text stringlengths 0 1.05M | meta dict |
|---|---|
'''A class to store estimators once retrieved from the database.
'''
import numpy as np
import pickle
import os
from itertools import izip
from dmrg_helpers.core.dmrg_exceptions import DMRGException
from dmrg_helpers.core.dmrg_logging import logger
class XYData(object):
"""An auxiliary class to hold the numerical data from an Estimator.
Estimators contain numerical data and you use this class to store them.
Parameters
----------
sites_list: an list of tuples of ints.
The sites at which each of the single-site operators of the correlator
act.
values_list: an list of doubles.
The value of the correlator at each site in the sites_list.
"""
def __init__(self, xy_list):
self.xy_list = xy_list
self.x_list, self.y_list = self.unzip_in_xy()
@classmethod
def from_lists(cls, x, y):
if len(x) != len(y):
raise DMRGException('Different sizes for lists')
return cls(izip(x, y))
@classmethod
def from_estimator_data(cls, estimator_data):
return cls(izip(estimator_data.x(), estimator_data.y()))
def unzip_in_xy(self):
return map(list, zip(*self.xy_list))
def x(self):
"""Returns x component in a numpy array.
"""
return np.array(self.x_list)
def y(self):
"""Returns y component in a numpy array.
"""
return np.array(self.y_list, dtype=float)
class XYDataDict(object):
"""A class for storing data for estimators once retrieved for a database.
You use this class to store the result of calling the function
get_estimator in the Database class. Additionally, you can create new
correlators by making linear combinations of other correlators.
Parameters
----------
name: a tuple of strings.
Each of the names of the single site operators that make up the
estimator.
meta_keys: a string.
The keys from the metadata dictionary joined by the ':' delimiter.
The keys are alphabetically ordered. It stores the metadata of the
estimator, like the names of the parameters of the Hamiltonian.
keys: a tuple of strings.
Obtained from meta_keys. Used to inspect which are the keys, like
parameters of the Hamiltonian that label your data.
data: a dict of a string on EstimatorData.
Contains the actual values for the estimator. The key in the dictionary
is given by the parameters that characterize the data, such as
Hamiltonian parameters of the DMRG run or the system length. The value
of the dictionary is given by a EstimatorData object that holds the
numerical part of the estimator.
"""
def __init__(self, meta_keys, data):
self.meta_keys = meta_keys
self.keys = self.meta_keys.split(':')
self.data = data
@classmethod
def from_estimator(cls, e):
return cls(e.meta_keys,
dict(izip(e.data.iterkeys(),
map(XYData.from_estimator_data,
e.data.itervalues()))))
def get_metadata_as_dict(self, meta_val):
"""Returns a dictionary with metadata.
Parameters
----------
meta_val: one of the meta_vals.
"""
return dict(izip(self.keys, meta_val.split(':')))
def save(self, filename, output_dir=os.getcwd()):
"""Saves the correlator data to a file.
You use this function to save data for a correlator to a file using
pickle. The correlator can be restored later using the load method.
"""
output_dir = os.path.abspath(output_dir)
pickle.dump(self, open(os.path.join(output_dir, filename), "wb"))
logger.info('Saving correlator to {} as pickle.'.format(filename))
@classmethod
def load(cls, filename, output_dir=os.getcwd()):
"""Loads the correlator data from a file.
You use this function to load data for a correlator tthat was
previously pickled to a file.
"""
output_dir = os.path.abspath(output_dir)
restored = pickle.load(open(os.path.join(output_dir, filename), "rb"))
logger.info('Loading correlator from {}.'.format(filename))
return restored
def save_as_txt(self, filename, output_dir=os.getcwd()):
"""Saves the correlator data to a file.
You use this function to save data for a correlator to a file. If there
is more that one set of data in the Correlator, for example, because
you have data for different systems sizes, each set will be saved into
a different file. The name of these files will be obtained by appending
the names and values of the meta_data to `filename`.
Inside the file the data is organized in two columns: the first is a
site of the chain, and the second the value of the correlator.
"""
output_dir = os.path.abspath(output_dir)
for key, val in self.generate_filenames(filename).iteritems():
tmp = izip(self.data[key].x_list, self.data[key].y_list)
saved = os.path.join(output_dir, val)
with open(saved, 'w') as f:
f.write('\n'.join('%s %s' % x for x in tmp))
logger.info('Saving correlator to {} as txt'.format(filename))
def generate_filenames(self, filename):
"""Generates one filename per entry in data according to `label`.
Parameters
----------
filename: a string.
The filename to be created. If it has a '.dat' extension, the
extension is stripped off.
Returns
-------
filenames: a list of strings.
The result is to append the labels with their values to `filename`.
Raises
------
DMRGException if the label is not found in self.keys.
"""
if filename[-4:] == '.dat':
filename = filename[:-4]
filenames = []
for meta_val in self.data.keys():
meta_dict = self.get_metadata_as_dict(meta_val)
extended_filename = filename
for key in sorted(meta_dict.iterkeys()):
extended_filename += '_'+str(key)+'_'+str(meta_dict[key])
extended_filename += '.dat'
filenames.append(extended_filename)
return dict(izip(self.data.keys(), filenames))
def get_min_y(self):
"""Returns the minimum value of all 'y' data.
Returns
-------
a float
"""
tmp = []
for v in self.data.itervalues():
tmp.append(min(v.y()))
return min(tmp)
def get_max_y(self):
"""Returns the maximum value of all 'y' data.
Returns
-------
a float
"""
tmp = []
for v in self.data.itervalues():
tmp.append(max(v.y()))
return max(tmp)
def get_data_for_plots(self, function_to_make_labels):
"""Makes a dictionary with the stuff you need to plot.
You use this function to get x, y values for plotting. You also get a
string to label the plots.
You can get estimators from calling the `get_estimator` function on the
Database class, or as the result of calling the functions that calculate
structure factors.
Parameters
----------
function_to_make_labels: a function which takes an XYDataDict.
This function must return a dictionary with the same keys as the
`data` member and the values as strings.
Returns
-------
A dictionary with the same keys as the estimator and a three-tuple as
values. The tuple contains :math:`K/t`, x and y for the estimator data.
"""
labels = function_to_make_labels(self)
tmp = []
for k, v in self.data.iteritems():
tmp.append((labels[k], v.x(), v.y()))
return dict(izip(self.data.iterkeys(), tmp))
def plot(self):
"""Plots the data.
Makes a plot of the correlator data. If the correlator contains several
sets of parameters, graphs all in the same plot.
"""
pass
| {
"repo_name": "iglpdc/dmrg_helpers",
"path": "dmrg_helpers/view/xy_data.py",
"copies": "1",
"size": "8265",
"license": "mit",
"hash": -6900633658405344000,
"line_mean": 34.3205128205,
"line_max": 80,
"alpha_frac": 0.6047186933,
"autogenerated": false,
"ratio": 4.203967446592065,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5308686139892065,
"avg_score": null,
"num_lines": null
} |
"""A class to store named variables and a scope operator to manage sharing."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import contextlib
import six
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import types
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import logging
class _VariableStore(object):
"""Variable store that carries a number of named Variables.
New variable names and new variables can be created; all stored
variables are initialized with the initializer passed to __init__.
Attributes:
vars: a dictionary with string names (same as passed in GetVar) as keys
and the corresponding TensorFlow Variables as values.
"""
def __init__(self):
"""Create a variable store."""
self._vars = {} # A dictionary of the stored TensorFlow variables.
def get_variable(self, name, shape=None, dtype=types.float32,
initializer=None, reuse=None, trainable=True,
collections=None):
"""Gets an existing variable with these parameters or create a new one.
If a variable with the given name is already stored, we return the stored
variable. Otherwise, we create a new one.
Set `reuse` to `True` when you only want to reuse existing Variables.
Set `reuse` to `False` when you only want to create new Variables.
If `reuse` is `None` (the default), both new and existing variables are
returned.
If initializer is `None` (the default), the default initializer passed in
the constructor is used. If that one is `None` too, we use a new
`UniformUnitScalingInitializer`.
Args:
name: the name of the new or existing variable.
shape: shape of the new or existing variable.
dtype: type of the new or existing variable (defaults to `DT_FLOAT`).
initializer: initializer for the variable.
reuse: a Boolean or `None`. Controls reuse or creation of variables.
trainable: If `True` also add the variable to the graph collection
`GraphKeys.TRAINABLE_VARIABLES` (see variables.Variable).
collections: List of graph collections keys to add the Variable to.
Defaults to `[GraphKeys.VARIABLES]` (see variables.Variable).
Returns:
The created or existing variable.
Raises:
ValueError: when creating a new variable and shape is not declared,
when reusing a variable and specifying a conflicting shape,
or when violating reuse during variable creation.
"""
should_check = reuse is not None
dtype = types.as_dtype(dtype)
shape = tensor_shape.as_shape(shape)
if name in self._vars:
# Here we handle the case when returning an existing variable.
if should_check and not reuse:
raise ValueError("Over-sharing: Variable %s already exists, disallowed."
" Did you mean to set reuse=True in VarScope?" % name)
found_var = self._vars[name]
if not shape.is_compatible_with(found_var.get_shape()):
raise ValueError("Trying to share variable %s, but specified shape %s"
" and found shape %s." % (name, shape,
found_var.get_shape()))
if not dtype.is_compatible_with(found_var.dtype):
dtype_str = dtype.name
found_type_str = found_var.dtype.name
raise ValueError("Trying to share variable %s, but specified dtype %s"
" and found dtype %s." % (name, dtype_str,
found_type_str))
return found_var
# The code below handles only the case of creating a new variable.
if should_check and reuse:
raise ValueError("Under-sharing: Variable %s does not exist, disallowed."
" Did you mean to set reuse=None in VarScope?" % name)
if not shape.is_fully_defined():
raise ValueError("Shape of a new variable (%s) must be fully defined, "
"but instead was %s." % (name, shape))
if initializer is None:
initializer = init_ops.uniform_unit_scaling_initializer()
with ops.name_scope(name + "/Initializer/"):
init_val = initializer(shape.as_list(), dtype=dtype)
v = variables.Variable(init_val, name=name, trainable=trainable,
collections=collections)
self._vars[name] = v
logging.info("Created variable %s with shape %s and init %s", v.name,
format(shape), initializer)
return v
class _VariableScope(object):
"""Variable scope object to carry defaults to provide to get_variable.
Many of the arguments we need for get_variable in a variable store are most
easily handled with a context. This object is used for the defaults.
Attributes:
name: name of the current scope, used as prefix in get_variable.
initializer: default initializer passed to get_variable.
reuse: Boolean or None, setting the reuse in get_variable.
"""
def __init__(self, reuse, name="", initializer=None):
self._name = name
self._initializer = initializer
self._reuse = reuse
@property
def name(self):
return self._name
@property
def reuse(self):
return self._reuse
@property
def initializer(self):
return self._initializer
def reuse_variables(self):
"""Reuse variables in this scope."""
self._reuse = True
def set_initializer(self, initializer):
"""Set initializer for this scope."""
self._initializer = initializer
def get_variable(self, var_store, name, shape=None, dtype=types.float32,
initializer=None, trainable=True, collections=None):
"""Gets an existing variable with this name or create a new one."""
if initializer is None and self._initializer:
initializer = self._initializer
full_name = self.name + "/" + name if self.name else name
# Variable names only depend on variable_scope (full_name here),
# not name_scope, so we reset it below for the time of variable creation.
with ops.name_scope(None):
return var_store.get_variable(full_name, shape, dtype, initializer,
self.reuse, trainable, collections)
_VARSTORE_KEY = ("__variable_store",)
_VARSCOPE_KEY = ("__varscope",)
def get_variable_scope():
"""Returns the current variable scope."""
scope = ops.get_collection(_VARSCOPE_KEY)
if scope: # This collection has at most 1 element, the default scope at [0].
return scope[0]
scope = _VariableScope(False)
ops.add_to_collection(_VARSCOPE_KEY, scope)
return scope
def _get_default_variable_store():
store = ops.get_collection(_VARSTORE_KEY)
if store:
return store[0]
store = _VariableStore()
ops.add_to_collection(_VARSTORE_KEY, store)
return store
def get_variable(name, shape=None, dtype=types.float32, initializer=None,
trainable=True, collections=None):
"""Gets an existing variable with these parameters or create a new one.
This function prefixes the name with the current variable scope
and performs reuse checks. See the
[Variable Scope How To](../../how_tos/variable_scope/index.md)
for an extensive description of how reusing works. Here is a basic example:
```python
with tf.variable_scope("foo"):
v = tf.get_variable("v", [1]) # v.name == "foo/v:0"
w = tf.get_variable("w", [1]) # w.name == "foo/w:0"
with tf.variable_scope("foo", reuse=True)
v1 = tf.get_variable("v") # The same as v above.
```
If initializer is `None` (the default), the default initializer passed in
the constructor is used. If that one is `None` too, a
`UniformUnitScalingInitializer` will be used.
Args:
name: the name of the new or existing variable.
shape: shape of the new or existing variable.
dtype: type of the new or existing variable (defaults to `DT_FLOAT`).
initializer: initializer for the variable if one is created.
trainable: If `True` also add the variable to the graph collection
`GraphKeys.TRAINABLE_VARIABLES` (see variables.Variable).
collections: List of graph collections keys to add the Variable to.
Defaults to `[GraphKeys.VARIABLES]` (see variables.Variable).
Returns:
The created or existing variable.
Raises:
ValueError: when creating a new variable and shape is not declared,
or when violating reuse during variable creation. Reuse is set inside
`variable_scope`.
"""
return get_variable_scope().get_variable(_get_default_variable_store(), name,
shape, dtype, initializer,
trainable, collections)
@contextlib.contextmanager
def variable_scope(name_or_scope, reuse=None, initializer=None):
"""Returns a context for variable scope.
Variable scope allows to create new variables and to share already created
ones while providing checks to not create or share by accident. For details,
see the [Variable Scope How To](../../how_tos/variable_scope/index.md),
here we present only a few basic examples.
Simple example of how to create a new variable:
```python
with tf.variable_scope("foo"):
with tf.variable_scope("bar"):
v = tf.get_variable("v", [1])
assert v.name == "foo/bar/v:0"
```
Basic example of sharing a variable:
```python
with tf.variable_scope("foo"):
v = tf.get_variable("v", [1])
with tf.variable_scope("foo", reuse=True):
v1 = tf.get_variable("v", [1])
assert v1 == v
```
Sharing a variable by capturing a scope and setting reuse:
```python
with tf.variable_scope("foo") as scope.
v = tf.get_variable("v", [1])
scope.reuse_variables()
v1 = tf.get_variable("v", [1])
assert v1 == v
```
To prevent accidental sharing of variables, we raise an exception when
getting an existing variable in a non-reusing scope.
```python
with tf.variable_scope("foo") as scope.
v = tf.get_variable("v", [1])
v1 = tf.get_variable("v", [1])
# Raises ValueError("... v already exists ...").
```
Similarly, we raise an exception when trying to get a variable that
does not exist in reuse mode.
```python
with tf.variable_scope("foo", reuse=True):
v = tf.get_variable("v", [1])
# Raises ValueError("... v does not exists ...").
```
Note that the `reuse` flag is inherited: if we open a reusing scope,
then all its sub-scopes become reusing as well.
Args:
name_or_scope: `string` or `VariableScope`: the scope to open.
reuse: `True` or `None`; if `True`, we go into reuse mode for this scope as
well as all sub-scopes; if `None`, we just inherit the parent scope reuse.
initializer: default initializer for variables within this scope.
Yields:
A scope that can be to captured and reused.
Raises:
ValueError: when trying to reuse within a create scope, or create within
a reuse scope, or if reuse is not `None` or `True`.
TypeError: when the types of some arguments are not appropriate.
"""
if not isinstance(name_or_scope, (_VariableScope,) + six.string_types):
raise TypeError("VariableScope: name_scope must be a string or "
"VariableScope.")
if reuse not in [None, True]:
raise ValueError("VariableScope reuse parameter must be True or None.")
if not reuse and isinstance(name_or_scope, (_VariableScope)):
logging.info("Passing VariableScope to a non-reusing scope, intended?")
if reuse and isinstance(name_or_scope, six.string_types):
logging.info("Re-using string-named scope, consider capturing as object.")
get_variable_scope() # Ensure that a default exists, then get a pointer.
default_varscope = ops.get_collection(_VARSCOPE_KEY)
try:
old = default_varscope[0]
reuse = reuse or old.reuse # Re-using is inherited by sub-scopes.
if isinstance(name_or_scope, _VariableScope):
# Handler for the case when we jump to a shared scope.
# In this case, we leave the current name_scope unchanged.
# We create a new VariableScope (default_varscope[0]) that contains
# a copy of the provided shared scope, possibly with changed reuse
# and initializer, if the user requested this.
default_varscope[0] = _VariableScope(reuse, name_or_scope.name,
name_or_scope.initializer)
if initializer:
default_varscope[0].set_initializer(initializer)
yield default_varscope[0]
else:
# Handler for the case when we just prolong current variable scope.
# In this case we prolong the current name_scope and create a new
# VariableScope with name extended by the provided one, and inherited
# reuse and initializer (except if the user provided values to set).
with ops.name_scope(name_or_scope):
new_name = old.name + "/" + name_or_scope if old.name else name_or_scope
default_varscope[0] = _VariableScope(reuse, name=new_name,
initializer=old.initializer)
if initializer:
default_varscope[0].set_initializer(initializer)
yield default_varscope[0]
finally:
default_varscope[0] = old
| {
"repo_name": "arunhotra/tensorflow",
"path": "tensorflow/python/ops/variable_scope.py",
"copies": "3",
"size": "13415",
"license": "apache-2.0",
"hash": 3964003397921540600,
"line_mean": 38.6893491124,
"line_max": 80,
"alpha_frac": 0.6661200149,
"autogenerated": false,
"ratio": 4.15968992248062,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.005686429998786664,
"num_lines": 338
} |
"""A class to store named variables and a scope operator to manage sharing."""
import contextlib
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import types
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import logging
class _VariableStore(object):
"""Variable store that carries a number of named Variables.
New variable names and new variables can be created; all stored
variables are initialized with the initializer passed to __init__.
Attributes:
vars: a dictionary with string names (same as passed in GetVar) as keys
and the corresponding TensorFlow Variables as values.
"""
def __init__(self):
"""Create a variable store."""
self._vars = {} # A dictionary of the stored TensorFlow variables.
def get_variable(self, name, shape=None, dtype=types.float32,
initializer=None, reuse=None, trainable=True,
collections=None):
"""Gets an existing variable with these parameters or create a new one.
If a variable with the given name is already stored, we return the stored
variable. Otherwise, we create a new one.
Set `reuse` to `True` when you only want to reuse existing Variables.
Set `reuse` to `False` when you only want to create new Variables.
If `reuse` is `None` (the default), both new and existing variables are
returned.
If initializer is `None` (the default), the default initializer passed in
the constructor is used. If that one is `None` too, we use a new
`UniformUnitScalingInitializer`.
Args:
name: the name of the new or existing variable.
shape: shape of the new or existing variable.
dtype: type of the new or existing variable (defaults to `DT_FLOAT`).
initializer: initializer for the variable.
reuse: a Boolean or `None`. Controls reuse or creation of variables.
trainable: If `True` also add the variable to the graph collection
`GraphKeys.TRAINABLE_VARIABLES` (see variables.Variable).
collections: List of graph collections keys to add the Variable to.
Defaults to `[GraphKeys.VARIABLES]` (see variables.Variable).
Returns:
The created or existing variable.
Raises:
ValueError: when creating a new variable and shape is not declared,
when reusing a variable and specifying a conflicting shape,
or when violating reuse during variable creation.
"""
should_check = reuse is not None
dtype = types.as_dtype(dtype)
shape = tensor_shape.as_shape(shape)
if name in self._vars:
# Here we handle the case when returning an existing variable.
if should_check and not reuse:
raise ValueError("Over-sharing: Variable %s already exists, disallowed."
" Did you mean to set reuse=True in VarScope?" % name)
found_var = self._vars[name]
if not shape.is_compatible_with(found_var.get_shape()):
raise ValueError("Trying to share variable %s, but specified shape %s"
" and found shape %s." % (name, str(shape),
str(found_var.get_shape())))
if not dtype.is_compatible_with(found_var.dtype):
dtype_str = dtype.name
found_type_str = found_var.dtype.name
raise ValueError("Trying to share variable %s, but specified dtype %s"
" and found dtype %s." % (name, str(dtype_str),
str(found_type_str)))
return found_var
# The code below handles only the case of creating a new variable.
if should_check and reuse:
raise ValueError("Under-sharing: Variable %s does not exist, disallowed."
" Did you mean to set reuse=None in VarScope?" % name)
if not shape.is_fully_defined():
raise ValueError("Shape of a new variable (%s) must be fully defined, "
"but instead was %s." % (name, shape))
if initializer is None:
initializer = init_ops.uniform_unit_scaling_initializer()
with ops.name_scope(name + "/Initializer/"):
init_val = initializer(shape.as_list(), dtype=dtype)
v = variables.Variable(init_val, name=name, trainable=trainable,
collections=collections)
self._vars[name] = v
logging.info("Created variable %s with shape %s and init %s", v.name,
format(shape), str(initializer))
return v
class _VariableScope(object):
"""Variable scope object to carry defaults to provide to get_variable.
Many of the arguments we need for get_variable in a variable store are most
easily handled with a context. This object is used for the defaults.
Attributes:
name: name of the current scope, used as prefix in get_variable.
initializer: default initializer passed to get_variable.
reuse: Boolean or None, setting the reuse in get_variable.
"""
def __init__(self, reuse, name="", initializer=None):
self._name = name
self._initializer = initializer
self._reuse = reuse
@property
def name(self):
return self._name
@property
def reuse(self):
return self._reuse
@property
def initializer(self):
return self._initializer
def reuse_variables(self):
"""Reuse variables in this scope."""
self._reuse = True
def set_initializer(self, initializer):
"""Set initializer for this scope."""
self._initializer = initializer
def get_variable(self, var_store, name, shape=None, dtype=types.float32,
initializer=None, trainable=True, collections=None):
"""Gets an existing variable with this name or create a new one."""
if initializer is None and self._initializer:
initializer = self._initializer
full_name = self.name + "/" + name if self.name else name
# Variable names only depend on variable_scope (full_name here),
# not name_scope, so we reset it below for the time of variable creation.
with ops.name_scope(None):
return var_store.get_variable(full_name, shape, dtype, initializer,
self.reuse, trainable, collections)
_VARSTORE_KEY = ("__variable_store",)
_VARSCOPE_KEY = ("__varscope",)
def get_variable_scope():
"""Returns the current variable scope."""
scope = ops.get_collection(_VARSCOPE_KEY)
if scope: # This collection has at most 1 element, the default scope at [0].
return scope[0]
scope = _VariableScope(False)
ops.add_to_collection(_VARSCOPE_KEY, scope)
return scope
def _get_default_variable_store():
store = ops.get_collection(_VARSTORE_KEY)
if store:
return store[0]
store = _VariableStore()
ops.add_to_collection(_VARSTORE_KEY, store)
return store
def get_variable(name, shape=None, dtype=types.float32, initializer=None,
trainable=True, collections=None):
"""Gets an existing variable with these parameters or create a new one.
This function prefixes the name with the current variable scope
and performs reuse checks. See the
[Variable Scope How To](../../how_tos/variable_scope/index.md)
for an extensive description of how reusing works. Here is a basic example:
```python
with tf.variable_scope("foo"):
v = tf.get_variable("v", [1]) # v.name == "foo/v:0"
w = tf.get_variable("w", [1]) # w.name == "foo/w:0"
with tf.variable_scope("foo", reuse=True)
v1 = tf.get_variable("v") # The same as v above.
```
If initializer is `None` (the default), the default initializer passed in
the constructor is used. If that one is `None` too, a
`UniformUnitScalingInitializer` will be used.
Args:
name: the name of the new or existing variable.
shape: shape of the new or existing variable.
dtype: type of the new or existing variable (defaults to `DT_FLOAT`).
initializer: initializer for the variable if one is created.
trainable: If `True` also add the variable to the graph collection
`GraphKeys.TRAINABLE_VARIABLES` (see variables.Variable).
collections: List of graph collections keys to add the Variable to.
Defaults to `[GraphKeys.VARIABLES]` (see variables.Variable).
Returns:
The created or existing variable.
Raises:
ValueError: when creating a new variable and shape is not declared,
or when violating reuse during variable creation. Reuse is set inside
`variable_scope`.
"""
return get_variable_scope().get_variable(_get_default_variable_store(), name,
shape, dtype, initializer,
trainable, collections)
@contextlib.contextmanager
def variable_scope(name_or_scope, reuse=None, initializer=None):
"""Returns a context for variable scope.
Variable scope allows to create new variables and to share already created
ones while providing checks to not create or share by accident. For details,
see the [Variable Scope How To](../../how_tos/variable_scope/index.md),
here we present only a few basic examples.
Simple example of how to create a new variable:
```python
with tf.variable_scope("foo"):
with tf.variable_scope("bar"):
v = tf.get_variable("v", [1])
assert v.name == "foo/bar/v:0"
```
Basic example of sharing a variable:
```python
with tf.variable_scope("foo"):
v = tf.get_variable("v", [1])
with tf.variable_scope("foo", reuse=True):
v1 = tf.get_variable("v", [1])
assert v1 == v
```
Sharing a variable by capturing a scope and setting reuse:
```python
with tf.variable_scope("foo") as scope.
v = tf.get_variable("v", [1])
scope.reuse_variables()
v1 = tf.get_variable("v", [1])
assert v1 == v
```
To prevent accidental sharing of variables, we raise an exception when
getting an existing variable in a non-reusing scope.
```python
with tf.variable_scope("foo") as scope.
v = tf.get_variable("v", [1])
v1 = tf.get_variable("v", [1])
# Raises ValueError("... v already exists ...").
```
Similarly, we raise an exception when trying to get a variable that
does not exist in reuse mode.
```python
with tf.variable_scope("foo", reuse=True):
v = tf.get_variable("v", [1])
# Raises ValueError("... v does not exists ...").
```
Note that the `reuse` flag is inherited: if we open a reusing scope,
then all its sub-scopes become reusing as well.
Args:
name_or_scope: `string` or `VariableScope`: the scope to open.
reuse: `True` or `None`; if `True`, we go into reuse mode for this scope as
well as all sub-scopes; if `None`, we just inherit the parent scope reuse.
initializer: default initializer for variables within this scope.
Yields:
A scope that can be to captured and reused.
Raises:
ValueError: when trying to reuse within a create scope, or create within
a reuse scope, or if reuse is not `None` or `True`.
TypeError: when the types of some arguments are not appropriate.
"""
if not isinstance(name_or_scope, (_VariableScope, basestring)):
raise TypeError("VariableScope: name_scope must be a string or "
"VariableScope.")
if reuse not in [None, True]:
raise ValueError("VariableScope reuse parameter must be True or None.")
if not reuse and isinstance(name_or_scope, (_VariableScope)):
logging.info("Passing VariableScope to a non-reusing scope, intended?")
if reuse and isinstance(name_or_scope, (basestring)):
logging.info("Re-using string-named scope, consider capturing as object.")
get_variable_scope() # Ensure that a default exists, then get a pointer.
default_varscope = ops.get_collection(_VARSCOPE_KEY)
try:
old = default_varscope[0]
reuse = reuse or old.reuse # Re-using is inherited by sub-scopes.
if isinstance(name_or_scope, _VariableScope):
# Handler for the case when we jump to a shared scope.
# In this case, we leave the current name_scope unchanged.
# We create a new VariableScope (default_varscope[0]) that contains
# a copy of the provided shared scope, possibly with changed reuse
# and initializer, if the user requested this.
default_varscope[0] = _VariableScope(reuse, name_or_scope.name,
name_or_scope.initializer)
if initializer:
default_varscope[0].set_initializer(initializer)
yield default_varscope[0]
else:
# Handler for the case when we just prolong current variable scope.
# In this case we prolong the current name_scope and create a new
# VariableScope with name extended by the provided one, and inherited
# reuse and initializer (except if the user provided values to set).
with ops.name_scope(name_or_scope):
new_name = old.name + "/" + name_or_scope if old.name else name_or_scope
default_varscope[0] = _VariableScope(reuse, name=new_name,
initializer=old.initializer)
if initializer:
default_varscope[0].set_initializer(initializer)
yield default_varscope[0]
finally:
default_varscope[0] = old
| {
"repo_name": "brendandburns/tensorflow",
"path": "tensorflow/python/ops/variable_scope.py",
"copies": "3",
"size": "13307",
"license": "apache-2.0",
"hash": -1038153803816615200,
"line_mean": 38.960960961,
"line_max": 80,
"alpha_frac": 0.6651386488,
"autogenerated": false,
"ratio": 4.161038148843027,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.005780305758013123,
"num_lines": 333
} |
"""A class to store tables.
Sample Usage:
table = SgTable()
table.Append([1, 2, 3])
table.Append([2, 4, 6])
table.Append([3, 6, 9])
for row in table:
print(row)
print(table[1])
table[1] = [2, 2, 2]
print(table[1])
table.SetFields(["a", "b", "c"])
print(table.GetVals("a"))
print(table.GetVals("b"))
print(table.GetVals("c"))
print(table[1:])
print(table[:2])
print(table[0:2:2])
"""
import itertools
class EscapeHtml:
MAPPING = {u"&": u"&",
u"<": u"<",
u">": u">",
u"\"": u""",
u"\'": u"'",
u"\n": u"<br>\n"}
@classmethod
def Escape(cls, ch):
return cls.MAPPING[ch] if cls.MAPPING.has_key(ch) else ch
@classmethod
def EscapeUnicodeStr(cls, unicode_str):
ret = u""
for ch in unicode_str:
ret += cls.Escape(ch)
return ret
class SgTable:
"""A class to store tables."""
def __init__(self):
self._fields = []
self._table = []
def __len__(self):
return len(self._table)
def __iter__(self):
for row in self._table:
yield row
def __getitem__(self, key):
if isinstance(key, slice):
return self._table[key.start:key.stop:key.step]
else:
if not ((type(key) == int or type(key) == long) and key >= 0 and key < len(self._table)):
raise ValueError("Index illegal")
else:
return self._table[key]
def __setitem__(self, key, value):
if not ((type(key) == int or type(key) == long) and key >= 0 and key < len(self._table)):
raise ValueError("Index illegal")
else:
self._table[key] = value
def __str__(self):
ret = str(self._fields)
for row in self._table:
ret += "\n" + str(row)
return ret
def __HasCommaOutOfString(self, val):
in_string = False
is_escaping = False
for ch in val:
if in_string:
if is_escaping:
is_escaping = False
elif ch == u"\\":
is_escaping = True
elif ch in (u"\"", u"\'"):
in_string = False
else:
if ch == u",":
return True
elif ch in (u"\"", u"\'"):
in_string = True
return False
def _GetCsvRepr(self, val):
if isinstance(val, list):
return u",".join(itertools.imap(self._GetCsvRepr, val))
else:
if isinstance(val, unicode):
if self.__HasCommaOutOfString(val) or u"\n" in val:
return u"\"" + val + u"\""
else:
return val
else:
return unicode(str(val), "utf-8")
def InCsv(self):
ret = self._GetCsvRepr(self._fields)
for row in self._table:
ret += u"\n" + self._GetCsvRepr(row)
return ret
def InHtml(self):
ret = u"<html>\n<head><meta charset=\"utf-8\">\n<title>SQLGitHub Result</title>\n</head>\n<body>\n"
ret += u"<table border=1>"
ret += u"<tr>"
for field in self._fields:
ret += u"<td>" + EscapeHtml.EscapeUnicodeStr(field) + u"</td>"
ret += u"</tr>\n"
for row in self._table:
ret += u"<tr>"
for val in row:
unicode_str = val if isinstance(val, unicode) else unicode(str(val), "utf-8")
ret += u"<td>" + EscapeHtml.EscapeUnicodeStr(unicode_str) + u"</td>"
ret += u"</tr>\n"
ret += u"</table>\n</html>"
return ret
def GetVals(self, field):
idx = [i for i, f in enumerate(self._fields) if f == field][0]
return [row[idx] for row in self._table]
def Copy(self, table):
self.SetFields(table.GetFields())
self.SetTable(table.GetTable())
def Append(self, row):
self._table.append(row)
def GetTable(self):
return self._table
def SetTable(self, table):
self._table = table
def GetFields(self):
return self._fields
def SetFields(self, fields):
self._fields = fields
def SliceCol(self, start, end):
table = SgTable()
table.SetFields(self._fields[start:end])
for row in self._table:
table.Append(row[start:end])
return table
def Chain(self, table):
res_table = SgTable()
res_table.SetFields(self._fields + table.GetFields())
rows = min(len(self._table), len(table))
for i in range(rows):
res_table.Append(self._table[i] + table[i])
return res_table
| {
"repo_name": "lnishan/SQLGitHub",
"path": "components/table.py",
"copies": "1",
"size": "4814",
"license": "mit",
"hash": -5761484324779521000,
"line_mean": 27.4852071006,
"line_max": 107,
"alpha_frac": 0.4914831741,
"autogenerated": false,
"ratio": 3.669207317073171,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9646879195866462,
"avg_score": 0.002762259061341706,
"num_lines": 169
} |
"""A class used for isotherm interpolation."""
from scipy.interpolate import interp1d
class IsothermInterpolator():
"""
Class used to interpolate between isotherm points.
Call directly to use.
It is mainly a wrapper around scipy.interpolate.interp1d.
Parameters
----------
interp_type : str
What variable the interpolator works on (pressure, loading etc).
known_data : str
The values corresponding to the input variable.
interp_data : str
The values corresponding to the variable to be interpolated.
interp_branch : str, optional
Stores which isotherm branch the interpolator is based on.
interp_kind : str, optional
Determine which kind of interpolation is done between the
datapoints.
interp_fill : str, optional
The parameter passed to the scipy.interpolate.interp1d function
to determine what to do outside data bounds.
"""
def __init__(
self,
known_data,
interp_data,
interp_branch='ads',
interp_kind='linear',
interp_fill=None,
):
"""Instantiate."""
# The branch the internal interpolator is on.
self.interp_branch = interp_branch
# The kind of interpolator in the internal interpolator.
self.interp_kind = interp_kind
# Value of loading to assume beyond highest pressure in the data.
self.interp_fill = interp_fill
# The actual interpolator. This is generated
# the first time it is needed to make calculations faster.
if known_data is None:
return
if interp_fill is None:
self.interp_fun = interp1d(
known_data, interp_data, kind=interp_kind
)
else:
self.interp_fun = interp1d(
known_data,
interp_data,
kind=interp_kind,
fill_value=interp_fill,
bounds_error=False
)
def __call__(self, data):
"""Override direct call."""
return self.interp_fun(data)
| {
"repo_name": "pauliacomi/pyGAPS",
"path": "src/pygaps/utilities/isotherm_interpolator.py",
"copies": "1",
"size": "2112",
"license": "mit",
"hash": 5408119305508866000,
"line_mean": 29.6086956522,
"line_max": 73,
"alpha_frac": 0.6051136364,
"autogenerated": false,
"ratio": 4.641758241758242,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5746871878158242,
"avg_score": null,
"num_lines": null
} |
""" A class which handles communication with the Imgur API
"""
import json
import requests
class ImgurAPI:
def __init__(self, config):
self.config = config
self.file_extensions = {
'image/jpeg': 'jpg',
'image/jpg': 'jpg',
'image/png': 'png'
}
def _convert_raw_image(self, raw_image):
image = {}
try:
image['page_url'] = "https://imgur.com/gallery/%s" % raw_image['id']
image['image_url'] = raw_image['link']
image['thumbnail_url'] = "https://i.imgur.com/%sm.%s" % (
raw_image['id'], self.file_extensions[raw_image['type']])
image['title'] = raw_image['title']
except KeyError:
print("Malformed raw image, unable to convert")
return None
return image
def _transform_result(self, result):
converted = []
# Example API response object:
"""
{
'type':'image/jpeg',
'title':'/b/ has a torture idea',
'height':742,
'vote':None,
'favorite':False,
'size':93655,
'link':'http://i.imgur.com/Q5SVNpZ.jpg',
'downs':142,
'score':3668,
'width':799,
'views':135492,
'datetime':1401128127,
'id':'Q5SVNpZ',
'account_url':None,
'description':None,
'nsfw':False,
'ups':3189,
'bandwidth':12689503260,
'is_album':False,
'section':'4chan',
'animated':False
}
"""
if 'data' not in result:
print("No 'data' section in result")
return []
for raw_image in result['data']:
# Do some filtering of which images we want to take in, and transform the data
# into something more usable elsewhere
if raw_image['is_album'] is not False:
# TODO: We can/should probably just pull out the 'images' section and turn those into entries, too.
continue
if raw_image['nsfw'] is not False and raw_image['nsfw'] is not None:
continue
if raw_image['animated'] is not False:
continue
if raw_image['type'] not in self.file_extensions:
continue
image = self._convert_raw_image(raw_image)
if image is not None:
converted.append(image)
return converted
def fetch_viral_images(self, api_endpoint):
headers = {
"Authorization": "Client-ID %s" % self.config['imgur']['client_id']
}
try:
r = requests.get(api_endpoint, headers=headers)
except requests.exceptions.ConnectionError:
print("Unable to connect to imgur")
return []
if r.status_code != 200:
print("Bad status code from imgur:", r.status_code)
return []
try:
result = json.loads(r.text)
except ValueError:
print("Malformed JSON response from imgur:", r.text)
return []
return self._transform_result(result)
| {
"repo_name": "xaroth8088/tournament-of-lulz",
"path": "image_populators/imgur/imgur_api.py",
"copies": "1",
"size": "3241",
"license": "mit",
"hash": -3371939071418490400,
"line_mean": 28.1981981982,
"line_max": 115,
"alpha_frac": 0.5057081148,
"autogenerated": false,
"ratio": 4.192755498059508,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5198463612859509,
"avg_score": null,
"num_lines": null
} |
# A class which presents the reverse of a sequence without duplicating it.
# From: "Steven D. Majewski" <sdm7g@elvis.med.virginia.edu>
# It works on mutable or inmutable sequences.
#
# >>> for c in Rev( 'Hello World!' ) : sys.stdout.write( c )
# ... else: sys.stdout.write( '\n' )
# ...
# !dlroW olleH
#
# The .forw is so you can use anonymous sequences in __init__, and still
# keep a reference the forward sequence. )
# If you give it a non-anonymous mutable sequence, the reverse sequence
# will track the updated values. ( but not reassignment! - another
# good reason to use anonymous values in creating the sequence to avoid
# confusion. Maybe it should be change to copy input sequence to break
# the connection completely ? )
#
# >>> nnn = range( 0, 3 )
# >>> rnn = Rev( nnn )
# >>> for n in rnn: print n
# ...
# 2
# 1
# 0
# >>> for n in range( 4, 6 ): nnn.append( n ) # update nnn
# ...
# >>> for n in rnn: print n # prints reversed updated values
# ...
# 5
# 4
# 2
# 1
# 0
# >>> nnn = nnn[1:-1]
# >>> nnn
# [1, 2, 4]
# >>> for n in rnn: print n # prints reversed values of old nnn
# ...
# 5
# 4
# 2
# 1
# 0
# >>>
#
# WH = Rev( 'Hello World!' )
# print WH.forw, WH.back
# nnn = Rev( range( 1, 10 ) )
# print nnn.forw
# print nnn
#
# produces output:
#
# Hello World! !dlroW olleH
# [1, 2, 3, 4, 5, 6, 7, 8, 9]
# [9, 8, 7, 6, 5, 4, 3, 2, 1]
#
# >>>rrr = Rev( nnn )
# >>>rrr
# <1, 2, 3, 4, 5, 6, 7, 8, 9>
from string import joinfields
class Rev:
def __init__( self, seq ):
self.forw = seq
self.back = self
def __len__( self ):
return len( self.forw )
def __getitem__( self, j ):
return self.forw[ -( j + 1 ) ]
def __repr__( self ):
seq = self.forw
if type(seq) == type( [] ) :
wrap = '[]'
sep = ', '
elif type(seq) == type( () ) :
wrap = '()'
sep = ', '
elif type(seq) == type( '' ) :
wrap = ''
sep = ''
else:
wrap = '<>'
sep = ', '
outstrs = []
for item in self.back :
outstrs.append( str( item ) )
return wrap[:1] + joinfields( outstrs, sep ) + wrap[-1:]
| {
"repo_name": "OS2World/APP-INTERNET-torpak_2",
"path": "Demo/classes/Rev.py",
"copies": "1",
"size": "2196",
"license": "mit",
"hash": -5882985264392080000,
"line_mean": 23.6741573034,
"line_max": 74,
"alpha_frac": 0.5250455373,
"autogenerated": false,
"ratio": 2.931909212283044,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.8894026795811487,
"avg_score": 0.012585590754311305,
"num_lines": 89
} |
""" A clean, no_frills character-level generative language model.
Based on Andrej Karpathy's blog:
http://karpathy.github.io/2015/05/21/rnn-effectiveness/
"""
from __future__ import print_function
import os
import time
import tensorflow as tf
DATA_PATH = 'data/arvix_abstracts.txt'
HIDDEN_SIZE = 200
BATCH_SIZE = 64
NUM_STEPS = 50
SKIP_STEP = 40
TEMPRATURE = 0.7
LR = 0.003
LEN_GENERATED = 300
def vocab_encode(text, vocab):
return [vocab.index(x) + 1 for x in text if x in vocab]
def vocab_decode(array, vocab):
return ''.join([vocab[x - 1] for x in array])
def read_data(filename, vocab, window=NUM_STEPS, overlap=NUM_STEPS/2):
for text in open(filename):
text = vocab_encode(text, vocab)
overlap_int = int(overlap)
for start in range(0, len(text) - window, overlap_int):
chunk = text[start: start + window]
chunk += [0] * (window - len(chunk))
yield chunk
def read_batch(stream, batch_size=BATCH_SIZE):
batch = []
for element in stream:
batch.append(element)
if len(batch) == batch_size:
yield batch
batch = []
yield batch
def create_rnn(seq, hidden_size=HIDDEN_SIZE):
cell = tf.nn.rnn_cell.GRUCell(hidden_size)
in_state = tf.placeholder_with_default(
cell.zero_state(tf.shape(seq)[0], tf.float32), [None, hidden_size])
# this line to calculate the real length of seq
# all seq are padded to be of the same length which is NUM_STEPS
length = tf.reduce_sum(tf.reduce_max(tf.sign(seq), 2), 1)
output, out_state = tf.nn.dynamic_rnn(cell, seq, length, in_state)
return output, in_state, out_state
def create_model(seq, temp, vocab, hidden=HIDDEN_SIZE):
seq = tf.one_hot(seq, len(vocab))
output, in_state, out_state = create_rnn(seq, hidden)
# fully_connected is syntactic sugar for tf.matmul(w, output) + b
# it will create w and b for us
logits = tf.contrib.layers.fully_connected(output, len(vocab), None)
loss = tf.reduce_sum(tf.nn.softmax_cross_entropy_with_logits(logits=logits[:, :-1], labels=seq[:, 1:]))
# sample the next character from Maxwell-Boltzmann Distribution with temperature temp
# it works equally well without tf.exp
sample = tf.multinomial(tf.exp(logits[:, -1] / temp), 1)[:, 0]
return loss, sample, in_state, out_state
def training(vocab, seq, loss, optimizer, global_step, temp, sample, in_state, out_state):
saver = tf.train.Saver()
start = time.time()
with tf.Session() as sess:
writer = tf.summary.FileWriter('graphs/gist', sess.graph)
sess.run(tf.global_variables_initializer())
ckpt = tf.train.get_checkpoint_state(os.path.dirname('checkpoints/arvix/checkpoint'))
if ckpt and ckpt.model_checkpoint_path:
saver.restore(sess, ckpt.model_checkpoint_path)
iteration = global_step.eval()
for batch in read_batch(read_data(DATA_PATH, vocab)):
batch_loss, _ = sess.run([loss, optimizer], {seq: batch})
if (iteration + 1) % SKIP_STEP == 0:
print('Iter {}. \n Loss {}. Time {}'.format(iteration, batch_loss, time.time() - start))
online_inference(sess, vocab, seq, sample, temp, in_state, out_state)
start = time.time()
saver.save(sess, 'checkpoints/arvix/char-rnn', iteration)
iteration += 1
def online_inference(sess, vocab, seq, sample, temp, in_state, out_state, seed='T'):
""" Generate sequence one character at a time, based on the previous character
"""
sentence = seed
state = None
for _ in range(LEN_GENERATED):
batch = [vocab_encode(sentence[-1], vocab)]
feed = {seq: batch, temp: TEMPRATURE}
# for the first decoder step, the state is None
if state is not None:
feed.update({in_state: state})
index, state = sess.run([sample, out_state], feed)
sentence += vocab_decode(index, vocab)
print(sentence)
def main():
vocab = (
" $%'()+,-./0123456789:;=?ABCDEFGHIJKLMNOPQRSTUVWXYZ"
"\\^_abcdefghijklmnopqrstuvwxyz{|}")
seq = tf.placeholder(tf.int32, [None, None])
temp = tf.placeholder(tf.float32)
loss, sample, in_state, out_state = create_model(seq, temp, vocab)
global_step = tf.Variable(0, dtype=tf.int32, trainable=False, name='global_step')
optimizer = tf.train.AdamOptimizer(LR).minimize(loss, global_step=global_step)
training(vocab, seq, loss, optimizer, global_step, temp, sample, in_state, out_state)
if __name__ == '__main__':
main()
| {
"repo_name": "infilect/ml-course1",
"path": "week3/rnn-lstm/scripts/simple-lstm-on-ptb/char_rnn_gist.py",
"copies": "2",
"size": "4623",
"license": "mit",
"hash": -6780708157374466000,
"line_mean": 39.5526315789,
"line_max": 107,
"alpha_frac": 0.6355180619,
"autogenerated": false,
"ratio": 3.4168514412416853,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5052369503141685,
"avg_score": null,
"num_lines": null
} |
""" A clean, no_frills character-level generative language model.
Created by Danijar Hafner (danijar.com), edited by Chip Huyen
for the class CS 20SI: "TensorFlow for Deep Learning Research"
Based on Andrej Karpathy's blog:
http://karpathy.github.io/2015/05/21/rnn-effectiveness/
"""
import os
os.environ['TF_CPP_MIN_LOG_LEVEL']='2'
import sys
sys.path.append('..')
import time
import tensorflow as tf
import utils
DATA_PATH = 'data/arvix_abstracts.txt'
HIDDEN_SIZE = 200
BATCH_SIZE = 64
NUM_STEPS = 50
SKIP_STEP = 40
TEMPRATURE = 0.7
LR = 0.003
LEN_GENERATED = 300
def vocab_encode(text, vocab):
return [vocab.index(x) + 1 for x in text if x in vocab]
def vocab_decode(array, vocab):
return ''.join([vocab[x - 1] for x in array])
def read_data(filename, vocab, window=NUM_STEPS, overlap=NUM_STEPS//2):
for text in open(filename):
text = vocab_encode(text, vocab)
for start in range(0, len(text) - window, overlap):
chunk = text[start: start + window]
chunk += [0] * (window - len(chunk))
yield chunk
def read_batch(stream, batch_size=BATCH_SIZE):
batch = []
for element in stream:
batch.append(element)
if len(batch) == batch_size:
yield batch
batch = []
yield batch
def create_rnn(seq, hidden_size=HIDDEN_SIZE):
cell = tf.contrib.rnn.GRUCell(hidden_size)
in_state = tf.placeholder_with_default(
cell.zero_state(tf.shape(seq)[0], tf.float32), [None, hidden_size])
# this line to calculate the real length of seq
# all seq are padded to be of the same length which is NUM_STEPS
length = tf.reduce_sum(tf.reduce_max(tf.sign(seq), 2), 1)
output, out_state = tf.nn.dynamic_rnn(cell, seq, length, in_state)
return output, in_state, out_state
def create_model(seq, temp, vocab, hidden=HIDDEN_SIZE):
seq = tf.one_hot(seq, len(vocab))
output, in_state, out_state = create_rnn(seq, hidden)
# fully_connected is syntactic sugar for tf.matmul(w, output) + b
# it will create w and b for us
logits = tf.contrib.layers.fully_connected(output, len(vocab), None)
loss = tf.reduce_sum(tf.nn.softmax_cross_entropy_with_logits(logits=logits[:, :-1], labels=seq[:, 1:]))
# sample the next character from Maxwell-Boltzmann Distribution with temperature temp
# it works equally well without tf.exp
sample = tf.multinomial(tf.exp(logits[:, -1] / temp), 1)[:, 0]
return loss, sample, in_state, out_state
def training(vocab, seq, loss, optimizer, global_step, temp, sample, in_state, out_state):
saver = tf.train.Saver()
start = time.time()
with tf.Session() as sess:
writer = tf.summary.FileWriter('graphs/gist', sess.graph)
sess.run(tf.global_variables_initializer())
ckpt = tf.train.get_checkpoint_state(os.path.dirname('checkpoints/arvix/checkpoint'))
if ckpt and ckpt.model_checkpoint_path:
saver.restore(sess, ckpt.model_checkpoint_path)
iteration = global_step.eval()
for batch in read_batch(read_data(DATA_PATH, vocab)):
batch_loss, _ = sess.run([loss, optimizer], {seq: batch})
if (iteration + 1) % SKIP_STEP == 0:
print('Iter {}. \n Loss {}. Time {}'.format(iteration, batch_loss, time.time() - start))
online_inference(sess, vocab, seq, sample, temp, in_state, out_state)
start = time.time()
saver.save(sess, 'checkpoints/arvix/char-rnn', iteration)
iteration += 1
def online_inference(sess, vocab, seq, sample, temp, in_state, out_state, seed='T'):
""" Generate sequence one character at a time, based on the previous character
"""
sentence = seed
state = None
for _ in range(LEN_GENERATED):
batch = [vocab_encode(sentence[-1], vocab)]
feed = {seq: batch, temp: TEMPRATURE}
# for the first decoder step, the state is None
if state is not None:
feed.update({in_state: state})
index, state = sess.run([sample, out_state], feed)
sentence += vocab_decode(index, vocab)
print(sentence)
def main():
vocab = (
" $%'()+,-./0123456789:;=?ABCDEFGHIJKLMNOPQRSTUVWXYZ"
"\\^_abcdefghijklmnopqrstuvwxyz{|}")
seq = tf.placeholder(tf.int32, [None, None])
temp = tf.placeholder(tf.float32)
loss, sample, in_state, out_state = create_model(seq, temp, vocab)
global_step = tf.Variable(0, dtype=tf.int32, trainable=False, name='global_step')
optimizer = tf.train.AdamOptimizer(LR).minimize(loss, global_step=global_step)
utils.make_dir('checkpoints')
utils.make_dir('checkpoints/arvix')
training(vocab, seq, loss, optimizer, global_step, temp, sample, in_state, out_state)
if __name__ == '__main__':
main() | {
"repo_name": "YeEmrick/learning",
"path": "stanford-tensorflow/2017/examples/11_char_rnn_gist.py",
"copies": "1",
"size": "4833",
"license": "apache-2.0",
"hash": -3619107956642548700,
"line_mean": 38.3008130081,
"line_max": 107,
"alpha_frac": 0.6405959032,
"autogenerated": false,
"ratio": 3.391578947368421,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4532174850568421,
"avg_score": null,
"num_lines": null
} |
""" A clean, no_frills character-level generative language model.
Created by Danijar Hafner, edited by Chip Huyen
for the class CS 20SI: "TensorFlow for Deep Learning Research"
Based on Andrej Karpathy's blog:
http://karpathy.github.io/2015/05/21/rnn-effectiveness/
"""
from __future__ import print_function
import os
import time
import tensorflow as tf
DATA_PATH = '../data/arvix_abstracts.txt'
HIDDEN_SIZE = 200
BATCH_SIZE = 64
NUM_STEPS = 50
SKIP_STEP = 40
TEMPRATURE = 0.7
LR = 0.003
LEN_GENERATED = 300
def vocab_encode(text, vocab):
return [vocab.index(x) + 1 for x in text if x in vocab]
def vocab_decode(array, vocab):
return ''.join([vocab[x - 1] for x in array])
def read_data(filename, vocab, window=NUM_STEPS, overlap=NUM_STEPS/2):
for text in open(filename):
text = vocab_encode(text, vocab)
for start in range(0, len(text) - window, overlap):
chunk = text[start: start + window]
chunk += [0] * (window - len(chunk))
yield chunk
def read_batch(stream, batch_size=BATCH_SIZE):
batch = []
for element in stream:
batch.append(element)
if len(batch) == batch_size:
yield batch
batch = []
yield batch
def create_rnn(seq, hidden_size=HIDDEN_SIZE):
cell = tf.nn.rnn_cell.GRUCell(hidden_size)
in_state = tf.placeholder_with_default(
cell.zero_state(tf.shape(seq)[0], tf.float32), [None, hidden_size])
# this line to calculate the real length of seq
# all seq are padded to be of the same length which is NUM_STEPS
length = tf.reduce_sum(tf.reduce_max(tf.sign(seq), 2), 1)
output, out_state = tf.nn.dynamic_rnn(cell, seq, length, in_state)
return output, in_state, out_state
def create_model(seq, temp, vocab, hidden=HIDDEN_SIZE):
seq = tf.one_hot(seq, len(vocab))
output, in_state, out_state = create_rnn(seq, hidden)
# fully_connected is syntactic sugar for tf.matmul(w, output) + b
# it will create w and b for us
logits = tf.contrib.layers.fully_connected(output, len(vocab), None)
loss = tf.reduce_sum(tf.nn.softmax_cross_entropy_with_logits(logits[:, :-1], seq[:, 1:]))
# sample the next character from Maxwell-Boltzmann Distribution with temperature temp
# it works equally well without tf.exp
sample = tf.multinomial(tf.exp(logits[:, -1] / temp), 1)[:, 0]
return loss, sample, in_state, out_state
def training(vocab, seq, loss, optimizer, global_step, temp, sample, in_state, out_state):
saver = tf.train.Saver()
start = time.time()
with tf.Session() as sess:
writer = tf.summary.FileWriter('graphs/gist', sess.graph)
sess.run(tf.global_variables_initializer())
ckpt = tf.train.get_checkpoint_state(os.path.dirname('checkpoints/arvix/checkpoint'))
if ckpt and ckpt.model_checkpoint_path:
saver.restore(sess, ckpt.model_checkpoint_path)
iteration = global_step.eval()
for batch in read_batch(read_data(DATA_PATH, vocab)):
batch_loss, _ = sess.run([loss, optimizer], {seq: batch})
if (iteration + 1) % SKIP_STEP == 0:
print('Iter {}. \n Loss {}. Time {}'.format(iteration, batch_loss, time.time() - start))
online_inference(sess, vocab, seq, sample, temp, in_state, out_state)
start = time.time()
saver.save(sess, 'checkpoints/arvix/char-rnn', iteration)
iteration += 1
def online_inference(sess, vocab, seq, sample, temp, in_state, out_state, seed='T'):
""" Generate sequence one character at a time, based on the previous character
"""
sentence = seed
state = None
for _ in range(LEN_GENERATED):
batch = [vocab_encode(sentence[-1], vocab)]
feed = {seq: batch, temp: TEMPRATURE}
# for the first decoder step, the state is None
if state is not None:
feed.update({in_state: state})
index, state = sess.run([sample, out_state], feed)
sentence += vocab_decode(index, vocab)
print(sentence)
def main():
vocab = (
" $%'()+,-./0123456789:;=?ABCDEFGHIJKLMNOPQRSTUVWXYZ"
"\\^_abcdefghijklmnopqrstuvwxyz{|}")
seq = tf.placeholder(tf.int32, [None, None])
temp = tf.placeholder(tf.float32)
loss, sample, in_state, out_state = create_model(seq, temp, vocab)
global_step = tf.Variable(0, dtype=tf.int32, trainable=False, name='global_step')
optimizer = tf.train.AdamOptimizer(LR).minimize(loss, global_step=global_step)
training(vocab, seq, loss, optimizer, global_step, temp, sample, in_state, out_state)
if __name__ == '__main__':
main() | {
"repo_name": "kabrapratik28/Stanford_courses",
"path": "cs20si/tf-stanford-tutorials/examples/11_char_rnn_gist.py",
"copies": "2",
"size": "4684",
"license": "apache-2.0",
"hash": -3947805301999071000,
"line_mean": 39.3879310345,
"line_max": 107,
"alpha_frac": 0.6387702818,
"autogenerated": false,
"ratio": 3.4164843180160465,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5055254599816046,
"avg_score": null,
"num_lines": null
} |
""" A clean, no_frills character-level generative language model.
CS 20: "TensorFlow for Deep Learning Research"
cs20.stanford.edu
Danijar Hafner (mail@danijar.com)
& Chip Huyen (chiphuyen@cs.stanford.edu)
Lecture 11
"""
import os
os.environ['TF_CPP_MIN_LOG_LEVEL']='2'
import random
import sys
sys.path.append('..')
import time
import tensorflow as tf
import utils
def vocab_encode(text, vocab):
return [vocab.index(x) + 1 for x in text if x in vocab]
def vocab_decode(array, vocab):
return ''.join([vocab[x - 1] for x in array])
def read_data(filename, vocab, window, overlap):
lines = [line.strip() for line in open(filename, 'r').readlines()]
while True:
random.shuffle(lines)
for text in lines:
text = vocab_encode(text, vocab)
for start in range(0, len(text) - window, overlap):
chunk = text[start: start + window]
chunk += [0] * (window - len(chunk))
yield chunk
def read_batch(stream, batch_size):
batch = []
for element in stream:
batch.append(element)
if len(batch) == batch_size:
yield batch
batch = []
yield batch
class CharRNN(object):
def __init__(self, model):
self.model = model
self.path = 'data/' + model + '.txt'
if 'trump' in model:
self.vocab = ("$%'()+,-./0123456789:;=?ABCDEFGHIJKLMNOPQRSTUVWXYZ"
" '\"_abcdefghijklmnopqrstuvwxyz{|}@#➡📈")
else:
self.vocab = (" $%'()+,-./0123456789:;=?ABCDEFGHIJKLMNOPQRSTUVWXYZ"
"\\^_abcdefghijklmnopqrstuvwxyz{|}")
self.seq = tf.placeholder(tf.int32, [None, None])
self.temp = tf.constant(1.5)
self.hidden_sizes = [128, 256]
self.batch_size = 64
self.lr = 0.0003
self.skip_step = 1
self.num_steps = 50 # for RNN unrolled
self.len_generated = 200
self.gstep = tf.Variable(0, dtype=tf.int32, trainable=False, name='global_step')
def create_rnn(self, seq):
layers = [tf.nn.rnn_cell.GRUCell(size) for size in self.hidden_sizes]
cells = tf.nn.rnn_cell.MultiRNNCell(layers)
batch = tf.shape(seq)[0]
zero_states = cells.zero_state(batch, dtype=tf.float32)
self.in_state = tuple([tf.placeholder_with_default(state, [None, state.shape[1]])
for state in zero_states])
# this line to calculate the real length of seq
# all seq are padded to be of the same length, which is num_steps
length = tf.reduce_sum(tf.reduce_max(tf.sign(seq), 2), 1)
self.output, self.out_state = tf.nn.dynamic_rnn(cells, seq, length, self.in_state)
def create_model(self):
seq = tf.one_hot(self.seq, len(self.vocab))
self.create_rnn(seq)
self.logits = tf.layers.dense(self.output, len(self.vocab), None)
loss = tf.nn.softmax_cross_entropy_with_logits(logits=self.logits[:, :-1],
labels=seq[:, 1:])
self.loss = tf.reduce_sum(loss)
# sample the next character from Maxwell-Boltzmann Distribution
# with temperature temp. It works equally well without tf.exp
self.sample = tf.multinomial(tf.exp(self.logits[:, -1] / self.temp), 1)[:, 0]
self.opt = tf.train.AdamOptimizer(self.lr).minimize(self.loss, global_step=self.gstep)
def train(self):
saver = tf.train.Saver()
start = time.time()
min_loss = None
with tf.Session() as sess:
writer = tf.summary.FileWriter('graphs/gist', sess.graph)
sess.run(tf.global_variables_initializer())
ckpt = tf.train.get_checkpoint_state(os.path.dirname('checkpoints/' + self.model + '/checkpoint'))
if ckpt and ckpt.model_checkpoint_path:
saver.restore(sess, ckpt.model_checkpoint_path)
iteration = self.gstep.eval()
stream = read_data(self.path, self.vocab, self.num_steps, overlap=self.num_steps//2)
data = read_batch(stream, self.batch_size)
while True:
batch = next(data)
# for batch in read_batch(read_data(DATA_PATH, vocab)):
batch_loss, _ = sess.run([self.loss, self.opt], {self.seq: batch})
if (iteration + 1) % self.skip_step == 0:
print('Iter {}. \n Loss {}. Time {}'.format(iteration, batch_loss, time.time() - start))
self.online_infer(sess)
start = time.time()
checkpoint_name = 'checkpoints/' + self.model + '/char-rnn'
if min_loss is None:
saver.save(sess, checkpoint_name, iteration)
elif batch_loss < min_loss:
saver.save(sess, checkpoint_name, iteration)
min_loss = batch_loss
iteration += 1
def online_infer(self, sess):
""" Generate sequence one character at a time, based on the previous character
"""
for seed in ['Hillary', 'I', 'R', 'T', '@', 'N', 'M', '.', 'G', 'A', 'W']:
sentence = seed
state = None
for _ in range(self.len_generated):
batch = [vocab_encode(sentence[-1], self.vocab)]
feed = {self.seq: batch}
if state is not None: # for the first decoder step, the state is None
for i in range(len(state)):
feed.update({self.in_state[i]: state[i]})
index, state = sess.run([self.sample, self.out_state], feed)
sentence += vocab_decode(index, self.vocab)
print('\t' + sentence)
def main():
model = 'trump_tweets'
utils.safe_mkdir('checkpoints')
utils.safe_mkdir('checkpoints/' + model)
lm = CharRNN(model)
lm.create_model()
lm.train()
if __name__ == '__main__':
main() | {
"repo_name": "YeEmrick/learning",
"path": "stanford-tensorflow/examples/11_char_rnn.py",
"copies": "1",
"size": "6017",
"license": "apache-2.0",
"hash": 3652099170834995000,
"line_mean": 39.0866666667,
"line_max": 111,
"alpha_frac": 0.5595475715,
"autogenerated": false,
"ratio": 3.724907063197026,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9737559907905751,
"avg_score": 0.009378945358255205,
"num_lines": 150
} |
"""A cleanup tool for HTML.
Removes unwanted tags and content. See the `Cleaner` class for
details.
"""
import re
import copy
try:
from urlparse import urlsplit
except ImportError:
# Python 3
from urllib.parse import urlsplit
from lxml import etree
from lxml.html import defs
from lxml.html import fromstring, tostring, XHTML_NAMESPACE
from lxml.html import _nons, _transform_result
try:
set
except NameError:
# Python 3
from sets import Set as set
try:
unichr = __builtins__['unichr']
except (NameError, KeyError):
# Python 3
unichr = chr
try:
unicode = __builtins__['unicode']
except (NameError, KeyError):
# Python 3
unicode = str
try:
bytes = __builtins__['bytes']
except (NameError, KeyError):
# Python < 2.6
bytes = str
try:
basestring = __builtins__['basestring']
except (NameError, KeyError):
basestring = (str, bytes)
__all__ = ['clean_html', 'clean', 'Cleaner', 'autolink', 'autolink_html',
'word_break', 'word_break_html']
# Look at http://code.sixapart.com/trac/livejournal/browser/trunk/cgi-bin/cleanhtml.pl
# Particularly the CSS cleaning; most of the tag cleaning is integrated now
# I have multiple kinds of schemes searched; but should schemes be
# whitelisted instead?
# max height?
# remove images? Also in CSS? background attribute?
# Some way to whitelist object, iframe, etc (e.g., if you want to
# allow *just* embedded YouTube movies)
# Log what was deleted and why?
# style="behavior: ..." might be bad in IE?
# Should we have something for just <meta http-equiv>? That's the worst of the
# metas.
# UTF-7 detections? Example:
# <HEAD><META HTTP-EQUIV="CONTENT-TYPE" CONTENT="text/html; charset=UTF-7"> </HEAD>+ADw-SCRIPT+AD4-alert('XSS');+ADw-/SCRIPT+AD4-
# you don't always have to have the charset set, if the page has no charset
# and there's UTF7-like code in it.
# Look at these tests: http://htmlpurifier.org/live/smoketests/xssAttacks.php
# This is an IE-specific construct you can have in a stylesheet to
# run some Javascript:
_css_javascript_re = re.compile(
r'expression\s*\(.*?\)', re.S|re.I)
# Do I have to worry about @\nimport?
_css_import_re = re.compile(
r'@\s*import', re.I)
# All kinds of schemes besides just javascript: that can cause
# execution:
_javascript_scheme_re = re.compile(
r'\s*(?:javascript|jscript|livescript|vbscript|about|mocha):', re.I)
_substitute_whitespace = re.compile(r'\s+').sub
# FIXME: should data: be blocked?
# FIXME: check against: http://msdn2.microsoft.com/en-us/library/ms537512.aspx
_conditional_comment_re = re.compile(
r'\[if[\s\n\r]+.*?][\s\n\r]*>', re.I|re.S)
_find_styled_elements = etree.XPath(
"descendant-or-self::*[@style]")
_find_external_links = etree.XPath(
("descendant-or-self::a [normalize-space(@href) and substring(normalize-space(@href),1,1) != '#'] |"
"descendant-or-self::x:a[normalize-space(@href) and substring(normalize-space(@href),1,1) != '#']"),
namespaces={'x':XHTML_NAMESPACE})
class Cleaner(object):
"""
Instances cleans the document of each of the possible offending
elements. The cleaning is controlled by attributes; you can
override attributes in a subclass, or set them in the constructor.
``scripts``:
Removes any ``<script>`` tags.
``javascript``:
Removes any Javascript, like an ``onclick`` attribute.
``comments``:
Removes any comments.
``style``:
Removes any style tags or attributes.
``links``:
Removes any ``<link>`` tags
``meta``:
Removes any ``<meta>`` tags
``page_structure``:
Structural parts of a page: ``<head>``, ``<html>``, ``<title>``.
``processing_instructions``:
Removes any processing instructions.
``embedded``:
Removes any embedded objects (flash, iframes)
``frames``:
Removes any frame-related tags
``forms``:
Removes any form tags
``annoying_tags``:
Tags that aren't *wrong*, but are annoying. ``<blink>`` and ``<marque>``
``remove_tags``:
A list of tags to remove.
``allow_tags``:
A list of tags to include (default include all).
``remove_unknown_tags``:
Remove any tags that aren't standard parts of HTML.
``safe_attrs_only``:
If true, only include 'safe' attributes (specifically the list
from `feedparser
<http://feedparser.org/docs/html-sanitization.html>`_).
``add_nofollow``:
If true, then any <a> tags will have ``rel="nofollow"`` added to them.
``host_whitelist``:
A list or set of hosts that you can use for embedded content
(for content like ``<object>``, ``<link rel="stylesheet">``, etc).
You can also implement/override the method
``allow_embedded_url(el, url)`` or ``allow_element(el)`` to
implement more complex rules for what can be embedded.
Anything that passes this test will be shown, regardless of
the value of (for instance) ``embedded``.
Note that this parameter might not work as intended if you do not
make the links absolute before doing the cleaning.
``whitelist_tags``:
A set of tags that can be included with ``host_whitelist``.
The default is ``iframe`` and ``embed``; you may wish to
include other tags like ``script``, or you may want to
implement ``allow_embedded_url`` for more control. Set to None to
include all tags.
This modifies the document *in place*.
"""
scripts = True
javascript = True
comments = True
style = False
links = True
meta = True
page_structure = True
processing_instructions = True
embedded = True
frames = True
forms = True
annoying_tags = True
remove_tags = None
allow_tags = None
remove_unknown_tags = True
safe_attrs_only = True
add_nofollow = False
host_whitelist = ()
whitelist_tags = set(['iframe', 'embed'])
def __init__(self, **kw):
for name, value in kw.items():
if not hasattr(self, name):
raise TypeError(
"Unknown parameter: %s=%r" % (name, value))
setattr(self, name, value)
# Used to lookup the primary URL for a given tag that is up for
# removal:
_tag_link_attrs = dict(
script='src',
link='href',
# From: http://java.sun.com/j2se/1.4.2/docs/guide/misc/applet.html
# From what I can tell, both attributes can contain a link:
applet=['code', 'object'],
iframe='src',
embed='src',
layer='src',
# FIXME: there doesn't really seem like a general way to figure out what
# links an <object> tag uses; links often go in <param> tags with values
# that we don't really know. You'd have to have knowledge about specific
# kinds of plugins (probably keyed off classid), and match against those.
##object=?,
# FIXME: not looking at the action currently, because it is more complex
# than than -- if you keep the form, you should keep the form controls.
##form='action',
a='href',
)
def __call__(self, doc):
"""
Cleans the document.
"""
if hasattr(doc, 'getroot'):
# ElementTree instance, instead of an element
doc = doc.getroot()
# convert XHTML to HTML
for el in doc.iter():
tag = el.tag
if isinstance(tag, basestring):
el.tag = _nons(tag)
# Normalize a case that IE treats <image> like <img>, and that
# can confuse either this step or later steps.
for el in doc.iter('image'):
el.tag = 'img'
if not self.comments:
# Of course, if we were going to kill comments anyway, we don't
# need to worry about this
self.kill_conditional_comments(doc)
kill_tags = set()
remove_tags = set(self.remove_tags or ())
if self.allow_tags:
allow_tags = set(self.allow_tags)
else:
allow_tags = set()
if self.scripts:
kill_tags.add('script')
if self.safe_attrs_only:
safe_attrs = set(defs.safe_attrs)
for el in doc.iter():
attrib = el.attrib
for aname in attrib.keys():
if aname not in safe_attrs:
del attrib[aname]
if self.javascript:
if not self.safe_attrs_only:
# safe_attrs handles events attributes itself
for el in doc.iter():
attrib = el.attrib
for aname in attrib.keys():
if aname.startswith('on'):
del attrib[aname]
doc.rewrite_links(self._remove_javascript_link,
resolve_base_href=False)
if not self.style:
# If we're deleting style then we don't have to remove JS links
# from styles, otherwise...
for el in _find_styled_elements(doc):
old = el.get('style')
new = _css_javascript_re.sub('', old)
new = _css_import_re.sub('', old)
if self._has_sneaky_javascript(new):
# Something tricky is going on...
del el.attrib['style']
elif new != old:
el.set('style', new)
for el in list(doc.iter('style')):
if el.get('type', '').lower().strip() == 'text/javascript':
el.drop_tree()
continue
old = el.text or ''
new = _css_javascript_re.sub('', old)
# The imported CSS can do anything; we just can't allow:
new = _css_import_re.sub('', old)
if self._has_sneaky_javascript(new):
# Something tricky is going on...
el.text = '/* deleted */'
elif new != old:
el.text = new
if self.comments or self.processing_instructions:
# FIXME: why either? I feel like there's some obscure reason
# because you can put PIs in comments...? But I've already
# forgotten it
kill_tags.add(etree.Comment)
if self.processing_instructions:
kill_tags.add(etree.ProcessingInstruction)
if self.style:
kill_tags.add('style')
etree.strip_attributes(doc, 'style')
if self.links:
kill_tags.add('link')
elif self.style or self.javascript:
# We must get rid of included stylesheets if Javascript is not
# allowed, as you can put Javascript in them
for el in list(doc.iter('link')):
if 'stylesheet' in el.get('rel', '').lower():
# Note this kills alternate stylesheets as well
el.drop_tree()
if self.meta:
kill_tags.add('meta')
if self.page_structure:
remove_tags.update(('head', 'html', 'title'))
if self.embedded:
# FIXME: is <layer> really embedded?
# We should get rid of any <param> tags not inside <applet>;
# These are not really valid anyway.
for el in list(doc.iter('param')):
found_parent = False
parent = el.getparent()
while parent is not None and parent.tag not in ('applet', 'object'):
parent = parent.getparent()
if parent is None:
el.drop_tree()
kill_tags.update(('applet',))
# The alternate contents that are in an iframe are a good fallback:
remove_tags.update(('iframe', 'embed', 'layer', 'object', 'param'))
if self.frames:
# FIXME: ideally we should look at the frame links, but
# generally frames don't mix properly with an HTML
# fragment anyway.
kill_tags.update(defs.frame_tags)
if self.forms:
remove_tags.add('form')
kill_tags.update(('button', 'input', 'select', 'textarea'))
if self.annoying_tags:
remove_tags.update(('blink', 'marque'))
_remove = []
_kill = []
for el in doc.iter():
if el.tag in kill_tags:
if self.allow_element(el):
continue
_kill.append(el)
elif el.tag in remove_tags:
if self.allow_element(el):
continue
_remove.append(el)
if _remove and _remove[0] == doc:
# We have to drop the parent-most tag, which we can't
# do. Instead we'll rewrite it:
el = _remove.pop(0)
el.tag = 'div'
el.attrib.clear()
elif _kill and _kill[0] == doc:
# We have to drop the parent-most element, which we can't
# do. Instead we'll clear it:
el = _kill.pop(0)
if el.tag != 'html':
el.tag = 'div'
el.clear()
for el in _kill:
el.drop_tree()
for el in _remove:
el.drop_tag()
allow_tags = self.allow_tags
if self.remove_unknown_tags:
if allow_tags:
raise ValueError(
"It does not make sense to pass in both allow_tags and remove_unknown_tags")
allow_tags = set(defs.tags)
if allow_tags:
bad = []
for el in doc.iter():
if el.tag not in allow_tags:
bad.append(el)
for el in bad:
el.drop_tag()
if self.add_nofollow:
for el in _find_external_links(doc):
if not self.allow_follow(el):
el.set('rel', 'nofollow')
def allow_follow(self, anchor):
"""
Override to suppress rel="nofollow" on some anchors.
"""
return False
def allow_element(self, el):
if el.tag not in self._tag_link_attrs:
return False
attr = self._tag_link_attrs[el.tag]
if isinstance(attr, (list, tuple)):
for one_attr in attr:
url = el.get(one_attr)
if not url:
return False
if not self.allow_embedded_url(el, url):
return False
return True
else:
url = el.get(attr)
if not url:
return False
return self.allow_embedded_url(el, url)
def allow_embedded_url(self, el, url):
if (self.whitelist_tags is not None
and el.tag not in self.whitelist_tags):
return False
scheme, netloc, path, query, fragment = urlsplit(url)
netloc = netloc.lower().split(':', 1)[0]
if scheme not in ('http', 'https'):
return False
if netloc in self.host_whitelist:
return True
return False
def kill_conditional_comments(self, doc):
"""
IE conditional comments basically embed HTML that the parser
doesn't normally see. We can't allow anything like that, so
we'll kill any comments that could be conditional.
"""
bad = []
self._kill_elements(
doc, lambda el: _conditional_comment_re.search(el.text),
etree.Comment)
def _kill_elements(self, doc, condition, iterate=None):
bad = []
for el in doc.iter(iterate):
if condition(el):
bad.append(el)
for el in bad:
el.drop_tree()
def _remove_javascript_link(self, link):
# links like "j a v a s c r i p t:" might be interpreted in IE
new = _substitute_whitespace('', link)
if _javascript_scheme_re.search(new):
# FIXME: should this be None to delete?
return ''
return link
_substitute_comments = re.compile(r'/\*.*?\*/', re.S).sub
def _has_sneaky_javascript(self, style):
"""
Depending on the browser, stuff like ``e x p r e s s i o n(...)``
can get interpreted, or ``expre/* stuff */ssion(...)``. This
checks for attempt to do stuff like this.
Typically the response will be to kill the entire style; if you
have just a bit of Javascript in the style another rule will catch
that and remove only the Javascript from the style; this catches
more sneaky attempts.
"""
style = self._substitute_comments('', style)
style = style.replace('\\', '')
style = _substitute_whitespace('', style)
style = style.lower()
if 'javascript:' in style:
return True
if 'expression(' in style:
return True
return False
def clean_html(self, html):
result_type = type(html)
if isinstance(html, basestring):
doc = fromstring(html)
else:
doc = copy.deepcopy(html)
self(doc)
return _transform_result(result_type, doc)
clean = Cleaner()
clean_html = clean.clean_html
############################################################
## Autolinking
############################################################
_link_regexes = [
re.compile(r'(?P<body>https?://(?P<host>[a-z0-9._-]+)(?:/[/\-_.,a-z0-9%&?;=~]*)?(?:\([/\-_.,a-z0-9%&?;=~]*\))?)', re.I),
# This is conservative, but autolinking can be a bit conservative:
re.compile(r'mailto:(?P<body>[a-z0-9._-]+@(?P<host>[a-z0-9_._]+[a-z]))', re.I),
]
_avoid_elements = ['textarea', 'pre', 'code', 'head', 'select', 'a']
_avoid_hosts = [
re.compile(r'^localhost', re.I),
re.compile(r'\bexample\.(?:com|org|net)$', re.I),
re.compile(r'^127\.0\.0\.1$'),
]
_avoid_classes = ['nolink']
def autolink(el, link_regexes=_link_regexes,
avoid_elements=_avoid_elements,
avoid_hosts=_avoid_hosts,
avoid_classes=_avoid_classes):
"""
Turn any URLs into links.
It will search for links identified by the given regular
expressions (by default mailto and http(s) links).
It won't link text in an element in avoid_elements, or an element
with a class in avoid_classes. It won't link to anything with a
host that matches one of the regular expressions in avoid_hosts
(default localhost and 127.0.0.1).
If you pass in an element, the element's tail will not be
substituted, only the contents of the element.
"""
if el.tag in avoid_elements:
return
class_name = el.get('class')
if class_name:
class_name = class_name.split()
for match_class in avoid_classes:
if match_class in class_name:
return
for child in list(el):
autolink(child, link_regexes=link_regexes,
avoid_elements=avoid_elements,
avoid_hosts=avoid_hosts,
avoid_classes=avoid_classes)
if child.tail:
text, tail_children = _link_text(
child.tail, link_regexes, avoid_hosts, factory=el.makeelement)
if tail_children:
child.tail = text
index = el.index(child)
el[index+1:index+1] = tail_children
if el.text:
text, pre_children = _link_text(
el.text, link_regexes, avoid_hosts, factory=el.makeelement)
if pre_children:
el.text = text
el[:0] = pre_children
def _link_text(text, link_regexes, avoid_hosts, factory):
leading_text = ''
links = []
last_pos = 0
while 1:
best_match, best_pos = None, None
for regex in link_regexes:
regex_pos = last_pos
while 1:
match = regex.search(text, pos=regex_pos)
if match is None:
break
host = match.group('host')
for host_regex in avoid_hosts:
if host_regex.search(host):
regex_pos = match.end()
break
else:
break
if match is None:
continue
if best_pos is None or match.start() < best_pos:
best_match = match
best_pos = match.start()
if best_match is None:
# No more matches
if links:
assert not links[-1].tail
links[-1].tail = text
else:
assert not leading_text
leading_text = text
break
link = best_match.group(0)
end = best_match.end()
if link.endswith('.') or link.endswith(','):
# These punctuation marks shouldn't end a link
end -= 1
link = link[:-1]
prev_text = text[:best_match.start()]
if links:
assert not links[-1].tail
links[-1].tail = prev_text
else:
assert not leading_text
leading_text = prev_text
anchor = factory('a')
anchor.set('href', link)
body = best_match.group('body')
if not body:
body = link
if body.endswith('.') or body.endswith(','):
body = body[:-1]
anchor.text = body
links.append(anchor)
text = text[end:]
return leading_text, links
def autolink_html(html, *args, **kw):
result_type = type(html)
if isinstance(html, basestring):
doc = fromstring(html)
else:
doc = copy.deepcopy(html)
autolink(doc, *args, **kw)
return _transform_result(result_type, doc)
autolink_html.__doc__ = autolink.__doc__
############################################################
## Word wrapping
############################################################
_avoid_word_break_elements = ['pre', 'textarea', 'code']
_avoid_word_break_classes = ['nobreak']
def word_break(el, max_width=40,
avoid_elements=_avoid_word_break_elements,
avoid_classes=_avoid_word_break_classes,
break_character=unichr(0x200b)):
"""
Breaks any long words found in the body of the text (not attributes).
Doesn't effect any of the tags in avoid_elements, by default
``<textarea>`` and ``<pre>``
Breaks words by inserting ​, which is a unicode character
for Zero Width Space character. This generally takes up no space
in rendering, but does copy as a space, and in monospace contexts
usually takes up space.
See http://www.cs.tut.fi/~jkorpela/html/nobr.html for a discussion
"""
# Character suggestion of ​ comes from:
# http://www.cs.tut.fi/~jkorpela/html/nobr.html
if el.tag in _avoid_word_break_elements:
return
class_name = el.get('class')
if class_name:
dont_break = False
class_name = class_name.split()
for avoid in avoid_classes:
if avoid in class_name:
dont_break = True
break
if dont_break:
return
if el.text:
el.text = _break_text(el.text, max_width, break_character)
for child in el:
word_break(child, max_width=max_width,
avoid_elements=avoid_elements,
avoid_classes=avoid_classes,
break_character=break_character)
if child.tail:
child.tail = _break_text(child.tail, max_width, break_character)
def word_break_html(html, *args, **kw):
result_type = type(html)
doc = fromstring(html)
word_break(doc, *args, **kw)
return _transform_result(result_type, doc)
def _break_text(text, max_width, break_character):
words = text.split()
for word in words:
if len(word) > max_width:
replacement = _insert_break(word, max_width, break_character)
text = text.replace(word, replacement)
return text
_break_prefer_re = re.compile(r'[^a-z]', re.I)
def _insert_break(word, width, break_character):
orig_word = word
result = ''
while len(word) > width:
start = word[:width]
breaks = list(_break_prefer_re.finditer(start))
if breaks:
last_break = breaks[-1]
# Only walk back up to 10 characters to find a nice break:
if last_break.end() > width-10:
# FIXME: should the break character be at the end of the
# chunk, or the beginning of the next chunk?
start = word[:last_break.end()]
result += start + break_character
word = word[len(start):]
result += word
return result
| {
"repo_name": "azureplus/hue",
"path": "desktop/core/ext-py/lxml/src/lxml/html/clean.py",
"copies": "36",
"size": "25017",
"license": "apache-2.0",
"hash": 6664657198664457000,
"line_mean": 34.2849083216,
"line_max": 133,
"alpha_frac": 0.5513850582,
"autogenerated": false,
"ratio": 4.041518578352181,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": null,
"num_lines": null
} |
"""A cleanup tool for HTML.
Removes unwanted tags and content. See the `Cleaner` class for
details.
"""
import re
import copy
try:
from urlparse import urlsplit
from urllib import unquote_plus
except ImportError:
# Python 3
from urllib.parse import urlsplit, unquote_plus
from lxml import etree
from lxml.html import defs
from lxml.html import fromstring, XHTML_NAMESPACE
from lxml.html import xhtml_to_html, _transform_result
try:
unichr
except NameError:
# Python 3
unichr = chr
try:
unicode
except NameError:
# Python 3
unicode = str
try:
bytes
except NameError:
# Python < 2.6
bytes = str
try:
basestring
except NameError:
basestring = (str, bytes)
__all__ = ['clean_html', 'clean', 'Cleaner', 'autolink', 'autolink_html',
'word_break', 'word_break_html']
# Look at http://code.sixapart.com/trac/livejournal/browser/trunk/cgi-bin/cleanhtml.pl
# Particularly the CSS cleaning; most of the tag cleaning is integrated now
# I have multiple kinds of schemes searched; but should schemes be
# whitelisted instead?
# max height?
# remove images? Also in CSS? background attribute?
# Some way to whitelist object, iframe, etc (e.g., if you want to
# allow *just* embedded YouTube movies)
# Log what was deleted and why?
# style="behavior: ..." might be bad in IE?
# Should we have something for just <meta http-equiv>? That's the worst of the
# metas.
# UTF-7 detections? Example:
# <HEAD><META HTTP-EQUIV="CONTENT-TYPE" CONTENT="text/html; charset=UTF-7"> </HEAD>+ADw-SCRIPT+AD4-alert('XSS');+ADw-/SCRIPT+AD4-
# you don't always have to have the charset set, if the page has no charset
# and there's UTF7-like code in it.
# Look at these tests: http://htmlpurifier.org/live/smoketests/xssAttacks.php
# This is an IE-specific construct you can have in a stylesheet to
# run some Javascript:
_css_javascript_re = re.compile(
r'expression\s*\(.*?\)', re.S|re.I)
# Do I have to worry about @\nimport?
_css_import_re = re.compile(
r'@\s*import', re.I)
# All kinds of schemes besides just javascript: that can cause
# execution:
_is_image_dataurl = re.compile(
r'^data:image/.+;base64', re.I).search
_is_possibly_malicious_scheme = re.compile(
r'(?:javascript|jscript|livescript|vbscript|data|about|mocha):',
re.I).search
def _is_javascript_scheme(s):
if _is_image_dataurl(s):
return None
return _is_possibly_malicious_scheme(s)
_substitute_whitespace = re.compile(r'[\s\x00-\x08\x0B\x0C\x0E-\x19]+').sub
# FIXME: should data: be blocked?
# FIXME: check against: http://msdn2.microsoft.com/en-us/library/ms537512.aspx
_conditional_comment_re = re.compile(
r'\[if[\s\n\r]+.*?][\s\n\r]*>', re.I|re.S)
_find_styled_elements = etree.XPath(
"descendant-or-self::*[@style]")
_find_external_links = etree.XPath(
("descendant-or-self::a [normalize-space(@href) and substring(normalize-space(@href),1,1) != '#'] |"
"descendant-or-self::x:a[normalize-space(@href) and substring(normalize-space(@href),1,1) != '#']"),
namespaces={'x':XHTML_NAMESPACE})
class Cleaner(object):
"""
Instances cleans the document of each of the possible offending
elements. The cleaning is controlled by attributes; you can
override attributes in a subclass, or set them in the constructor.
``scripts``:
Removes any ``<script>`` tags.
``javascript``:
Removes any Javascript, like an ``onclick`` attribute. Also removes stylesheets
as they could contain Javascript.
``comments``:
Removes any comments.
``style``:
Removes any style tags.
``inline_style``
Removes any style attributes. Defaults to the value of the ``style`` option.
``links``:
Removes any ``<link>`` tags
``meta``:
Removes any ``<meta>`` tags
``page_structure``:
Structural parts of a page: ``<head>``, ``<html>``, ``<title>``.
``processing_instructions``:
Removes any processing instructions.
``embedded``:
Removes any embedded objects (flash, iframes)
``frames``:
Removes any frame-related tags
``forms``:
Removes any form tags
``annoying_tags``:
Tags that aren't *wrong*, but are annoying. ``<blink>`` and ``<marquee>``
``remove_tags``:
A list of tags to remove. Only the tags will be removed,
their content will get pulled up into the parent tag.
``kill_tags``:
A list of tags to kill. Killing also removes the tag's content,
i.e. the whole subtree, not just the tag itself.
``allow_tags``:
A list of tags to include (default include all).
``remove_unknown_tags``:
Remove any tags that aren't standard parts of HTML.
``safe_attrs_only``:
If true, only include 'safe' attributes (specifically the list
from the feedparser HTML sanitisation web site).
``safe_attrs``:
A set of attribute names to override the default list of attributes
considered 'safe' (when safe_attrs_only=True).
``add_nofollow``:
If true, then any <a> tags will have ``rel="nofollow"`` added to them.
``host_whitelist``:
A list or set of hosts that you can use for embedded content
(for content like ``<object>``, ``<link rel="stylesheet">``, etc).
You can also implement/override the method
``allow_embedded_url(el, url)`` or ``allow_element(el)`` to
implement more complex rules for what can be embedded.
Anything that passes this test will be shown, regardless of
the value of (for instance) ``embedded``.
Note that this parameter might not work as intended if you do not
make the links absolute before doing the cleaning.
Note that you may also need to set ``whitelist_tags``.
``whitelist_tags``:
A set of tags that can be included with ``host_whitelist``.
The default is ``iframe`` and ``embed``; you may wish to
include other tags like ``script``, or you may want to
implement ``allow_embedded_url`` for more control. Set to None to
include all tags.
This modifies the document *in place*.
"""
scripts = True
javascript = True
comments = True
style = False
inline_style = None
links = True
meta = True
page_structure = True
processing_instructions = True
embedded = True
frames = True
forms = True
annoying_tags = True
remove_tags = None
allow_tags = None
kill_tags = None
remove_unknown_tags = True
safe_attrs_only = True
safe_attrs = defs.safe_attrs
add_nofollow = False
host_whitelist = ()
whitelist_tags = set(['iframe', 'embed'])
def __init__(self, **kw):
for name, value in kw.items():
if not hasattr(self, name):
raise TypeError(
"Unknown parameter: %s=%r" % (name, value))
setattr(self, name, value)
if self.inline_style is None and 'inline_style' not in kw:
self.inline_style = self.style
# Used to lookup the primary URL for a given tag that is up for
# removal:
_tag_link_attrs = dict(
script='src',
link='href',
# From: http://java.sun.com/j2se/1.4.2/docs/guide/misc/applet.html
# From what I can tell, both attributes can contain a link:
applet=['code', 'object'],
iframe='src',
embed='src',
layer='src',
# FIXME: there doesn't really seem like a general way to figure out what
# links an <object> tag uses; links often go in <param> tags with values
# that we don't really know. You'd have to have knowledge about specific
# kinds of plugins (probably keyed off classid), and match against those.
##object=?,
# FIXME: not looking at the action currently, because it is more complex
# than than -- if you keep the form, you should keep the form controls.
##form='action',
a='href',
)
def __call__(self, doc):
"""
Cleans the document.
"""
if hasattr(doc, 'getroot'):
# ElementTree instance, instead of an element
doc = doc.getroot()
# convert XHTML to HTML
xhtml_to_html(doc)
# Normalize a case that IE treats <image> like <img>, and that
# can confuse either this step or later steps.
for el in doc.iter('image'):
el.tag = 'img'
if not self.comments:
# Of course, if we were going to kill comments anyway, we don't
# need to worry about this
self.kill_conditional_comments(doc)
kill_tags = set(self.kill_tags or ())
remove_tags = set(self.remove_tags or ())
allow_tags = set(self.allow_tags or ())
if self.scripts:
kill_tags.add('script')
if self.safe_attrs_only:
safe_attrs = set(self.safe_attrs)
for el in doc.iter(etree.Element):
attrib = el.attrib
for aname in attrib.keys():
if aname not in safe_attrs:
del attrib[aname]
if self.javascript:
if not (self.safe_attrs_only and
self.safe_attrs == defs.safe_attrs):
# safe_attrs handles events attributes itself
for el in doc.iter(etree.Element):
attrib = el.attrib
for aname in attrib.keys():
if aname.startswith('on'):
del attrib[aname]
doc.rewrite_links(self._remove_javascript_link,
resolve_base_href=False)
# If we're deleting style then we don't have to remove JS links
# from styles, otherwise...
if not self.inline_style:
for el in _find_styled_elements(doc):
old = el.get('style')
new = _css_javascript_re.sub('', old)
new = _css_import_re.sub('', new)
if self._has_sneaky_javascript(new):
# Something tricky is going on...
del el.attrib['style']
elif new != old:
el.set('style', new)
if not self.style:
for el in list(doc.iter('style')):
if el.get('type', '').lower().strip() == 'text/javascript':
el.drop_tree()
continue
old = el.text or ''
new = _css_javascript_re.sub('', old)
# The imported CSS can do anything; we just can't allow:
new = _css_import_re.sub('', old)
if self._has_sneaky_javascript(new):
# Something tricky is going on...
el.text = '/* deleted */'
elif new != old:
el.text = new
if self.comments or self.processing_instructions:
# FIXME: why either? I feel like there's some obscure reason
# because you can put PIs in comments...? But I've already
# forgotten it
kill_tags.add(etree.Comment)
if self.processing_instructions:
kill_tags.add(etree.ProcessingInstruction)
if self.style:
kill_tags.add('style')
if self.inline_style:
etree.strip_attributes(doc, 'style')
if self.links:
kill_tags.add('link')
elif self.style or self.javascript:
# We must get rid of included stylesheets if Javascript is not
# allowed, as you can put Javascript in them
for el in list(doc.iter('link')):
if 'stylesheet' in el.get('rel', '').lower():
# Note this kills alternate stylesheets as well
if not self.allow_element(el):
el.drop_tree()
if self.meta:
kill_tags.add('meta')
if self.page_structure:
remove_tags.update(('head', 'html', 'title'))
if self.embedded:
# FIXME: is <layer> really embedded?
# We should get rid of any <param> tags not inside <applet>;
# These are not really valid anyway.
for el in list(doc.iter('param')):
found_parent = False
parent = el.getparent()
while parent is not None and parent.tag not in ('applet', 'object'):
parent = parent.getparent()
if parent is None:
el.drop_tree()
kill_tags.update(('applet',))
# The alternate contents that are in an iframe are a good fallback:
remove_tags.update(('iframe', 'embed', 'layer', 'object', 'param'))
if self.frames:
# FIXME: ideally we should look at the frame links, but
# generally frames don't mix properly with an HTML
# fragment anyway.
kill_tags.update(defs.frame_tags)
if self.forms:
remove_tags.add('form')
kill_tags.update(('button', 'input', 'select', 'textarea'))
if self.annoying_tags:
remove_tags.update(('blink', 'marquee'))
_remove = []
_kill = []
for el in doc.iter():
if el.tag in kill_tags:
if self.allow_element(el):
continue
_kill.append(el)
elif el.tag in remove_tags:
if self.allow_element(el):
continue
_remove.append(el)
if _remove and _remove[0] == doc:
# We have to drop the parent-most tag, which we can't
# do. Instead we'll rewrite it:
el = _remove.pop(0)
el.tag = 'div'
el.attrib.clear()
elif _kill and _kill[0] == doc:
# We have to drop the parent-most element, which we can't
# do. Instead we'll clear it:
el = _kill.pop(0)
if el.tag != 'html':
el.tag = 'div'
el.clear()
_kill.reverse() # start with innermost tags
for el in _kill:
el.drop_tree()
for el in _remove:
el.drop_tag()
if self.remove_unknown_tags:
if allow_tags:
raise ValueError(
"It does not make sense to pass in both allow_tags and remove_unknown_tags")
allow_tags = set(defs.tags)
if allow_tags:
bad = []
for el in doc.iter():
if el.tag not in allow_tags:
bad.append(el)
if bad:
if bad[0] is doc:
el = bad.pop(0)
el.tag = 'div'
el.attrib.clear()
for el in bad:
el.drop_tag()
if self.add_nofollow:
for el in _find_external_links(doc):
if not self.allow_follow(el):
rel = el.get('rel')
if rel:
if ('nofollow' in rel
and ' nofollow ' in (' %s ' % rel)):
continue
rel = '%s nofollow' % rel
else:
rel = 'nofollow'
el.set('rel', rel)
def allow_follow(self, anchor):
"""
Override to suppress rel="nofollow" on some anchors.
"""
return False
def allow_element(self, el):
if el.tag not in self._tag_link_attrs:
return False
attr = self._tag_link_attrs[el.tag]
if isinstance(attr, (list, tuple)):
for one_attr in attr:
url = el.get(one_attr)
if not url:
return False
if not self.allow_embedded_url(el, url):
return False
return True
else:
url = el.get(attr)
if not url:
return False
return self.allow_embedded_url(el, url)
def allow_embedded_url(self, el, url):
if (self.whitelist_tags is not None
and el.tag not in self.whitelist_tags):
return False
scheme, netloc, path, query, fragment = urlsplit(url)
netloc = netloc.lower().split(':', 1)[0]
if scheme not in ('http', 'https'):
return False
if netloc in self.host_whitelist:
return True
return False
def kill_conditional_comments(self, doc):
"""
IE conditional comments basically embed HTML that the parser
doesn't normally see. We can't allow anything like that, so
we'll kill any comments that could be conditional.
"""
bad = []
self._kill_elements(
doc, lambda el: _conditional_comment_re.search(el.text),
etree.Comment)
def _kill_elements(self, doc, condition, iterate=None):
bad = []
for el in doc.iter(iterate):
if condition(el):
bad.append(el)
for el in bad:
el.drop_tree()
def _remove_javascript_link(self, link):
# links like "j a v a s c r i p t:" might be interpreted in IE
new = _substitute_whitespace('', unquote_plus(link))
if _is_javascript_scheme(new):
# FIXME: should this be None to delete?
return ''
return link
_substitute_comments = re.compile(r'/\*.*?\*/', re.S).sub
def _has_sneaky_javascript(self, style):
"""
Depending on the browser, stuff like ``e x p r e s s i o n(...)``
can get interpreted, or ``expre/* stuff */ssion(...)``. This
checks for attempt to do stuff like this.
Typically the response will be to kill the entire style; if you
have just a bit of Javascript in the style another rule will catch
that and remove only the Javascript from the style; this catches
more sneaky attempts.
"""
style = self._substitute_comments('', style)
style = style.replace('\\', '')
style = _substitute_whitespace('', style)
style = style.lower()
if 'javascript:' in style:
return True
if 'expression(' in style:
return True
return False
def clean_html(self, html):
result_type = type(html)
if isinstance(html, basestring):
doc = fromstring(html)
else:
doc = copy.deepcopy(html)
self(doc)
return _transform_result(result_type, doc)
clean = Cleaner()
clean_html = clean.clean_html
############################################################
## Autolinking
############################################################
_link_regexes = [
re.compile(r'(?P<body>https?://(?P<host>[a-z0-9._-]+)(?:/[/\-_.,a-z0-9%&?;=~]*)?(?:\([/\-_.,a-z0-9%&?;=~]*\))?)', re.I),
# This is conservative, but autolinking can be a bit conservative:
re.compile(r'mailto:(?P<body>[a-z0-9._-]+@(?P<host>[a-z0-9_.-]+[a-z]))', re.I),
]
_avoid_elements = ['textarea', 'pre', 'code', 'head', 'select', 'a']
_avoid_hosts = [
re.compile(r'^localhost', re.I),
re.compile(r'\bexample\.(?:com|org|net)$', re.I),
re.compile(r'^127\.0\.0\.1$'),
]
_avoid_classes = ['nolink']
def autolink(el, link_regexes=_link_regexes,
avoid_elements=_avoid_elements,
avoid_hosts=_avoid_hosts,
avoid_classes=_avoid_classes):
"""
Turn any URLs into links.
It will search for links identified by the given regular
expressions (by default mailto and http(s) links).
It won't link text in an element in avoid_elements, or an element
with a class in avoid_classes. It won't link to anything with a
host that matches one of the regular expressions in avoid_hosts
(default localhost and 127.0.0.1).
If you pass in an element, the element's tail will not be
substituted, only the contents of the element.
"""
if el.tag in avoid_elements:
return
class_name = el.get('class')
if class_name:
class_name = class_name.split()
for match_class in avoid_classes:
if match_class in class_name:
return
for child in list(el):
autolink(child, link_regexes=link_regexes,
avoid_elements=avoid_elements,
avoid_hosts=avoid_hosts,
avoid_classes=avoid_classes)
if child.tail:
text, tail_children = _link_text(
child.tail, link_regexes, avoid_hosts, factory=el.makeelement)
if tail_children:
child.tail = text
index = el.index(child)
el[index+1:index+1] = tail_children
if el.text:
text, pre_children = _link_text(
el.text, link_regexes, avoid_hosts, factory=el.makeelement)
if pre_children:
el.text = text
el[:0] = pre_children
def _link_text(text, link_regexes, avoid_hosts, factory):
leading_text = ''
links = []
last_pos = 0
while 1:
best_match, best_pos = None, None
for regex in link_regexes:
regex_pos = last_pos
while 1:
match = regex.search(text, pos=regex_pos)
if match is None:
break
host = match.group('host')
for host_regex in avoid_hosts:
if host_regex.search(host):
regex_pos = match.end()
break
else:
break
if match is None:
continue
if best_pos is None or match.start() < best_pos:
best_match = match
best_pos = match.start()
if best_match is None:
# No more matches
if links:
assert not links[-1].tail
links[-1].tail = text
else:
assert not leading_text
leading_text = text
break
link = best_match.group(0)
end = best_match.end()
if link.endswith('.') or link.endswith(','):
# These punctuation marks shouldn't end a link
end -= 1
link = link[:-1]
prev_text = text[:best_match.start()]
if links:
assert not links[-1].tail
links[-1].tail = prev_text
else:
assert not leading_text
leading_text = prev_text
anchor = factory('a')
anchor.set('href', link)
body = best_match.group('body')
if not body:
body = link
if body.endswith('.') or body.endswith(','):
body = body[:-1]
anchor.text = body
links.append(anchor)
text = text[end:]
return leading_text, links
def autolink_html(html, *args, **kw):
result_type = type(html)
if isinstance(html, basestring):
doc = fromstring(html)
else:
doc = copy.deepcopy(html)
autolink(doc, *args, **kw)
return _transform_result(result_type, doc)
autolink_html.__doc__ = autolink.__doc__
############################################################
## Word wrapping
############################################################
_avoid_word_break_elements = ['pre', 'textarea', 'code']
_avoid_word_break_classes = ['nobreak']
def word_break(el, max_width=40,
avoid_elements=_avoid_word_break_elements,
avoid_classes=_avoid_word_break_classes,
break_character=unichr(0x200b)):
"""
Breaks any long words found in the body of the text (not attributes).
Doesn't effect any of the tags in avoid_elements, by default
``<textarea>`` and ``<pre>``
Breaks words by inserting ​, which is a unicode character
for Zero Width Space character. This generally takes up no space
in rendering, but does copy as a space, and in monospace contexts
usually takes up space.
See http://www.cs.tut.fi/~jkorpela/html/nobr.html for a discussion
"""
# Character suggestion of ​ comes from:
# http://www.cs.tut.fi/~jkorpela/html/nobr.html
if el.tag in _avoid_word_break_elements:
return
class_name = el.get('class')
if class_name:
dont_break = False
class_name = class_name.split()
for avoid in avoid_classes:
if avoid in class_name:
dont_break = True
break
if dont_break:
return
if el.text:
el.text = _break_text(el.text, max_width, break_character)
for child in el:
word_break(child, max_width=max_width,
avoid_elements=avoid_elements,
avoid_classes=avoid_classes,
break_character=break_character)
if child.tail:
child.tail = _break_text(child.tail, max_width, break_character)
def word_break_html(html, *args, **kw):
result_type = type(html)
doc = fromstring(html)
word_break(doc, *args, **kw)
return _transform_result(result_type, doc)
def _break_text(text, max_width, break_character):
words = text.split()
for word in words:
if len(word) > max_width:
replacement = _insert_break(word, max_width, break_character)
text = text.replace(word, replacement)
return text
_break_prefer_re = re.compile(r'[^a-z]', re.I)
def _insert_break(word, width, break_character):
orig_word = word
result = ''
while len(word) > width:
start = word[:width]
breaks = list(_break_prefer_re.finditer(start))
if breaks:
last_break = breaks[-1]
# Only walk back up to 10 characters to find a nice break:
if last_break.end() > width-10:
# FIXME: should the break character be at the end of the
# chunk, or the beginning of the next chunk?
start = word[:last_break.end()]
result += start + break_character
word = word[len(start):]
result += word
return result
| {
"repo_name": "bjornlevi/5thpower",
"path": "nefndaralit/env/lib/python3.6/site-packages/lxml/html/clean.py",
"copies": "3",
"size": "26427",
"license": "mit",
"hash": -4239597225224439000,
"line_mean": 34.615902965,
"line_max": 133,
"alpha_frac": 0.5509516782,
"autogenerated": false,
"ratio": 4.051356737697379,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.001473997494967496,
"num_lines": 742
} |
"""A cleanup tool for HTML.
Removes unwanted tags and content. See the `HtmlCleaner` class for
details.
"""
import re
import copy
import threading
try:
from urlparse import urlsplit
except ImportError:
# Python 3
from urllib.parse import urlsplit
from lxml import etree
from lxml.html import defs
from lxml.html import fromstring, tostring, XHTML_NAMESPACE
from lxml.html import xhtml_to_html, _transform_result
try:
unichr
except NameError:
# Python 3
unichr = chr
try:
unicode
except NameError:
# Python 3
unicode = str
try:
bytes
except NameError:
# Python < 2.6
bytes = str
try:
basestring
except NameError:
basestring = (str, bytes)
# Look at http://code.sixapart.com/trac/livejournal/browser/trunk/cgi-bin/cleanhtml.pl
# Particularly the CSS cleaning; most of the tag cleaning is integrated now
# I have multiple kinds of schemes searched; but should schemes be
# whitelisted instead?
# max height?
# remove images? Also in CSS? background attribute?
# Some way to whitelist object, iframe, etc (e.g., if you want to
# allow *just* embedded YouTube movies)
# Log what was deleted and why?
# style="behavior: ..." might be bad in IE?
# Should we have something for just <meta http-equiv>? That's the worst of the
# metas.
# UTF-7 detections? Example:
# <HEAD><META HTTP-EQUIV="CONTENT-TYPE" CONTENT="text/html; charset=UTF-7"> </HEAD>+ADw-SCRIPT+AD4-alert('XSS');+ADw-/SCRIPT+AD4-
# you don't always have to have the charset set, if the page has no charset
# and there's UTF7-like code in it.
# Look at these tests: http://htmlpurifier.org/live/smoketests/xssAttacks.php
# This is an IE-specific construct you can have in a stylesheet to
# run some Javascript:
_css_javascript_re = re.compile(
r'expression\s*\(.*?\)', re.S|re.I)
# Do I have to worry about @\nimport?
_css_import_re = re.compile(
r'@\s*import', re.I)
# All kinds of schemes besides just javascript: that can cause
# execution:
_is_image_dataurl = re.compile(
r'^data:image/.+;base64', re.I).search
_is_possibly_malicious_scheme = re.compile(
r'(?:javascript|jscript|livescript|vbscript|data|about|mocha):',
re.I).search
def _is_javascript_scheme(s):
if _is_image_dataurl(s):
return None
return _is_possibly_malicious_scheme(s)
_substitute_whitespace = re.compile(r'[\s\x00-\x08\x0B\x0C\x0E-\x19]+').sub
# FIXME: should data: be blocked?
# FIXME: check against: http://msdn2.microsoft.com/en-us/library/ms537512.aspx
_conditional_comment_re = re.compile(
r'\[if[\s\n\r]+.*?][\s\n\r]*>', re.I|re.S)
_find_styled_elements = etree.XPath(
"descendant-or-self::*[@style]")
_find_external_links = etree.XPath(
("descendant-or-self::a [normalize-space(@href) and substring(normalize-space(@href),1,1) != '#'] |"
"descendant-or-self::x:a[normalize-space(@href) and substring(normalize-space(@href),1,1) != '#']"),
namespaces={'x':XHTML_NAMESPACE})
class HtmlCleaner(object):
"""
Instances cleans the document of each of the possible offending
elements. The cleaning is controlled by attributes; you can
override attributes in a subclass, or set them in the constructor.
``scripts``:
Removes any ``<script>`` tags.
``javascript``:
Removes any Javascript, like an ``onclick`` attribute. Also removes stylesheets
as they could contain Javascript.
``comments``:
Removes any comments.
``style``:
Removes any style tags or attributes.
``links``:
Removes any ``<link>`` tags
``meta``:
Removes any ``<meta>`` tags
``page_structure``:
Structural parts of a page: ``<head>``, ``<html>``, ``<title>``.
``processing_instructions``:
Removes any processing instructions.
``embedded``:
Removes any embedded objects (flash, iframes)
``frames``:
Removes any frame-related tags
``forms``:
Removes any form tags
``annoying_tags``:
Tags that aren't *wrong*, but are annoying. ``<blink>`` and ``<marquee>``
``remove_tags``:
A list of tags to remove. Only the tags will be removed,
their content will get pulled up into the parent tag.
``kill_tags``:
A list of tags to kill. Killing also removes the tag's content,
i.e. the whole subtree, not just the tag itself.
``allow_tags``:
A list of tags to include (default include all).
``remove_unknown_tags``:
Remove any tags that aren't standard parts of HTML.
``safe_attrs_only``:
If true, only include 'safe' attributes (specifically the list
from the feedparser HTML sanitisation web site).
``safe_attrs``:
A set of attribute names to override the default list of attributes
considered 'safe' (when safe_attrs_only=True).
``add_nofollow``:
If true, then any <a> tags will have ``rel="nofollow"`` added to them.
``host_whitelist``:
A list or set of hosts that you can use for embedded content
(for content like ``<object>``, ``<link rel="stylesheet">``, etc).
You can also implement/override the method
``allow_embedded_url(el, url)`` or ``allow_element(el)`` to
implement more complex rules for what can be embedded.
Anything that passes this test will be shown, regardless of
the value of (for instance) ``embedded``.
Note that this parameter might not work as intended if you do not
make the links absolute before doing the cleaning.
Note that you may also need to set ``whitelist_tags``.
``whitelist_tags``:
A set of tags that can be included with ``host_whitelist``.
The default is ``iframe`` and ``embed``; you may wish to
include other tags like ``script``, or you may want to
implement ``allow_embedded_url`` for more control. Set to None to
include all tags.
This modifies the document *in place*.
"""
cfg = threading.local()
cfg.scripts = True
cfg.javascript = True
cfg.comments = True
cfg.style = False
cfg.links = True
cfg.meta = True
cfg.page_structure = True
cfg.processing_instructions = True
cfg.embedded = True
cfg.frames = True
cfg.forms = True
cfg.annoying_tags = True
cfg.remove_tags = None
cfg.allow_tags = None
cfg.kill_tags = None
cfg.remove_unknown_tags = True
cfg.safe_attrs_only = True
cfg.safe_attrs = defs.safe_attrs
cfg.add_nofollow = False
cfg.host_whitelist = ()
cfg.whitelist_tags = set(['iframe', 'embed'])
def __init__(self, **kw):
for name, value in kw.items():
if not hasattr(self.cfg, name):
raise TypeError(
"Unknown parameter: %s=%r" % (name, value))
setattr(self.cfg, name, value)
# Used to lookup the primary URL for a given tag that is up for
# removal:
_tag_link_attrs = dict(
script='src',
link='href',
# From: http://java.sun.com/j2se/1.4.2/docs/guide/misc/applet.html
# From what I can tell, both attributes can contain a link:
applet=['code', 'object'],
iframe='src',
embed='src',
layer='src',
# FIXME: there doesn't really seem like a general way to figure out what
# links an <object> tag uses; links often go in <param> tags with values
# that we don't really know. You'd have to have knowledge about specific
# kinds of plugins (probably keyed off classid), and match against those.
##object=?,
# FIXME: not looking at the action currently, because it is more complex
# than than -- if you keep the form, you should keep the form controls.
##form='action',
a='href',
)
def __call__(self, doc):
"""
Cleans the document.
"""
if hasattr(doc, 'getroot'):
# ElementTree instance, instead of an element
doc = doc.getroot()
# convert XHTML to HTML
xhtml_to_html(doc)
# Normalize a case that IE treats <image> like <img>, and that
# can confuse either this step or later steps.
for el in doc.iter('image'):
el.tag = 'img'
if not self.cfg.comments:
# Of course, if we were going to kill comments anyway, we don't
# need to worry about this
self.kill_conditional_comments(doc)
kill_tags = set(self.cfg.kill_tags or ())
remove_tags = set(self.cfg.remove_tags or ())
allow_tags = set(self.cfg.allow_tags or ())
if self.cfg.scripts:
kill_tags.add('script')
if self.cfg.safe_attrs_only:
safe_attrs = set(self.cfg.safe_attrs)
for el in doc.iter(etree.Element):
attrib = el.attrib
for aname in attrib.keys():
if aname not in safe_attrs:
del attrib[aname]
if self.cfg.javascript:
if not (self.cfg.safe_attrs_only and
self.cfg.safe_attrs == defs.safe_attrs):
# safe_attrs handles events attributes itself
for el in doc.iter(etree.Element):
attrib = el.attrib
for aname in attrib.keys():
if aname.startswith('on'):
del attrib[aname]
doc.rewrite_links(self._remove_javascript_link,
resolve_base_href=False)
if not self.cfg.style:
# If we're deleting style then we don't have to remove JS links
# from styles, otherwise...
for el in _find_styled_elements(doc):
old = el.get('style')
new = _css_javascript_re.sub('', old)
new = _css_import_re.sub('', new)
if self._has_sneaky_javascript(new):
# Something tricky is going on...
del el.attrib['style']
elif new != old:
el.set('style', new)
for el in list(doc.iter('style')):
if el.get('type', '').lower().strip() == 'text/javascript':
el.drop_tree()
continue
old = el.text or ''
new = _css_javascript_re.sub('', old)
# The imported CSS can do anything; we just can't allow:
new = _css_import_re.sub('', old)
if self._has_sneaky_javascript(new):
# Something tricky is going on...
el.text = '/* deleted */'
elif new != old:
el.text = new
if self.cfg.comments or self.cfg.processing_instructions:
# FIXME: why either? I feel like there's some obscure reason
# because you can put PIs in comments...? But I've already
# forgotten it
kill_tags.add(etree.Comment)
if self.cfg.processing_instructions:
kill_tags.add(etree.ProcessingInstruction)
if self.cfg.style:
kill_tags.add('style')
etree.strip_attributes(doc, 'style')
if self.cfg.links:
kill_tags.add('link')
elif self.cfg.style or self.cfg.javascript:
# We must get rid of included stylesheets if Javascript is not
# allowed, as you can put Javascript in them
for el in list(doc.iter('link')):
if 'stylesheet' in el.get('rel', '').lower():
# Note this kills alternate stylesheets as well
if not self.allow_element(el):
el.drop_tree()
if self.cfg.meta:
kill_tags.add('meta')
if self.cfg.page_structure:
remove_tags.update(('head', 'html', 'title'))
if self.cfg.embedded:
# FIXME: is <layer> really embedded?
# We should get rid of any <param> tags not inside <applet>;
# These are not really valid anyway.
for el in list(doc.iter('param')):
found_parent = False
parent = el.getparent()
while parent is not None and parent.tag not in ('applet', 'object'):
parent = parent.getparent()
if parent is None:
el.drop_tree()
kill_tags.update(('applet',))
# The alternate contents that are in an iframe are a good fallback:
remove_tags.update(('iframe', 'embed', 'layer', 'object', 'param'))
if self.cfg.frames:
# FIXME: ideally we should look at the frame links, but
# generally frames don't mix properly with an HTML
# fragment anyway.
kill_tags.update(defs.frame_tags)
if self.cfg.forms:
remove_tags.add('form')
kill_tags.update(('button', 'input', 'select', 'textarea'))
if self.cfg.annoying_tags:
remove_tags.update(('blink', 'marquee'))
_remove = []
_kill = []
for el in doc.iter():
if el.tag in kill_tags:
if self.allow_element(el):
continue
_kill.append(el)
elif el.tag in remove_tags:
if self.allow_element(el):
continue
_remove.append(el)
if _remove and _remove[0] == doc:
# We have to drop the parent-most tag, which we can't
# do. Instead we'll rewrite it:
el = _remove.pop(0)
el.tag = 'div'
el.attrib.clear()
elif _kill and _kill[0] == doc:
# We have to drop the parent-most element, which we can't
# do. Instead we'll clear it:
el = _kill.pop(0)
if el.tag != 'html':
el.tag = 'div'
el.clear()
_kill.reverse() # start with innermost tags
for el in _kill:
el.drop_tree()
for el in _remove:
el.drop_tag()
if self.cfg.remove_unknown_tags:
if allow_tags:
raise ValueError(
"It does not make sense to pass in both allow_tags and remove_unknown_tags")
allow_tags = set(defs.tags)
if allow_tags:
bad = []
for el in doc.iter():
if el.tag not in allow_tags:
bad.append(el)
if bad:
if bad[0] is doc:
el = bad.pop(0)
el.tag = 'div'
el.attrib.clear()
for el in bad:
el.drop_tag()
if self.cfg.add_nofollow:
for el in _find_external_links(doc):
if not self.allow_follow(el):
rel = el.get('rel')
if rel:
if ('nofollow' in rel
and ' nofollow ' in (' %s ' % rel)):
continue
rel = '%s nofollow' % rel
else:
rel = 'nofollow'
el.set('rel', rel)
def allow_follow(self, anchor):
"""
Override to suppress rel="nofollow" on some anchors.
"""
return False
def allow_element(self, el):
if el.tag not in self._tag_link_attrs:
return False
attr = self._tag_link_attrs[el.tag]
if isinstance(attr, (list, tuple)):
for one_attr in attr:
url = el.get(one_attr)
if not url:
return False
if not self.allow_embedded_url(el, url):
return False
return True
else:
url = el.get(attr)
if not url:
return False
return self.allow_embedded_url(el, url)
def allow_embedded_url(self, el, url):
if (self.cfg.whitelist_tags is not None
and el.tag not in self.cfg.whitelist_tags):
return False
scheme, netloc, path, query, fragment = urlsplit(url)
netloc = netloc.lower().split(':', 1)[0]
if scheme not in ('http', 'https'):
return False
if netloc in self.cfg.host_whitelist:
return True
return False
def kill_conditional_comments(self, doc):
"""
IE conditional comments basically embed HTML that the parser
doesn't normally see. We can't allow anything like that, so
we'll kill any comments that could be conditional.
"""
bad = []
self._kill_elements(
doc, lambda el: _conditional_comment_re.search(el.text),
etree.Comment)
def _kill_elements(self, doc, condition, iterate=None):
bad = []
for el in doc.iter(iterate):
if condition(el):
bad.append(el)
for el in bad:
el.drop_tree()
def _remove_javascript_link(self, link):
# links like "j a v a s c r i p t:" might be interpreted in IE
new = _substitute_whitespace('', link)
if _is_javascript_scheme(new):
# FIXME: should this be None to delete?
return ''
return link
_substitute_comments = re.compile(r'/\*.*?\*/', re.S).sub
def _has_sneaky_javascript(self, style):
"""
Depending on the browser, stuff like ``e x p r e s s i o n(...)``
can get interpreted, or ``expre/* stuff */ssion(...)``. This
checks for attempt to do stuff like this.
Typically the response will be to kill the entire style; if you
have just a bit of Javascript in the style another rule will catch
that and remove only the Javascript from the style; this catches
more sneaky attempts.
"""
style = self._substitute_comments('', style)
style = style.replace('\\', '')
style = _substitute_whitespace('', style)
style = style.lower()
if 'javascript:' in style:
return True
if 'expression(' in style:
return True
return False
def clean_html(self, html):
result_type = type(html)
if isinstance(html, basestring):
doc = fromstring(html)
else:
doc = copy.deepcopy(html)
self(doc)
return _transform_result(result_type, doc)
| {
"repo_name": "listen-lavender/webcrawl",
"path": "webcrawl/lxmlclean.py",
"copies": "1",
"size": "18630",
"license": "mit",
"hash": -8119910387736401000,
"line_mean": 35.5294117647,
"line_max": 133,
"alpha_frac": 0.5575952764,
"autogenerated": false,
"ratio": 4.0972069496371235,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0028454947738099454,
"num_lines": 510
} |
"""ACL evaluation result and ACL abstractions."""
from weakref import proxy
from itertools import chain
log = __import__('logging').getLogger(__name__)
class ACLResult(object):
__slots__ = ('result', 'predicate', 'path', 'source')
def __init__(self, result, predicate, path=None, source=None):
self.result = result
self.predicate = predicate
self.path = path
self.source = source
def __bool__(self):
return bool(self.result)
__nonzero__ = __bool__
class ACL(list):
def __init__(self, *rules, **kw): # Python 3: , context=None, policy=None):
super().__init__((None, rule, None) for rule in rules)
context = kw.pop('context', None)
policy = kw.pop('policy', None)
if __debug__:
if kw: # This is the only keyword argument we accept.
raise TypeError(f"Unknown keyword arguments: {', '.join(sorted(kw))}")
self.context = proxy(context) if context else None
self.policy = policy or ()
@property
def is_authorized(self):
for path, predicate, source in self:
result = predicate() if self.context is None else predicate(self.context)
if __debug__:
log.debug(repr(predicate) + " (from " + repr(source) + ") voted " + repr(result))
if result is None:
continue
return ACLResult(result, predicate, path, source)
return ACLResult(None, None, None, None)
def __bool__(self):
return bool(len(self) or len(self.policy))
__nonzero__ = __bool__
def __iter__(self):
return chain(super().__iter__(), ((None, i, None) for i in self.policy))
def __repr__(self):
return '[' + ', '.join(repr(i) for i in self) + ']'
| {
"repo_name": "marrow/web.security",
"path": "web/security/acl.py",
"copies": "1",
"size": "1609",
"license": "mit",
"hash": 8469196829159036000,
"line_mean": 24.5396825397,
"line_max": 85,
"alpha_frac": 0.6308266004,
"autogenerated": false,
"ratio": 3.3106995884773665,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.44415261888773666,
"avg_score": null,
"num_lines": null
} |
"""A client for accessing reader output from the DART system."""
import os
import tqdm
import json
import logging
import requests
import itertools
from datetime import datetime
from collections import defaultdict
from indra.config import get_config
logger = logging.getLogger(__name__)
dart_uname = get_config('DART_WM_USERNAME')
dart_pwd = get_config('DART_WM_PASSWORD')
# The URL is configurable since it is subject to change per use case
dart_base_url = get_config('DART_WM_URL')
if dart_base_url is None:
dart_base_url = ('https://wm-ingest-pipeline-rest-1.prod.dart'
'.worldmodelers.com/dart/api/v1/readers')
meta_endpoint = dart_base_url + '/query'
downl_endpoint = dart_base_url + '/download/'
def get_content_by_storage_key(storage_key):
"""Return content from DART based on its storage key.
Parameters
----------
storage_key : str
A DART storage key.
Returns
-------
dict
The content corresponding to the storage key.
"""
res = requests.get(url=downl_endpoint + storage_key,
auth=(dart_uname, dart_pwd))
res.raise_for_status()
return res.text
def get_reader_outputs(readers=None, versions=None, document_ids=None,
timestamp=None, local_storage=None):
"""Return reader outputs by querying the DART API.
Parameters
----------
readers : list
A list of reader names
versions : list
A list of versions to match with the reader name(s)
document_ids : list
A list of document identifiers
timestamp : dict("on"|"before"|"after",str)
The timestamp string must of format "yyyy-mm-dd" or "yyyy-mm-dd
hh:mm:ss" (only for "before" and "after").
local_storage : Optional[str]
The path to a local folder in which the downloaded reader
outputs should be stored. If not given, the outputs are
just returned, not stored.
Returns
-------
dict(str, dict)
A two-level dict of reader output keyed by reader and then
document id.
"""
records = get_reader_output_records(readers=readers, versions=versions,
document_ids=document_ids,
timestamp=timestamp)
logger.info('Got %d document storage keys. Fetching output...' %
len(records))
reader_outputs = download_records(records, local_storage)
return reader_outputs
def download_records(records, local_storage=None):
"""Return reader outputs corresponding to a list of records.
Parameters
----------
records : list of dict
A list of records returned from the reader output query.
local_storage : Optional[str]
The path to a local folder in which the downloaded reader
outputs should be stored. If not given, the outputs are
just returned, not stored.
Returns
-------
dict(str, dict)
A two-level dict of reader output keyed by reader and then
document id.
"""
# Loop document keys and get documents
reader_outputs = defaultdict(dict)
for record in tqdm.tqdm(records):
storage_key = record['storage_key']
try:
output = None
if local_storage:
fname = get_local_storage_path(local_storage, record)
if os.path.exists(fname):
with open(fname, 'r') as fh:
output = fh.read()
if output is None:
output = get_content_by_storage_key(storage_key)
if local_storage:
store_reader_output(local_storage, record, output)
reader_outputs[record['identity']][record['document_id']] = output
except Exception as e:
logger.warning('Error downloading %s' % storage_key)
reader_outputs = dict(reader_outputs)
return reader_outputs
def store_reader_output(path, record, output):
"""Save a reader output in a standardized form locally."""
fname = get_local_storage_path(path, record)
with open(fname, 'w') as fh:
fh.write(output)
def get_local_storage_path(path, record):
folder = os.path.join(path, record['identity'], record['version'])
if not os.path.exists(folder):
os.makedirs(folder)
fname = os.path.join(folder, record['document_id'])
return fname
def prioritize_records(records, priorities=None):
"""Return unique records per reader and document prioritizing by version.
Parameters
----------
records : list of dict
A list of records returned from the reader output query.
priorities : dict of list
A dict keyed by reader names (e.g., cwms, eidos) with values
representing reader versions in decreasing order of priority.
Returns
-------
records : list of dict
A list of records that are unique per reader and document, picked by
version priority when multiple records exist for the same reader
and document.
"""
priorities = {} if not priorities else priorities
prioritized_records = []
key = lambda x: (x['identity'], x['document_id'])
for (reader, doc_id), group in itertools.groupby(sorted(records, key=key),
key=key):
group_records = list(group)
if len(group_records) == 1:
prioritized_records.append(group_records[0])
else:
reader_prio = priorities.get(reader)
if reader_prio:
first_rec = sorted(
group_records,
key=lambda x: reader_prio.index(x['version']))[0]
prioritized_records.append(first_rec)
else:
logger.warning('Could not prioritize between records: %s' %
str(group_records))
prioritized_records.append(group_records[0])
return prioritized_records
def get_reader_output_records(readers=None, versions=None, document_ids=None,
timestamp=None):
"""Return reader output metadata records by querying the DART API
Query json structure:
{"readers": ["MyAwesomeTool", "SomeOtherAwesomeTool"],
"versions": ["3.1.4", "1.3.3.7"],
"document_ids": ["qwerty1234", "poiuyt0987"],
"timestamp": {"before": "yyyy-mm-dd"|"yyyy-mm-dd hh:mm:ss",
"after": "yyyy-mm-dd"|"yyyy-mm-dd hh:mm:ss",
"on": "yyyy-mm-dd"}}
Parameters
----------
readers : list
A list of reader names
versions : list
A list of versions to match with the reader name(s)
document_ids : list
A list of document identifiers
timestamp : dict("on"|"before"|"after",str)
The timestamp string must of format "yyyy-mm-dd" or "yyyy-mm-dd
hh:mm:ss" (only for "before" and "after").
Returns
-------
dict
The JSON payload of the response from the DART API
"""
if not dart_uname:
raise ValueError('DART_WM_USERNAME is not configured.')
if not dart_pwd:
raise ValueError('DART_WM_PASSWORD is not configured.')
query_data = _jsonify_query_data(readers, versions, document_ids, timestamp)
if not query_data:
return {}
full_query_data = {'metadata': query_data}
res = requests.post(meta_endpoint, data=full_query_data,
auth=(dart_uname, dart_pwd))
res.raise_for_status()
rj = res.json()
# This handles both empty list and dict
if not rj or 'records' not in rj:
return []
return rj['records']
def get_reader_versions(reader):
"""Return the available versions for a given reader."""
records = get_reader_output_records([reader])
return {record['version'] for record in records}
def _check_lists(lst):
if not isinstance(lst, (list, tuple)):
return False
elif any(not isinstance(s, str) for s in lst):
logger.warning('At least one object in list is not a string')
return False
return True
def _check_timestamp_dict(ts_dict):
"""Check the timestamp dict
Parameters
----------
ts_dict : dict
Timestamp should be of format "yyyy-mm-dd". "yyyy-mm-dd hh:mm:ss"
is allowed as well for the keys "before" and "after".
Returns
-------
dict
"""
def _is_valid_ts(k, tstr):
"""
%Y - Year as Zero padded decimal
%m - month as zero padded number
%d - day as zero padded number
%H - 24h hour as zero padded number
%M - minute as zero padded number
%S - second as zero padded number
"""
ts_fmt = '%Y-%m-%d'
ts_long_fmt = '%Y-%m-%d %H:%M:%S'
if k == 'on':
dt = datetime.strptime(tstr, ts_fmt)
else:
try:
dt = datetime.strptime(tstr, ts_long_fmt)
except ValueError:
try:
dt = datetime.strptime(tstr, ts_fmt)
except ValueError as err:
raise ValueError(
f'Timestamp "{tstr}" is not in a valid format. '
f'Format must be "%Y-%m-%d" or "%Y-%m-%d %H:%M:%S" '
f'(for "before" and "after" only)') from err
try:
if dt < datetime(1900, 1, 1):
logger.warning('Timestamp is before 1900-JAN-01, ignoring')
return False
except (ValueError, OverflowError):
logger.warning('Could not parse timestamp, ignoring')
return False
return True
ek = {'on', 'before', 'after'}
if sum(k in ek for k in ts_dict) > 0:
if 'on' in ts_dict and \
sum(k in ek for k in ts_dict) > 1 and \
_is_valid_ts('on', ts_dict['on']):
logger.warning('Ignoring any other keys than "on"')
ts = {'on': ts_dict['on']}
else:
ts = {k: v for k, v in ts_dict.items() if k in ek and
_is_valid_ts(k, v)}
else:
raise ValueError(f'None of the allowed keys '
f'{", ".join(list(ek))} were provided')
return ts
def _jsonify_query_data(readers=None, versions=None, document_ids=None,
timestamp=None):
"""Check and json.dumps the metadata dictionary
Parameters
----------
readers : list
The list of reading systems.
versions : list
Versions of reading systems.
document_ids : list
Document IDs.
timestamp : dict("on"|"before"|"after",str)
Reader output time stamp constraint.
Returns
-------
str
The json.dumps representation of the query metadata
"""
if all(v is None for v in [readers, versions, document_ids, timestamp]):
logger.warning('No query parameters were filled out')
return ''
pd = {}
if readers and _check_lists(readers):
pd['readers'] = readers
if versions and _check_lists(versions):
pd['versions'] = versions
if document_ids and _check_lists(document_ids):
pd['document_ids'] = document_ids
if isinstance(timestamp, dict):
pd['timestamp'] = _check_timestamp_dict(timestamp)
elif timestamp is not None:
raise ValueError('Argument "timestamp" must be of type dict')
return json.dumps(pd)
| {
"repo_name": "johnbachman/belpy",
"path": "indra/literature/dart_client.py",
"copies": "1",
"size": "11428",
"license": "mit",
"hash": 2017978166958637000,
"line_mean": 33.0119047619,
"line_max": 80,
"alpha_frac": 0.5878543927,
"autogenerated": false,
"ratio": 4.062566654816921,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.00009185773074661963,
"num_lines": 336
} |
"""A client for a CoreNLP Server."""
import json
import os
import requests
from server import CoreNLPServer
class CoreNLPClient(object):
"""A client that interacts with the CoreNLPServer."""
def __init__(self, hostname='http://localhost', port=7000,
start_server=False, server_flags=None, server_log=None,
cache_file=None,):
"""Create the client.
Args:
hostname: hostname of server.
port: port of server.
start_server: start the server on first cache miss.
server_flags: passed to CoreNLPServer.__init__()
server_log: passed to CoreNLPServer.__init__()
cache_file: load and save cache to this file.
"""
self.hostname = hostname
self.port = port
self.start_server = start_server
self.server_flags = server_flags
self.server_log = server_log
self.server = None
self.cache_file = cache_file
self.has_cache_misses = False
if cache_file:
if os.path.exists(cache_file):
with open(cache_file) as f:
self.cache = json.load(f)
else:
self.cache = {}
else:
self.cache = None
def save_cache(self):
if self.cache_file and self.has_cache_misses:
with open(self.cache_file, 'w') as f:
json.dump(self.cache, f)
self.has_cache_misses = False
def query(self, sents, properties):
"""Most general way to query the server.
Args:
sents: Either a string or a list of strings.
properties: CoreNLP properties to send as part of the request.
"""
url = '%s:%d' % (self.hostname, self.port)
params = {'properties': str(properties)}
if isinstance(sents, list):
data = '\n'.join(sents)
else:
data = sents
key = '%s\t%s' % (data, str(properties))
if self.cache and key in self.cache:
return self.cache[key]
self.has_cache_misses = True
if self.start_server and not self.server:
self.server = CoreNLPServer(port=self.port, flags=self.server_flags,
logfile=self.server_log)
self.server.start()
r = requests.post(url, params=params, data=data.encode('utf-8'))
r.encoding = 'utf-8'
json_response = json.loads(r.text, strict=False)
if self.cache is not None:
self.cache[key] = json_response
return json_response
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
if self.server:
self.server.stop()
if self.cache_file:
self.save_cache()
def query_pos(self, sents):
"""Standard query for getting POS tags."""
properties = {
'ssplit.newlineIsSentenceBreak': 'always',
'annotators': 'tokenize,ssplit,pos',
'outputFormat':'json'
}
return self.query(sents, properties)
def query_ner(self, paragraphs):
"""Standard query for getting NERs on raw paragraphs."""
annotators = 'tokenize,ssplit,pos,ner,entitymentions'
properties = {
'ssplit.newlineIsSentenceBreak': 'always',
'annotators': annotators,
'outputFormat':'json'
}
return self.query(paragraphs, properties)
def query_depparse_ptb(self, sents, use_sd=False):
"""Standard query for getting dependency parses on PTB-tokenized input."""
annotators = 'tokenize,ssplit,pos,depparse'
properties = {
'tokenize.whitespace': True,
'ssplit.eolonly': True,
'ssplit.newlineIsSentenceBreak': 'always',
'annotators': annotators,
'outputFormat':'json'
}
if use_sd:
# Use Stanford Dependencies trained on PTB
# Default is Universal Dependencies
properties['depparse.model'] = 'edu/stanford/nlp/models/parser/nndep/english_SD.gz'
return self.query(sents, properties)
def query_depparse(self, sents, use_sd=False, add_ner=False):
"""Standard query for getting dependency parses on raw sentences."""
annotators = 'tokenize,ssplit,pos,depparse'
if add_ner:
annotators += ',ner'
properties = {
'ssplit.eolonly': True,
'ssplit.newlineIsSentenceBreak': 'always',
'annotators': annotators,
'outputFormat':'json'
}
if use_sd:
# Use Stanford Dependencies trained on PTB
# Default is Universal Dependencies
properties['depparse.model'] = 'edu/stanford/nlp/models/parser/nndep/english_SD.gz'
return self.query(sents, properties)
def query_const_parse(self, sents, add_ner=False):
"""Standard query for getting constituency parses on raw sentences."""
annotators = 'tokenize,ssplit,pos,parse'
if add_ner:
annotators += ',ner'
properties = {
'ssplit.eolonly': True,
'ssplit.newlineIsSentenceBreak': 'always',
'annotators': annotators,
'outputFormat':'json'
}
return self.query(sents, properties)
| {
"repo_name": "robinjia/nectar",
"path": "nectar/corenlp/client.py",
"copies": "1",
"size": "4810",
"license": "mit",
"hash": 8843661790949195000,
"line_mean": 31.9452054795,
"line_max": 89,
"alpha_frac": 0.6367983368,
"autogenerated": false,
"ratio": 3.621987951807229,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4758786288607229,
"avg_score": null,
"num_lines": null
} |
"""A client for Bazaar."""
from __future__ import unicode_literals
import logging
import os
import re
from rbtools.clients import SCMClient, RepositoryInfo
from rbtools.clients.errors import TooManyRevisionsError
from rbtools.utils.checks import check_install
from rbtools.utils.diffs import filter_diff, normalize_patterns
from rbtools.utils.process import execute
USING_PARENT_PREFIX = 'Using parent branch '
class BazaarClient(SCMClient):
"""A client for Bazaar.
This is a wrapper that fetches repository information and generates
compatible diffs.
"""
name = 'Bazaar'
server_tool_names = 'Bazaar'
supports_diff_exclude_patterns = True
supports_parent_diffs = True
can_branch = True
INDEX_FILE_RE = re.compile(b"===.+'(.+?)'\n")
# Regular expression that matches the path to the current branch.
#
# For branches with shared repositories, Bazaar reports
# "repository branch: /foo", but for standalone branches it reports
# "branch root: /foo".
BRANCH_REGEX = (
r'\w*(repository branch|branch root|checkout root|checkout of branch):'
r' (?P<branch_path>.+)$')
# Revision separator (two ..s without escaping, and not followed by a /).
# This is the same regex used in bzrlib/option.py:_parse_revision_spec.
REVISION_SEPARATOR_REGEX = re.compile(r'\.\.(?![\\/])')
def get_local_path(self):
"""Return the local path to the working tree.
Returns:
unicode:
The filesystem path of the repository on the client system.
"""
if not check_install(['bzr', 'help']):
logging.debug('Unable to execute "bzr help": skipping Bazaar')
return None
bzr_info = execute(['bzr', 'info'], ignore_errors=True)
if 'ERROR: Not a branch:' in bzr_info:
return None
# This is a branch, let's get its attributes:
branch_match = re.search(self.BRANCH_REGEX, bzr_info, re.MULTILINE)
path = branch_match.group('branch_path')
if path == '.':
path = os.getcwd()
return path
def get_repository_info(self):
"""Return repository information for the current working tree.
Returns:
rbtools.clients.RepositoryInfo:
The repository info structure.
"""
path = self.get_local_path()
if not path:
return None
return RepositoryInfo(
path=path,
base_path='/', # Diffs are always relative to the root.
local_path=path)
def parse_revision_spec(self, revisions=[]):
"""Parse the given revision spec.
Args:
revisions (list of unicode, optional):
A list of revisions as specified by the user. Items in the
list do not necessarily represent a single revision, since the
user can use SCM-native syntaxes such as ``r1..r2`` or
``r1:r2``. SCMTool-specific overrides of this method are
expected to deal with such syntaxes.
Raises:
rbtools.clients.errors.InvalidRevisionSpecError:
The given revisions could not be parsed.
rbtools.clients.errors.TooManyRevisionsError:
The specified revisions list contained too many revisions.
Returns:
dict:
A dictionary with the following keys:
``base`` (:py:class:`unicode`):
A revision to use as the base of the resulting diff.
``tip`` (:py:class:`unicode`):
A revision to use as the tip of the resulting diff.
``parent_base`` (:py:class:`unicode`, optional):
The revision to use as the base of a parent diff.
These will be used to generate the diffs to upload to Review Board
(or print). The diff for review will include the changes in (base,
tip], and the parent diff (if necessary) will include (parent,
base].
If a single revision is passed in, this will return the parent of
that revision for "base" and the passed-in revision for "tip".
If zero revisions are passed in, this will return the current HEAD
as 'tip', and the upstream branch as 'base', taking into account
parent branches explicitly specified via --parent.
"""
n_revs = len(revisions)
result = {}
if n_revs == 0:
# No revisions were passed in--start with HEAD, and find the
# submit branch automatically.
result['tip'] = self._get_revno()
result['base'] = self._get_revno('ancestor:')
elif n_revs == 1 or n_revs == 2:
# If there's a single argument, try splitting it on '..'
if n_revs == 1:
revisions = self.REVISION_SEPARATOR_REGEX.split(revisions[0])
n_revs = len(revisions)
if n_revs == 1:
# Single revision. Extract the parent of that revision to use
# as the base.
result['base'] = self._get_revno('before:' + revisions[0])
result['tip'] = self._get_revno(revisions[0])
elif n_revs == 2:
# Two revisions.
result['base'] = self._get_revno(revisions[0])
result['tip'] = self._get_revno(revisions[1])
else:
raise TooManyRevisionsError
# XXX: I tried to automatically find the parent diff revision here,
# but I really don't understand the difference between submit
# branch, parent branch, bound branches, etc. If there's some way
# to know what to diff against, we could use
# 'bzr missing --mine-only --my-revision=(base) --line'
# to see if we need a parent diff.
else:
raise TooManyRevisionsError
if self.options.parent_branch:
result['parent_base'] = result['base']
result['base'] = self._get_revno(
'ancestor:%s' % self.options.parent_branch)
return result
def _get_revno(self, revision_spec=None):
"""Convert a revision spec to a revision number.
Args:
revision_spec (unicode, optional):
The revision spec to convert.
Returns:
unicode:
A new revision spec that contains a revision number instead of a
symbolic revision.
"""
command = ['bzr', 'revno']
if revision_spec:
command += ['-r', revision_spec]
result = execute(command).strip().split('\n')
if len(result) == 1:
return 'revno:' + result[0]
elif len(result) == 2 and result[0].startswith(USING_PARENT_PREFIX):
branch = result[0][len(USING_PARENT_PREFIX):]
return 'revno:%s:%s' % (result[1], branch)
def diff(self, revisions, include_files=[], exclude_patterns=[],
extra_args=[], **kwargs):
"""Perform a diff using the given revisions.
If the revision spec is empty, this returns the diff of the current
branch with respect to its parent. If a single revision is passed in,
this returns the diff of the change introduced in that revision. If two
revisions are passed in, this will do a diff between those two
revisions.
Args:
revisions (dict):
A dictionary of revisions, as returned by
:py:meth:`parse_revision_spec`.
include_files (list of unicode, optional):
A list of files to whitelist during the diff generation.
exclude_patterns (list of unicode, optional):
A list of shell-style glob patterns to blacklist during diff
generation.
extra_args (list, unused):
Additional arguments to be passed to the diff generation.
Unused for Bazaar.
**kwargs (dict, unused):
Unused keyword arguments.
Returns:
dict:
A dictionary containing the following keys:
``diff`` (:py:class:`bytes`):
The contents of the diff to upload.
``parent_diff`` (:py:class:`bytes`, optional):
The contents of the parent diff, if available.
"""
exclude_patterns = normalize_patterns(exclude_patterns,
self.get_repository_info().path)
diff = self._get_range_diff(revisions['base'], revisions['tip'],
include_files, exclude_patterns)
if 'parent_base' in revisions:
parent_diff = self._get_range_diff(
revisions['parent_base'], revisions['base'], include_files,
exclude_patterns)
else:
parent_diff = None
return {
'diff': diff,
'parent_diff': parent_diff,
}
def _get_range_diff(self, base, tip, include_files, exclude_patterns=[]):
"""Return the diff between 'base' and 'tip'.
Args:
base (unicode):
The name of the base revision.
tip (unicode):
The name of the tip revision.
include_files (list of unicode):
A list of files to whitelist during the diff generation.
exclude_patterns (list of unicode, optional):
A list of shell-style glob patterns to blacklist during diff
generation.
Returns:
bytes:
The generated diff contents.
"""
diff_cmd = ['bzr', 'diff', '-q', '-r',
'%s..%s' % (base, tip)] + include_files
diff = execute(diff_cmd, ignore_errors=True, log_output_on_error=False,
split_lines=True, results_unicode=False)
if diff:
if exclude_patterns:
diff = filter_diff(diff, self.INDEX_FILE_RE, exclude_patterns,
base_dir=self.get_repository_info().path)
return b''.join(diff)
else:
return None
def get_raw_commit_message(self, revisions):
"""Extract the commit message based on the provided revision range.
Args:
revisions (dict):
A dictionary containing ``base`` and ``tip`` keys.
Returns:
unicode:
The commit messages of all commits between (base, tip].
"""
# The result is content in the form of:
#
# 2014-01-02 First Name <email@address>
#
# <tab>line 1
# <tab>line 2
# <tab>...
#
# 2014-01-02 First Name <email@address>
#
# ...
log_cmd = ['bzr', 'log', '-r',
'%s..%s' % (revisions['base'], revisions['tip'])]
# Find out how many commits there are, then log limiting to one fewer.
# This is because diff treats the range as (r1, r2] while log treats
# the lange as [r1, r2].
lines = execute(log_cmd + ['--line'],
ignore_errors=True, split_lines=True)
n_revs = len(lines) - 1
lines = execute(log_cmd + ['--gnu-changelog', '-l', str(n_revs)],
ignore_errors=True, split_lines=True)
message = []
for line in lines:
# We only care about lines that start with a tab (commit message
# lines) or blank lines.
if line.startswith('\t'):
message.append(line[1:])
elif not line.strip():
message.append(line)
return ''.join(message).strip()
def get_current_branch(self):
"""Return the name of the current branch.
Returns:
unicode:
A string with the name of the current branch.
"""
return execute(['bzr', 'nick'], ignore_errors=True).strip()
| {
"repo_name": "reviewboard/rbtools",
"path": "rbtools/clients/bazaar.py",
"copies": "1",
"size": "12107",
"license": "mit",
"hash": -7357028615447200000,
"line_mean": 34.0927536232,
"line_max": 79,
"alpha_frac": 0.5591806393,
"autogenerated": false,
"ratio": 4.489061920652577,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5548242559952576,
"avg_score": null,
"num_lines": null
} |
"""A client for CVS."""
from __future__ import unicode_literals
import logging
import os
import re
import socket
from rbtools.clients import SCMClient, RepositoryInfo
from rbtools.clients.errors import (InvalidRevisionSpecError,
TooManyRevisionsError)
from rbtools.utils.checks import check_install
from rbtools.utils.diffs import filter_diff, normalize_patterns
from rbtools.utils.process import execute
class CVSClient(SCMClient):
"""A client for CVS.
This is a wrapper around the cvs executable that fetches repository
information and generates compatible diffs.
"""
name = 'CVS'
server_tool_names = 'CVS'
supports_diff_exclude_patterns = True
supports_patch_revert = True
INDEX_FILE_RE = re.compile(b'^Index: (.+)\n$')
REVISION_WORKING_COPY = '--rbtools-working-copy'
def get_local_path(self):
"""Return the local path to the working tree.
Returns:
unicode:
The filesystem path of the repository on the client system.
"""
if not check_install(['cvs']):
logging.debug('Unable to execute "cvs": skipping CVS')
return None
cvsroot_path = os.path.join('CVS', 'Root')
if not os.path.exists(cvsroot_path):
return None
with open(cvsroot_path, 'r') as fp:
repository_path = fp.read().strip()
i = repository_path.find('@')
if i != -1:
repository_path = repository_path[i + 1:]
i = repository_path.rfind(':')
if i != -1:
host = repository_path[:i]
try:
canon = socket.getfqdn(host)
repository_path = repository_path.replace('%s:' % host,
'%s:' % canon)
except socket.error as msg:
logging.error('failed to get fqdn for %s, msg=%s',
host, msg)
return repository_path
def get_repository_info(self):
"""Return repository information for the current working tree.
Returns:
rbtools.clients.RepositoryInfo:
The repository info structure.
"""
if not check_install(['cvs']):
logging.debug('Unable to execute "cvs": skipping CVS')
return None
repository_path = self.get_local_path()
return RepositoryInfo(path=repository_path,
local_path=repository_path)
def parse_revision_spec(self, revisions=[]):
"""Parse the given revision spec.
Args:
revisions (list of unicode, optional):
A list of revisions as specified by the user. Items in the list
do not necessarily represent a single revision, since the user
can use SCM-native syntaxes such as ``r1..r2`` or ``r1:r2``.
SCMTool-specific overrides of this method are expected to deal
with such syntaxes.
Raises:
rbtools.clients.errors.InvalidRevisionSpecError:
The given revisions could not be parsed.
rbtools.clients.errors.TooManyRevisionsError:
The specified revisions list contained too many revisions.
Returns:
dict:
A dictionary with the following keys:
``base`` (:py:class:`unicode`):
A revision to use as the base of the resulting diff.
``tip`` (:py:class:`unicode`):
A revision to use as the tip of the resulting diff.
These will be used to generate the diffs to upload to Review Board
(or print). The diff for review will include the changes in (base,
tip].
If a single revision is passed in, this will raise an exception,
because CVS doesn't have a repository-wide concept of "revision",
so selecting an individual "revision" doesn't make sense.
With two revisions, this will treat those revisions as tags and do
a diff between those tags.
If zero revisions are passed in, this will return revisions
relevant for the "current change". The exact definition of what
"current" means is specific to each SCMTool backend, and documented
in the implementation classes.
The CVS SCMClient never fills in the 'parent_base' key. Users who
are using other patch-stack tools who want to use parent diffs with
CVS will have to generate their diffs by hand.
Because :command:`cvs diff` uses multiple arguments to define
multiple tags, there's no single-argument/multiple-revision syntax
available.
"""
n_revs = len(revisions)
if n_revs == 0:
return {
'base': 'BASE',
'tip': self.REVISION_WORKING_COPY,
}
elif n_revs == 1:
raise InvalidRevisionSpecError(
'CVS does not support passing in a single revision.')
elif n_revs == 2:
return {
'base': revisions[0],
'tip': revisions[1],
}
else:
raise TooManyRevisionsError
return {
'base': None,
'tip': None,
}
def diff(self, revisions, include_files=[], exclude_patterns=[],
extra_args=[], **kwargs):
"""Perform a diff using the given revisions.
If no revisions are specified, this will return the diff for the
modified files in the working directory. If it's not empty and contains
two revisions, this will do a diff between those revisions.
Args:
revisions (dict):
A dictionary of revisions, as returned by
:py:meth:`parse_revision_spec`.
include_files (list of unicode, optional):
A list of files to whitelist during the diff generation.
exclude_patterns (list of unicode, optional):
A list of shell-style glob patterns to blacklist during diff
generation.
extra_args (list, unused):
Additional arguments to be passed to the diff generation.
Unused for CVS.
**kwargs (dict, unused):
Unused keyword arguments.
Returns:
dict:
A dictionary containing the following keys:
``diff`` (:py:class:`bytes`):
The contents of the diff to upload.
"""
# CVS paths are always relative to the current working directory.
cwd = os.getcwd()
exclude_patterns = normalize_patterns(exclude_patterns, cwd, cwd)
include_files = include_files or []
# Diff returns "1" if differences were found.
diff_cmd = ['cvs', 'diff', '-uN']
base = revisions['base']
tip = revisions['tip']
if not (base == 'BASE' and
tip == self.REVISION_WORKING_COPY):
diff_cmd.extend(['-r', base, '-r', tip])
diff = execute(diff_cmd + include_files,
extra_ignore_errors=(1,),
log_output_on_error=False,
split_lines=True,
results_unicode=False)
if exclude_patterns:
# CVS diffs are relative to the current working directory, so the
# base_dir parameter to filter_diff is unnecessary.
diff = filter_diff(diff, self.INDEX_FILE_RE, exclude_patterns,
base_dir=cwd)
return {
'diff': b''.join(diff)
}
| {
"repo_name": "reviewboard/rbtools",
"path": "rbtools/clients/cvs.py",
"copies": "1",
"size": "7763",
"license": "mit",
"hash": -4038863485027534300,
"line_mean": 33.65625,
"line_max": 79,
"alpha_frac": 0.5661471081,
"autogenerated": false,
"ratio": 4.753827311696265,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5819974419796266,
"avg_score": null,
"num_lines": null
} |
"""A client for Git."""
from __future__ import unicode_literals
import logging
import os
import re
import sys
import six
from six.moves import zip
from rbtools.clients import PatchResult, SCMClient, RepositoryInfo
from rbtools.clients.errors import (AmendError,
CreateCommitError,
MergeError,
PushError,
InvalidRevisionSpecError,
TooManyRevisionsError,
SCMError)
from rbtools.clients.perforce import PerforceClient
from rbtools.clients.svn import SVNClient, SVNRepositoryInfo
from rbtools.utils.checks import check_install, is_valid_version
from rbtools.utils.console import edit_text
from rbtools.utils.diffs import (normalize_patterns,
remove_filenames_matching_patterns)
from rbtools.utils.errors import EditorError
from rbtools.utils.process import execute
class GitClient(SCMClient):
"""A client for Git.
This is a wrapper around the git executable that fetches repository
information and generates compatible diffs. This will attempt to generate a
diff suitable for the remote repository, whether git, SVN or Perforce.
"""
name = 'Git'
server_tool_names = 'Git,Perforce,Subversion'
supports_commit_history = True
supports_diff_exclude_patterns = True
supports_no_renames = True
supports_parent_diffs = True
supports_patch_revert = True
can_amend_commit = True
can_merge = True
can_push_upstream = True
can_delete_branch = True
can_branch = True
can_squash_merges = True
TYPE_GIT = 0
TYPE_GIT_SVN = 1
TYPE_GIT_P4 = 2
_NUL = '\x00'
_FIELD_SEP = '\x1f'
def __init__(self, **kwargs):
"""Initialize the client.
Args:
**kwargs (dict):
Keyword arguments to pass through to the superclass.
"""
super(GitClient, self).__init__(**kwargs)
# Store the 'correct' way to invoke git, just plain old 'git' by
# default.
self.git = 'git'
self._git_toplevel = None
self._type = None
def _supports_git_config_flag(self):
"""Return whether the installed version of git supports the -c flag.
This will execute ``git --version`` on the first call and cache the
result.
Returns:
bool:
``True`` if the user's installed git supports ``-c``.
"""
if not hasattr(self, '_git_version_at_least_180'):
self._git_version_least_180 = False
version_str = execute([self.git, 'version'], ignore_errors=True,
none_on_ignored_error=True)
if version_str:
m = re.search('(\d+)\.(\d+)\.(\d+)', version_str)
if m:
git_version = (int(m.group(1)),
int(m.group(2)),
int(m.group(3)))
self._git_version_at_least_180 = git_version >= (1, 8, 0)
return self._git_version_at_least_180
def parse_revision_spec(self, revisions=[]):
"""Parse the given revision spec.
Args:
revisions (list of unicode, optional):
A list of revisions as specified by the user. Items in the list
do not necessarily represent a single revision, since the user
can use SCM-native syntaxes such as ``r1..r2`` or ``r1:r2``.
SCMTool-specific overrides of this method are expected to deal
with such syntaxes.
Raises:
rbtools.clients.errors.InvalidRevisionSpecError:
The given revisions could not be parsed.
rbtools.clients.errors.TooManyRevisionsError:
The specified revisions list contained too many revisions.
Returns:
dict:
A dictionary with the following keys:
``base`` (:py:class:`unicode`):
A revision to use as the base of the resulting diff.
``tip`` (:py:class:`unicode`):
A revision to use as the tip of the resulting diff.
``parent_base`` (:py:class:`unicode`, optional):
The revision to use as the base of a parent diff.
``commit_id`` (:py:class:`unicode`, optional):
The ID of the single commit being posted, if not using a range.
These will be used to generate the diffs to upload to Review Board
(or print). The diff for review will include the changes in (base,
tip], and the parent diff (if necessary) will include (parent_base,
base].
If a single revision is passed in, this will return the parent of
that revision for "base" and the passed-in revision for "tip".
If zero revisions are passed in, this will return the current HEAD
as "tip", and the upstream branch as "base", taking into account
parent branches explicitly specified via --parent.
"""
n_revs = len(revisions)
result = {}
if n_revs == 0:
# No revisions were passed in. Start with HEAD, and find the
# tracking branch automatically.
head_ref = self._rev_parse(self.get_head_ref())[0]
parent_branch = self._get_parent_branch()
remote = self._find_remote(parent_branch)
parent_ref = self._rev_parse(parent_branch)[0]
merge_base = self._rev_list_youngest_remote_ancestor(
parent_ref, remote)
result = {
'base': parent_ref,
'tip': head_ref,
'commit_id': head_ref,
}
if parent_ref != merge_base:
result['parent_base'] = merge_base
# Since the user asked us to operate on HEAD, warn them about a
# dirty working directory.
if (self.has_pending_changes() and
not self.config.get('SUPPRESS_CLIENT_WARNINGS', False)):
logging.warning('Your working directory is not clean. Any '
'changes which have not been committed '
'to a branch will not be included in your '
'review request.')
elif n_revs == 1 or n_revs == 2:
# Let `git rev-parse` sort things out.
parsed = self._rev_parse(revisions)
n_parsed_revs = len(parsed)
assert n_parsed_revs <= 3
if n_parsed_revs == 1:
# Single revision. Extract the parent of that revision to use
# as the base.
parent = self._rev_parse('%s^' % parsed[0])[0]
result = {
'base': parent,
'tip': parsed[0],
'commit_id': parsed[0],
}
elif n_parsed_revs == 2:
if parsed[1].startswith('^'):
# Passed in revisions were probably formatted as
# "base..tip". The rev-parse output includes all ancestors
# of the first part, and none of the ancestors of the
# second. Basically, the second part is the base (after
# stripping the ^ prefix) and the first is the tip.
result = {
'base': parsed[1][1:],
'tip': parsed[0],
}
else:
# First revision is base, second is tip
result = {
'base': parsed[0],
'tip': parsed[1],
}
elif n_parsed_revs == 3 and parsed[2].startswith('^'):
# Revision spec is diff-since-merge. Find the merge-base of the
# two revs to use as base.
merge_base = self._execute([self.git, 'merge-base', parsed[0],
parsed[1]]).strip()
result = {
'base': merge_base,
'tip': parsed[0],
}
else:
raise InvalidRevisionSpecError(
'Unexpected result while parsing revision spec')
parent_branch = self._get_parent_branch()
remote = self._find_remote(parent_branch)
parent_base = self._rev_list_youngest_remote_ancestor(
result['base'], remote)
if parent_base != result['base']:
result['parent_base'] = parent_base
else:
raise TooManyRevisionsError
return result
def get_local_path(self):
"""Return the local path to the working tree.
Returns:
unicode:
The filesystem path of the repository on the client system.
"""
# Temporarily reset the toplevel. This is necessary for making things
# work correctly in unit tests where we may be moving the cwd around a
# lot.
self._git_toplevel = None
if not check_install(['git', '--help']):
# CreateProcess (launched via subprocess, used by check_install)
# does not automatically append .cmd for things it finds in PATH.
# If we're on Windows, and this works, save it for further use.
if (sys.platform.startswith('win') and
check_install(['git.cmd', '--help'])):
self.git = 'git.cmd'
else:
logging.debug('Unable to execute "git --help" or "git.cmd '
'--help": skipping Git')
return None
self._git_dir = self._get_git_dir()
if self._git_dir is None:
return None
# Sometimes core.bare is not set, and generates an error, so ignore
# errors. Valid values are 'true' or '1'.
bare = execute([self.git, 'config', 'core.bare'],
ignore_errors=True).strip()
self.bare = bare in ('true', '1')
# Running in directories other than the top level of
# of a work-tree would result in broken diffs on the server
if not self.bare:
git_top = execute([self.git, 'rev-parse', '--show-toplevel'],
ignore_errors=True).rstrip('\n')
# Top level might not work on old git version se we use git dir
# to find it.
if (git_top.startswith(('fatal:', 'cygdrive')) or
not os.path.isdir(self._git_dir)):
git_top = self._git_dir
self._git_toplevel = os.path.abspath(git_top)
return self._git_toplevel
def get_repository_info(self):
"""Return repository information for the current working tree.
Returns:
rbtools.clients.RepositoryInfo:
The repository info structure.
"""
local_path = self.get_local_path()
if not local_path:
return None
self._head_ref = self._execute(
[self.git, 'symbolic-ref', '-q', 'HEAD'],
ignore_errors=True).strip()
# We know we have something we can work with. Let's find out
# what it is. We'll try SVN first, but only if there's a .git/svn
# directory. Otherwise, it may attempt to create one and scan
# revisions, which can be slow. Also skip SVN detection if the git
# repository was specified on command line.
git_svn_dir = os.path.join(self._git_dir, 'svn')
if (not getattr(self.options, 'repository_url', None) and
os.path.isdir(git_svn_dir) and
len(os.listdir(git_svn_dir)) > 0):
data = self._execute([self.git, 'svn', 'info'], ignore_errors=True)
m = re.search(r'^Repository Root: (.+)$', data, re.M)
if m:
path = m.group(1)
m = re.search(r'^URL: (.+)$', data, re.M)
if m:
base_path = m.group(1)[len(path):] or '/'
m = re.search(r'^Repository UUID: (.+)$', data, re.M)
if m:
uuid = m.group(1)
self._type = self.TYPE_GIT_SVN
m = re.search(r'Working Copy Root Path: (.+)$', data,
re.M)
if m:
local_path = m.group(1)
else:
local_path = self._git_toplevel
return SVNRepositoryInfo(path=path,
base_path=base_path,
local_path=local_path,
uuid=uuid)
else:
# Versions of git-svn before 1.5.4 don't (appear to) support
# 'git svn info'. If we fail because of an older git install,
# here, figure out what version of git is installed and give
# the user a hint about what to do next.
version = self._execute([self.git, 'svn', '--version'],
ignore_errors=True)
version_parts = re.search('version (\d+)\.(\d+)\.(\d+)',
version)
svn_remote = self._execute(
[self.git, 'config', '--get', 'svn-remote.svn.url'],
ignore_errors=True)
if (version_parts and svn_remote and
not is_valid_version((int(version_parts.group(1)),
int(version_parts.group(2)),
int(version_parts.group(3))),
(1, 5, 4))):
raise SCMError('Your installation of git-svn must be '
'upgraded to version 1.5.4 or later.')
# Okay, maybe Perforce (git-p4).
git_p4_ref = os.path.join(self._git_dir, 'refs', 'remotes', 'p4',
'master')
if os.path.exists(git_p4_ref):
data = self._execute([self.git, 'config', '--get', 'git-p4.port'],
ignore_errors=True)
m = re.search(r'(.+)', data)
if m:
port = m.group(1)
else:
port = os.getenv('P4PORT')
if port:
self._type = self.TYPE_GIT_P4
return RepositoryInfo(path=port,
base_path='',
local_path=self._git_toplevel)
# Nope, it's git then.
# Check for a tracking branch and determine merge-base
self._type = self.TYPE_GIT
url = None
if getattr(self.options, 'repository_url', None):
url = self.options.repository_url
else:
upstream_branch = self._get_parent_branch()
url = self._get_origin(upstream_branch).rstrip('/')
if url.startswith('fatal:'):
raise SCMError('Could not determine remote URL for upstream '
'branch %s' % upstream_branch)
# Central bare repositories don't have origin URLs.
# We return git_dir instead and hope for the best.
if not url:
url = os.path.abspath(self._git_dir)
if url:
return RepositoryInfo(path=url,
base_path='',
local_path=self._git_toplevel)
return None
def _get_git_dir(self):
"""Return the current git directory.
This will return the :file:`.git` directory corresponding to the full
checkout, traversing up in the case of worktrees.
Returns:
unicode:
The path to the :file:`.git` directory for the repository.
"""
git_dir = self._execute([self.git, 'rev-parse', '--git-dir'],
ignore_errors=True).rstrip('\n')
if git_dir.startswith('fatal:') or not os.path.isdir(git_dir):
return None
try:
# In the case of a worktree, find the common gitdir.
with open(os.path.join(git_dir, 'commondir')) as f:
common_dir = f.read().strip()
git_dir = os.path.abspath(os.path.join(git_dir, common_dir))
except IOError:
pass
return git_dir
def _strip_heads_prefix(self, ref):
"""Strip the heads prefix off of a reference name.
Args:
ref (unicode):
The full name of a branch.
Returns:
unicode:
The bare name of the branch without the ``refs/heads/`` prefix.
"""
return re.sub(r'^refs/heads/', '', ref)
def _get_origin(self, upstream_branch):
"""Return the remote URL for the given upstream branch.
Args:
upstream_branch (unicode):
The name of the upstream branch.
Returns:
tuple of unicode:
A 2-tuple, containing the upstream branch name and the remote URL.
"""
upstream_remote = upstream_branch.split('/')[0]
return self._execute(
[self.git, 'config', '--get', 'remote.%s.url' % upstream_remote],
ignore_errors=True).rstrip('\n')
def scan_for_server(self, repository_info):
"""Find the Review Board server matching this repository.
Args:
repository_info (rbtools.clients.RepositoryInfo):
The repository information structure.
Returns:
unicode:
The Review Board server URL, if available.
"""
if self._type == self.TYPE_GIT:
# TODO: Maybe support a server per remote later? Is that useful?
server_url = self._execute(
[self.git, 'config', '--get', 'reviewboard.url'],
ignore_errors=True).strip()
return server_url or None
elif self._type == self.TYPE_GIT_SVN:
# Try using the reviewboard:url property on the SVN repo, if it
# exists.
return SVNClient().scan_for_server_property(repository_info)
elif self._type == self.TYPE_GIT_P4:
return PerforceClient().scan_for_server(repository_info)
else:
return None
def get_raw_commit_message(self, revisions):
"""Extract the commit message based on the provided revision range.
Args:
revisions (dict):
A dictionary containing ``base`` and ``tip`` keys.
Returns:
unicode:
The commit messages of all commits between (base, tip].
"""
return self._execute(
[self.git, 'log', '--reverse', '--pretty=format:%s%n%n%b',
'^%s' % revisions['base'], revisions['tip']],
ignore_errors=True).strip()
def _get_parent_branch(self):
"""Return the parent branch.
Returns:
unicode:
The name of the current parent branch.
"""
# If the user has manually specified the parent, return that.
parent_branch = (getattr(self.options, 'parent_branch', None) or
getattr(self.options, 'tracking', None))
if parent_branch:
return parent_branch
if self._type == self.TYPE_GIT_SVN:
data = self._execute(
[self.git, 'svn', 'rebase', '-n'],
ignore_errors=True)
m = re.search(r'^Remote Branch:\s*(.+)$', data, re.M)
if m:
return m.group(1)
else:
logging.warning('Failed to determine SVN tracking branch. '
'Defaulting to "master"\n')
return 'master'
elif self._type == self.TYPE_GIT_P4:
return 'p4/master'
elif self._type == self.TYPE_GIT:
if self._head_ref:
short_head = self._strip_heads_prefix(self._head_ref)
merge = self._strip_heads_prefix(self._execute(
[self.git, 'config', '--get',
'branch.%s.merge' % short_head],
ignore_errors=True).strip())
remote = self._get_remote(short_head)
if remote and remote != '.' and merge:
return '%s/%s' % (remote, merge)
# As of Git 2.28, users can configure a default main branch name.
# In most cases, this will be handled by the _get_remote call
# above. This here is a fallback to a fallback, and assumes that if
# they're operating with a bare checkout with a non-standard main
# branch name, they're configured correctly.
defaultBranch = self._execute([self.git, 'config', '--global',
'--get', 'init.defaultBranch'],
ignore_errors=True).strip()
if (defaultBranch and
os.path.exists(os.path.join(self._git_dir, 'refs',
'remotes', 'origin',
defaultBranch))):
return 'origin/%s' % defaultBranch
# Finally, just fall back to the old standard.
return 'origin/master'
else:
raise ValueError('Unknown git client type %s' % self._type)
def get_head_ref(self):
"""Return the HEAD reference.
Returns:
unicode:
The name of the HEAD reference.
"""
return self._head_ref or 'HEAD'
def _rev_parse(self, revisions):
"""Parse a git symbolic reference.
Args:
revisions (unicode or list):
A set of revisions passed in by the user. This can either be a
single revision name or a range.
Returns:
list of unicode:
A list of the parsed revision data. This can be either 1, 2, or 3
elements long, depending on the exact string provided.
"""
if not isinstance(revisions, list):
revisions = [revisions]
revisions = self._execute([self.git, 'rev-parse'] + revisions)
return revisions.strip().split('\n')
def _rev_list_youngest_remote_ancestor(self, local_branch, remote):
"""Return the youngest ancestor of ``local_branch`` on ``remote``.
Args:
local_branch (unicode):
The commit whose ancestor we are trying to find.
remote (unicode):
This is most commonly ``origin``, but can be changed via
configuration or command line options. This represents the
remote which is configured in Review Board.
Returns:
unicode:
The youngest ancestor of local_branch that is also contained in
the remote repository (where youngest means the commit that can
be reached from local_branch by following the least number of
parent links).
"""
local_commits = self._execute(
[self.git, 'rev-list', local_branch, '--not',
'--remotes=%s' % remote])
local_commits = local_commits.split()
if local_commits == []:
# We are currently at a commit also available to the remote.
return local_branch
local_commit = local_commits[-1]
youngest_remote_commit = self._rev_parse('%s^' % local_commit)[0]
logging.debug('Found youngest remote git commit %s',
youngest_remote_commit)
return youngest_remote_commit
def diff(self, revisions, include_files=[], exclude_patterns=[],
no_renames=False, extra_args=[], with_parent_diff=True,
git_find_renames_threshold=None, **kwargs):
"""Perform a diff using the given revisions.
If no revisions are specified, this will do a diff of the contents of
the current branch since the tracking branch (which defaults to
'master'). If one revision is specified, this will get the diff of that
specific change. If two revisions are specified, this will do a diff
between those two revisions.
If a parent branch is specified via the command line options, or would
make sense given the requested revisions and the tracking branch, this
will also return a parent diff.
Args:
revisions (dict):
A dictionary of revisions, as returned by
:py:meth:`parse_revision_spec`.
include_files (list of unicode, optional):
A list of files to whitelist during the diff generation.
exclude_patterns (list of unicode, optional):
A list of shell-style glob patterns to blacklist during diff
generation.
no_renames (bool, optional):
Whether to avoid rename detection.
extra_args (list, unused):
Additional arguments to be passed to the diff generation.
Unused for git.
with_parent_diff (bool, optional):
Whether or not to compute a parent diff.
git_find_renames_threshold (unicode, optional):
The threshold to pass to ``--find-renames``, if any.
**kwargs (dict, unused):
Unused keyword arguments.
Returns:
dict:
A dictionary containing the following keys:
``diff`` (:py:class:`bytes`):
The contents of the diff to upload.
``parent_diff`` (:py:class:`bytes`, optional):
The contents of the parent diff, if available.
``commit_id`` (:py:class:`unicode`, optional):
The commit ID to include when posting, if available.
``base_commit_id` (:py:class:`unicode`, optional):
The ID of the commit that the change is based on, if available.
This is necessary for some hosting services that don't provide
individual file access.
"""
exclude_patterns = normalize_patterns(exclude_patterns,
self._git_toplevel,
cwd=os.getcwd())
try:
merge_base = revisions['parent_base']
except KeyError:
merge_base = revisions['base']
diff_lines = self.make_diff(
merge_base,
revisions['base'],
revisions['tip'],
include_files,
exclude_patterns,
no_renames,
find_renames_threshold=git_find_renames_threshold)
if 'parent_base' in revisions and with_parent_diff:
parent_diff_lines = self.make_diff(
merge_base,
revisions['parent_base'],
revisions['base'],
include_files,
exclude_patterns,
no_renames,
find_renames_threshold=git_find_renames_threshold)
base_commit_id = revisions['parent_base']
else:
parent_diff_lines = None
base_commit_id = revisions['base']
return {
'diff': diff_lines,
'parent_diff': parent_diff_lines,
'commit_id': revisions.get('commit_id'),
'base_commit_id': base_commit_id,
}
def make_diff(self, merge_base, base, tip, include_files,
exclude_patterns, no_renames, find_renames_threshold):
"""Perform a diff on a particular branch range.
Args:
merge_base (unicode):
The ID of the merge base commit. This is only used when
creating diffs with git-svn or git-p4 clones.
base (unicode):
The ID of the base commit for the diff.
tip (unicode):
The ID of the tip commit for the diff.
include_files (list of unicode):
A list of files to whitelist during the diff generation.
exclude_patterns (list of unicode):
A list of shell-style glob patterns to blacklist during diff
generation.
no_renames (bool, optional):
Whether to skip rename detection.
find_renames_threshold (unicode, optional):
The threshhold to pass to ``--find-renames``, if any.
Returns:
bytes:
The diff between (base, tip].
"""
rev_range = '%s..%s' % (base, tip)
if include_files:
include_files = ['--'] + include_files
git_cmd = [self.git]
if self._supports_git_config_flag():
git_cmd.extend(['-c', 'core.quotepath=false'])
if self._type in (self.TYPE_GIT_SVN, self.TYPE_GIT_P4):
diff_cmd_params = ['--no-color', '--no-prefix', '-r', '-u']
elif self._type == self.TYPE_GIT:
diff_cmd_params = ['--no-color', '--full-index',
'--ignore-submodules']
if self._supports_git_config_flag():
git_cmd.extend(['-c', 'diff.noprefix=false'])
if (not no_renames and
self.capabilities is not None and
self.capabilities.has_capability('diffs', 'moved_files')):
if find_renames_threshold is not None:
diff_cmd_params.append('--find-renames=%s'
% find_renames_threshold)
else:
diff_cmd_params.append('--find-renames')
else:
diff_cmd_params.append('--no-renames')
else:
raise ValueError('Unknown git client type %s' % self._type)
# By default, don't allow using external diff commands. This prevents
# things from breaking horribly if someone configures a graphical diff
# viewer like p4merge or kaleidoscope. This can be overridden by
# setting GIT_USE_EXT_DIFF = True in ~/.reviewboardrc
if not self.config.get('GIT_USE_EXT_DIFF', False):
diff_cmd_params.append('--no-ext-diff')
diff_cmd = git_cmd + ['diff'] + diff_cmd_params
if exclude_patterns:
# If we have specified files to exclude, we will get a list of all
# changed files and run `git diff` on each un-excluded file
# individually.
changed_files_cmd = git_cmd + ['diff-tree'] + diff_cmd_params
if self._type in (self.TYPE_GIT_SVN, self.TYPE_GIT_P4):
# We don't want to send -u along to git diff-tree because it
# will generate diff information along with the list of
# changed files.
changed_files_cmd.remove('-u')
elif self._type == self.TYPE_GIT:
changed_files_cmd.append('-r')
changed_files = self._execute(
changed_files_cmd + [rev_range] + include_files,
split_lines=True,
with_errors=False,
ignore_errors=True,
none_on_ignored_error=True,
log_output_on_error=False)
# The output of git diff-tree will be a list of entries that have
# changed between the two revisions that we give it. The last part
# of the line is the name of the file that has changed.
changed_files = remove_filenames_matching_patterns(
(filename.split()[-1] for filename in changed_files),
exclude_patterns, base_dir=self._git_toplevel)
diff_lines = []
for filename in changed_files:
lines = self._execute(diff_cmd + [rev_range, '--', filename],
split_lines=True,
with_errors=False,
ignore_errors=True,
none_on_ignored_error=True,
log_output_on_error=False,
results_unicode=False)
if lines is None:
logging.error(
'Could not get diff for all files (git-diff failed '
'for "%s"). Refusing to return a partial diff.',
filename)
diff_lines = None
break
diff_lines += lines
else:
diff_lines = self._execute(diff_cmd + [rev_range] + include_files,
split_lines=True,
with_errors=False,
ignore_errors=True,
none_on_ignored_error=True,
log_output_on_error=False,
results_unicode=False)
if self._type == self.TYPE_GIT_SVN:
return self.make_svn_diff(merge_base, diff_lines)
elif self._type == self.TYPE_GIT_P4:
return self.make_perforce_diff(merge_base, diff_lines)
else:
return b''.join(diff_lines)
def make_svn_diff(self, merge_base, diff_lines):
"""Format a git-svn diff to apply correctly against an SVN repository.
This reformats the diff from a git-svn clone to look like it came from
:command:`svn diff`. This is needed so that the SVNTool in Review Board
can properly parse the diff.
Args:
merge_base (unicode):
The ID of the merge base commit. This is only used when
creating diffs with :command:`git svn` or :command:`git p4`
clones.
diff_lines (list of bytes):
The lines of the diff.
Returns:
bytes:
The reformatted diff contents.
"""
rev = self._execute([self.git, 'svn', 'find-rev', merge_base]).strip()
if not rev:
return None
diff_data = b''
original_file = b''
filename = b''
newfile = False
for i, line in enumerate(diff_lines):
if line.startswith(b'diff '):
# Grab the filename and then filter this out.
# This will be in the format of:
#
# diff --git a/path/to/file b/path/to/file
info = line.split(b' ')
diff_data += b'Index: %s\n' % info[2]
diff_data += b'=' * 67
diff_data += b'\n'
elif line.startswith(b'index '):
# Filter this out.
pass
elif line.strip() == b'--- /dev/null':
# New file
newfile = True
elif (line.startswith(b'--- ') and i + 1 < len(diff_lines) and
diff_lines[i + 1].startswith(b'+++ ')):
newfile = False
original_file = line[4:].strip()
diff_data += b'--- %s\t(revision %s)\n' % (original_file, rev)
elif line.startswith(b'+++ '):
filename = line[4:].strip()
if newfile:
diff_data += b'--- %s\t(revision 0)\n' % filename
diff_data += b'+++ %s\t(revision 0)\n' % filename
else:
# We already printed the "--- " line.
diff_data += b'+++ %s\t(working copy)\n' % original_file
elif (line.startswith(b'new file mode') or
line.startswith(b'deleted file mode')):
# Filter this out.
pass
elif line.startswith(b'Binary files '):
# Add the following so that we know binary files were
# added/changed.
diff_data += b'Cannot display: file marked as a binary type.\n'
diff_data += b'svn:mime-type = application/octet-stream\n'
else:
diff_data += line
return diff_data
def make_perforce_diff(self, merge_base, diff_lines):
"""Format a git-p4 diff to apply correctly against a P4 repository.
This reformats the diff from a :command:`git p4` clone to look like it
came from a Perforce repository. This is needed so that the
PerforceTool in Review Board can properly parse the diff.
Args:
merge_base (unicode):
The ID of the merge base commit. This is only used when
creating diffs with :command:`git svn` or
:command:`git p4` clones.
diff_lines (list of bytes):
The lines of the diff.
Returns:
bytes:
The reformatted diff contents.
"""
diff_data = b''
filename = b''
p4rev = b''
# Find which depot changelist we're based on
log = self._execute([self.git, 'log', merge_base], ignore_errors=True)
for line in log:
m = re.search(br'[rd]epo.-paths = "(.+)": change = (\d+).*\]',
log, re.M)
if m:
base_path = m.group(1).strip()
p4rev = m.group(2).strip()
break
else:
# We should really raise an error here, base_path is required
pass
for i, line in enumerate(diff_lines):
if line.startswith(b'diff '):
# Grab the filename and then filter this out.
# This will be in the format of:
# diff --git a/path/to/file b/path/to/file
filename = line.split(b' ')[2].strip()
elif (line.startswith(b'index ') or
line.startswith(b'new file mode ')):
# Filter this out
pass
elif (line.startswith(b'--- ') and i + 1 < len(diff_lines) and
diff_lines[i + 1].startswith(b'+++ ')):
data = self._execute(
['p4', 'files', base_path + filename + '@' + p4rev],
ignore_errors=True, results_unicode=False)
m = re.search(br'^%s%s#(\d+).*$' % (re.escape(base_path),
re.escape(filename)),
data, re.M)
if m:
file_version = m.group(1).strip()
else:
file_version = 1
diff_data += b'--- %s%s\t%s%s#%s\n' % (base_path, filename,
base_path, filename,
file_version)
elif line.startswith(b'+++ '):
# TODO: add a real timestamp
diff_data += b'+++ %s%s\t%s\n' % (base_path, filename,
b'TIMESTAMP')
else:
diff_data += line
return diff_data
def has_pending_changes(self):
"""Check if there are changes waiting to be committed.
Returns:
bool:
``True`` if the working directory has been modified or if changes
have been staged in the index.
"""
status = self._execute(['git', 'status', '--porcelain',
'--untracked-files=no',
'--ignore-submodules=dirty'])
return status != ''
def amend_commit_description(self, message, revisions):
"""Update a commit message to the given string.
Args:
message (unicode):
The commit message to use when amending the commit.
revisions (dict):
A dictionary of revisions, as returned by
:py:meth:`parse_revision_spec`.
Raises:
rbtools.clients.errors.AmendError:
The requested revision tip was not the most recent commit.
Unless rewriting the entire series of commits, git can only
amend the latest commit on the branch.
"""
if revisions and revisions['tip']:
commit_ids = self._execute(
[self.git, 'rev-parse', 'HEAD', revisions['tip']],
split_lines=True)
head_id = commit_ids[0].strip()
revision_id = commit_ids[1].strip()
if head_id != revision_id:
raise AmendError('Commit "%s" is not the latest commit, '
'and thus cannot be modified' % revision_id)
self._execute([self.git, 'commit', '--amend', '-m', message])
def apply_patch(self, patch_file, base_path=None, base_dir=None, p=None,
revert=False):
"""Apply the given patch to index.
This will take the given patch file and apply it to the index,
scheduling all changes for commit.
Args:
patch_file (unicode):
The name of the patch file to apply.
base_path (unicode, unused):
The base path that the diff was generated in. All git diffs are
absolute to the repository root, so this is unused.
base_dir (unicode, unused):
The path of the current working directory relative to the root
of the repository. All git diffs are absolute to the repository
root, so this is unused.
p (unicode, optional):
The prefix level of the diff.
revert (bool, optional):
Whether the patch should be reverted rather than applied.
Returns:
rbtools.clients.PatchResult:
The result of the patch operation.
"""
cmd = ['git', 'apply', '-3']
if revert:
cmd.append('-R')
if p:
cmd += ['-p', p]
cmd.append(patch_file)
rc, data = self._execute(cmd,
ignore_errors=True,
with_errors=True,
return_error_code=True,
results_unicode=False)
if rc == 0:
return PatchResult(applied=True, patch_output=data)
elif b'with conflicts' in data:
return PatchResult(
applied=True,
has_conflicts=True,
conflicting_files=[
line.split(b' ', 1)[1]
for line in data.splitlines()
if line.startswith(b'U')
],
patch_output=data)
else:
return PatchResult(applied=False, patch_output=data)
def create_commit(self, message, author, run_editor,
files=[], all_files=False):
"""Commit the given modified files.
This is expected to be called after applying a patch. This commits the
patch using information from the review request, opening the commit
message in :envvar:`$EDITOR` to allow the user to update it.
Args:
message (unicode):
The commit message to use.
author (object):
The author of the commit. This is expected to have ``fullname``
and ``email`` attributes.
run_editor (bool):
Whether to run the user's editor on the commmit message before
committing.
files (list of unicode, optional):
The list of filenames to commit.
all_files (bool, optional):
Whether to commit all changed files, ignoring the ``files``
argument.
Raises:
rbtools.clients.errors.CreateCommitError:
The commit message could not be created. It may have been
aborted by the user.
"""
try:
if all_files:
self._execute(['git', 'add', '--all', ':/'])
elif files:
self._execute(['git', 'add'] + files)
except Exception as e:
raise CreateCommitError(six.text_type(e))
if run_editor:
try:
modified_message = edit_text(message,
filename='COMMIT_EDITMSG')
except EditorError as e:
raise CreateCommitError(six.text_type(e))
else:
modified_message = message
if not modified_message.strip():
raise CreateCommitError(
"A commit message wasn't provided. The patched files are in "
"your tree and are staged for commit, but haven't been "
"committed. Run `git commit` to commit them.")
cmd = ['git', 'commit', '-m', modified_message]
try:
cmd += ['--author', '%s <%s>' % (author.fullname, author.email)]
except AttributeError:
# Users who have marked their profile as private won't include the
# fullname or email fields in the API payload. Just commit as the
# user running RBTools.
logging.warning('The author has marked their Review Board profile '
'information as private. Committing without '
'author attribution.')
try:
self._execute(cmd)
except Exception as e:
raise CreateCommitError(six.text_type(e))
def delete_branch(self, branch_name, merged_only=True):
"""Delete the specified branch.
Args:
branch_name (unicode):
The name of the branch to delete.
merged_only (bool, optional):
Whether to limit branch deletion to only those branches which
have been merged into the current HEAD.
"""
if merged_only:
delete_flag = '-d'
else:
delete_flag = '-D'
self._execute(['git', 'branch', delete_flag, branch_name])
def merge(self, target, destination, message, author, squash=False,
run_editor=False, close_branch=False, **kwargs):
"""Merge the target branch with destination branch.
Args:
target (unicode):
The name of the branch to merge.
destination (unicode):
The name of the branch to merge into.
message (unicode):
The commit message to use.
author (object):
The author of the commit. This is expected to have ``fullname``
and ``email`` attributes.
squash (bool, optional):
Whether to squash the commits or do a plain merge.
run_editor (bool, optional):
Whether to run the user's editor on the commmit message before
committing.
close_branch (bool, optional):
Whether to delete the branch after merging.
**kwargs (dict, unused):
Additional keyword arguments passed, for future expansion.
Raises:
rbtools.clients.errors.MergeError:
An error occurred while merging the branch.
"""
rc, output = self._execute(
['git', 'checkout', destination],
ignore_errors=True,
return_error_code=True)
if rc:
raise MergeError('Could not checkout to branch "%s".\n\n%s' %
(destination, output))
if squash:
method = '--squash'
else:
method = '--no-ff'
rc, output = self._execute(
['git', 'merge', target, method, '--no-commit'],
ignore_errors=True,
return_error_code=True)
if rc:
raise MergeError('Could not merge branch "%s" into "%s".\n\n%s' %
(target, destination, output))
self.create_commit(message, author, run_editor)
if close_branch:
self.delete_branch(target, merged_only=False)
def push_upstream(self, local_branch):
"""Push the current branch to upstream.
Args:
local_branch (unicode):
The name of the branch to push.
Raises:
rbtools.client.errors.PushError:
The branch was unable to be pushed.
"""
remote = self._get_remote(local_branch)
if remote is None:
raise PushError('Could not determine remote for branch "%s".'
% local_branch)
rc, output = self._execute(
['git', 'pull', '--rebase', remote, local_branch],
ignore_errors=True,
return_error_code=True)
if rc:
raise PushError('Could not pull changes from upstream.')
rc, output = self._execute(
['git', 'push', remote, local_branch],
ignore_errors=True,
return_error_code=True)
if rc:
raise PushError('Could not push branch "%s" to upstream.' %
local_branch)
def get_current_branch(self):
"""Return the name of the current branch.
Returns:
unicode:
The name of the directory corresponding to the root of the current
working directory (whether a plain checkout or a git worktree). If
no repository can be found, this will return None.
"""
return self._execute([self.git, 'rev-parse', '--abbrev-ref', 'HEAD'],
ignore_errors=True).strip()
def _execute(self, cmdline, *args, **kwargs):
"""Execute a git command within the correct cwd.
Args:
cmdline (list of unicode):
A command-line to execute.
*args (list):
Positional arguments to pass through to
:py:func:`rbtools.utils.process.execute`.
*kwargs (dict):
Keyword arguments to pass through to
:py:func:`rbtools.utils.process.execute`.
Returns:
tuple:
The result from the execute call.
"""
return execute(cmdline, cwd=self._git_toplevel, *args, **kwargs)
def get_commit_history(self, revisions):
"""Return the commit history specified by the revisions.
Args:
revisions (dict):
A dictionary of revisions to generate history for, as returned
by :py:meth:`parse_revision_spec`.
Returns:
list of dict:
The list of history entries, in order. The dictionaries have the
following keys:
``commit_id``:
The unique identifier of the commit.
``parent_id``:
The unique identifier of the parent commit.
``author_name``:
The name of the commit's author.
``author_email``:
The e-mail address of the commit's author.
``author_date``:
The date the commit was authored.
``committer_name``:
The committer's name.
``committer_email``:
The e-mail address of the committer.
``committer_date``:
The date the commit was committed.
``commit_message``:
The commit's message.
Raises:
rbtools.clients.errors.SCMError:
The history is non-linear or there is a commit with no parents.
"""
log_fields = {
'commit_id': b'%H',
'parent_id': b'%P',
'author_name': b'%an',
'author_email': b'%ae',
'author_date': b'%ad',
'committer_name': b'%cn',
'committer_email': b'%ce',
'committer_date': b'%cd',
'commit_message': b'%B',
}
# 0x1f is the ASCII field separator. It is a non-printable character
# that should not appear in any field in `git log`.
log_format = b'%x1f'.join(six.itervalues(log_fields))
log_entries = execute(
[
self.git,
b'log',
b'-z',
b'--reverse',
b'--pretty=format:%s' % log_format,
b'--date=iso8601-strict',
b'%s..%s' % (revisions['base'].encode('utf-8'),
revisions['tip'].encode('utf-8')),
],
ignore_errors=True,
none_on_ignored_error=True,
results_unicode=True)
if not log_entries:
return None
history = []
field_names = six.viewkeys(log_fields)
for log_entry in log_entries.split(self._NUL):
fields = log_entry.split(self._FIELD_SEP)
entry = dict(zip(field_names, fields))
parents = entry['parent_id'].split()
if len(parents) > 1:
raise SCMError(
'The Git SCMClient only supports posting commit histories '
'that are entirely linear.')
elif len(parents) == 0:
raise SCMError(
'The Git SCMClient only supports posting commits that '
'have exactly one parent.')
history.append(entry)
return history
def _get_remote(self, local_branch):
"""Return the remote for a given branch.
Args:
local_branch (unicode):
The name of the local branch.
Returns:
unicode:
The name of the remote corresponding to the local branch. May
return None if there is no remote.
"""
rc, output = self._execute(
['git', 'config', '--get', 'branch.%s.remote' % local_branch],
ignore_errors=True,
return_error_code=True)
if rc == 0:
return output.strip()
else:
return None
def _find_remote(self, local_or_remote_branch):
"""Find the remote given a branch name.
This takes in a branch name which can be either a local or remote
branch, and attempts to determine the name of the remote.
Args:
local_or_remote_branch (unicode):
The name of a branch to find the remote for.
Returns:
unicode:
The name of the remote for the given branch. Returns ``origin`` if
no associated remote can be found.
Raises:
rbtools.clients.errors.SCMError:
The current repository did not have any remotes configured.
"""
all_remote_branches = [
branch.strip()
for branch in self._execute(['git', 'branch', '--remotes'],
split_lines=True)
]
if local_or_remote_branch in all_remote_branches:
return local_or_remote_branch.split('/', 1)[0]
remote = self._get_remote(local_or_remote_branch)
if remote:
return remote
all_remotes = [
_remote.strip()
for _remote in self._execute(['git', 'remote'],
split_lines=True)
]
if len(all_remotes) >= 1:
# We prefer "origin" if it's present, otherwise just choose at
# random.
if 'origin' in all_remotes:
return 'origin'
else:
logging.warning('Could not determine specific upstream remote '
'to use for diffs. We recommend setting '
'TRACKING_BRANCH in reviewboardrc to your '
'nearest upstream remote branch.')
return all_remotes[0]
else:
raise SCMError('This clone has no configured remotes.')
| {
"repo_name": "reviewboard/rbtools",
"path": "rbtools/clients/git.py",
"copies": "1",
"size": "56637",
"license": "mit",
"hash": 6173783049855218000,
"line_mean": 36.3348714568,
"line_max": 79,
"alpha_frac": 0.5115560499,
"autogenerated": false,
"ratio": 4.650763672195763,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5662319722095763,
"avg_score": null,
"num_lines": null
} |
"""A client for in-process kernels."""
#-----------------------------------------------------------------------------
# Copyright (C) 2012 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
# IPython imports
from ipykernel.inprocess.socket import DummySocket
from traitlets import Type, Instance, default
from jupyter_client.clientabc import KernelClientABC
from jupyter_client.client import KernelClient
# Local imports
from .channels import (
InProcessChannel,
InProcessHBChannel,
)
#-----------------------------------------------------------------------------
# Main kernel Client class
#-----------------------------------------------------------------------------
class InProcessKernelClient(KernelClient):
"""A client for an in-process kernel.
This class implements the interface of
`jupyter_client.clientabc.KernelClientABC` and allows
(asynchronous) frontends to be used seamlessly with an in-process kernel.
See `jupyter_client.client.KernelClient` for docstrings.
"""
# The classes to use for the various channels.
shell_channel_class = Type(InProcessChannel)
iopub_channel_class = Type(InProcessChannel)
stdin_channel_class = Type(InProcessChannel)
control_channel_class = Type(InProcessChannel)
hb_channel_class = Type(InProcessHBChannel)
kernel = Instance('ipykernel.inprocess.ipkernel.InProcessKernel',
allow_none=True)
#--------------------------------------------------------------------------
# Channel management methods
#--------------------------------------------------------------------------
@default('blocking_class')
def _default_blocking_class(self):
from .blocking import BlockingInProcessKernelClient
return BlockingInProcessKernelClient
def get_connection_info(self):
d = super(InProcessKernelClient, self).get_connection_info()
d['kernel'] = self.kernel
return d
def start_channels(self, *args, **kwargs):
super(InProcessKernelClient, self).start_channels()
self.kernel.frontends.append(self)
@property
def shell_channel(self):
if self._shell_channel is None:
self._shell_channel = self.shell_channel_class(self)
return self._shell_channel
@property
def iopub_channel(self):
if self._iopub_channel is None:
self._iopub_channel = self.iopub_channel_class(self)
return self._iopub_channel
@property
def stdin_channel(self):
if self._stdin_channel is None:
self._stdin_channel = self.stdin_channel_class(self)
return self._stdin_channel
@property
def control_channel(self):
if self._control_channel is None:
self._control_channel = self.control_channel_class(self)
return self._control_channel
@property
def hb_channel(self):
if self._hb_channel is None:
self._hb_channel = self.hb_channel_class(self)
return self._hb_channel
# Methods for sending specific messages
# -------------------------------------
def execute(self, code, silent=False, store_history=True,
user_expressions={}, allow_stdin=None):
if allow_stdin is None:
allow_stdin = self.allow_stdin
content = dict(code=code, silent=silent, store_history=store_history,
user_expressions=user_expressions,
allow_stdin=allow_stdin)
msg = self.session.msg('execute_request', content)
self._dispatch_to_kernel(msg)
return msg['header']['msg_id']
def complete(self, code, cursor_pos=None):
if cursor_pos is None:
cursor_pos = len(code)
content = dict(code=code, cursor_pos=cursor_pos)
msg = self.session.msg('complete_request', content)
self._dispatch_to_kernel(msg)
return msg['header']['msg_id']
def inspect(self, code, cursor_pos=None, detail_level=0):
if cursor_pos is None:
cursor_pos = len(code)
content = dict(code=code, cursor_pos=cursor_pos,
detail_level=detail_level,
)
msg = self.session.msg('inspect_request', content)
self._dispatch_to_kernel(msg)
return msg['header']['msg_id']
def history(self, raw=True, output=False, hist_access_type='range', **kwds):
content = dict(raw=raw, output=output,
hist_access_type=hist_access_type, **kwds)
msg = self.session.msg('history_request', content)
self._dispatch_to_kernel(msg)
return msg['header']['msg_id']
def shutdown(self, restart=False):
# FIXME: What to do here?
raise NotImplementedError('Cannot shutdown in-process kernel')
def kernel_info(self):
"""Request kernel info."""
msg = self.session.msg('kernel_info_request')
self._dispatch_to_kernel(msg)
return msg['header']['msg_id']
def comm_info(self, target_name=None):
"""Request a dictionary of valid comms and their targets."""
if target_name is None:
content = {}
else:
content = dict(target_name=target_name)
msg = self.session.msg('comm_info_request', content)
self._dispatch_to_kernel(msg)
return msg['header']['msg_id']
def input(self, string):
if self.kernel is None:
raise RuntimeError('Cannot send input reply. No kernel exists.')
self.kernel.raw_input_str = string
def is_complete(self, code):
msg = self.session.msg('is_complete_request', {'code': code})
self._dispatch_to_kernel(msg)
return msg['header']['msg_id']
def _dispatch_to_kernel(self, msg):
""" Send a message to the kernel and handle a reply.
"""
kernel = self.kernel
if kernel is None:
raise RuntimeError('Cannot send request. No kernel exists.')
stream = DummySocket()
self.session.send(stream, msg)
msg_parts = stream.recv_multipart()
kernel.dispatch_shell(stream, msg_parts)
idents, reply_msg = self.session.recv(stream, copy=False)
self.shell_channel.call_handlers_later(reply_msg)
#-----------------------------------------------------------------------------
# ABC Registration
#-----------------------------------------------------------------------------
KernelClientABC.register(InProcessKernelClient)
| {
"repo_name": "sserrot/champion_relationships",
"path": "venv/Lib/site-packages/ipykernel/inprocess/client.py",
"copies": "1",
"size": "6831",
"license": "mit",
"hash": -3935698435814211000,
"line_mean": 35.5294117647,
"line_max": 80,
"alpha_frac": 0.5656565657,
"autogenerated": false,
"ratio": 4.418499353169469,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0015374777614527797,
"num_lines": 187
} |
"""A client for Mercurial."""
from __future__ import unicode_literals
import logging
import os
import re
import uuid
import six
from six.moves.urllib.parse import urlsplit, urlunparse
from rbtools.clients import PatchResult, SCMClient, RepositoryInfo
from rbtools.clients.errors import (CreateCommitError,
InvalidRevisionSpecError,
MergeError,
SCMError,
TooManyRevisionsError)
from rbtools.clients.svn import SVNClient
from rbtools.utils.checks import check_install
from rbtools.utils.console import edit_file
from rbtools.utils.errors import EditorError
from rbtools.utils.filesystem import make_empty_files, make_tempfile
from rbtools.utils.process import execute
class MercurialRefType(object):
"""Types of references in Mercurial."""
#: Revision hashes.
REVISION = 'revision'
#: Branch names.
BRANCH = 'branch'
#: Bookmark names.
BOOKMARK = 'bookmark'
#: Tag names.
TAG = 'tag'
#: Unknown references.
UNKNOWN = 'unknown'
class MercurialClient(SCMClient):
"""A client for Mercurial.
This is a wrapper around the hg executable that fetches repository
information and generates compatible diffs.
"""
name = 'Mercurial'
server_tool_names = 'Mercurial,Subversion'
supports_commit_history = True
supports_diff_exclude_patterns = True
supports_parent_diffs = True
can_bookmark = True
can_branch = True
can_merge = True
PRE_CREATION = '/dev/null'
PRE_CREATION_DATE = 'Thu Jan 01 00:00:00 1970 +0000'
NO_PARENT = '0' * 40
# The ASCII field seperator.
_FIELD_SEP = '\x1f'
# The ASCII field separator as an escape sequence.
#
# This is passed to Mercurial, where it is interpreted and transformed into
# the actual character.
_FIELD_SEP_ESC = r'\x1f'
# The ASCII record separator.
_RECORD_SEP = '\x1e'
# The ASCII record separator as an escape sequence.
#
# This is passed to Mercurial, where it is interpreted and transformed into
# the actual character.
_RECORD_SEP_ESC = r'\x1e'
def __init__(self, executable='hg', **kwargs):
"""Initialize the client.
Args:
**kwargs (dict):
Keyword arguments to pass through to the superclass.
"""
super(MercurialClient, self).__init__(**kwargs)
self.hgrc = {}
self._exe = executable
self._type = 'hg'
self._remote_path = ()
self._initted = False
self._hg_env = {
'HGPLAIN': '1',
}
self._hgext_path = os.path.normpath(os.path.join(
os.path.dirname(__file__),
'..', 'helpers', 'hgext.py'))
# `self._remote_path_candidates` is an ordered set of hgrc
# paths that are checked if `tracking` option is not given
# explicitly. The first candidate found to exist will be used,
# falling back to `default` (the last member.)
self._remote_path_candidates = ['reviewboard', 'origin', 'parent',
'default']
@property
def hidden_changesets_supported(self):
"""Return whether the repository supports hidden changesets.
Mercurial 1.9 and above support hidden changesets. These are changesets
that have been hidden from regular repository view. They still exist
and are accessible, but only if the --hidden command argument is
specified.
Since we may encounter hidden changesets (e.g. the user specifies
hidden changesets as part of the revision spec), we need to be aware
of hidden changesets.
"""
if not hasattr(self, '_hidden_changesets_supported'):
# The choice of command is arbitrary. parents for the initial
# revision should be fast.
result = execute([self._exe, 'parents', '--hidden', '-r', '0'],
ignore_errors=True,
with_errors=False,
none_on_ignored_error=True)
self._hidden_changesets_supported = result is not None
return self._hidden_changesets_supported
@property
def hg_root(self):
"""Return the root of the working directory.
This will return the root directory of the current repository. If the
current working directory is not inside a mercurial repository, this
returns None.
"""
if not hasattr(self, '_hg_root'):
self._load_hgrc()
key = 'bundle.mainreporoot'
if key in self.hgrc:
self._hg_root = self.hgrc[key]
else:
self._hg_root = None
return self._hg_root
def _init(self):
"""Initialize the client."""
if self._initted or not self.hg_root:
return
if 'extensions.hgsubversion' in self.hgrc:
svn_info = execute([self._exe, 'svn', 'info'], ignore_errors=True)
else:
svn_info = None
if (svn_info and not svn_info.startswith('abort:') and
not svn_info.startswith('hg: unknown command') and
not svn_info.lower().startswith('not a child of')):
self._type = 'svn'
self._svn_info = svn_info
else:
self._type = 'hg'
for candidate in self._remote_path_candidates:
rc_key = 'paths.%s' % candidate
if rc_key in self.hgrc:
self._remote_path = (candidate, self.hgrc[rc_key])
logging.debug('Using candidate path %r: %r',
self._remote_path[0], self._remote_path[1])
break
self._initted = True
def get_commit_history(self, revisions):
"""Return the commit history specified by the revisions.
Args:
revisions (dict):
A dictionary of revisions to generate history for, as returned
by :py:meth:`parse_revision_spec`.
Returns:
list of dict:
This list of history entries, in order.
Raises:
rbtools.clients.errors.SCMError:
The history is non-linear or there is a commit with no parents.
"""
log_fields = {
'commit_id': '{node}',
'parent_id': '{p1node}',
'author_name': '{author|person}',
'author_email': '{author|email}',
'author_date': '{date|rfc3339date}',
'parent2': '{p2node}',
'commit_message': '{desc}',
}
log_format = self._FIELD_SEP_ESC.join(six.itervalues(log_fields))
log_entries = execute(
[
self._exe,
'log',
'--template',
'%s%s' % (log_format, self._RECORD_SEP_ESC),
'-r',
'%(base)s::%(tip)s and not %(base)s' % revisions,
],
ignore_errors=True,
none_on_ignored_error=True,
results_unicode=True)
if not log_entries:
return None
history = []
field_names = six.viewkeys(log_fields)
# The ASCII record separator will be appended to every record, so if we
# attempt to split the entire output by the record separator, we will
# end up with an empty ``log_entry`` at the end, which will cause
# errors.
for log_entry in log_entries[:-1].split(self._RECORD_SEP):
fields = log_entry.split(self._FIELD_SEP)
entry = dict(zip(field_names, fields))
# We do not want `parent2` to be included in the entry because
# the entry's items are used as the keyword arguments to the
# method that uploads a commit and it would be unexpected.
if entry.pop('parent2') != self.NO_PARENT:
raise SCMError(
'The Mercurial SCMClient only supports posting commit '
'histories that are entirely linear.'
)
elif entry['parent_id'] == self.NO_PARENT:
raise SCMError(
'The Mercurial SCMClient only supports posting commits '
'that have exactly one parent.'
)
history.append(entry)
return history
def get_local_path(self):
"""Return the local path to the working tree.
Returns:
unicode:
The filesystem path of the repository on the client system.
"""
if not check_install([self._exe, '--help']):
logging.debug('Unable to execute "hg --help": skipping Mercurial')
return None
return self.hg_root
def get_repository_info(self):
"""Return repository information for the current working tree.
Returns:
rbtools.clients.RepositoryInfo:
The repository info structure.
"""
if not check_install([self._exe, '--help']):
logging.debug('Unable to execute "hg --help": skipping Mercurial')
return None
self._init()
if not self.hg_root:
# hg aborted => no mercurial repository here.
return None
if self._type == 'svn':
return self._calculate_hgsubversion_repository_info(self._svn_info)
else:
path = self.hg_root
base_path = '/'
if self._remote_path:
path = self._remote_path[1]
base_path = ''
return RepositoryInfo(path=path,
base_path=base_path,
local_path=self.hg_root)
def parse_revision_spec(self, revisions=[]):
"""Parse the given revision spec.
Args:
revisions (list of unicode, optional):
A list of revisions as specified by the user. Items in the list
do not necessarily represent a single revision, since the user
can use SCM-native syntaxes such as ``r1..r2`` or ``r1:r2``.
SCMTool-specific overrides of this method are expected to deal
with such syntaxes.
Raises:
rbtools.clients.errors.InvalidRevisionSpecError:
The given revisions could not be parsed.
rbtools.clients.errors.TooManyRevisionsError:
The specified revisions list contained too many revisions.
Returns:
dict:
A dictionary with the following keys:
``base`` (:py:class:`unicode`):
A revision to use as the base of the resulting diff.
``tip`` (:py:class:`unicode`):
A revision to use as the tip of the resulting diff.
``parent_base`` (:py:class:`unicode`, optional):
The revision to use as the base of a parent diff.
``commit_id`` (:py:class:`unicode`, optional):
The ID of the single commit being posted, if not using a range.
These will be used to generate the diffs to upload to Review Board (or
print). The diff for review will include the changes in (base, tip],
and the parent diff (if necessary) will include (parent, base].
If zero revisions are passed in, this will return the outgoing changes
from the parent of the working directory.
If a single revision is passed in, this will return the parent of that
revision for "base" and the passed-in revision for "tip". This will
result in generating a diff for the changeset specified.
If two revisions are passed in, they will be used for the "base"
and "tip" revisions, respectively.
In all cases, a parent base will be calculated automatically from
changesets not present on the remote.
"""
self._init()
n_revisions = len(revisions)
if n_revisions == 1:
# If there's a single revision, try splitting it based on hg's
# revision range syntax (either :: or ..). If this splits, then
# it's handled as two revisions below.
revisions = re.split(r'\.\.|::', revisions[0])
n_revisions = len(revisions)
result = {}
if n_revisions == 0:
# No revisions: Find the outgoing changes. Only consider the
# working copy revision and ancestors because that makes sense.
# If a user wishes to include other changesets, they can run
# `hg up` or specify explicit revisions as command arguments.
if self._type == 'svn':
result['base'] = self._get_parent_for_hgsubversion()
result['tip'] = '.'
else:
# Ideally, generating a diff for outgoing changes would be as
# simple as just running `hg outgoing --patch <remote>`, but
# there are a couple problems with this. For one, the
# server-side diff parser isn't equipped to filter out diff
# headers such as "comparing with..." and
# "changeset: <rev>:<hash>". Another problem is that the output
# of `hg outgoing` potentially includes changesets across
# multiple branches.
#
# In order to provide the most accurate comparison between
# one's local clone and a given remote (something akin to git's
# diff command syntax `git diff <treeish>..<treeish>`), we have
# to do the following:
#
# - Get the name of the current branch
# - Get a list of outgoing changesets, specifying a custom
# format
# - Filter outgoing changesets by the current branch name
# - Get the "top" and "bottom" outgoing changesets
#
# These changesets are then used as arguments to
# `hg diff -r <rev> -r <rev>`.
#
# Future modifications may need to be made to account for odd
# cases like having multiple diverged branches which share
# partial history--or we can just punish developers for doing
# such nonsense :)
outgoing = \
self._get_bottom_and_top_outgoing_revs_for_remote(rev='.')
if outgoing[0] is None or outgoing[1] is None:
raise InvalidRevisionSpecError(
'There are no outgoing changes')
result['base'] = self._identify_revision(outgoing[0])
result['tip'] = self._identify_revision(outgoing[1])
result['commit_id'] = result['tip']
# Since the user asked us to operate on tip, warn them about a
# dirty working directory.
if (self.has_pending_changes() and
not self.config.get('SUPPRESS_CLIENT_WARNINGS', False)):
logging.warning('Your working directory is not clean. Any '
'changes which have not been committed '
'to a branch will not be included in your '
'review request.')
if self.options.parent_branch:
result['parent_base'] = result['base']
result['base'] = self._identify_revision(
self.options.parent_branch)
elif n_revisions == 1:
# One revision: Use the given revision for tip, and find its parent
# for base.
result['tip'] = self._identify_revision(revisions[0])
result['commit_id'] = result['tip']
result['base'] = self._execute(
[self._exe, 'parents', '--hidden', '-r', result['tip'],
'--template', '{node|short}']).split()[0]
if len(result['base']) != 12:
raise InvalidRevisionSpecError(
"Can't determine parent revision"
)
elif n_revisions == 2:
# Two revisions: Just use the given revisions
result['base'] = self._identify_revision(revisions[0])
result['tip'] = self._identify_revision(revisions[1])
else:
raise TooManyRevisionsError
if 'base' not in result or 'tip' not in result:
raise InvalidRevisionSpecError(
'"%s" does not appear to be a valid revision spec' % revisions)
if self._type == 'hg' and 'parent_base' not in result:
# If there are missing changesets between base and the remote, we
# need to generate a parent diff.
outgoing = self._get_outgoing_changesets(self._get_remote_branch(),
rev=result['base'])
logging.debug('%d outgoing changesets between remote and base.',
len(outgoing))
if not outgoing:
return result
parent_base = self._execute(
[self._exe, 'parents', '--hidden', '-r', outgoing[0][1],
'--template', '{node|short}']).split()
if len(parent_base) == 0:
raise Exception(
'Could not find parent base revision. Ensure upstream '
'repository is not empty.')
result['parent_base'] = parent_base[0]
logging.debug('Identified %s as parent base',
result['parent_base'])
return result
def _identify_revision(self, revision):
"""Identify the given revision.
Args:
revision (unicode):
The revision.
Raises:
rbtools.clients.errors.InvalidRevisionSpecError:
The specified revision could not be identified.
Returns:
unicode:
The global revision ID of the commit.
"""
identify = self._execute(
[self._exe, 'identify', '-i', '--hidden', '-r', str(revision)],
ignore_errors=True, none_on_ignored_error=True)
if identify is None:
raise InvalidRevisionSpecError(
'"%s" does not appear to be a valid revision' % revision)
else:
return identify.split()[0]
def _calculate_hgsubversion_repository_info(self, svn_info):
"""Return repository info for an hgsubversion checkout.
Args:
svn_info (unicode):
The SVN info output.
Returns:
rbtools.clients.RepositoryInfo:
The repository info structure, if available.
"""
def _info(r):
m = re.search(r, svn_info, re.M)
if m:
return urlsplit(m.group(1))
else:
return None
self._type = 'svn'
root = _info(r'^Repository Root: (.+)$')
url = _info(r'^URL: (.+)$')
if not (root and url):
return None
scheme, netloc, path, _, _ = root
root = urlunparse([scheme, root.netloc.split('@')[-1], path,
'', '', ''])
base_path = url.path[len(path):]
return RepositoryInfo(path=root,
base_path=base_path,
local_path=self.hg_root)
def _load_hgrc(self):
"""Load the hgrc file."""
for line in execute([self._exe, 'showconfig'],
env=self._hg_env, split_lines=True):
line = line.split('=', 1)
if len(line) == 2:
key, value = line
else:
key = line[0]
value = ''
self.hgrc[key] = value.strip()
def get_hg_ref_type(self, ref):
"""Return the type of a reference in Mercurial.
This can be used to determine if something is a bookmark, branch,
tag, or revision.
Args:
ref (unicode):
The reference to return the type for.
Returns:
unicode:
The reference type. This will be a value in
:py:class:`MercurialRefType`.
"""
# Check for any bookmarks matching ref.
rc, output = self._execute([self._exe, 'log', '-ql1', '-r',
'bookmark(%s)' % ref],
ignore_errors=True,
return_error_code=True)
if rc == 0:
return MercurialRefType.BOOKMARK
# Check for any bookmarks matching ref.
#
# Ideally, we'd use the same sort of log call we'd use for bookmarks
# and tags, but it works differently for branches, and will
# incorrectly match tags.
branches = self._execute([self._exe, 'branches', '-q']).split()
if ref in branches:
return MercurialRefType.BRANCH
# Check for any tags matching ref.
rc, output = self._execute([self._exe, 'log', '-ql1', '-r',
'tag(%s)' % ref],
ignore_errors=True,
return_error_code=True)
if rc == 0:
return MercurialRefType.TAG
# Now just check that it exists at all. We'll assume it's a revision.
rc, output = self._execute([self._exe, 'identify', '-r', ref],
ignore_errors=True,
return_error_code=True)
if rc == 0:
return MercurialRefType.REVISION
return MercurialRefType.UNKNOWN
def get_raw_commit_message(self, revisions):
"""Return the raw commit message.
This extracts all descriptions in the given revision range and
concatenates them, most recent ones going first.
Args:
revisions (dict):
A dictionary containing ``base`` and ``tip`` keys.
Returns:
unicode:
The commit messages of all commits between (base, tip].
"""
rev1 = revisions['base']
rev2 = revisions['tip']
delim = str(uuid.uuid1())
descs = self._execute(
[self._exe, 'log', '--hidden', '-r', '%s::%s' % (rev1, rev2),
'--template', '{desc}%s' % delim],
env=self._hg_env)
# This initial element in the base changeset, which we don't
# care about. The last element is always empty due to the string
# ending with <delim>.
descs = descs.split(delim)[1:-1]
return '\n\n'.join(desc.strip() for desc in descs)
def diff(self, revisions, include_files=[], exclude_patterns=[],
extra_args=[], with_parent_diff=True, **kwargs):
"""Perform a diff using the given revisions.
This will generate a Git-style diff and parent diff (if needed) for
the provided revisions. The diff will contain additional metadata
headers used by Review Board to locate the appropriate revisions from
the repository.
Args:
revisions (dict):
A dictionary of revisions, as returned by
:py:meth:`parse_revision_spec`.
include_files (list of unicode, optional):
A list of files to whitelist during the diff generation.
exclude_patterns (list of unicode, optional):
A list of shell-style glob patterns to blacklist during diff
generation.
extra_args (list, unused):
Additional arguments to be passed to the diff generation.
Unused for mercurial.
with_parent_diff (bool, optional):
Whether or not to include the parent diff in the result.
**kwargs (dict, unused):
Unused keyword arguments.
Returns:
dict:
A dictionary containing the following keys:
``diff`` (:py:class:`bytes`):
The contents of the diff to upload.
``parent_diff`` (:py:class:`bytes`, optional):
The contents of the parent diff, if available.
``commit_id`` (:py:class:`unicode`, optional):
The commit ID to include when posting, if available.
``base_commit_id` (:py:class:`unicode`, optional):
The ID of the commit that the change is based on, if available.
This is necessary for some hosting services that don't provide
individual file access.
"""
self._init()
diff_args = ['--hidden', '--nodates', '-g']
if self._type == 'svn':
diff_args.append('--svn')
diff_args += include_files
for pattern in exclude_patterns:
diff_args += ['-X', pattern]
node_base_id = revisions['base']
diff = self._run_diff(diff_args,
parent_id=node_base_id,
node_id=revisions['tip'])
if with_parent_diff and 'parent_base' in revisions:
base_commit_id = revisions['parent_base']
parent_diff = self._run_diff(diff_args,
parent_id=base_commit_id,
node_id=node_base_id)
else:
base_commit_id = node_base_id
parent_diff = None
# If reviewboard requests a relative revision via hgweb it will fail
# since hgweb does not support the relative revision syntax (^1, -1).
# Rewrite this relative node id to an absolute node id.
match = re.match(r'^[a-z|A-Z|0-9]*$', base_commit_id)
if not match:
base_commit_id = self._execute(
[self._exe, 'log', '-r', base_commit_id,
'--template', '{node}'],
env=self._hg_env, results_unicode=False)
return {
'diff': diff,
'parent_diff': parent_diff,
'commit_id': revisions.get('commit_id'),
'base_commit_id': base_commit_id,
}
def _run_diff(self, diff_args, parent_id, node_id):
"""Run a diff command and normalize its results.
This will run :command:`hg diff` with the provided arguments for the
provided revision range, performing some normalization on the diff to
prepare it for use in Review Board.
Args:
diff_args (list of unicode):
The arguments to pass to :command:`hg diff` (except for any
revision ranges).
parent_id (unicode):
The ID of the parent commit for the range.
node_id (unicode):
The ID of the latest commit for the range.
Returns:
bytes:
The normalized diff content.
"""
diff = self._execute(
[self._exe, 'diff'] + diff_args + ['-r', parent_id, '-r', node_id],
env=self._hg_env,
log_output_on_error=False,
results_unicode=False)
return self._normalize_diff(diff,
node_id=node_id,
parent_id=parent_id)
def _normalize_diff(self, diff, node_id, parent_id):
"""Normalize a diff, adding any headers that may be needed.
For Git-style diffs, this will ensure the diff starts with information
required for Review Board to identify the commit and its parent. These
are based on headers normally generated by :command:`hg export`.
Args:
diff (bytes):
The generated diff content to prepend to.
node_id (unicode):
The revision of this change.
parent_id (unicode):
The revision of the parent change.
Returns:
bytes:
The normalized diff content.
"""
assert isinstance(diff, bytes)
if diff.lstrip().startswith(b'diff --git'):
diff = (
b'# HG changeset patch\n'
b'# Node ID %(node_id)s\n'
b'# Parent %(parent_id)s\n'
b'%(diff)s'
% {
b'node_id': node_id.encode('utf-8'),
b'parent_id': parent_id.encode('utf-8'),
b'diff': diff,
}
)
return diff
def _get_files_in_changeset(self, rev):
"""Return a set of all files in the specified changeset.
Args:
rev (unicode):
A changeset identifier.
Returns:
set:
A set of filenames in the changeset.
"""
cmd = [self._exe, 'locate', '-r', rev]
files = execute(cmd, env=self._hg_env, ignore_errors=True,
none_on_ignored_error=True)
if files:
files = files.replace('\\', '/') # workaround for issue 3894
return set(files.splitlines())
return set()
def _get_parent_for_hgsubversion(self):
"""Return the parent Subversion branch.
Returns the parent branch defined in the command options if it exists,
otherwise returns the parent Subversion branch of the current
repository.
Returns:
unicode:
The branch branch for the hgsubversion checkout.
"""
return (getattr(self.options, 'tracking', None) or
execute([self._exe, 'parent', '--svn', '--template',
'{node}\n']).strip())
def _get_remote_branch(self):
"""Return the remote branch assoicated with this repository.
If the remote branch is not defined, the parent branch of the
repository is returned.
Returns:
unicode:
The name of the tracking branch.
"""
remote = getattr(self.options, 'tracking', None)
if not remote:
try:
remote = self._remote_path[0]
except IndexError:
remote = None
if not remote:
raise SCMError('Could not determine remote branch to use for '
'diff creation. Specify --tracking-branch to '
'continue.')
return remote
def create_commit(self, message, author, run_editor,
files=[], all_files=False):
"""Commit the given modified files.
This is expected to be called after applying a patch. This commits the
patch using information from the review request, opening the commit
message in $EDITOR to allow the user to update it.
Args:
message (unicode):
The commit message to use.
author (object):
The author of the commit. This is expected to have ``fullname``
and ``email`` attributes.
run_editor (bool):
Whether to run the user's editor on the commmit message before
committing.
files (list of unicode, optional):
The list of filenames to commit.
all_files (bool, optional):
Whether to commit all changed files, ignoring the ``files``
argument.
Raises:
rbtools.clients.errors.CreateCommitError:
The commit message could not be created. It may have been
aborted by the user.
"""
if run_editor:
filename = make_tempfile(message.encode('utf-8'),
prefix='hg-editor-',
suffix='.txt')
try:
modified_message = edit_file(filename)
except EditorError as e:
raise CreateCommitError(six.text_type(e))
finally:
try:
os.unlink(filename)
except OSError:
pass
else:
modified_message = message
if not modified_message.strip():
raise CreateCommitError(
"A commit message wasn't provided. The patched files are in "
"your tree but haven't been committed.")
hg_command = [self._exe, 'commit', '-m', modified_message]
try:
hg_command += ['-u', '%s <%s>' % (author.fullname, author.email)]
except AttributeError:
# Users who have marked their profile as private won't include the
# fullname or email fields in the API payload. Just commit as the
# user running RBTools.
logging.warning('The author has marked their Review Board profile '
'information as private. Committing without '
'author attribution.')
if all_files:
hg_command.append('-A')
else:
hg_command += files
try:
self._execute(hg_command)
except Exception as e:
raise CreateCommitError(six.text_type(e))
def merge(self, target, destination, message, author, squash=False,
run_editor=False, close_branch=False, **kwargs):
"""Merge the target branch with destination branch.
Args:
target (unicode):
The name of the branch to merge.
destination (unicode):
The name of the branch to merge into.
message (unicode):
The commit message to use.
author (object):
The author of the commit. This is expected to have ``fullname``
and ``email`` attributes.
squash (bool, optional):
Whether to squash the commits or do a plain merge. This is not
used for Mercurial.
run_editor (bool, optional):
Whether to run the user's editor on the commmit message before
committing.
close_branch (bool, optional):
Whether to delete the branch after merging.
**kwargs (dict, unused):
Additional keyword arguments passed, for future expansion.
Raises:
rbtools.clients.errors.MergeError:
An error occurred while merging the branch.
"""
ref_type = self.get_hg_ref_type(target)
if ref_type == MercurialRefType.UNKNOWN:
raise MergeError('Could not find a valid branch, tag, bookmark, '
'or revision called "%s".'
% target)
if close_branch and ref_type == MercurialRefType.BRANCH:
try:
self._execute([self._exe, 'update', target])
except Exception as e:
raise MergeError('Could not switch to branch "%s".\n\n%s'
% (target, e))
try:
self._execute([self._exe, 'commit', '-m', message,
'--close-branch'])
except Exception as e:
raise MergeError('Could not close branch "%s".\n\n%s'
% (target, e))
try:
self._execute([self._exe, 'update', destination])
except Exception as e:
raise MergeError('Could not switch to branch "%s".\n\n%s'
% (destination, e))
try:
self._execute([self._exe, 'merge', target])
except Exception as e:
raise MergeError('Could not merge %s "%s" into "%s".\n\n%s'
% (ref_type, target, destination, e))
self.create_commit(message=message,
author=author,
run_editor=run_editor)
if close_branch and ref_type == MercurialRefType.BOOKMARK:
try:
self._execute([self._exe, 'bookmark', '-d', target])
except Exception as e:
raise MergeError('Could not delete bookmark "%s".\n\n%s'
% (target, e))
def _get_current_branch(self):
"""Return the current branch of this repository.
Returns:
unicode:
The name of the currently checked-out branch.
"""
return execute([self._exe, 'branch'], env=self._hg_env).strip()
def _get_bottom_and_top_outgoing_revs_for_remote(self, rev=None):
"""Return the bottom and top outgoing revisions.
Args:
rev (unicode, optional):
An optional revision to limit the results. If specified, only
outgoing changesets which are ancestors of this revision will
be included.
Returns:
tuple:
A 2-tuple containing the bottom and top outgoing revisions for the
changesets between the current branch and the remote branch.
"""
remote = self._get_remote_branch()
current_branch = self._get_current_branch()
outgoing = [o for o in self._get_outgoing_changesets(remote, rev=rev)
if current_branch == o[2]]
if outgoing:
top_rev, bottom_rev = \
self._get_top_and_bottom_outgoing_revs(outgoing)
else:
top_rev = None
bottom_rev = None
return bottom_rev, top_rev
def _get_outgoing_changesets(self, remote, rev=None):
"""Return the outgoing changesets between us and a remote.
Args:
remote (unicode):
The name of the remote.
rev (unicode, optional):
An optional revision to limit the results. If specified, only
outgoing changesets which are ancestors of this revision will
be included.
Returns:
list:
A list of tuples, each containing ``(rev, node, branch)``, for each
outgoing changeset. The list will be sorted in revision order.
"""
outgoing_changesets = []
args = [self._exe, '-q', 'outgoing', '--template',
'{rev}\\t{node|short}\\t{branch}\\n',
remote]
if rev:
args.extend(['-r', rev])
# We must handle the special case where there are no outgoing commits
# as mercurial has a non-zero return value in this case.
raw_outgoing = execute(args,
env=self._hg_env,
extra_ignore_errors=(1,))
for line in raw_outgoing.splitlines():
if not line:
continue
# Ignore warning messages that hg might put in, such as
# "warning: certificate for foo can't be verified (Python too old)"
if line.startswith('warning: '):
continue
rev, node, branch = [f.strip() for f in line.split('\t')]
branch = branch or 'default'
if not rev.isdigit():
raise Exception('Unexpected output from hg: %s' % line)
logging.debug('Found outgoing changeset %s:%s', rev, node)
outgoing_changesets.append((int(rev), node, branch))
return outgoing_changesets
def _get_top_and_bottom_outgoing_revs(self, outgoing_changesets):
"""Return top and bottom outgoing revisions for the given changesets.
Args:
outgoing_changesets (list):
A list of outgoing changesets.
Returns:
tuple:
A 2-tuple containing the top and bottom revisions for the given
outgoing changesets.
"""
revs = set(t[0] for t in outgoing_changesets)
top_rev = max(revs)
bottom_rev = min(revs)
for rev, node, branch in reversed(outgoing_changesets):
parents = execute(
[self._exe, 'log', '-r', str(rev), '--template', '{parents}'],
env=self._hg_env)
parents = re.split(':[^\s]+\s*', parents)
parents = [int(p) for p in parents if p != '']
parents = [p for p in parents if p not in outgoing_changesets]
if len(parents) > 0:
bottom_rev = parents[0]
break
else:
bottom_rev = rev - 1
bottom_rev = max(0, bottom_rev)
return top_rev, bottom_rev
def scan_for_server(self, repository_info):
"""Find the Review Board server matching this repository.
Args:
repository_info (rbtools.clients.RepositoryInfo):
The repository information structure.
Returns:
unicode:
The Review Board server URL, if available.
"""
server_url = self.hgrc.get('reviewboard.url', '').strip()
if server_url:
return server_url
elif self._type == 'svn':
# Try using the reviewboard:url property on the SVN repo, if it
# exists.
return SVNClient().scan_for_server_property(repository_info)
return None
def _execute(self, cmd, *args, **kwargs):
"""Execute an hg command.
Args:
cmd (list of unicode):
A command line to execute.
*args (list):
Addditional arguments to pass to
:py:func:`rbtools.utils.process.execute`.
**kwargs (dict):
Addditional keyword arguments to pass to
:py:func:`rbtools.utils.process.execute`.
Returns:
tuple:
The result of the execute call.
"""
# Don't modify the original arguments passed in. This interferes
# with testing and could mess up callers.
cmd = list(cmd)
if not self.hidden_changesets_supported and '--hidden' in cmd:
cmd = [p for p in cmd if p != '--hidden']
# Add our extension which normalizes settings. This is the easiest
# way to normalize settings since it doesn't require us to chase
# a tail of diff-related config options.
cmd += [
'--config',
'extensions.rbtoolsnormalize=%s' % self._hgext_path
]
return execute(cmd, *args, **kwargs)
def has_pending_changes(self):
"""Check if there are changes waiting to be committed.
Returns:
bool:
``True`` if the working directory has been modified, otherwise
returns ``False``.
"""
status = execute([self._exe, 'status', '--modified', '--added',
'--removed', '--deleted'])
return status != ''
def apply_patch(self, patch_file, base_path=None, base_dir=None, p=None,
revert=False):
"""Apply the given patch.
This will take the given patch file and apply it to the working
directory.
Args:
patch_file (unicode):
The name of the patch file to apply.
base_path (unicode, unused):
The base path that the diff was generated in. All hg diffs are
absolute to the repository root, so this is unused.
base_dir (unicode, unused):
The path of the current working directory relative to the root
of the repository. All hg diffs are absolute to the repository
root, so this is unused.
p (unicode, optional):
The prefix level of the diff.
revert (bool, optional):
Whether the patch should be reverted rather than applied.
Returns:
rbtools.clients.PatchResult:
The result of the patch operation.
"""
cmd = [self._exe, 'patch', '--no-commit']
if p:
cmd += ['-p', p]
cmd.append(patch_file)
rc, data = self._execute(cmd, with_errors=True, return_error_code=True)
return PatchResult(applied=(rc == 0), patch_output=data)
def apply_patch_for_empty_files(self, patch, p_num, revert=False):
"""Return whether any empty files in the patch are applied.
Args:
patch (bytes):
The contents of the patch.
p_num (unicode):
The prefix level of the diff.
revert (bool, optional):
Whether the patch should be reverted rather than applied.
Returns:
``True`` if there are empty files in the patch. ``False`` if there
were no empty files, or if an error occurred while applying the
patch.
"""
patched_empty_files = False
added_files = re.findall(r'--- %s\t%s\n'
r'\+\+\+ b/(\S+)\t[^\r\n\t\f]+\n'
r'(?:[^@]|$)'
% (self.PRE_CREATION,
re.escape(self.PRE_CREATION_DATE)), patch)
deleted_files = re.findall(r'--- a/(\S+)\t[^\r\n\t\f]+\n'
r'\+\+\+ %s\t%s\n'
r'(?:[^@]|$)'
% (self.PRE_CREATION,
re.escape(self.PRE_CREATION_DATE)),
patch)
if added_files:
added_files = self._strip_p_num_slashes(added_files, int(p_num))
make_empty_files(added_files)
result = execute([self._exe, 'add'] + added_files,
ignore_errors=True, none_on_ignored_error=True)
if result is None:
logging.error('Unable to execute "hg add" on: %s',
', '.join(added_files))
else:
patched_empty_files = True
if deleted_files:
deleted_files = self._strip_p_num_slashes(deleted_files,
int(p_num))
result = execute([self._exe, 'remove'] + deleted_files,
ignore_errors=True, none_on_ignored_error=True)
if result is None:
logging.error('Unable to execute "hg remove" on: %s',
', '.join(deleted_files))
else:
patched_empty_files = True
return patched_empty_files
def supports_empty_files(self):
"""Return whether the RB server supports added/deleted empty files.
Returns:
bool:
``True`` if the Review Board server supports showing empty files.
"""
return (self.capabilities and
self.capabilities.has_capability('scmtools', 'mercurial',
'empty_files'))
def get_current_bookmark(self):
"""Return the name of the current bookmark.
Returns:
unicode:
A string with the name of the current bookmark.
"""
return execute([self._exe, 'id', '-B'], ignore_errors=True).strip()
| {
"repo_name": "reviewboard/rbtools",
"path": "rbtools/clients/mercurial.py",
"copies": "1",
"size": "47524",
"license": "mit",
"hash": 4241743377227547000,
"line_mean": 34.8131122833,
"line_max": 82,
"alpha_frac": 0.5303636058,
"autogenerated": false,
"ratio": 4.698368759268413,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5728732365068414,
"avg_score": null,
"num_lines": null
} |
"""A client for Modoboa's public API."""
from __future__ import unicode_literals
import os
import pkg_resources
import requests
from requests.exceptions import RequestException
class ModoAPIClient(object):
"""A simple client for the public API."""
def __init__(self, api_url=None):
"""Constructor."""
if api_url is None:
from django.conf import settings
self._api_url = settings.MODOBOA_API_URL
else:
self._api_url = api_url
self._local_core_version = None
def __send_request(self, url, params=None):
"""Send a request to the API."""
if params is None:
params = {}
try:
resp = requests.get(url, params=params)
except RequestException:
return None
if resp.status_code != 200:
return None
return resp.json()
@property
def local_core_version(self):
"""Return the version installed locally."""
if self._local_core_version is None:
self._local_core_version = pkg_resources.get_distribution(
"modoboa").version
return self._local_core_version
def list_extensions(self):
"""List all official extensions."""
url = os.path.join(self._api_url, "extensions/")
return self.__send_request(url)
def register_instance(self, hostname):
"""Register this instance."""
url = "{}instances/search/?hostname={}".format(
self._api_url, hostname)
instance = self.__send_request(url)
if instance is None:
url = "{}instances/".format(self._api_url)
data = {
"hostname": hostname, "known_version": self.local_core_version}
response = requests.post(url, data=data)
if response.status_code != 201:
return None
instance = response.json()
return int(instance["pk"])
def update_instance(self, pk, data):
"""Update instance and send stats."""
url = "{}instances/{}/".format(self._api_url, pk)
response = requests.put(url, data=data)
if response.status_code != 200:
return False
return True
def versions(self):
"""Fetch core and extension versions."""
url = "{}versions/".format(self._api_url)
return self.__send_request(url)
| {
"repo_name": "bearstech/modoboa",
"path": "modoboa/lib/api_client.py",
"copies": "1",
"size": "2401",
"license": "isc",
"hash": -7250981990038853000,
"line_mean": 30.5921052632,
"line_max": 79,
"alpha_frac": 0.5751770096,
"autogenerated": false,
"ratio": 4.264653641207815,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5339830650807815,
"avg_score": null,
"num_lines": null
} |
"""A client for Modoboa's public API."""
import logging
import os
import pkg_resources
import requests
from requests.exceptions import RequestException
from django.utils.translation import ugettext as _
logger = logging.getLogger("modoboa.admin")
class ModoAPIClient(object):
"""A simple client for the public API."""
def __init__(self, api_url=None):
"""Constructor."""
if api_url is None:
from django.conf import settings
self._api_url = settings.MODOBOA_API_URL
else:
self._api_url = api_url
self._local_core_version = None
def __send_request(self, url, params=None):
"""Send a request to the API."""
if params is None:
params = {}
try:
resp = requests.get(url, params=params)
except RequestException as err:
logger.critical(
_("Failed to communicate with public API: %s"), str(err)
)
return None
if resp.status_code != 200:
return None
return resp.json()
@property
def local_core_version(self):
"""Return the version installed locally."""
if self._local_core_version is None:
try:
self._local_core_version = pkg_resources.get_distribution(
"modoboa").version
except pkg_resources.DistributionNotFound:
self._local_core_version = "unknown"
return self._local_core_version
def list_extensions(self):
"""List all official extensions."""
url = os.path.join(self._api_url, "extensions/")
return self.__send_request(url)
def register_instance(self, hostname):
"""Register this instance."""
url = "{}instances/search/?hostname={}".format(
self._api_url, hostname)
instance = self.__send_request(url)
if instance is None:
url = "{}instances/".format(self._api_url)
data = {
"hostname": hostname, "known_version": self.local_core_version}
try:
response = requests.post(url, data=data)
except RequestException as err:
logger.critical(
_("Failed to communicate with public API: %s"), str(err)
)
return None
if response.status_code != 201:
return None
instance = response.json()
return int(instance["pk"])
def update_instance(self, pk, data):
"""Update instance and send stats."""
url = "{}instances/{}/".format(self._api_url, pk)
try:
response = requests.put(url, data=data)
except RequestException as err:
logger.critical(
_("Failed to communicate with public API: %s"), str(err)
)
if response.status_code != 200:
logger.critical(
_("Failed to communicate with public API: %s"), response.text
)
def versions(self):
"""Fetch core and extension versions."""
url = "{}versions/".format(self._api_url)
return self.__send_request(url)
| {
"repo_name": "modoboa/modoboa",
"path": "modoboa/lib/api_client.py",
"copies": "1",
"size": "3198",
"license": "isc",
"hash": 91308827253780300,
"line_mean": 31.9690721649,
"line_max": 79,
"alpha_frac": 0.5562851782,
"autogenerated": false,
"ratio": 4.460251046025105,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5516536224225105,
"avg_score": null,
"num_lines": null
} |
"""A client for Modoboa's public API."""
import os
import pkg_resources
import requests
from requests.exceptions import RequestException
class ModoAPIClient(object):
"""A simple client for the public API."""
def __init__(self, api_url=None):
"""Constructor."""
if api_url is None:
from django.conf import settings
self._api_url = settings.MODOBOA_API_URL
else:
self._api_url = api_url
self._local_core_version = None
def __send_request(self, url, params=None):
"""Send a request to the API."""
if params is None:
params = {}
try:
resp = requests.get(url, params=params)
except RequestException:
return None
if resp.status_code != 200:
return None
return resp.json()
@property
def local_core_version(self):
"""Return the version installed locally."""
if self._local_core_version is None:
self._local_core_version = pkg_resources.get_distribution(
"modoboa").version
return self._local_core_version
def list_extensions(self):
"""List all official extensions."""
url = os.path.join(self._api_url, "extensions/")
return self.__send_request(url)
def register_instance(self, hostname):
"""Register this instance."""
url = "{}instances/search/?hostname={}".format(
self._api_url, hostname)
instance = self.__send_request(url)
if instance is None:
url = "{}instances/".format(self._api_url)
data = {
"hostname": hostname, "known_version": self.local_core_version}
response = requests.post(url, data=data)
if response.status_code != 201:
return None
instance = response.json()
return int(instance["pk"])
def update_instance(self, pk, data):
"""Update instance and send stats."""
url = "{}instances/{}/".format(self._api_url, pk)
response = requests.put(url, data=data)
if response.status_code != 200:
return False
return True
def versions(self):
"""Fetch core and extension versions."""
url = "{}versions/".format(self._api_url)
return self.__send_request(url)
| {
"repo_name": "carragom/modoboa",
"path": "modoboa/lib/api_client.py",
"copies": "1",
"size": "2360",
"license": "isc",
"hash": -2110103579833443300,
"line_mean": 30.8918918919,
"line_max": 79,
"alpha_frac": 0.5720338983,
"autogenerated": false,
"ratio": 4.267631103074141,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0,
"num_lines": 74
} |
"""A client for OBO-sourced identifier mappings."""
import json
import logging
import os
import pathlib
import pickle
import re
from collections import Counter, defaultdict
from typing import List, Mapping, Optional
import obonet
from indra.resources import get_resource_path, load_resource_json
__all__ = [
'OntologyClient',
'OboClient',
]
HERE = pathlib.Path(__file__).parent.resolve()
logger = logging.getLogger(__name__)
class OntologyClient:
"""A base client class for OBO and OWL ontologies."""
def __init__(self, prefix: str):
"""Read the OBO file export at the given path."""
self.prefix = prefix.lower()
self.entries = {
entry['id']: entry for entry
in load_resource_json(f'{prefix}.json')
}
self.alt_to_id = {}
self.name_to_id = {}
self.synonym_to_id = {}
ambig_synonyms = set()
for db_id, entry in self.entries.items():
xrs = defaultdict(list)
for xref in entry.get('xrefs', []):
xrs[xref['namespace']].append(xref['id'])
entry['xrefs'] = dict(xrs)
self.name_to_id[entry['name']] = db_id
for synonym in entry.get('synonyms', []):
# Make a note of this is an ambiguous synonym so that we can
# get rid of it after the loop, e.g., "multiciliation"
if synonym in self.synonym_to_id:
ambig_synonyms.add(synonym)
self.synonym_to_id[synonym] = db_id
for db_alt_id in entry.get('alt_ids', []):
if db_alt_id in self.entries:
raise ValueError(
'Problem with integrity of {}:{}'.format(
self.prefix, db_alt_id
)
)
self.alt_to_id[db_alt_id] = db_id
# Remove all ambiguous synonyms
self.synonym_to_id = {k: v for k, v in self.synonym_to_id.items()
if k not in ambig_synonyms}
def get_name_from_id(self, db_id: str) -> Optional[str]:
"""Return the database name corresponding to the given database ID.
Parameters
----------
db_id :
The ID to be converted.
Returns
-------
:
The name corresponding to the given ID.
"""
return self.entries.get(db_id, {}).get('name')
def get_id_from_name(self, db_name: str) -> Optional[str]:
"""Return the database identifier corresponding to the given name.
Parameters
----------
db_name :
The name to be converted.
Returns
-------
:
The ID corresponding to the given name.
"""
return self.name_to_id.get(db_name)
def get_id_from_name_or_synonym(self, txt: str) -> Optional[str]:
"""Return the database id corresponding to the given name or synonym.
Note that the way the OboClient is constructed, ambiguous synonyms are
filtered out. Further, this function prioritizes names over synonyms
(i.e., it first looks up the ID by name, and only if that fails,
it attempts a synonym-based lookup). Overall, these mappings are
guaranteed to be many-to-one.
Parameters
----------
txt :
The name or synonym to be converted.
Returns
-------
:
The ID corresponding to the given name or synonym.
"""
name_id = self.get_id_from_name(txt)
if name_id:
return name_id
return self.synonym_to_id.get(txt)
def get_id_from_alt_id(self, db_alt_id: str) -> Optional[str]:
"""Return the canonical database id corresponding to the alt id.
Parameters
----------
db_alt_id :
The alt id to be converted.
Returns
-------
:
The ID corresponding to the given alt id.
"""
return self.alt_to_id.get(db_alt_id)
def get_relations(self, db_id: str) -> Mapping[str, List[str]]:
"""Return the isa relationships corresponding to a given ID.
Parameters
----------
db_id :
The ID whose isa relationships should be returned
Returns
-------
:
A dict keyed by relation type with each entry a list of IDs of the
terms that are in the given relation with the given ID.
"""
return self.entries.get(db_id, {})
def get_relation(self, db_id: str, rel_type: str) -> List[str]:
"""Return the isa relationships corresponding to a given ID.
Parameters
----------
db_id :
The ID whose isa relationships should be returned
rel_type :
The type of relationships to get, e.g., is_a, part_of
Returns
-------
:
The IDs of the terms that are in the given relation with the given
ID.
"""
return self.entries.get(db_id, {}).get(rel_type, [])
class OboClient(OntologyClient):
"""A base client for data that's been grabbed via OBO"""
@staticmethod
def entries_from_graph(obo_graph, prefix, remove_prefix=False,
allowed_synonyms=None, allowed_external_ns=None):
"""Return processed entries from an OBO graph."""
allowed_synonyms = allowed_synonyms if allowed_synonyms is not None \
else {'EXACT', 'RELATED'}
prefix_upper = prefix.upper()
entries = []
for node, data in obo_graph.nodes(data=True):
if 'name' not in data:
continue
# There are entries in some OBOs that are actually from other
# ontologies. We either skip these entirely or if allowed
# external name spaces are provided, we allow nodes that are
# in one of those namespaces
external_node = False
if not node.startswith(prefix_upper):
if allowed_external_ns and \
node.split(':')[0] in allowed_external_ns:
external_node = True
else:
continue
if not external_node and remove_prefix:
node = node[len(prefix) + 1:]
xrefs = []
for xref in data.get('xref', []):
try:
db, db_id = xref.split(':', maxsplit=1)
# This is typically the case when the xref doesn't have
# a separate name space in which case we skip it
except ValueError:
continue
# Example: for EFO, we have xrefs like
# PERSON: James Malone
db_id = db_id.lstrip()
# Example: for HP, we have xrefs like
# MEDDRA:10050185 "Palmoplantar pustulosis"
if ' ' in db_id:
db_id = db_id.split()[0]
logging.debug(
'Likely labeled %s:%s xref: %s. Recovered %s:%s',
prefix, node, xref, db, db_id,
)
xrefs.append(dict(namespace=db, id=db_id))
# For simplicity, here we only take rels from the same ontology
# but in principle, we could consider ones across ontologies
rels_dict = defaultdict(list)
if 'is_a' in data:
rels_dict['is_a'] = data.get('is_a')
for rel in data.get('relationship', []):
rel_type, target = rel.split(' ', maxsplit=1)
rels_dict[rel_type].append(target)
for rel_type, rels in rels_dict.items():
rel_own = [entry for entry in
sorted(set(rels)) if entry.startswith(prefix_upper)
or (allowed_external_ns and
entry.split(':')[0] in allowed_external_ns)]
rel_own = [(entry if ((not remove_prefix)
or (allowed_external_ns
and entry.split(':')[0] in
allowed_external_ns))
else entry.split(':', maxsplit=1)[1])
for entry in rel_own]
rels_dict[rel_type] = rel_own
rels_dict = dict(rels_dict)
synonyms = []
for synonym in data.get('synonym', []):
match = re.match(r'^\"(.+)\" (EXACT|RELATED|NARROW|BROAD|\[\])',
synonym)
syn, status = match.groups()
if status == '[]':
status = 'EXACT'
if status in allowed_synonyms:
synonyms.append(syn)
namespace = data.get('namespace', prefix)
entries.append({
'namespace': namespace,
'id': node,
'name': data['name'],
'synonyms': synonyms,
'xrefs': xrefs,
'alt_ids': data.get('alt_id', []),
'relations': rels_dict,
})
return entries
@classmethod
def update_resource(cls, directory, url, prefix, *args, remove_prefix=False,
allowed_synonyms=None, allowed_external_ns=None):
"""Write the OBO information to files in the given directory."""
resource_path = get_resource_path(f'{prefix}.json')
obo_path = os.path.join(directory, '%s.obo.pkl' % prefix)
if os.path.exists(obo_path):
with open(obo_path, 'rb') as file:
g = pickle.load(file)
else:
g = obonet.read_obo(url)
with open(obo_path, 'wb') as file:
pickle.dump(g, file)
entries = \
OboClient.entries_from_graph(
g, prefix=prefix,
remove_prefix=remove_prefix,
allowed_synonyms=allowed_synonyms,
allowed_external_ns=allowed_external_ns)
entries = prune_empty_entries(entries,
{'synonyms', 'xrefs',
'alt_ids', 'relations'})
def sort_key(x):
val = x['id']
if not remove_prefix:
val = val.split(':')[1]
try:
val = int(val)
except ValueError:
pass
return val
entries = sorted(entries, key=sort_key)
with open(resource_path, 'w') as file:
json.dump(entries, file, indent=1, sort_keys=True)
def prune_empty_entries(entries, keys):
for entry in entries:
for key in keys:
if key in entry and not entry[key]:
entry.pop(key)
return entries
| {
"repo_name": "sorgerlab/indra",
"path": "indra/databases/obo_client.py",
"copies": "1",
"size": "10958",
"license": "bsd-2-clause",
"hash": -7906763245769618000,
"line_mean": 34.0095846645,
"line_max": 80,
"alpha_frac": 0.5075743749,
"autogenerated": false,
"ratio": 4.234157650695518,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5241732025595518,
"avg_score": null,
"num_lines": null
} |
"""A client for OBO-sourced identifier mappings."""
import json
import logging
import os
import pickle
import re
from collections import Counter, defaultdict
import obonet
__all__ = [
'OboClient',
'RESOURCES',
]
HERE = os.path.dirname(os.path.abspath(__file__))
RESOURCES = os.path.join(HERE, os.pardir, 'resources')
logger = logging.getLogger(__name__)
def _make_resource_path(directory, prefix):
return os.path.join(directory, '{prefix}.json'.format(prefix=prefix))
class OboClient:
"""A base client for data that's been grabbed via OBO"""
def __init__(self, prefix, *, directory=RESOURCES):
"""Read the OBO file export at the given path."""
self.prefix = prefix
self.directory = directory
self.mapping_path = _make_resource_path(self.directory, self.prefix)
self.entries = {}
self.alt_to_id = {}
self.name_to_id = {}
self.synonym_to_id = {}
with open(self.mapping_path) as file:
entries = json.load(file)
self.entries = {entry['id']: entry for entry in entries}
ambig_synonyms = set()
for db_id, entry in self.entries.items():
xrs = defaultdict(list)
for xref in entry.get('xrefs', []):
xrs[xref['namespace']].append(xref['id'])
entry['xrefs'] = dict(xrs)
self.name_to_id[entry['name']] = db_id
for synonym in entry.get('synonyms', []):
# Make a note of this is an ambiguous synonym so that we can
# get rid of it after the loop, e.g., "multiciliation"
if synonym in self.synonym_to_id:
ambig_synonyms.add(synonym)
self.synonym_to_id[synonym] = db_id
for db_alt_id in entry.get('alt_ids', []):
if db_alt_id in self.entries:
raise ValueError(
'Problem with integrity of {}:{}'.format(
self.prefix, db_alt_id
)
)
self.alt_to_id[db_alt_id] = db_id
# Remove all ambiguous synonyms
self.synonym_to_id = {k: v for k, v in self.synonym_to_id.items()
if k not in ambig_synonyms}
@staticmethod
def entries_from_graph(obo_graph, prefix, remove_prefix=False,
allowed_synonyms=None):
"""Return processed entries from an OBO graph."""
allowed_synonyms = allowed_synonyms if allowed_synonyms is not None \
else {'EXACT', 'RELATED'}
prefix_upper = prefix.upper()
entries = []
for node, data in obo_graph.nodes(data=True):
if 'name' not in data:
continue
# There are entries in some OBOs that are actually from other
# ontologies
if not node.startswith(prefix_upper):
continue
if remove_prefix:
node = node[len(prefix) + 1:]
xrefs = []
for xref in data.get('xref', []):
try:
db, db_id = xref.split(':', maxsplit=1)
# This is typically the case when the xref doesn't have
# a separate name space in which case we skip it
except ValueError:
continue
# Example: for EFO, we have xrefs like
# PERSON: James Malone
db_id = db_id.lstrip()
# Example: for HP, we have xrefs like
# MEDDRA:10050185 "Palmoplantar pustulosis"
if ' ' in db_id:
db_id = db_id.split()[0]
logging.debug(
'Likely labeled %s:%s xref: %s. Recovered %s:%s',
prefix, node, xref, db, db_id,
)
xrefs.append(dict(namespace=db, id=db_id))
# For simplicity, here we only take rels from the same ontology
# but in principle, we could consider ones across ontologies
rels_dict = defaultdict(list)
if 'is_a' in data:
rels_dict['is_a'] = data.get('is_a')
for rel in data.get('relationship', []):
rel_type, target = rel.split(' ', maxsplit=1)
rels_dict[rel_type].append(target)
for rel_type, rels in rels_dict.items():
rel_own = [entry for entry in
sorted(set(rels)) if entry.startswith(prefix_upper)]
rel_own = [(entry if not remove_prefix
else entry.split(':', maxsplit=1)[1])
for entry in rel_own]
rels_dict[rel_type] = rel_own
rels_dict = dict(rels_dict)
synonyms = []
for synonym in data.get('synonym', []):
match = re.match(r'^\"(.+)\" (EXACT|RELATED|NARROW|BROAD|\[\])',
synonym)
syn, status = match.groups()
if status == '[]':
status = 'EXACT'
if status in allowed_synonyms:
synonyms.append(syn)
namespace = data.get('namespace', prefix)
entries.append({
'namespace': namespace,
'id': node,
'name': data['name'],
'synonyms': synonyms,
'xrefs': xrefs,
'alt_ids': data.get('alt_id', []),
'relations': rels_dict,
})
return entries
@staticmethod
def update_resource(directory, url, prefix, *args, remove_prefix=False,
allowed_synonyms=None):
"""Write the OBO information to files in the given directory."""
resource_path = _make_resource_path(directory, prefix)
obo_path = os.path.join(directory, '%s.obo.pkl' % prefix)
if os.path.exists(obo_path):
with open(obo_path, 'rb') as file:
g = pickle.load(file)
else:
g = obonet.read_obo(url)
with open(obo_path, 'wb') as file:
pickle.dump(g, file)
entries = \
OboClient.entries_from_graph(g, prefix=prefix,
remove_prefix=remove_prefix,
allowed_synonyms=allowed_synonyms)
entries = prune_empty_entries(entries,
{'synonyms', 'xrefs',
'alt_ids', 'relations'})
with open(resource_path, 'w') as file:
json.dump(entries, file, indent=1, sort_keys=True)
def count_xrefs(self):
"""Count how many xrefs there are to each database."""
return Counter(
xref_db
for db_id, xref_map in self.id_to_xrefs.items()
for xref_db, xref_db_ids in xref_map.items()
for _ in xref_db_ids
)
def get_name_from_id(self, db_id):
"""Return the database name corresponding to the given database ID.
Parameters
----------
db_id : str
The ID to be converted.
Returns
-------
db_name : str or None
The name corresponding to the given ID.
"""
return self.entries.get(db_id, {}).get('name')
def get_id_from_name(self, db_name):
"""Return the database identifier corresponding to the given name.
Parameters
----------
db_name : str
The name to be converted.
Returns
-------
db_id : str
The ID corresponding to the given name.
"""
return self.name_to_id.get(db_name)
def get_id_from_name_or_synonym(self, txt):
"""Return the database id corresponding to the given name or synonym.
Note that the way the OboClient is constructed, ambiguous synonyms are
filtered out. Further, this function prioritizes names over synonyms
(i.e., it first looks up the ID by name, and only if that fails,
it attempts a synonym-based lookup). Overall, these mappings are
guaranteed to be many-to-one.
Parameters
----------
txt : str
The name or synonym to be converted.
Returns
-------
db_id : str
The ID corresponding to the given name or synonym.
"""
name_id = self.get_id_from_name(txt)
if name_id:
return name_id
return self.synonym_to_id.get(txt)
def get_id_from_alt_id(self, db_alt_id):
"""Return the canonical database id corresponding to the alt id.
Parameters
----------
db_alt_id : str
The alt id to be converted.
Returns
-------
db_id : str or None
The ID corresponding to the given alt id.
"""
return self.alt_to_id.get(db_alt_id)
def get_relations(self, db_id):
"""Return the isa relationships corresponding to a given ID.
Parameters
----------
db_id : str
The ID whose isa relationships should be returned
Returns
-------
dict
A dict keyed by relation type with each entry a list of IDs of the
terms that are in the given relation with the given ID.
"""
return self.entries.get(db_id, {})
def get_relation(self, db_id, rel_type):
"""Return the isa relationships corresponding to a given ID.
Parameters
----------
db_id : str
The ID whose isa relationships should be returned
rel_type : str
The type of relationships to get, e.g., is_a, part_of
Returns
-------
list of str
The IDs of the terms that are in the given relation with the given
ID.
"""
return self.entries.get(db_id, {}).get(rel_type, [])
def prune_empty_entries(entries, keys):
for entry in entries:
for key in keys:
if key in entry and not entry[key]:
entry.pop(key)
return entries | {
"repo_name": "johnbachman/belpy",
"path": "indra/databases/obo_client.py",
"copies": "1",
"size": "10232",
"license": "mit",
"hash": 4897701888870146000,
"line_mean": 33.5709459459,
"line_max": 80,
"alpha_frac": 0.5153440188,
"autogenerated": false,
"ratio": 4.127470754336426,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.00022939606272939604,
"num_lines": 296
} |
"""A client for OWL-sourced identifier mappings."""
import json
import os
import pickle
from collections import defaultdict
from operator import itemgetter
from typing import Any, Collection, Mapping, TYPE_CHECKING
from tqdm import tqdm
from indra.databases.obo_client import OntologyClient, prune_empty_entries
from indra.resources import get_resource_path
if TYPE_CHECKING:
import pronto
class OwlClient(OntologyClient):
"""A base client for data that's been grabbed via OWL."""
@staticmethod
def entry_from_term(
term: "pronto.Term",
prefix: str,
remove_prefix: bool = False,
allowed_external_ns: Collection = None,
) -> Mapping[str, Any]:
"""Create a data dictionary from a Pronto term."""
rels_dict = defaultdict(list)
xrefs = []
for xref in term.xrefs:
try:
xref_db, xref_id = xref.id.split(":", maxsplit=1)
except ValueError:
continue
else:
xrefs.append(dict(namespace=xref_db, id=xref_id))
for parent in term.superclasses(distance=1, with_self=False):
parent_db, parent_id = parent.id.split(':', maxsplit=1)
# If the parent here is not from this namespace and not one of the
# allowed external namespaces then we skip the parent
if parent_db.lower() != prefix.lower() and \
(not allowed_external_ns or
parent_db not in allowed_external_ns):
continue
if remove_prefix and parent_db.lower() == prefix.lower():
rels_dict["is_a"].append(parent_id)
else:
rels_dict["is_a"].append(parent.id)
term_ns, term_id = term.id.split(':', maxsplit=1)
term_ns = term_ns.lower()
return {
"namespace": term_ns,
"id": term_id,
"name": term.name,
"synonyms": [s.description for s in term.synonyms],
"xrefs": xrefs,
"alt_ids": sorted(term.alternate_ids),
"relations": dict(rels_dict),
}
@classmethod
def entries_from_ontology(
cls,
prefix: str,
ontology: "pronto.Ontology",
*,
skip_obsolete: bool = True,
remove_prefix: bool = False,
allowed_external_ns: Collection = None,
):
prefix = prefix.upper()
rv = []
for term in tqdm(ontology.terms(), desc=f"[{prefix}]"):
if term.obsolete and skip_obsolete:
continue
if not term.id.startswith(prefix):
continue
rv.append(cls.entry_from_term(term, prefix,
remove_prefix=remove_prefix))
return rv
@classmethod
def update_resource(
cls,
prefix: str,
ontology: "pronto.Ontology",
skip_obsolete: bool = True,
remove_prefix: bool = False,
):
prefix = prefix.lower()
entries = cls.entries_from_ontology(
prefix=prefix, ontology=ontology, skip_obsolete=skip_obsolete,
remove_prefix=remove_prefix
)
entries = prune_empty_entries(
entries,
{"synonyms", "xrefs", "alt_ids", "relations"},
)
entries = sorted(
entries,
key=itemgetter("id") if remove_prefix else _id_key,
)
resource_path = get_resource_path(f"{prefix}.json")
with open(resource_path, "w") as file:
json.dump(entries, file, indent=1, sort_keys=True)
@classmethod
def update_from_obo_library(
cls,
prefix: str,
extension: str = "owl",
**kwargs,
):
prefix = prefix.lower()
cache_path = get_resource_path(f"{prefix}.{extension}.pkl")
if os.path.exists(cache_path):
with open(cache_path, "rb") as file:
ontology = pickle.load(file)
else:
try:
import pronto
except ImportError:
raise ImportError(
"To use the INDRA OWL Client, you must first"
"install Pronto with `pip install pronto`."
)
ontology = pronto.Ontology.from_obo_library(
f"{prefix.upper()}.{extension}")
with open(cache_path, "wb") as file:
pickle.dump(ontology, file, protocol=pickle.HIGHEST_PROTOCOL)
cls.update_resource(prefix=prefix, ontology=ontology, **kwargs)
@classmethod
def update_from_file(
cls,
prefix: str,
file,
**kwargs,
):
try:
import pronto
except ImportError:
raise ImportError(
"To use the INDRA OWL Client, you must first"
"install Pronto with `pip install pronto`."
)
ontology = pronto.Ontology(file)
cls.update_resource(prefix=prefix, ontology=ontology, **kwargs)
def _id_key(x):
return int(x["id"].split(':')[1])
if __name__ == "__main__":
OwlClient.update_from_obo_library("ido")
| {
"repo_name": "sorgerlab/indra",
"path": "indra/databases/owl_client.py",
"copies": "1",
"size": "5166",
"license": "bsd-2-clause",
"hash": -4605458429948141600,
"line_mean": 30.8888888889,
"line_max": 78,
"alpha_frac": 0.5481997677,
"autogenerated": false,
"ratio": 3.964696853415196,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5012896621115196,
"avg_score": null,
"num_lines": null
} |
'''A client for talking to NSQ'''
from . import connection
from . import logger
from . import exceptions
from .constants import HEARTBEAT
from .response import Response, Error
from .http import nsqlookupd, ClientException
from .checker import ConnectionChecker
from contextlib import contextmanager
import random
import select
import socket
import time
import threading
import math
class Client(object):
'''A client for talking to NSQ over a connection'''
def __init__(self,
lookupd_http_addresses=None, nsqd_tcp_addresses=None, topic=None,
timeout=0.1, reconnection_backoff=None, auth_secret=None, connect_timeout=None, **identify):
# If lookupd_http_addresses are provided, so must a topic be.
if lookupd_http_addresses:
assert topic
# Create clients for each of lookupd instances
lookupd_http_addresses = lookupd_http_addresses or []
params = {}
if auth_secret:
params['access_token'] = auth_secret
self._lookupd = [
nsqlookupd.Client(host, **params) for host in lookupd_http_addresses]
self._topic = topic
# The select timeout
self._timeout = timeout
# Our reconnection backoff policy
self._reconnection_backoff = reconnection_backoff
# The connection timeout to pass to the `Connection` class
self._connect_timeout = connect_timeout
# The options to send along with identify when establishing connections
self._identify_options = identify
self._auth_secret = auth_secret
# A mapping of (host, port) to our nsqd connection objects
self._connections = {}
self._nsqd_tcp_addresses = nsqd_tcp_addresses or []
self.heartbeat_interval = 30 * 1000
self.last_recv_timestamp = time.time()
# A lock for manipulating our connections
self._lock = threading.RLock()
# And lastly, instantiate our connections
self.check_connections()
def discover(self, topic):
'''Run the discovery mechanism'''
logger.info('Discovering on topic %s', topic)
producers = []
for lookupd in self._lookupd:
logger.info('Discovering on %s', lookupd)
try:
# Find all the current producers on this instance
for producer in lookupd.lookup(topic)['producers']:
logger.info('Found producer %s on %s', producer, lookupd)
producers.append(
(producer['broadcast_address'], producer['tcp_port']))
except ClientException:
logger.exception('Failed to query %s', lookupd)
new = []
for host, port in producers:
conn = self._connections.get((host, port))
if not conn:
logger.info('Discovered %s:%s', host, port)
new.append(self.connect(host, port))
elif not conn.alive():
logger.info('Reconnecting to %s:%s', host, port)
if conn.connect():
conn.setblocking(0)
self.reconnected(conn)
else:
logger.debug('Connection to %s:%s still alive', host, port)
# And return all the new connections
return [conn for conn in new if conn]
def check_connections(self):
'''Connect to all the appropriate instances'''
logger.info('Checking connections')
if self._lookupd:
self.discover(self._topic)
# Make sure we're connected to all the prescribed hosts
for hostspec in self._nsqd_tcp_addresses:
logger.debug('Checking nsqd instance %s', hostspec)
host, port = hostspec.split(':')
port = int(port)
conn = self._connections.get((host, port), None)
# If there is no connection to it, we have to try to connect
if not conn:
logger.info('Connecting to %s:%s', host, port)
self.connect(host, port)
elif not conn.alive():
# If we've connected to it before, but it's no longer alive,
# we'll have to make a decision about when to try to reconnect
# to it, if we need to reconnect to it at all
if conn.ready_to_reconnect():
logger.info('Reconnecting to %s:%s', host, port)
if conn.connect():
conn.setblocking(0)
self.reconnected(conn)
else:
logger.debug('Checking freshness')
now = time.time()
time_check = math.ceil(now - self.last_recv_timestamp)
if time_check >= ((self.heartbeat_interval * 2) / 1000.0):
if conn.ready_to_reconnect():
logger.info('Reconnecting to %s:%s', host, port)
if conn.connect():
conn.setblocking(0)
self.reconnected(conn)
@contextmanager
def connection_checker(self):
'''Run periodic reconnection checks'''
thread = ConnectionChecker(self)
logger.info('Starting connection-checker thread')
thread.start()
try:
yield thread
finally:
logger.info('Stopping connection-checker')
thread.stop()
logger.info('Joining connection-checker')
thread.join()
def connect(self, host, port):
'''Connect to the provided host, port'''
conn = connection.Connection(host, port,
reconnection_backoff=self._reconnection_backoff,
auth_secret=self._auth_secret,
timeout=self._connect_timeout,
**self._identify_options)
if conn.alive():
conn.setblocking(0)
self.add(conn)
return conn
def reconnected(self, conn):
'''Hook into when a connection has been reestablished'''
def connections(self):
'''Safely return a list of all our connections'''
with self._lock:
return list(self._connections.values())
def added(self, conn):
'''Hook into when a connection has been added'''
def add(self, connection):
'''Add a connection'''
key = (connection.host, connection.port)
with self._lock:
if key not in self._connections:
self._connections[key] = connection
self.added(connection)
return connection
else:
return None
def remove(self, connection):
'''Remove a connection'''
key = (connection.host, connection.port)
with self._lock:
found = self._connections.pop(key, None)
try:
self.close_connection(found)
except Exception as exc:
logger.warning('Failed to close %s: %s', connection, exc)
return found
def close_connection(self, connection):
'''A hook for subclasses when connections are closed'''
connection.close()
def close(self):
'''Close this client down'''
map(self.remove, self.connections())
def read(self):
'''Read from any of the connections that need it'''
# We'll check all living connections
connections = [c for c in self.connections() if c.alive()]
if not connections:
# If there are no connections, obviously we return no messages, but
# we should wait the duration of the timeout
time.sleep(self._timeout)
return []
# Not all connections need to be written to, so we'll only concern
# ourselves with those that require writes
writes = [c for c in connections if c.pending()]
try:
readable, writable, exceptable = select.select(
connections, writes, connections, self._timeout)
except exceptions.ConnectionClosedException:
logger.exception('Tried selecting on closed client')
return []
except select.error:
logger.exception('Error running select')
return []
# If we returned because the timeout interval passed, log it and return
if not (readable or writable or exceptable):
logger.debug('Timed out...')
return []
responses = []
# For each readable socket, we'll try to read some responses
for conn in readable:
try:
for res in conn.read():
# We'll capture heartbeats and respond to them automatically
if (isinstance(res, Response) and res.data == HEARTBEAT):
logger.info('Sending heartbeat to %s', conn)
conn.nop()
logger.debug('Setting last_recv_timestamp')
self.last_recv_timestamp = time.time()
continue
elif isinstance(res, Error):
nonfatal = (
exceptions.FinFailedException,
exceptions.ReqFailedException,
exceptions.TouchFailedException
)
if not isinstance(res.exception(), nonfatal):
# If it's not any of the non-fatal exceptions, then
# we have to close this connection
logger.error(
'Closing %s: %s', conn, res.exception())
self.close_connection(conn)
responses.append(res)
logger.debug('Setting last_recv_timestamp')
self.last_recv_timestamp = time.time()
except exceptions.NSQException:
logger.exception('Failed to read from %s', conn)
self.close_connection(conn)
except socket.error:
logger.exception('Failed to read from %s', conn)
self.close_connection(conn)
# For each writable socket, flush some data out
for conn in writable:
try:
conn.flush()
except socket.error:
logger.exception('Failed to flush %s', conn)
self.close_connection(conn)
# For each connection with an exception, try to close it and remove it
# from our connections
for conn in exceptable:
self.close_connection(conn)
return responses
@contextmanager
def random_connection(self):
'''Pick a random living connection'''
# While at the moment there's no need for this to be a context manager
# per se, I would like to use that interface since I anticipate
# adding some wrapping around it at some point.
yield random.choice(
[conn for conn in self.connections() if conn.alive()])
def wait_response(self):
'''Wait for a response'''
responses = self.read()
while not responses:
responses = self.read()
return responses
def wait_write(self, client):
'''Wait until the specific client has written the message'''
while client.pending():
self.read()
def pub(self, topic, message):
'''Publish the provided message to the provided topic'''
with self.random_connection() as client:
client.pub(topic, message)
return self.wait_response()
def mpub(self, topic, *messages):
'''Publish messages to a topic'''
with self.random_connection() as client:
client.mpub(topic, *messages)
return self.wait_response()
| {
"repo_name": "dlecocq/nsq-py",
"path": "nsq/client.py",
"copies": "1",
"size": "11834",
"license": "mit",
"hash": 6286099481519049000,
"line_mean": 38.0561056106,
"line_max": 100,
"alpha_frac": 0.5652357614,
"autogenerated": false,
"ratio": 4.7891541885876165,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5854389949987617,
"avg_score": null,
"num_lines": null
} |
"""A client for the ClassyFire API which enables efficient querying with
chemical database files"""
import csv
import io
import json
import os
import time
import requests
url = "http://classyfire.wishartlab.com"
chunk_size = 1000
sleep_interval = 60
def structure_query(compound, label='pyclassyfire'):
"""Submit a compound information to the ClassyFire service for evaluation
and receive a id which can be used to used to collect results
:param compound: The compound structures as line delimited inchikey or
smiles. Optionally a tab-separated id may be prepended for each
structure.
:type compound: str
:param label: A label for the query
:type label:
:return: A query ID number
:rtype: int
>>> structure_query('CCC', 'smiles_test')
>>> structure_query('InChI=1S/C3H4O3/c1-2(4)3(5)6/h1H3,(H,5,6)')
"""
r = requests.post(url + '/queries.json', data='{"label": "%s", '
'"query_input": "%s", "query_type": "STRUCTURE"}'
% (label, compound),
headers={"Content-Type": "application/json"})
r.raise_for_status()
return r.json()['id']
def iupac_query(compound, label='pyclassyfire'):
"""Submit a IUPAC compound name to the ClassyFire service for evaluation
and receive a id which can be used to used to collect results.
:param compound: The line delimited compound names. Optionally a
tab-separated id may be prepended for each compound.
:type compound: str
:param label: A label for the query
:type label:
:return: A query ID number
:rtype: int
>>> iupac_query('ethane', 'iupac_test')
>>> iupac_query('C001\\tethane\\nC002\\tethanol', 'iupac_test')
"""
r = requests.post(url + '/queries.json', data='{"label": "%s", '
'"query_input": "%s", "query_type": "IUPAC_NAME"}'
% (label, compound),
headers={"Content-Type": "application/json"})
r.raise_for_status()
return r.json()['id']
def get_results(query_id, return_format="json"):
"""Given a query_id, fetch the classification results.
:param query_id: A numeric query id returned at time of query submission
:type query_id: str
:param return_format: desired return format. valid types are json, csv or sdf
:type return_format: str
:return: query information
:rtype: str
>>> get_results('595535', 'csv')
>>> get_results('595535', 'json')
>>> get_results('595535', 'sdf')
"""
r = requests.get('%s/queries/%s.%s' % (url, query_id, return_format),
headers={"Content-Type": "application/%s" % return_format})
r.raise_for_status()
return r.text
def get_entity(inchikey, return_format="json"):
"""Given a InChIKey for a previously queried structure, fetch the
classification results.
:param inchikey: An InChIKey for a previously calculated chemical structure
:type inchikey: str
:param return_format: desired return format. valid types are json, csv or sdf
:type return_format: str
:return: query information
:rtype: str
>>> get_entity("ATUOYWHBWRKTHZ-UHFFFAOYSA-N", 'csv')
>>> get_entity("ATUOYWHBWRKTHZ-UHFFFAOYSA-N", 'json')
>>> get_entity("ATUOYWHBWRKTHZ-UHFFFAOYSA-N", 'sdf')
"""
inchikey = inchikey.replace('InChIKey=', '')
r = requests.get('%s/entities/%s.%s' % (url, inchikey, return_format),
headers={
"Content-Type": "application/%s" % return_format})
r.raise_for_status()
return r.text
def get_chemont_node(chemontid):
"""Return data for the TaxNode with ID chemontid.
:param chemontid: the ChemOnt ID of the entity.
:type chemontid: str
:return: The classification results for the entity as json.
:rtype: str
>>> get_chemont_node('CHEMONTID:0004253')
"""
chemontid = chemontid.replace("CHEMONTID:", "C")
r = requests.get('%s/tax_nodes/%s.json' % (url, chemontid),
headers={"Content-Type": "application/json" })
r.raise_for_status()
return r.text
def tabular_query(inpath, structure_key, dialect='excel', outpath=None,
outfields=('taxonomy', 'description', 'substituents')):
"""Given a path to a compound set in tabular form (comma or tab delimited)
annotate all compounds and write results to an expanded table.
:param inpath: path to compound file to be annotated
:type inpath: str
:param structure_key: column heading which contains the compounds InChIKey
or SMILES
:type structure_key: str
:param dialect: dialect for parsing table (generally 'excel' for csv,
'excel-tab' for tsv)
:type dialect: str
:param outpath: Path to desired output location
:type outpath: str
:param outfields: Fields to append to table from ClassyFire output
:type outfields: tuple(string)
>>> tabular_query('/tabulated_data.tsv', 'structure', 'excel-tab')
"""
tax_fields = ('kingdom', 'superclass', 'class', 'subclass')
query_ids = []
infile = open(inpath, 'rU')
if not outpath:
outpath = _prevent_overwrite(inpath)
comps = []
for line in csv.DictReader(infile, dialect=dialect):
comps.append(line[structure_key])
if not len(comps) % chunk_size:
query_ids.append(structure_query('\\n'.join(comps)))
comps = []
if comps:
query_ids.append(structure_query('\\n'.join(comps)))
print('%s queries submitted to ClassyFire API' % len(query_ids))
i = 0
infile.seek(0)
with open(outpath, 'w') as outfile:
reader = csv.DictReader(infile, dialect=dialect)
writer = csv.DictWriter(outfile, reader.fieldnames+list(outfields),
dialect=dialect)
writer.writeheader()
while i < len(query_ids):
result = json.loads(get_results(query_ids[i]))
if result["classification_status"] == "Done":
for j, line in enumerate(reader):
if result['entities'] and str(j+1) == result['entities'][0]['identifier'].split('-')[1]:
hit = result['entities'].pop(0)
if 'taxonomy' in outfields:
hit['taxonomy'] = ";".join(
['%s:%s' % (hit[x]['name'], hit[x]['chemont_id'])
for x in tax_fields if hit[x]])
for field in outfields:
if isinstance(hit[field], list):
line[field] = ';'.join(hit[field])
else:
line[field] = hit[field]
writer.writerow(line)
i += 1
else:
print("%s percent complete" % round(i/len(query_ids)*100))
time.sleep(sleep_interval)
infile.close()
def sdf_query(inpath, outpath=None):
"""Given a path to a compound set in a sdf file, annotate all compounds
and write results as attributes in a sdf file.
:param inpath: path to compound file to be annotated
:type inpath: str
:param outpath: Path to desired output location
:type outpath: str
>>> sdf_query('/sdf_data.sdf')
"""
from rdkit.Chem import AllChem
query_ids = []
if not outpath:
outpath = _prevent_overwrite(inpath)
comps = []
for mol in AllChem.SDMolSupplier(inpath):
if mol:
comps.append(AllChem.MolToSmiles(mol))
if not len(comps) % chunk_size:
query_ids.append(structure_query('/n'.join(comps)))
comps = []
if comps:
query_ids.append(structure_query('\\n'.join(comps)))
print('%s queries submitted to ClassyFire API' % len(query_ids))
i = 0
with io.open(outpath, 'w', encoding="utf-8") as outfile:
while i < len(query_ids):
result = json.loads(get_results(query_ids[i]))
if result["classification_status"] == "Done":
outfile.write(get_results(query_ids[i], return_format='sdf'))
i += 1
else:
print("%s percent complete" % round(i / len(query_ids) * 100))
time.sleep(sleep_interval)
def _prevent_overwrite(write_path, suffix='_annotated'):
"""Prevents overwrite of existing output files by appending a suffix when
needed
:param write_path: potential write path
:type write_path: string
:return:
:rtype:
"""
while os.path.exists(write_path):
sp = write_path.split('.')
if len(sp) > 1:
sp[-2] += suffix
write_path = '.'.join(sp)
else:
write_path += suffix
return write_path
| {
"repo_name": "JamesJeffryes/pyclassyfire",
"path": "pyclassyfire/client.py",
"copies": "1",
"size": "8963",
"license": "mit",
"hash": -5847511594425249000,
"line_mean": 35.1411290323,
"line_max": 108,
"alpha_frac": 0.5838446949,
"autogenerated": false,
"ratio": 3.780261493040911,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4864106187940911,
"avg_score": null,
"num_lines": null
} |
"""A client for the HackerSchool OAuth API.
The script aims to do stuff without having a web-server as the "middle man"
"""
from __future__ import absolute_import, print_function
import getpass
import re
from requests import get, post, Session
HS_ID = ''
HS_SECRET = ''
# The redirect url should match exactly with the url provided when
# registering the client, if one was provided.
# This URI is a special one, which prevents any redirects and puts the
# authorization code, directly in the URI. This is only being used
# for demonstation purposes.
REDIRECT_URI = 'urn:ietf:wg:oauth:2.0:oob'
#### Auth server end-points
HS_BASE_URL = 'https://www.recurse.com'
HS_AUTHORIZE_URL= HS_BASE_URL + '/oauth/authorize'
HS_ACCESS_TOKEN_URL= HS_BASE_URL + '/oauth/token'
def get_hs_credentials():
""" Get the credentials of the user. """
username = raw_input('HS Email: ').strip()
password = getpass.getpass('HS Password: ').strip()
return username, password
def get_authenticated_session(username=None, password=None):
""" Return a session authenticated with the HS site. """
if not username or not password:
username, password = get_hs_credentials()
session = _authenticate(Session(), username, password)
return session
def get_access_token(session=None, username=None, password=None):
""" Get the access token from the server.
Return a (access_token, refresh_token) pair.
"""
if session is None:
session = get_authenticated_session(username, password)
# Request the authorization server for the code.
response = _request_authorization_grant(session)
# Get the authorization code
code = _get_code(response.content)
# Request the tokens
return _request_access_token(code)
# Get all batches
def get_batches(access_token, reverse=False):
# FIXME: move this code to the hs_oauth module, methods on the class that we
# wish we had!
batches = sorted(
request(access_token, HS_BASE_URL + '/api/v1/batches'),
key=lambda batch:batch['id'],
reverse=reverse,
)
return batches
# Get list of all users, given batch
def get_people_in_a_batch(batch_id, access_token):
return request(
access_token, HS_BASE_URL + '/api/v1/batches/%d/people' % batch_id
)
def request(access_token, resource):
""" Client requests a protected resource. """
headers = {'Authorization': 'Bearer %s' % access_token}
return get(resource, headers=headers).json()
#### Private protocol #########################################################
def _authenticate(session, username, password):
""" Resource owner authenticates. """
data = {
'email': username,
'password': password,
'commit': 'Log+in',
'authenticity_token': _get_authenticity_token(session),
}
response = session.post('https://www.recurse.com/sessions', data=data)
assert 'Your email or password is incorrect' not in response.content, 'Login failed'
return session
def _authorize_client(session):
""" Emulates the resource owner authorizing the client.
This function is essentially clicking on the "authorize" button
shown on the familiar "do you want to allow bogus-application to
use your hackerschool data?" page.
"""
if not HS_ID or not HS_SECRET:
raise ValueError('Need a valid HS client ID and secret.')
data = {
'client_id': HS_ID,
'response_type': 'code',
'redirect_uri': REDIRECT_URI,
}
return session.post(HS_AUTHORIZE_URL, data=data)
def _get_authenticity_token(session):
""" Parse the page to get the authenticity token. """
response = session.get('https://www.recurse.com/login')
matches = re.findall('<meta\s*?name="(.*?)"\s*content="(.*)".*/>', response.text)
for name, content in matches:
if name == 'csrf-token':
return content
raise ValueError('Could not find authenticity token')
def _get_code(html):
""" A client-internal method to get the authentication code.
The client code is shown in the html. Parse it out of the html.
"""
code, = re.findall('<code.*id="authorization_code">(.*)</code>', html)
return code
def _request_access_token(code):
""" Client requests an access token using an authorization code.
'grant_type', 'code', 'redirect_uri', 'client_id' and
'client_secret' (if one was provided during registration) are all
required parameters.
"""
data = {
'grant_type': 'authorization_code',
'code': code,
'redirect_uri': REDIRECT_URI
}
response_data = post(
HS_ACCESS_TOKEN_URL, data=data, auth=(HS_ID, HS_SECRET)
).json()
return response_data['access_token'], response_data['refresh_token']
def _request_authorization_grant(session):
""" Client requests for authorization.
NOTE: The 'client_id' and 'response_type' are required arguments,
and the 'redirect_uri' is required, if it was speicified when the
client registered with the server. Also, to use this workflow the
'response_type' MUST be 'code'.
"""
data = {
'client_id': HS_ID,
'response_type': 'code',
'redirect_uri': REDIRECT_URI
}
return session.get(HS_AUTHORIZE_URL, data=data)
if __name__ == '__main__':
my_url = HS_BASE_URL + '/api/v1/people/me'
username = raw_input('HS Username: ').strip()
password = getpass.getpass('HS Password: ')
access_token, _ = get_access_token(username=username, password=password)
print(request(access_token, my_url))
| {
"repo_name": "punchagan/hs-twitter-lists",
"path": "hs_oauth.py",
"copies": "1",
"size": "5631",
"license": "unlicense",
"hash": -2975960183592166400,
"line_mean": 27.155,
"line_max": 88,
"alpha_frac": 0.6515716569,
"autogenerated": false,
"ratio": 3.754,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9898553976578992,
"avg_score": 0.0014035360642014637,
"num_lines": 200
} |
"""A client for the REST API of cdstar instances."""
import logging
import json
import requests
from pycdstar import resource
from pycdstar.config import Config
from pycdstar.exception import CdstarError
log = logging.getLogger(__name__)
class Cdstar(object):
"""The API client.
>>> api = Cdstar(service_url='http://example.org', user='user', password='pwd')
>>> obj = api.get_object()
"""
def __init__(self, cfg=None, service_url=None, user=None, password=None):
"""
Initialize a new client object.
:param cfg: A `pycdstar.config.Config` object or `None`.
:param service_url: The base URL of the cdstar service.
:param user: user name for HTTP basic auth.
:param password: password for HTTP basic auth.
:return:
"""
self.cfg = cfg or Config()
self.service_url = service_url or self.cfg.get('service', 'url')
user = user or self.cfg.get('service', 'user', default=None)
password = password or self.cfg.get('service', 'password', default=None)
self.session = requests.Session()
if user and password:
self.session.auth = (user, password)
def url(self, obj):
res = self.service_url
if res.endswith('/'):
res = res[:-1]
return res + getattr(obj, 'path', obj)
def _req(self, path, method='get', json=True, assert_status=200, **kw):
"""Make a request to the API of an cdstar instance.
:param path: HTTP path.
:param method: HTTP method.
:param json: Flag signalling whether the response should be treated as JSON.
:param assert_status: Expected HTTP response status of a successful request.
:param kw: Additional keyword parameters will be handed through to the \
appropriate function of the requests library.
:return: The return value of the function of the requests library or a decoded \
JSON object/array.
"""
method = getattr(self.session, method.lower())
res = method(self.url(path), **kw)
status_code = res.status_code
if json:
try:
res = res.json()
except ValueError:
log.error(res.text[:1000])
raise
if assert_status:
if not isinstance(assert_status, (list, tuple)):
assert_status = [assert_status]
if status_code not in assert_status:
log.error(
'got HTTP %s, expected HTTP %s' % (status_code, assert_status))
log.error(res.text[:1000] if hasattr(res, 'text') else res)
raise CdstarError('Unexpected HTTP status code', res, status_code)
return res
def get_object(self, uid=None):
"""
Retrieve an existing or newly created object.
:param uid: UID of an existing object or `None` to create a new object.
:return: `pycdstar.resource.Object` instance.
"""
return resource.Object(self, uid)
# def get_collection(self, uid=None):
# return resource.Object(self, uid, type='collection')
def search(self, query, limit=15, offset=0, index=None):
"""
Query the search service.
:param query: The query.
:param limit: The maximal number of results to return (at most 500).
:param offset: Use to page through big search result sets.
:param index: Name of the index to search in (metadata|fulltext) or `None`.
:return:
"""
params = dict(limit=limit, offset=offset)
if index:
assert index in ['metadata', 'fulltext']
params['indexselection'] = index
if isinstance(query, str):
query = {"query_string": {"query": query}}
# elif isinstance(query, ElasticQuery):
# query = query.dict()
assert isinstance(query, dict)
return resource.SearchResults(self, self._req(
'/search/',
method='post',
params=params,
headers={'content-type': 'application/json'},
data=json.dumps(query)))
# def landing(self):
# pass
# def accesscontrol(self):
# pass
# def dariah(self):
# pass
| {
"repo_name": "clld/pycdstar",
"path": "src/pycdstar/api.py",
"copies": "1",
"size": "4290",
"license": "apache-2.0",
"hash": 2074182113276864500,
"line_mean": 34.1639344262,
"line_max": 88,
"alpha_frac": 0.5878787879,
"autogenerated": false,
"ratio": 4.128970163618864,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5216848951518864,
"avg_score": null,
"num_lines": null
} |
"""A client for the REST API of imeji instances."""
import logging
from collections import OrderedDict
import requests
from six import string_types
from pyimeji import resource
from pyimeji.config import Config
log = logging.getLogger(__name__)
class ImejiError(Exception):
def __init__(self, message, error):
super(ImejiError, self).__init__(message)
self.error = error.get('error') if isinstance(error, dict) else error
class GET(object):
"""Handle GET requests.
This includes requests
- to retrieve single objects,
- to fetch lists of object references (which are returned as `OrderedDict` mapping
object `id` to additional metadata present in the response).
"""
def __init__(self, api, name):
"""Initialize a handler.
:param api: An Imeji API instance.
:param name: Name specifying the kind of object(s) to retrieve. We check whether\
this name has a plural "s" to determine if a list is to be retrieved.
"""
self._list = name.endswith('s')
self.rsc = getattr(resource, (name[:-1] if self._list else name).capitalize())
self.api = api
self.name = name
self.path = name
if not self._list:
self.path += 's'
def __call__(self, id='', **kw):
"""Calling the handler initiates an HTTP request to the imeji server.
:param id: If a single object is to be retrieved it must be specified by id.
:return: An OrderedDict mapping id to additional metadata for lists, a \
:py:class:`pyimeji.resource.Resource` instance for single objects.
"""
if not self._list and not id:
raise ValueError('no id given')
if id:
id = '/' + id
res = self.api._req('/%s%s' % (self.path, id), params=kw)
if not self._list:
return self.rsc(res, self.api)
return OrderedDict([(d['id'], d) for d in res])
class Imeji(object):
"""The client.
>>> api = Imeji(service_url='http://demo.imeji.org/imeji/')
>>> collection_id = list(api.collections().keys())[0]
>>> collection = api.collection(collection_id)
>>> collection = api.create('collection', title='the new collection')
>>> item = collection.add_item(fetchUrl='http://example.org')
>>> item.delete()
"""
def __init__(self, cfg=None, service_url=None):
self.cfg = cfg or Config()
self.service_url = service_url or self.cfg.get('service', 'url')
user = self.cfg.get('service', 'user', default=None)
password = self.cfg.get('service', 'password', default=None)
self.session = requests.Session()
if user and password:
self.session.auth = (user, password)
def _req(self, path, method='get', json=True, assert_status=200, **kw):
"""Make a request to the API of an imeji instance.
:param path: HTTP path.
:param method: HTTP method.
:param json: Flag signalling whether the response should be treated as JSON.
:param assert_status: Expected HTTP response status of a successful request.
:param kw: Additional keyword parameters will be handed through to the \
appropriate function of the requests library.
:return: The return value of the function of the requests library or a decoded \
JSON object/array.
"""
method = getattr(self.session, method.lower())
res = method(self.service_url + '/rest' + path, **kw)
status_code = res.status_code
if json:
try:
res = res.json()
except ValueError: # pragma: no cover
log.error(res.text[:1000])
raise
if assert_status:
if status_code != assert_status:
log.error(
'got HTTP %s, expected HTTP %s' % (status_code, assert_status))
log.error(res.text[:1000] if hasattr(res, 'text') else res)
raise ImejiError('Unexpected HTTP status code', res)
return res
def __getattr__(self, name):
"""Names of resource classes are accepted and resolved as dynamic attribute names.
This allows convenient retrieval of resources as api.<resource-class>(id=<id>),
or api.<resource-class>s(q='x').
"""
return GET(self, name)
def create(self, rsc, **kw):
if isinstance(rsc, string_types):
cls = getattr(resource, rsc.capitalize())
rsc = cls(kw, self)
return rsc.save()
def delete(self, rsc):
return rsc.delete()
def update(self, rsc, **kw):
for k, v in kw.items():
setattr(rsc, k, v)
return rsc.save()
| {
"repo_name": "xrotwang/pyimeji",
"path": "pyimeji/api.py",
"copies": "1",
"size": "4739",
"license": "apache-2.0",
"hash": 1911871962174483200,
"line_mean": 35.4538461538,
"line_max": 90,
"alpha_frac": 0.5986495041,
"autogenerated": false,
"ratio": 4.016101694915254,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5114751199015254,
"avg_score": null,
"num_lines": null
} |
"""A client implementation for the Spotnet slave server."""
import asyncio
import datetime
import uuid
from ..utils import WebSocketWrapper
DELAY = 0.1
class SpotnetSlaveClient(WebSocketWrapper):
"""A client for interacting with a Spotnet slave server.
Attributes:
name (str): The user friendly name of the slave node.
uuid (str): The unique identifier for the slave node.
is_connected (bool): Boolean indicating whether or not this instance
has been connected with Spotfiy credentials.
is_paused (bool): Boolean indicating whether or not this slave
instance is paused; instances are initialized with audio paused.
track_queue (List[Tuple(str,str)]): A list of tuples containing the
Spotify id and uri of each track in the queue for this slave
node.
counted_votes_for_skip (int): The current number of votes towards
skipping the currently playing song.
first_connected_at (str): An isoformat string indicating the time
tht the slave first made connection with the master server.
"""
def __init__(self, ws):
super(SpotnetSlaveClient, self).__init__(ws)
self.name = None
self.uuid = str(uuid.uuid1())
self.is_connected = False
self.is_paused = True
self.track_queue = []
self.counted_votes_for_skip = 0
self.first_connected_at = datetime.datetime.now().isoformat()
@asyncio.coroutine
def send_credentials(self, name, username, password):
"""Coroutine to send credentials and node name to connect the slave.
Args:
name (str): The name to assign the slave node.
username (str): The Spotify username to use in authentication.
password (str): The Spotify password to use in authentication.abs
"""
yield from self.send_json({
'status': 'send-credentials',
'sender': 'master',
'data': {
'username': username,
'password': password
}})
self.name = name
@asyncio.coroutine
def send_pause(self):
"""Coroutine to tell the slave server to pause audio playback."""
yield from self.send_json({
'status': 'pause-audio',
'sender': 'master'
})
@asyncio.coroutine
def send_play(self):
"""Coroutine to tell the slave server to resume audio playback."""
yield from self.send_json({
'status': 'play-audio',
'sender': 'master'
})
@asyncio.coroutine
def add_track(self, track, position):
"""Coroutine to add a track to the mopidy tracklist.
Args:
track (dict): JSON-like dict with 'id' and 'uri' keys.
position (str): Either 'current' or 'next'.
"""
uri = track['uri']
if position == 'current':
if not self.track_queue:
self.track_queue.append(track)
yield from self._send_add_track(uri)
elif self.is_paused:
self.track_queue = [track] + self.track_queue
yield from self._send_clear_tracks()
yield from self._send_add_track(uri)
else:
self.track_queue[0] = track
yield from self._send_pause_audio()
yield from self._send_clear_tracks()
yield from self._send_add_track(uri)
yield from self._send_play_audio()
elif position == 'next':
if not self.track_queue:
self.track_queue.append(track)
yield from self._send_add_track(uri)
else:
self.track_queue = (self.track_queue[:1] + [track] +
self.track_queue[1:])
@asyncio.coroutine
def remove_track(self, position, from_transition=False):
"""Coroutine to remove a track from the mopidy tracklist.
Args:
position (int): The position in the queue to remove.
from_transition (bool): Indicating if this call is resultant from
the end of a track into another.
"""
self.track_queue.pop(position)
if position == 0:
if self.is_paused:
yield from self._send_clear_tracks()
if self.track_queue:
# send the next uri to be played (but we are still paused)
new_uri = self.track_queue[0]['uri']
yield from self._send_add_track(new_uri)
else:
# was playing a track
if not self.track_queue:
yield from self._send_pause_audio()
self.is_paused = True
yield from self._send_clear_tracks()
else:
if from_transition:
yield from self._send_stop_playback()
else:
yield from self._send_pause_audio()
yield from self._send_clear_tracks()
new_uri = self.track_queue[0]['uri']
yield from self._send_add_track(new_uri)
yield from self._send_play_audio()
@asyncio.coroutine
def _send_play_audio(self):
"""Coroutine to tell slave to play audio."""
yield from asyncio.sleep(DELAY)
yield from self.send_json({
'status': 'play-audio',
'sender': 'master'
})
@asyncio.coroutine
def _send_pause_audio(self):
"""Coroutine to tell slave to pause audio."""
yield from asyncio.sleep(DELAY)
yield from self.send_json({
'status': 'pause-audio',
'sender': 'master'
})
@asyncio.coroutine
def _send_stop_playback(self):
"""Coroutine to tell slave to stop playback."""
yield from asyncio.sleep(DELAY)
yield from self.send_json({
'status': 'stop-playback',
'sender': 'master'
})
@asyncio.coroutine
def _send_clear_tracks(self):
"""Coroutine to tell slave to clear tracks."""
yield from asyncio.sleep(DELAY)
yield from self.send_json({
'status': 'clear-tracks',
'sender': 'master'
})
@asyncio.coroutine
def _send_add_track(self, uri):
"""Coroutine to tell slave to add a track by uri."""
yield from asyncio.sleep(DELAY)
yield from self.send_json({
'status': 'add-track',
'sender': 'master',
'data': {
'uri': uri
}
})
def get_state(self):
"""Return the state of this slave as a JSON-like dict.
Returned dicts will have the form::
{
'uuid': str,
'name': str,
'is-connected': bool,
'is-paused': bool,
'counted-votes-for-skip': int,
'first-connected-at': string,
'track-queue': [
{
'id': str,
'uri': str
},
...
]
}
"""
return {
'uuid': self.uuid,
'name': self.name,
'is-connected': self.is_connected,
'is-paused': self.is_paused,
'counted-votes-for-skip': self.counted_votes_for_skip,
'first-connected-at': self.first_connected_at,
'track-queue': self.track_queue
}
| {
"repo_name": "welchbj/spotnet",
"path": "backend/master/slave_client.py",
"copies": "1",
"size": "7663",
"license": "mit",
"hash": 8670089353673154000,
"line_mean": 32.7577092511,
"line_max": 78,
"alpha_frac": 0.5289051285,
"autogenerated": false,
"ratio": 4.424364896073903,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5453270024573903,
"avg_score": null,
"num_lines": null
} |
"""A client library for interacting with Apache Knox for Hadoop REST services
See:
https://github.com/alexmilowski/python-hadoop-rest-api
"""
# Always prefer setuptools over distutils
from setuptools import setup, find_packages
long_description = """
A client library for interacting with Apache Knox for Hadoop REST services
See:
https://github.com/alexmilowski/python-hadoop-rest-api
"""
import re
vdir = __file__[0:__file__.rfind('/')]+'/' if __file__.rfind('/')>=0 else ''
with open(vdir+'pyox/__init__.py', 'rt') as vfile:
verstrline = vfile.read()
VSRE = r"^__version__ = ['\"]([^'\"]*)['\"]"
mo = re.search(VSRE, verstrline, re.M)
if mo:
version_info = mo.group(1)
else:
raise RuntimeError("Unable to find version string in %s." % (VERSIONFILE,))
setup(
name='pyox',
# Versions should comply with PEP440. For a discussion on single-sourcing
# the version across setup.py and the project code, see
# https://packaging.python.org/en/latest/single_source_version.html
version=version_info,
description='A client library for Apache Knox',
long_description=long_description,
# The project's main homepage.
url='https://github.com/alexmilowski/python-hadoop-rest-api',
# Author details
author='Alex Miłowski',
author_email='alex@milowski.com',
# Choose your license
license='Apache 2.0',
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 4 - Beta',
# Indicate who your project is intended for
'Intended Audience :: Developers',
'Topic :: Software Development :: Libraries',
# Pick your license as you wish (should match "license" above)
'License :: OSI Approved :: Apache Software License',
# Specify the Python versions you support here. In particular, ensure
# that you indicate whether you support Python 2, Python 3 or both.
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
# What does your project relate to?
keywords='',
# You can just specify the packages manually here if your project is
# simple. Or you can use find_packages().
packages=['pyox','pyox.apps.monitor', 'pyox.apps.tracker'],
#find_packages(exclude=['contrib', 'docs', 'tests', 'virtualenv']),
# Alternatively, if you want to distribute just a my_module.py, uncomment
# this:
# py_modules=["my_module"],
# List run-time dependencies here. These will be installed by pip when
# your project is installed. For an analysis of "install_requires" vs pip's
# requirements files see:
# https://packaging.python.org/en/latest/requirements.html
install_requires=['requests','flask','redis','cryptography'],
# List additional groups of dependencies here (e.g. development
# dependencies). You can install these using the following syntax,
# for example:
# $ pip install -e .[dev,test]
extras_require={
},
include_package_data=True,
# If there are data files included in your packages that need to be
# installed, specify them here. If using Python 2.6 or less, then these
# have to be included in MANIFEST.in as well.
package_data={
},
# Although 'package_data' is the preferred approach, in some case you may
# need to place data files outside of your packages. See:
# http://docs.python.org/3.4/distutils/setupscript.html#installing-additional-files # noqa
# In this case, 'data_file' will be installed into '<sys.prefix>/my_data'
data_files=[],
# To provide executable scripts, use entry points in preference to the
# "scripts" keyword. Entry points provide cross-platform support and allow
# pip to create the appropriate form of executable for the target platform.
entry_points={
},
)
| {
"repo_name": "alexmilowski/python-hadoop-rest-api",
"path": "setup.py",
"copies": "1",
"size": "4118",
"license": "apache-2.0",
"hash": 7308491691477549000,
"line_mean": 33.8898305085,
"line_max": 94,
"alpha_frac": 0.663832888,
"autogenerated": false,
"ratio": 3.9172216936251187,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5081054581625118,
"avg_score": null,
"num_lines": null
} |
# A client that abstracts shard placement.
import os
import time
from net import gorpc
from vtdb import cursor
from vtdb import dbapi
from vtdb import dbexceptions
from vtdb import keyspace
from vtdb import tablet3
from zk import zkns_query
from zk import zkocc
# zkocc_addrs - bootstrap addresses for resolving other endpoints
# local_cell - name of the local cell
# keyspace_name - as in /zk/local/vt/ns/<keyspace_name>
# db_type - master/replica/rdonly
def connect(zkocc_addrs, local_cell, keyspace_name, db_type, use_streaming, timeout, user, password, dbname):
zk_client = zkocc.ZkOccConnection(zkocc_addrs, local_cell, timeout)
return ShardedClient(zk_client, keyspace_name, db_type, use_streaming, timeout, user, password, dbname)
# Track the connections and statement issues with a transaction context.
# This is fundamentally a misnomer since we do not support transactions between shards.
#
class Txn(object):
conns = []
stmts = []
class ShardedClient(object):
zkocc_client = None
keyspace_name = ''
keyspace = None
db_type = ''
db_name = ''
use_streaming = False
user = ''
password = ''
timeout = 0
max_attempts = 2
reconnect_delay = 0.005
cursorclass = cursor.ShardedCursor
def __init__(self, zkocc_client, keyspace_name, db_type, use_streaming, timeout, user, password, dbname):
self.zkocc_client = zkocc_client
self.keyspace_name = keyspace_name
self.db_type = db_type
self.use_streaming = use_streaming
self.timeout = timeout
self.user = user
self.password = password
self.dbname = dbname
self.txn = None
self.keyspace = keyspace.read_keyspace(self.zkocc_client, keyspace_name)
self.conns = [None] * self.keyspace.shard_count
self._streaming_shard_list = [] # shards with active streams
def _dial_shard(self, shard_idx):
shard_name = self.keyspace.shard_names[shard_idx]
name_path = os.path.join(keyspace.ZK_KEYSPACE_PATH, self.keyspace_name, shard_name, self.db_type)
addrs = zkns_query.lookup_name(zkocc_client, name_path)
for addr in addrs:
tablet_conn = tablet3.TabletConnection(addr, self.keyspace_name, shard_name, self.timeout, self.user, self.password)
try:
tablet_conn.dial()
self.conns[shard_idx] = tablet_conn
return tablet_conn
except dbexceptions.OperationalError:
# FIXME(msolomon) Implement retry deadline.
pass
raise dbexceptions.OperationalError('no tablet available for shard', name_path)
def begin(self):
if self.txn is not None:
raise dbexceptions.ProgrammingError('nested transaction are not supported')
self.txn = Txn()
# Fast-fail on the first commit, but otherwise raise a PartialCommitError.
# NOTE: Multi-db commits should be rare, so no need to do this in parallel yet.
def commit(self):
try:
if self.txn:
err_conns = []
for i, conn in enumerate(self.txn.conns):
if conn.is_closed():
err_conns.append(conn)
if err_conns:
raise dbexceptions.OperationalError('tablets offline', [str(x) for x in err_conn])
for i, conn in enumerate(self.txn.conns):
try:
conn.commit()
except dbexceptions.DatabaseError as e:
err_conns.append(conn)
# If our first commit fails, just raise the error after rolling back
# everything else.
if i == 0:
try:
self.rollback()
except dbexceptions.DatabaseError:
pass
raise e
if err_conns and len(err_conns) != len(self.txn.conns):
raise dbexceptions.PartialCommitError(err_conns)
except dbexceptions.DatabaseError:
# If a DatabaseError occurred, scan for dead connections and remove
# them so they will be recreated.
for i, conn in enumerate(self.conns):
if conn.is_closed():
self.conns[i] = None
finally:
self.txn = None
# NOTE: Multi-db rollbacks should be rare, so no need to do this in parallel yet.
def rollback(self):
try:
if self.txn:
for conn in self.txn.conns:
try:
conn.rollback()
except dbexceptions.DatabaseError:
logging.warning('rollback failed: %s', conn)
finally:
self.txn = None
def _execute_on_shards(self, query, bind_vars, shard_idx_list):
# FIXME(msolomon) This needs to be parallel, without threads?
raise NotImplementedError
for i in shard_idx_list:
self._execute_on_shards(query, bind_vars, i)
def _stream_execute_on_shards(self, query, bind_vars, shard_idx_list):
self._streaming_shard_list = shard_idx_list
# FIXME(msolomon) This needs to be parallel, without threads?
raise NotImplementedError
for i in shard_idx_list:
self._execute_on_shards(query, bind_vars, i)
def _stream_next_on_shards(self, shard_idx_list=None):
if shard_idx_list is None:
shard_idx_list = self.streaming_shard_list
# FIXME(msolomon) This needs to be parallel, without threads?
raise NotImplementedError
for i in shard_idx_list:
self._stream_next_on_shard(i)
def _execute_for_keyspace_ids(self, query, bind_vars, keyspace_id_list):
shard_idx_list = list(set([self.keyspace.keyspace_id_to_shard_index(k)
for k in keyspace_id_list]))
return self._execute_on_shards(query, bind_vars, shard_idx_list)
def _stream_execute_for_keyspace_ids(self, query, bind_vars, keyspace_id_list):
shard_idx_list = list(set([self.keyspace.keyspace_id_to_shard_index(k)
for k in keyspace_id_list]))
return self._stream_execute_on_shards(query, bind_vars, shard_idx_list)
def _begin(self, shard_idx):
for x in xrange(self.max_attempts):
try:
conn = self.conns[shard_idx]
if conn is None:
conn = self._dial_shard(shard_idx)
return conn.begin()
except dbexceptions.OperationalError as e:
# Tear down regardless of the precise failure.
self.conns[shard_idx] = None
if isinstance(e, tablet3.TimeoutError):
# On any timeout let the error bubble up and just redial next time.
raise e
if isinstance(e, tablet3.RetryError):
# Give the tablet a moment to restart itself. This isn't
# strictly necessary since there is a significant chance you
# will end up talking to another host.
time.sleep(self.reconnect_delay)
raise dbexceptions.OperationalError('tablets unreachable', self.keyspace_name, shard_idx, self.db_type)
def _execute_on_shard(self, query, bind_vars, shard_idx):
query, bind_vars = dbapi.prepare_query_bind_vars(query, bind_vars)
for x in xrange(self.max_attempts):
try:
conn = self.conns[shard_idx]
if conn is None:
conn = self._dial_shard(shard_idx)
if self.txn:
self.txn.stmts.append(query)
if conn not in self.txt.conns:
# Defer the begin until we actually issue a statement.
conn.begin()
self.txt.conns.append(conn)
return conn._execute(query, bind_vars)
except dbexceptions.OperationalError as e:
# Tear down regardless of the precise failure.
self.conns[shard_idx] = None
if isinstance(e, tablet3.TimeoutError):
# On any timeout let the error bubble up and just redial next time.
raise e
if isinstance(e, tablet3.RetryError):
# Give the tablet a moment to restart itself. This isn't
# strictly necessary since there is a significant chance you
# will end up talking to another host.
time.sleep(self.reconnect_delay)
raise dbexceptions.OperationalError('tablets unreachable', self.keyspace_name, shard_idx, self.db_type)
def _execute_batch(self, query_list, bind_vars_list, shard_idx):
new_query_list = []
new_bind_vars_list = []
for query, bind_vars in zip(query_list, bind_vars_list):
query, bind_vars = dbapi.prepare_query_bind_vars(query, bind_vars)
new_query_list.append(query)
new_bind_vars_list.append(bind_vars)
query_list = new_query_list
bind_vars_list = new_bind_vars_list
for x in xrange(self.max_attempts):
try:
conn = self.conns[shard_idx]
if conn is None:
conn = self._dial_shard(shard_idx)
return conn._execute_batch(query_list, bind_vars_list)
except dbexceptions.OperationalError as e:
# Tear down regardless of the precise failure.
self.conns[shard_idx] = None
if isinstance(e, tablet3.TimeoutError):
# On any timeout let the error bubble up and just redial next time.
raise e
if isinstance(e, tablet3.RetryError):
# Give the tablet a moment to restart itself. This isn't
# strictly necessary since there is a significant chance you
# will end up talking to another host.
time.sleep(self.reconnect_delay)
raise dbexceptions.OperationalError('tablets unreachable', self.keyspace_name, shard_idx, self.db_type)
def _stream_execute_on_shard(self, query, bind_vars, shard_idx):
query, bind_vars = dbapi.prepare_query_bind_vars(query, bind_vars)
for x in xrange(self.max_attempts):
try:
conn = self.conns[shard_idx]
if conn is None:
conn = self._dial_shard(shard_idx)
return conn._stream_execute(self, query, bind_vars)
except dbexceptions.OperationalError as e:
# Tear down regardless of the precise failure.
self.conns[shard_idx] = None
if isinstance(e, tablet3.TimeoutError):
# On any timeout let the error bubble up and just redial next time.
raise e
if isinstance(e, tablet3.RetryError):
# Give the tablet a moment to restart itself. This isn't
# strictly necessary since there is a significant chance you
# will end up talking to another host.
time.sleep(self.reconnect_delay)
raise dbexceptions.OperationalError('tablets unreachable', self.keyspace_name, shard_idx, self.db_type)
def _stream_next_on_shard(self, shard_idx):
# NOTE(msolomon) This action cannot be retried.
try:
conn = self.conns[shard_idx]
if conn is None:
conn = self._dial_shard(shard_idx)
return conn._stream_next()
except dbexceptions.OperationalError:
# Tear down regardless of the precise failure.
self.conns[shard_idx] = None
raise
def cursor(self, cursorclass=None):
return (cursorclass or self.cursorclass)(self)
def __enter__(self):
return self.cursor()
def __exit__(self, exc, value, tb):
if exc:
self.rollback()
else:
self.commit()
| {
"repo_name": "CERN-Stage-3/vitess",
"path": "py/vtdb/sharded.py",
"copies": "4",
"size": "10824",
"license": "bsd-3-clause",
"hash": -7660292144189499000,
"line_mean": 36.7142857143,
"line_max": 122,
"alpha_frac": 0.6571507761,
"autogenerated": false,
"ratio": 3.7375690607734806,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.639471983687348,
"avg_score": null,
"num_lines": null
} |
"""A client that uses the local file system pretending to be Swift.
"""
"""
Copyright 2014 Gregory Holt
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from contextlib import contextmanager
from errno import EAGAIN
from fcntl import flock, LOCK_EX, LOCK_NB
from json import dumps, loads
from os import close as os_close, listdir, mkdir, open as os_open, O_CREAT, \
O_WRONLY, rename, rmdir, unlink
from os.path import exists, getsize, isdir, isfile, join as path_join, \
sep as path_sep
from sqlite3 import connect, Row
from StringIO import StringIO
from time import time
from uuid import uuid4
from swiftly.client.client import Client
from swiftly.client.utils import quote
try:
from eventlet import sleep
except ImportError:
from time import sleep
SUBS = [
('_', '__'),
('.', '_.'),
('/', '_s'),
('\\', '_b'),
(':', '_c'),
('*', '_S'),
("'", '_q'),
('"', '_d'),
('?', '_Q'),
('<', '_l'),
('>', '_g'),
('|', '_p')]
"""The list of strings in names to substitute for."""
# Note that _- is reserved for use as the start of internal data file names.
def _encode_name(name):
for a, b in SUBS:
name = name.replace(a, b)
return name
def _decode_name(name):
for a, b in SUBS:
name = name.replace(b, a)
return name
@contextmanager
def lock_dir(path):
path = path_join(path, '_-lock')
fd = os_open(path, O_WRONLY | O_CREAT, 0o0600)
try:
for x in xrange(100):
try:
flock(fd, LOCK_EX | LOCK_NB)
break
except IOError as err:
if err.errno != EAGAIN:
raise
sleep(0.1)
else:
raise Exception('Timeout 10s trying to get lock on %r' % path)
yield True
finally:
os_close(fd)
class LocalClient(Client):
"""A client that uses the local file system pretending to be Swift.
.. note::
This is a really early implementation and no-ops a lot of stuff.
With time it will become a more complete representation.
:param local_path: This is where the fake Swift will store its data.
Default: Current working directory.
:param chunk_size: Maximum size to read or write at one time.
:param verbose: Set to a ``func(msg, *args)`` that will be called
with debug messages. Constructing a string for output can be
done with msg % args.
:param verbose_id: Set to a string you wish verbose messages to
be prepended with; can help in identifying output when
multiple Clients are in use.
"""
def __init__(self, local_path=None, chunk_size=65536, verbose=None,
verbose_id=''):
super(LocalClient, self).__init__()
self.local_path = local_path.rstrip(path_sep) if local_path else '.'
self.chunk_size = chunk_size
if verbose:
self.verbose = lambda m, *a, **k: verbose(
self._verbose_id + m, *a, **k)
else:
self.verbose = lambda *a, **k: None
self.verbose_id = verbose_id
self._verbose_id = self.verbose_id
if self._verbose_id:
self._verbose_id += ' '
def request(self, method, path, contents, headers, decode_json=False,
stream=False, query=None, cdn=False):
"""
See :py:func:`swiftly.client.client.Client.request`
"""
if cdn:
raise Exception('CDN not yet supported with LocalClient')
if isinstance(contents, basestring):
contents = StringIO(contents)
if not headers:
headers = {}
if not query:
query = {}
rpath = path.lstrip('/')
if '/' in rpath:
container_name, object_name = rpath.split('/', 1)
else:
container_name = rpath
object_name = ''
if not container_name:
status, reason, hdrs, body = self._account(
method, contents, headers, stream, query, cdn)
elif not object_name:
status, reason, hdrs, body = self._container(
method, container_name, contents, headers, stream, query, cdn)
else:
status, reason, hdrs, body = self._object(
method, container_name, object_name, contents, headers, stream,
query, cdn)
if status and status // 100 != 5:
if not stream and decode_json and status // 100 == 2:
if body:
body = loads(body)
else:
body = None
return (status, reason, hdrs, body)
raise Exception('%s %s failed: %s %s' % (method, path, status, reason))
def _connect(self, db_path):
db = connect(db_path)
db.row_factory = Row
db.text_factory = str
db.executescript('''
PRAGMA synchronous = NORMAL;
PRAGMA count_changes = OFF;
PRAGMA temp_store = MEMORY;
PRAGMA journal_mode = DELETE;
''')
return db
def _get_db(self):
db_path = path_join(self.local_path, '_-account.db')
if isfile(db_path):
return self._connect(db_path)
with lock_dir(self.local_path):
if isfile(db_path):
return self._connect(db_path)
temp_path = path_join(self.local_path, '_-temp-account.db')
db = self._connect(temp_path)
db.executescript('''
CREATE TABLE account_entry (
container_count INTEGER,
object_count INTEGER,
byte_count INTEGER);
INSERT INTO account_entry (
container_count, object_count, byte_count)
VALUES (0, 0, 0);
CREATE TABLE container_entry (
container_name TEXT PRIMARY KEY,
object_count INTEGER,
byte_count INTEGER);
CREATE TRIGGER container_insert
AFTER INSERT
ON container_entry
BEGIN
UPDATE account_entry
SET container_count = container_count + 1,
object_count = object_count + new.object_count,
byte_count = byte_count + new.byte_count;
END;
CREATE TRIGGER container_update
AFTER UPDATE
ON container_entry
BEGIN
UPDATE account_entry
SET object_count = object_count + (
new.object_count - old.object_count),
byte_count = byte_count + (
new.byte_count - old.byte_count);
END;
CREATE TRIGGER container_delete
AFTER DELETE
ON container_entry
BEGIN
UPDATE account_entry
SET container_count = container_count - 1,
object_count = object_count - old.object_count,
byte_count = byte_count - old.byte_count;
END;
CREATE TABLE object_entry (
container_name TEXT,
object_name TEXT,
put_timestamp TEXT,
byte_count INTEGER);
CREATE UNIQUE INDEX object_entry_primary_key
ON object_entry (container_name, object_name);
CREATE TRIGGER object_insert
AFTER INSERT
ON object_entry
BEGIN
UPDATE container_entry
SET object_count = object_count + 1,
byte_count = byte_count + new.byte_count
WHERE container_name = new.container_name;
END;
CREATE TRIGGER object_update
AFTER UPDATE
ON object_entry
BEGIN
UPDATE container_entry
SET byte_count = byte_count + (
new.byte_count - old.byte_count)
WHERE container_name = new.container_name;
END;
CREATE TRIGGER object_delete
AFTER DELETE
ON object_entry
BEGIN
UPDATE container_entry
SET object_count = object_count - 1,
byte_count = byte_count - old.byte_count
WHERE container_name = old.container_name;
END;
''')
db.commit()
db.close()
if isfile(db_path):
unlink(temp_path)
else:
rename(temp_path, db_path)
return self._connect(db_path)
def _account(self, method, contents, headers, stream, query, cdn):
if cdn:
raise Exception('CDN not yet supported with LocalClient')
status = 503
reason = 'Internal Server Error'
hdrs = {}
body = ''
if method in ('GET', 'HEAD'):
db = self._get_db()
prefix = query.get('prefix')
delimiter = query.get('delimiter')
marker = query.get('marker')
end_marker = query.get('end_marker')
limit = int(query.get('limit') or 10000)
if delimiter and not prefix:
prefix = ''
orig_marker = marker
body = []
done = False
while len(body) < limit and not done:
query = '''
SELECT container_name AS name, object_count AS count,
byte_count AS bytes
FROM container_entry
'''
query_args = []
where = []
if end_marker:
where.append('name < ?')
query_args.append(end_marker)
if marker and marker >= prefix:
where.append('name > ?')
query_args.append(marker)
elif prefix:
where.append('name >= ?')
query_args.append(prefix)
if where:
query += ' WHERE ' + ' AND '.join(where)
query += ' ORDER BY name LIMIT ? '
query_args.append(limit - len(body))
curs = db.execute(query, query_args)
if prefix is None:
body = [dict(r) for r in curs]
break
if not delimiter:
if not prefix:
body = [dict(r) for r in curs]
else:
body = [
dict(r) for r in curs if r[0].startswith(prefix)]
break
rowcount = 0
for row in curs:
rowcount += 1
marker = name = row[0]
if len(body) >= limit or not name.startswith(prefix):
curs.close()
done = True
break
end = name.find(delimiter, len(prefix))
if end > 0:
marker = name[:end] + chr(ord(delimiter) + 1)
dir_name = name[:end + 1]
if dir_name != orig_marker:
body.append({'subdir': dir_name})
curs.close()
break
body.append(dict(row))
if not rowcount:
break
status = 200
reason = 'OK'
body = dumps(body)
hdrs['content-length'] = str(len(body))
if method == 'HEAD':
body = ''
row = db.execute('''
SELECT container_count, object_count, byte_count
FROM account_entry
''').fetchone()
hdrs['x-account-container-count'] = row['container_count']
hdrs['x-account-object-count'] = row['object_count']
hdrs['x-account-bytes-used'] = row['byte_count']
db.close()
if stream:
return status, reason, hdrs, StringIO(body)
else:
return status, reason, hdrs, body
def _container(self, method, container_name, contents, headers, stream,
query, cdn):
if cdn:
raise Exception('CDN not yet supported with LocalClient')
fs_container = _encode_name(container_name)
status = 503
reason = 'Internal Server Error'
hdrs = {}
body = ''
if method in ('GET', 'HEAD'):
local_path = path_join(self.local_path, fs_container)
if not isdir(local_path):
status = 404
reason = 'Not Found'
body = ''
hdrs['content-length'] = str(len(body))
else:
object_count = 0
bytes_used = 0
prefix = query.get('prefix')
delimiter = query.get('delimiter')
marker = query.get('marker')
end_marker = query.get('end_marker')
limit = query.get('limit')
objects = {}
for item in listdir(local_path):
item_local_path = path_join(local_path, item)
if isfile(item_local_path):
object_name = _decode_name(item)
object_count += 1
object_size = getsize(item_local_path)
bytes_used += object_size
if prefix and not object_name.startswith(prefix):
continue
if delimiter:
index = object_name.find(
delimiter, len(prefix) + 1 if prefix else 0)
if index >= 0:
object_name = object_name[:index + 1]
objects[object_name] = {
'name': object_name, 'bytes': object_size}
objects = sorted(objects.itervalues(), key=lambda x: x['name'])
if marker:
objects = [o for o in objects if o['name'] > marker]
if end_marker:
objects = [o for o in objects if o['name'] < end_marker]
if limit:
objects = objects[:int(limit)]
status = 200
reason = 'OK'
body = dumps([
({'subdir': o} if o['name'][-1] == delimiter else o)
for o in objects])
hdrs['content-length'] = str(len(body))
hdrs['x-container-object-count'] = str(object_count)
hdrs['x-container-bytes-used'] = str(bytes_used)
if method == 'HEAD':
body = ''
elif method == 'PUT':
fs_container_path = path_join(self.local_path, fs_container)
if isdir(fs_container_path):
status = 202
reason = 'Accepted'
else:
db = self._get_db()
with lock_dir(self.local_path):
if isdir(fs_container_path):
status = 202
reason = 'Accepted'
else:
mkdir(fs_container_path)
db.execute('''
INSERT INTO container_entry (
container_name, object_count, byte_count)
VALUES (?, 0, 0)
''', (container_name,))
db.commit()
db.close()
status = 201
reason = 'Created'
body = ''
hdrs['content-length'] = str(len(body))
elif method == 'POST':
status = 204
reason = 'No Content'
body = ''
hdrs['content-length'] = str(len(body))
elif method == 'DELETE':
fs_container_path = path_join(self.local_path, fs_container)
if not isdir(fs_container_path):
status = 404
reason = 'Not Found'
else:
db = self._get_db()
with lock_dir(self.local_path):
if not isdir(fs_container_path):
status = 404
reason = 'Not Found'
else:
rmdir(fs_container_path)
db.execute('''
DELETE FROM container_entry
WHERE container_name = ?
''', (container_name,))
db.commit()
status = 204
reason = 'No Content'
body = ''
hdrs['content-length'] = str(len(body))
if stream:
return status, reason, hdrs, StringIO(body)
else:
return status, reason, hdrs, body
def _object(self, method, container_name, object_name, contents, headers,
stream, query, cdn):
if cdn:
raise Exception('CDN not yet supported with LocalClient')
fs_container = _encode_name(container_name)
fs_object = _encode_name(object_name)
status = 503
reason = 'Internal Server Error'
hdrs = {}
body = ''
if method in ('GET', 'HEAD'):
local_path = path_join(self.local_path, fs_container, fs_object)
if not exists(local_path):
status = 404
reason = 'Not Found'
else:
content_length = getsize(local_path)
hdrs['content-length'] = str(content_length)
status = 200 if content_length else 204
if method == 'HEAD':
body = ''
else:
body = open(local_path, 'rb')
if not stream:
body = body.read()
elif method == 'PUT':
fs_object_path = path_join(
self.local_path, fs_container, fs_object)
temp_path = path_join(
self.local_path, fs_container, '_-temp' + uuid4().hex)
content_length = headers.get('content-length')
if content_length is not None:
content_length = int(content_length)
fp = open(temp_path, 'wb')
left = content_length
written = 0
while left is None or left > 0:
if left is not None:
chunk = contents.read(max(left, self.chunk_size))
left -= len(chunk)
else:
chunk = contents.read(self.chunk_size)
if not chunk:
break
fp.write(chunk)
written += len(chunk)
fp.flush()
fp.close()
if content_length is not None and written != content_length:
unlink(temp_path)
status = 503
reason = 'Internal Server Error'
body = 'Wrote %d bytes when Content-Length was %d' % (
written, content_length)
else:
db = self._get_db()
with lock_dir(self.local_path):
if isfile(fs_object_path):
rename(temp_path, fs_object_path)
db.execute('''
UPDATE object_entry
SET put_timestamp = ?, byte_count = ?
WHERE container_name = ? AND object_name = ?
''', (time(), written, container_name, object_name))
else:
rename(temp_path, fs_object_path)
db.execute('''
INSERT INTO object_entry (
container_name, object_name, put_timestamp,
byte_count)
VALUES (?, ?, ?, ?)
''', (container_name, object_name, time(), written))
db.commit()
status = 201
reason = 'Created'
body = ''
hdrs['content-length'] = str(len(body))
elif method == 'DELETE':
fs_object_path = path_join(
self.local_path, fs_container, fs_object)
if not isfile(fs_object_path):
status = 404
reason = 'Not Found'
else:
db = self._get_db()
with lock_dir(self.local_path):
if not isfile(fs_object_path):
status = 404
reason = 'Not Found'
else:
unlink(fs_object_path)
db.execute('''
DELETE FROM object_entry
WHERE container_name = ? AND object_name = ?
''', (container_name, object_name))
db.commit()
status = 204
reason = 'No Content'
body = ''
hdrs['content-length'] = str(len(body))
if stream and not hasattr(body, 'read'):
body = StringIO(body)
elif not stream and hasattr(body, 'read'):
body = body.read()
return status, reason, hdrs, body
def get_account_hash(self):
"""
See :py:func:`swiftly.client.client.Client.get_account_hash`
"""
return quote(self.local_path, safe='')
| {
"repo_name": "rackerlabs/swiftly",
"path": "swiftly/client/localclient.py",
"copies": "1",
"size": "22573",
"license": "apache-2.0",
"hash": -5081114475536469000,
"line_mean": 37.3894557823,
"line_max": 79,
"alpha_frac": 0.4662650069,
"autogenerated": false,
"ratio": 4.729310706054892,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0005996228385679922,
"num_lines": 588
} |
"""A client to miRBase."""
import os
__all__ = [
'get_mirbase_id_from_mirbase_name',
'get_mirbase_name_from_mirbase_id',
'get_hgnc_id_from_mirbase_id',
'get_mirbase_id_from_hgnc_id',
'get_mirbase_id_from_hgnc_symbol',
]
HERE = os.path.dirname(os.path.abspath(__file__))
MIRBASE_FILE = os.path.join(HERE, os.pardir, 'resources', 'mirbase.tsv')
def get_mirbase_name_from_mirbase_id(mirbase_id):
"""Return the miRBase name corresponding to the given miRBase ID.
Parameters
----------
mirbase_id : str
The miRBase ID to be converted. Example: "MI0000060"
Returns
-------
mirbase_name : str
The miRBase name corresponding to the given miRBase ID.
"""
return _mirbase_id_to_name.get(mirbase_id)
def get_mirbase_id_from_mirbase_name(mirbase_name):
"""Return the miRBase identifier corresponding to the given miRBase name.
Parameters
----------
mirbase_name : str
The miRBase ID to be converted. Example: "hsa-mir-19b-2"
Returns
-------
mirbase_id : str
The miRBase ID corresponding to the given miRBase name.
"""
return _mirbase_name_to_id.get(mirbase_name)
def get_hgnc_id_from_mirbase_id(mirbase_id):
"""Return the HGNC ID corresponding to the given miRBase ID.
Parameters
----------
mirbase_id : str
The miRBase ID to be converted. Example: "MI0000060"
Returns
-------
hgnc_id : str
The HGNC ID corresponding to the given miRBase ID.
"""
return _mirbase_id_to_hgnc_id.get(mirbase_id)
def get_mirbase_id_from_hgnc_id(hgnc_id):
"""Return the HGNC ID corresponding to the given miRBase ID.
Parameters
----------
hgnc_id : str
An HGNC identifier to convert to miRBase, if it is indeed
an miRNA. Example: "31476"
Returns
-------
mirbase_id : str
The miRBase ID corresponding to the given HGNC ID.
"""
return _hgnc_id_to_mirbase_id.get(hgnc_id)
def get_mirbase_id_from_hgnc_symbol(hgnc_symbol):
"""Return the HGNC gene symbol corresponding to the given miRBase ID.
Parameters
----------
hgnc_symbol : str
An HGNC gene symbol to convert to miRBase, if it is indeed
an miRNA. Example: "MIR19B2"
Returns
-------
mirbase_id : str
The miRBase ID corresponding to the given HGNC gene symbol.
"""
return _hgnc_symbol_to_mirbase_id.get(hgnc_symbol)
def _read():
"""Read the miRBase data into some lookup dictionaries."""
mirbase_id_to_name = {}
mirbase_name_to_id = {}
hgnc_id_to_mirbase_id = {}
mirbase_id_to_hgnc_id = {}
hgnc_symbol_to_mirbase_id = {}
mirbase_id_to_hgnc_symbol = {}
with open(MIRBASE_FILE) as file:
next(file)
for line in file:
try:
mirbase_id, mirbase_name, db, identifier, name = \
line.strip().split('\t')
except ValueError: # fails on WORMBASE since no names
continue
mirbase_id_to_name[mirbase_id] = mirbase_name
mirbase_name_to_id[mirbase_name] = mirbase_id
if db == 'HGNC':
hgnc_id_to_mirbase_id[identifier] = mirbase_id
mirbase_id_to_hgnc_id[mirbase_id] = identifier
hgnc_symbol_to_mirbase_id[name] = mirbase_id
mirbase_id_to_hgnc_symbol[mirbase_id] = name
return (
mirbase_id_to_name,
mirbase_name_to_id,
hgnc_id_to_mirbase_id,
mirbase_id_to_hgnc_id,
hgnc_symbol_to_mirbase_id,
mirbase_id_to_hgnc_symbol,
)
(
_mirbase_id_to_name,
_mirbase_name_to_id,
_hgnc_id_to_mirbase_id,
_mirbase_id_to_hgnc_id,
_hgnc_symbol_to_mirbase_id,
_mirbase_id_to_hgnc_symbol,
) = _read()
| {
"repo_name": "sorgerlab/indra",
"path": "indra/databases/mirbase_client.py",
"copies": "4",
"size": "3837",
"license": "bsd-2-clause",
"hash": -509254575096445500,
"line_mean": 25.8321678322,
"line_max": 77,
"alpha_frac": 0.5962991921,
"autogenerated": false,
"ratio": 2.9024205748865355,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5498719766986536,
"avg_score": null,
"num_lines": null
} |
"""A client to obtain metadata and text content from bioRxiv
(and to some extent medRxiv) preprints."""
import re
import logging
import requests
import datetime
logger = logging.getLogger(__name__)
# Browser link at https://connect.biorxiv.org/relate/content/181
collection_url = 'https://connect.biorxiv.org/relate/collection_json.php?grp='
covid19_collection_id = '181'
bio_content_url = 'https://www.biorxiv.org/content/'
med_content_url = 'https://www.medrxiv.org/content/'
def get_collection_pubs(collection_id, min_date=None):
"""Get list of DOIs from a biorxiv/medrxiv collection.
Parameters
----------
collection_id : str
The identifier of the collection to fetch.
min_date : Optional[datetime.datetime]
A datetime object representing an cutoff. If given, only
publications that were released on or after the given date
are returned. By default, no date constraint is applied.
Returns
-------
list of dict
A list of the publication entries which include the abstract and other
metadata.
"""
res = requests.get(collection_url + collection_id)
res.raise_for_status()
pubs = res.json()['rels']
if min_date:
new_rels = []
for pub in pubs:
try:
date = datetime.datetime.strptime(pub.get('rel_date'),
'%Y-%m-%d')
except Exception:
continue
if date >= min_date:
new_rels.append(pub)
return new_rels
return pubs
def get_collection_dois(collection_id, min_date=None):
"""Get list of DOIs from a biorxiv/medrxiv collection.
Parameters
----------
collection_id : str
The identifier of the collection to fetch.
min_date : Optional[datetime.datetime]
A datetime object representing an cutoff. If given, only
publications that were released on or after the given date
are returned. By default, no date constraint is applied.
Returns
-------
list of dict
The list of DOIs in the collection.
"""
pubs = get_collection_pubs(collection_id, min_date=min_date)
dois = [pub.get('rel_doi') for pub in pubs if pub.get('rel_doi')]
return dois
def get_pdf_xml_url_base(content):
"""Return base URL to PDF/XML based on the content of the landing page.
Parameters
----------
content : str
The content of the landing page for an rxiv paper.
Returns
-------
str or None
The base URL if available, otherwise None.
"""
match = re.match('(?:.*)"citation_pdf_url" content="([^"]+).full.pdf"',
content, re.S)
if match:
return match.groups()[0]
return None
def get_text_url_base(content):
"""Return base URL to full text based on the content of the landing page.
Parameters
----------
content : str
The content of the landing page for an rxiv paper.
Returns
-------
str or None
The base URL if available, otherwise None.
"""
match = re.match('(?:.*)"citation_html_url" content="([^"]+).full"',
content, re.S)
if match:
return match.groups()[0]
return None
def get_formats(pub):
"""Return formats available for a publication JSON.
Parameters
----------
pub : dict
The JSON dict description a publication.
Returns
-------
dict
A dict with available formats as its keys (abstract, pdf, xml, txt)
and either the content (in case of abstract) or the URL
(in case of pdf, xml, txt) as the value.
"""
formats = {}
if 'rel_abs' in pub:
formats['abstract'] = pub['rel_abs']
# The publication JSON does not contain enough information generally
# to identify the URL for the various formats. Therefore we have to
# load the landing page for the article and parse out various URLs
# to reliably get to the desired content.
landing_page_res = requests.get(pub['rel_link'])
# The URL for the full PDF and XML is often different in format than
# the rel_site URL so we need to get the link to it from the content
# of the landing page. The XML URL doesn't explicitly appear in the
# page content therefore we work with the citation_pdf_url and get
# URLs for both the PDF and the XML.
pdf_xml_url_base = get_pdf_xml_url_base(landing_page_res.text)
if pdf_xml_url_base:
formats['pdf'] = pdf_xml_url_base + '.full.pdf'
formats['xml'] = pdf_xml_url_base + '.source.xml'
text_url_base = get_text_url_base(landing_page_res.text)
if text_url_base:
formats['txt'] = text_url_base + 'txt'
return formats
def get_content_from_pub_json(pub, format):
"""Get text content based on a given format from a publication JSON.
In the case of abstract, the content is returned
from the JSON directly. For pdf, the content is returned as bytes
that can be dumped into a file. For txt and xml, the text is processed
out of either the raw XML or text content that rxiv provides.
Parameters
----------
pub : dict
The JSON dict description a publication.
format : str
The format, if available, via which the content should be
obtained.
"""
if format == 'abstract':
return pub.get('rel_abstract')
formats = get_formats(pub)
if format not in formats:
logger.warning('Content not available in format %s' % format)
return None
# If we're looking for an abstract, that is directly accessible
# in the pub JSON so we can just return it
if format == 'abstract':
return formats.get('abstract')
# For PDFs we return the result in bytes that can then be dumped
# into a file.
elif format == 'pdf':
return requests.get(formats[format]).content
# For xml and text, we return the result as str
elif format == 'xml':
return get_text_from_rxiv_xml(requests.get(formats[format]).text)
elif format == 'txt':
return get_text_from_rxiv_text(requests.get(formats[format]).text)
def get_text_from_rxiv_xml(rxiv_xml):
"""Return clean text from the raw rxiv xml content.
Parameters
----------
rxiv_xml : str
The content of the rxiv full xml as obtained from the web.
Returns
-------
str
The text content stripped out from the raw full xml.
"""
# FIXME: this is a very naive initial solution, we should instead
# traverse the XML structure properly to get the content.
text = re.sub('<.*?>', '', rxiv_xml)
return text
def get_text_from_rxiv_text(rxiv_text):
"""Return clean text from the raw rxiv text content.
This function parses out the title, headings and subheadings, and
the content of sections under headings/subheadings.
It filters out some irrelevant content e.g., references and
footnotes.
Parameters
----------
rxiv_text : str
The content of the rxiv full text as obtained from the web.
Returns
-------
str
The text content stripped out from the raw full text.
"""
lines = [line.strip() for line in rxiv_text.split('\n') if line.strip()]
current_section = 'title'
text = lines[0] + '\n'
line_idx = 1
skip_section = {'References', 'Footnotes', 'Acknowledgements',
'Supplementary Figures', 'Declaration of Interests',
'Author Contributions', 'Code and data availability'}
for line in lines[line_idx:]:
line_idx += 1
match = re.match('## (.+)', line)
if match:
current_section = match.groups()[0]
break
while line_idx < len(lines):
for line in lines[line_idx:]:
line_idx += 1
match_heading = re.match('## (.+)', line)
match_subheading = re.match('### (.+)', line)
if match_heading:
current_section = match_heading.groups()[0]
break
elif current_section in skip_section:
continue
elif match_subheading:
text += (match_subheading.groups()[0] + '\n')
else:
text += (line + '\n')
return text
if __name__ == '__main__':
import os
import json
fname = 'covid19_pubs.json'
if os.path.exists(fname):
with open(fname, 'r') as fh:
covid19_pubs = json.load(fh)
else:
covid19_pubs = get_collection_pubs(covid19_collection_id)
with open(fname, 'w') as fh:
json.dump(covid19_pubs, fh)
contents = {}
for pub in covid19_pubs:
doi = pub['rel_doi']
formats = get_formats(pub)
if 'txt' in formats:
print('Getting text for %s' % doi)
txt = get_content_from_pub_json(pub, 'txt')
elif 'xml' in formats:
print('Getting xml for %s' % doi)
txt = get_content_from_pub_json(pub, 'xml')
else:
print('Getting abstract for %s' % doi)
txt = get_content_from_pub_json(pub, 'abstract')
contents[doi] = txt
with open('covid19_contents', 'w') as fh:
json.dump(contents, fh)
| {
"repo_name": "johnbachman/indra",
"path": "indra/literature/biorxiv_client.py",
"copies": "4",
"size": "9287",
"license": "bsd-2-clause",
"hash": 6989662450983254000,
"line_mean": 31.2465277778,
"line_max": 78,
"alpha_frac": 0.6043932379,
"autogenerated": false,
"ratio": 3.945199660152931,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.003472222222222222,
"num_lines": 288
} |
"""A client to the Disease Ontology."""
from indra.databases.obo_client import OboClient
_client = OboClient(prefix='doid')
def get_doid_name_from_doid_id(doid_id):
"""Return the name corresponding to the given Disease Ontology ID.
Parameters
----------
doid_id : str
The Disease Ontology identifier to be converted.
Example: "DOID:0000017"
Returns
-------
doid_name : str
The DOID name corresponding to the given DOID identifier.
"""
return _client.get_name_from_id(doid_id)
def get_doid_id_from_doid_name(doid_name):
"""Return the identifier corresponding to the given Disease Ontology name.
Parameters
----------
doid_name : str
The Disease Ontology name to be converted. Example: "Nocturia"
Returns
-------
doid_id : str
The Disease Ontology identifier corresponding to the given name.
"""
return _client.get_id_from_name(doid_name)
def get_doid_id_from_doid_alt_id(doid_alt_id):
"""Return the identifier corresponding to the given Disease Ontology alt id.
Parameters
----------
doid_alt_id : str
The Disease Ontology alt id to be converted. Example: "DOID:267"
Returns
-------
doid_id : str
The Disease Ontology identifier corresponding to the given alt id.
"""
return _client.get_id_from_alt_id(doid_alt_id)
if __name__ == '__main__':
print(*_client.count_xrefs().most_common(), sep='\n')
| {
"repo_name": "johnbachman/belpy",
"path": "indra/databases/doid_client.py",
"copies": "3",
"size": "1479",
"license": "mit",
"hash": 4226422043502236000,
"line_mean": 24.5,
"line_max": 80,
"alpha_frac": 0.6355645707,
"autogenerated": false,
"ratio": 3.3922018348623855,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.00021285653469561513,
"num_lines": 58
} |
"""A client to the Gene Ontology."""
import re
import logging
from indra.databases.obo_client import OboClient
logger = logging.getLogger(__name__)
_client = OboClient(prefix='go')
def get_go_label(go_id):
"""Get label corresponding to a given GO identifier.
Parameters
----------
go_id : str
The GO identifier. Should include the `GO:` prefix, e.g., `GO:1903793`
(positive regulation of anion transport).
Returns
-------
str
Label corresponding to the GO ID.
"""
return _client.get_name_from_id(go_id)
def get_go_id_from_label(label):
"""Get ID corresponding to a given GO label.
Parameters
----------
label : str
The GO label to get the ID for.
Returns
-------
str
Identifier corresponding to the GO label, starts with GO:.
"""
return _client.get_id_from_name(label)
def get_go_id_from_label_or_synonym(label):
"""Get ID corresponding to a given GO label or synonym
Parameters
----------
label : str
The GO label or synonym to get the ID for.
Returns
-------
str
Identifier corresponding to the GO label or synonym, starts with GO:.
"""
return _client.get_id_from_name_or_synonym(label)
def get_primary_id(go_id):
"""Get primary ID corresponding to an alternative/deprecated GO ID.
Parameters
----------
go_id : str
The GO ID to get the primary ID for.
Returns
-------
str
Primary identifier corresponding to the given ID.
"""
return _client.get_id_from_alt_id(go_id)
def get_valid_location(loc):
"""Return a valid GO label based on an ID, label or synonym.
The rationale behind this function is that many sources produce
cellular locations that are arbitrarily either GO IDs (sometimes
without the prefix and sometimes outdated) or labels or synonyms.
This function handles all these cases and returns a valid GO label
in case one is available, otherwise None.
Parameters
----------
loc : txt
The location that needst o be canonicalized.
Returns
-------
str or None
The valid location string is available, otherwise None.
"""
if not loc:
return None
# If it's actually a GO ID, we do some validation and use it. If it is
# a text label then we look up the GO ID for it
if re.match(r'^(GO:)?\d+$', loc):
if not loc.startswith('GO:'):
loc = 'GO:' + loc
go_id = loc
prim_id = get_primary_id(go_id)
if prim_id:
go_id = prim_id
else:
go_id = get_go_id_from_label_or_synonym(loc)
if not go_id:
return None
# If we managed to get a GO ID either way, we get its label and return it
# with some extra caution to not return a None name under any
# circumstances
if go_id:
loc = get_go_label(go_id)
if loc:
return loc
return None
| {
"repo_name": "bgyori/indra",
"path": "indra/databases/go_client.py",
"copies": "4",
"size": "2989",
"license": "bsd-2-clause",
"hash": -5191195348355407000,
"line_mean": 24.547008547,
"line_max": 78,
"alpha_frac": 0.6078956173,
"autogenerated": false,
"ratio": 3.7787610619469025,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.6386656679246903,
"avg_score": null,
"num_lines": null
} |
"""A cli for interacting with the models.
The CLI can be used to publish issues to perform inference on to pubsub
to be picked up by the backends.
"""
import logging
import json
import fire
from code_intelligence import graphql
from code_intelligence import github_util
from code_intelligence import util
from google.cloud import pubsub
import subprocess
DEFAULT_TOPIC = "projects/issue-label-bot-dev/topics/TEST_event_queue"
class Cli:
@staticmethod
def get_issue(url):
"""Get the data for a specific issue.
Args:
url: URL of the issue
"""
gh_client = graphql.GraphQLClient()
result = github_util.get_issue(url, gh_client)
print(json.dumps(result, indent=4, sort_keys=True))
@staticmethod
def label_issue(issue, pubsub_topic=DEFAULT_TOPIC):
"""Label a specific issue.
Args:
issue: The issue in the form {owner}/{repo}#{issue}
pubsub_topic: (Optional) the pubsub topic to publish to. This should
be in the form projects/{project}/topics/{topic_name}
"""
publisher = pubsub.PublisherClient()
repo_owner, repo_name, issue_num = util.parse_issue_spec(issue)
if not repo_owner:
raise ValueError(f"issue={issue} didn't match regex "
f"{util.ISSUE_RE.pattern}")
# all attributes being published to pubsub must be sent as text strings
publisher.publish(pubsub_topic,
b'New issue.',
# TODO(jlewi): Does the backend depend on the client
# providing the installation id
installation_id="",
repo_owner=repo_owner,
repo_name=repo_name,
issue_num=str(issue_num))
@staticmethod
def pod_logs(pod):
"""Pretty print pod logs
Args:
pod: Name of the pod
"""
output = subprocess.check_output(["kubectl", "logs", pod])
for l in output.splitlines():
try:
entry = json.loads(l)
filename = entry.get("filename")
line = entry.get("line")
message = entry.get("message")
print(f"{filename}:{line}: {message}")
except json.JSONDecodeError:
print(l)
continue
if __name__ == "__main__":
logging.basicConfig(level=logging.INFO,
format=('%(levelname)s|%(asctime)s'
'|%(message)s|%(pathname)s|%(lineno)d|'),
datefmt='%Y-%m-%dT%H:%M:%S',
)
fire.Fire(Cli)
| {
"repo_name": "kubeflow/code-intelligence",
"path": "py/label_microservice/cli.py",
"copies": "1",
"size": "2503",
"license": "mit",
"hash": 3982820385656636000,
"line_mean": 30.2875,
"line_max": 75,
"alpha_frac": 0.599280863,
"autogenerated": false,
"ratio": 3.992025518341308,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5091306381341307,
"avg_score": null,
"num_lines": null
} |
"""A clone of threading module (version 2.7.2) that always
targets real OS threads. (Unlike 'threading' which flips between
green and OS threads based on whether the monkey patching is in effect
or not).
This module is missing 'Thread' class, but includes 'Queue'.
"""
from __future__ import absolute_import
try:
from Queue import Full, Empty
except ImportError:
from queue import Full, Empty # pylint:disable=import-error
from collections import deque
import heapq
from time import time as _time, sleep as _sleep
from gevent import monkey
from gevent._compat import PY3
__all__ = ['Condition',
'Event',
'Lock',
'RLock',
'Semaphore',
'BoundedSemaphore',
'Queue',
'local',
'stack_size']
thread_name = '_thread' if PY3 else 'thread'
start_new_thread, Lock, get_ident, local, stack_size = monkey.get_original(thread_name, [
'start_new_thread', 'allocate_lock', 'get_ident', '_local', 'stack_size'])
class RLock(object):
def __init__(self):
self.__block = Lock()
self.__owner = None
self.__count = 0
def __repr__(self):
owner = self.__owner
return "<%s owner=%r count=%d>" % (
self.__class__.__name__, owner, self.__count)
def acquire(self, blocking=1):
me = get_ident()
if self.__owner == me:
self.__count = self.__count + 1
return 1
rc = self.__block.acquire(blocking)
if rc:
self.__owner = me
self.__count = 1
return rc
__enter__ = acquire
def release(self):
if self.__owner != get_ident():
raise RuntimeError("cannot release un-acquired lock")
self.__count = count = self.__count - 1
if not count:
self.__owner = None
self.__block.release()
def __exit__(self, t, v, tb):
self.release()
# Internal methods used by condition variables
def _acquire_restore(self, count_owner):
count, owner = count_owner
self.__block.acquire()
self.__count = count
self.__owner = owner
def _release_save(self):
count = self.__count
self.__count = 0
owner = self.__owner
self.__owner = None
self.__block.release()
return (count, owner)
def _is_owned(self):
return self.__owner == get_ident()
class Condition(object):
# pylint:disable=method-hidden
def __init__(self, lock=None):
if lock is None:
lock = RLock()
self.__lock = lock
# Export the lock's acquire() and release() methods
self.acquire = lock.acquire
self.release = lock.release
# If the lock defines _release_save() and/or _acquire_restore(),
# these override the default implementations (which just call
# release() and acquire() on the lock). Ditto for _is_owned().
try:
self._release_save = lock._release_save
except AttributeError:
pass
try:
self._acquire_restore = lock._acquire_restore
except AttributeError:
pass
try:
self._is_owned = lock._is_owned
except AttributeError:
pass
self.__waiters = []
def __enter__(self):
return self.__lock.__enter__()
def __exit__(self, *args):
return self.__lock.__exit__(*args)
def __repr__(self):
return "<Condition(%s, %d)>" % (self.__lock, len(self.__waiters))
def _release_save(self):
self.__lock.release() # No state to save
def _acquire_restore(self, x): # pylint:disable=unused-argument
self.__lock.acquire() # Ignore saved state
def _is_owned(self):
# Return True if lock is owned by current_thread.
# This method is called only if __lock doesn't have _is_owned().
if self.__lock.acquire(0):
self.__lock.release()
return False
return True
def wait(self, timeout=None):
if not self._is_owned():
raise RuntimeError("cannot wait on un-acquired lock")
waiter = Lock()
waiter.acquire()
self.__waiters.append(waiter)
saved_state = self._release_save()
try: # restore state no matter what (e.g., KeyboardInterrupt)
if timeout is None:
waiter.acquire()
else:
# Balancing act: We can't afford a pure busy loop, so we
# have to sleep; but if we sleep the whole timeout time,
# we'll be unresponsive. The scheme here sleeps very
# little at first, longer as time goes on, but never longer
# than 20 times per second (or the timeout time remaining).
endtime = _time() + timeout
delay = 0.0005 # 500 us -> initial delay of 1 ms
while True:
gotit = waiter.acquire(0)
if gotit:
break
remaining = endtime - _time()
if remaining <= 0:
break
delay = min(delay * 2, remaining, .05)
_sleep(delay)
if not gotit:
try:
self.__waiters.remove(waiter)
except ValueError:
pass
finally:
self._acquire_restore(saved_state)
def notify(self, n=1):
if not self._is_owned():
raise RuntimeError("cannot notify on un-acquired lock")
__waiters = self.__waiters
waiters = __waiters[:n]
if not waiters:
return
for waiter in waiters:
waiter.release()
try:
__waiters.remove(waiter)
except ValueError:
pass
def notify_all(self):
self.notify(len(self.__waiters))
class Semaphore(object):
# After Tim Peters' semaphore class, but not quite the same (no maximum)
def __init__(self, value=1):
if value < 0:
raise ValueError("semaphore initial value must be >= 0")
self.__cond = Condition(Lock())
self.__value = value
def acquire(self, blocking=1):
rc = False
self.__cond.acquire()
while self.__value == 0:
if not blocking:
break
self.__cond.wait()
else:
self.__value = self.__value - 1
rc = True
self.__cond.release()
return rc
__enter__ = acquire
def release(self):
self.__cond.acquire()
self.__value = self.__value + 1
self.__cond.notify()
self.__cond.release()
def __exit__(self, t, v, tb):
self.release()
class BoundedSemaphore(Semaphore):
"""Semaphore that checks that # releases is <= # acquires"""
def __init__(self, value=1):
Semaphore.__init__(self, value)
self._initial_value = value
def release(self):
if self.Semaphore__value >= self._initial_value: # pylint:disable=no-member
raise ValueError("Semaphore released too many times")
return Semaphore.release(self)
class Event(object):
# After Tim Peters' event class (without is_posted())
def __init__(self):
self.__cond = Condition(Lock())
self.__flag = False
def _reset_internal_locks(self):
# private! called by Thread._reset_internal_locks by _after_fork()
self.__cond.__init__()
def is_set(self):
return self.__flag
def set(self):
self.__cond.acquire()
try:
self.__flag = True
self.__cond.notify_all()
finally:
self.__cond.release()
def clear(self):
self.__cond.acquire()
try:
self.__flag = False
finally:
self.__cond.release()
def wait(self, timeout=None):
self.__cond.acquire()
try:
if not self.__flag:
self.__cond.wait(timeout)
return self.__flag
finally:
self.__cond.release()
class Queue: # pylint:disable=old-style-class
"""Create a queue object with a given maximum size.
If maxsize is <= 0, the queue size is infinite.
"""
def __init__(self, maxsize=0):
self.maxsize = maxsize
self._init(maxsize)
# mutex must be held whenever the queue is mutating. All methods
# that acquire mutex must release it before returning. mutex
# is shared between the three conditions, so acquiring and
# releasing the conditions also acquires and releases mutex.
self.mutex = Lock()
# Notify not_empty whenever an item is added to the queue; a
# thread waiting to get is notified then.
self.not_empty = Condition(self.mutex)
# Notify not_full whenever an item is removed from the queue;
# a thread waiting to put is notified then.
self.not_full = Condition(self.mutex)
# Notify all_tasks_done whenever the number of unfinished tasks
# drops to zero; thread waiting to join() is notified to resume
self.all_tasks_done = Condition(self.mutex)
self.unfinished_tasks = 0
def task_done(self):
"""Indicate that a formerly enqueued task is complete.
Used by Queue consumer threads. For each get() used to fetch a task,
a subsequent call to task_done() tells the queue that the processing
on the task is complete.
If a join() is currently blocking, it will resume when all items
have been processed (meaning that a task_done() call was received
for every item that had been put() into the queue).
Raises a ValueError if called more times than there were items
placed in the queue.
"""
self.all_tasks_done.acquire()
try:
unfinished = self.unfinished_tasks - 1
if unfinished <= 0:
if unfinished < 0:
raise ValueError('task_done() called too many times')
self.all_tasks_done.notify_all()
self.unfinished_tasks = unfinished
finally:
self.all_tasks_done.release()
def join(self):
"""Blocks until all items in the Queue have been gotten and processed.
The count of unfinished tasks goes up whenever an item is added to the
queue. The count goes down whenever a consumer thread calls task_done()
to indicate the item was retrieved and all work on it is complete.
When the count of unfinished tasks drops to zero, join() unblocks.
"""
self.all_tasks_done.acquire()
try:
while self.unfinished_tasks:
self.all_tasks_done.wait()
finally:
self.all_tasks_done.release()
def qsize(self):
"""Return the approximate size of the queue (not reliable!)."""
self.mutex.acquire()
try:
return self._qsize()
finally:
self.mutex.release()
def empty(self):
"""Return True if the queue is empty, False otherwise (not reliable!)."""
self.mutex.acquire()
try:
return not self._qsize()
finally:
self.mutex.release()
def full(self):
"""Return True if the queue is full, False otherwise (not reliable!)."""
self.mutex.acquire()
try:
if self.maxsize <= 0:
return False
if self.maxsize >= self._qsize():
return True
finally:
self.mutex.release()
def put(self, item, block=True, timeout=None):
"""Put an item into the queue.
If optional args 'block' is true and 'timeout' is None (the default),
block if necessary until a free slot is available. If 'timeout' is
a positive number, it blocks at most 'timeout' seconds and raises
the Full exception if no free slot was available within that time.
Otherwise ('block' is false), put an item on the queue if a free slot
is immediately available, else raise the Full exception ('timeout'
is ignored in that case).
"""
self.not_full.acquire()
try:
if self.maxsize > 0:
if not block:
if self._qsize() >= self.maxsize:
raise Full
elif timeout is None:
while self._qsize() >= self.maxsize:
self.not_full.wait()
elif timeout < 0:
raise ValueError("'timeout' must be a positive number")
else:
endtime = _time() + timeout
while self._qsize() >= self.maxsize:
remaining = endtime - _time()
if remaining <= 0.0:
raise Full
self.not_full.wait(remaining)
self._put(item)
self.unfinished_tasks += 1
self.not_empty.notify()
finally:
self.not_full.release()
def put_nowait(self, item):
"""Put an item into the queue without blocking.
Only enqueue the item if a free slot is immediately available.
Otherwise raise the Full exception.
"""
return self.put(item, False)
def get(self, block=True, timeout=None):
"""Remove and return an item from the queue.
If optional args 'block' is true and 'timeout' is None (the default),
block if necessary until an item is available. If 'timeout' is
a positive number, it blocks at most 'timeout' seconds and raises
the Empty exception if no item was available within that time.
Otherwise ('block' is false), return an item if one is immediately
available, else raise the Empty exception ('timeout' is ignored
in that case).
"""
self.not_empty.acquire()
try:
if not block:
if not self._qsize():
raise Empty
elif timeout is None:
while not self._qsize():
self.not_empty.wait()
elif timeout < 0:
raise ValueError("'timeout' must be a positive number")
else:
endtime = _time() + timeout
while not self._qsize():
remaining = endtime - _time()
if remaining <= 0.0:
raise Empty
self.not_empty.wait(remaining)
item = self._get()
self.not_full.notify()
return item
finally:
self.not_empty.release()
def get_nowait(self):
"""Remove and return an item from the queue without blocking.
Only get an item if one is immediately available. Otherwise
raise the Empty exception.
"""
return self.get(False)
# Override these methods to implement other queue organizations
# (e.g. stack or priority queue).
# These will only be called with appropriate locks held
# Initialize the queue representation
def _init(self, maxsize):
# pylint:disable=unused-argument
self.queue = deque()
def _qsize(self, len=len):
return len(self.queue)
# Put a new item in the queue
def _put(self, item):
self.queue.append(item)
# Get an item from the queue
def _get(self):
return self.queue.popleft()
class PriorityQueue(Queue):
'''Variant of Queue that retrieves open entries in priority order (lowest first).
Entries are typically tuples of the form: (priority number, data).
'''
def _init(self, maxsize):
self.queue = []
def _qsize(self, len=len):
return len(self.queue)
def _put(self, item, heappush=heapq.heappush):
# pylint:disable=arguments-differ
heappush(self.queue, item)
def _get(self, heappop=heapq.heappop):
# pylint:disable=arguments-differ
return heappop(self.queue)
class LifoQueue(Queue):
'''Variant of Queue that retrieves most recently added entries first.'''
def _init(self, maxsize):
self.queue = []
def _qsize(self, len=len):
return len(self.queue)
def _put(self, item):
self.queue.append(item)
def _get(self):
return self.queue.pop()
| {
"repo_name": "burzillibus/RobHome",
"path": "venv/lib/python2.7/site-packages/gevent/_threading.py",
"copies": "1",
"size": "16584",
"license": "mit",
"hash": 9124254306715468000,
"line_mean": 31.2019417476,
"line_max": 89,
"alpha_frac": 0.5552339604,
"autogenerated": false,
"ratio": 4.410638297872341,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0002391443799003264,
"num_lines": 515
} |
"""ACL operations and objects."""
import copy
import json
import logging
import requests
from objectrocket import bases
from objectrocket import util
from objectrocket import errors
logger = logging.getLogger(__name__)
class Acls(bases.BaseOperationsLayer):
"""ACL operations.
:param objectrocket.client.Client base_client: An instance of objectrocket.client.Client.
"""
def __init__(self, base_client):
super(Acls, self).__init__(base_client=base_client)
#####################
# Public interface. #
#####################
@util.token_auto_auth
def all(self, instance):
"""Get all ACLs associated with the instance specified by name.
:param str instance: The name of the instance from which to fetch ACLs.
:returns: A list of :py:class:`Acl` objects associated with the specified instance.
:rtype: list
"""
url = self._url.format(instance=instance)
response = requests.get(url, **self._default_request_kwargs)
data = self._get_response_data(response)
return self._concrete_acl_list(data)
@util.token_auto_auth
def create(self, instance, cidr_mask, description, **kwargs):
"""Create an ACL entry for the specified instance.
:param str instance: The name of the instance to associate the new ACL entry with.
:param str cidr_mask: The IPv4 CIDR mask for the new ACL entry.
:param str description: A short description for the new ACL entry.
:param collector kwargs: (optional) Additional key=value pairs to be supplied to the
creation payload. **Caution:** fields unrecognized by the API will cause this request
to fail with a 400 from the API.
"""
# Build up request data.
url = self._url.format(instance=instance)
request_data = {
'cidr_mask': cidr_mask,
'description': description
}
request_data.update(kwargs)
# Call to create an instance.
response = requests.post(
url,
data=json.dumps(request_data),
**self._default_request_kwargs
)
# Log outcome of instance creation request.
if response.status_code == 200:
logger.info('Successfully created a new ACL for instance {} with: {}.'
.format(instance, request_data))
else:
logger.info('Failed to create a new ACL for instance {} with: {}.'
.format(instance, request_data))
data = self._get_response_data(response)
return self._concrete_acl(data)
@util.token_auto_auth
def get(self, instance, acl):
"""Get an ACL by ID belonging to the instance specified by name.
:param str instance: The name of the instance from which to fetch the ACL.
:param str acl: The ID of the ACL to fetch.
:returns: An :py:class:`Acl` object, or None if ACL does not exist.
:rtype: :py:class:`Acl`
"""
base_url = self._url.format(instance=instance)
url = '{base}{aclid}/'.format(base=base_url, aclid=acl)
response = requests.get(url, **self._default_request_kwargs)
data = self._get_response_data(response)
return self._concrete_acl(data)
@util.token_auto_auth
def delete(self, instance, acl):
"""Delete an ACL by ID belonging to the instance specified by name.
:param str instance: The name of the instance on which the ACL exists.
:param str acll: The ID of the ACL to delete.
"""
base_url = self._url.format(instance=instance)
url = '{base}{aclid}/'.format(base=base_url, aclid=acl)
response = requests.delete(url, **self._default_request_kwargs)
if response.status_code == 200:
logger.info('Successfully deleted ACL {}'.format(acl))
else:
logger.info('Failed to delete ACL {}'.format(acl))
logger.info('Response: [{0}] {1}'.format(response.status_code, response.content))
raise errors.ObjectRocketException('Failed to delete ACL.')
######################
# Private interface. #
######################
def _concrete_acl(self, acl_doc):
"""Concretize an ACL document.
:param dict acl_doc: A document describing an ACL entry. Should come from the API.
:returns: An :py:class:`Acl`, or None.
:rtype: :py:class:`bases.BaseInstance`
"""
if not isinstance(acl_doc, dict):
return None
# Attempt to instantiate an Acl object with the given dict.
try:
return Acl(document=acl_doc, acls=self)
# If construction fails, log the exception and return None.
except Exception as ex:
logger.exception(ex)
logger.error('Could not instantiate ACL document. You probably need to upgrade to a '
'recent version of the client. Document which caused this error: {}'
.format(acl_doc))
return None
def _concrete_acl_list(self, acl_docs):
"""Concretize a list of ACL documents.
:param list acl_docs: A list of ACL documents. Should come from the API.
:returns: A list of :py:class:`ACL` objects.
:rtype: list
"""
if not acl_docs:
return []
return list(filter(None, [self._concrete_acl(acl_doc=doc) for doc in acl_docs]))
@property
def _default_request_kwargs(self):
"""The default request keyword arguments to be passed to the requests library."""
defaults = copy.deepcopy(super(Acls, self)._default_request_kwargs)
defaults.setdefault('headers', {}).update({
'X-Auth-Token': self._client.auth._token
})
return defaults
@property
def _url(self):
"""The base URL for ACL operations."""
base_url = self._client._url.rstrip('/')
return '{}/instances/{{instance}}/acls/'.format(base_url)
class Acl(object):
"""An Access Control List entry object.
:param document dict: The dict representing this object.
:param Acls acls: The Acls operations layer instance from which this object came.
"""
def __init__(self, document, acls):
self.__client = acls._client
self.__acls = acls
self.__document = document
# Bind required pseudo private attributes from API response document.
self._cidr_mask = document['cidr_mask']
self._description = document['description']
self._id = document['_id']
self._instance_name = document['instance']
self._login = document['login']
self._port = document['port']
# Bind attributes which may be present in API response document.
self._date_created = document.get('date_created', None)
self._instance = document.get('instance_id', None)
self._instance_type = document.get('instance_type', None)
self._metadata = document.get('metadata', {})
self._service_type = document.get('service_type', None)
def __repr__(self):
"""Represent this object as a string."""
_id = hex(id(self))
rep = (
'<{!s} cidr={!s} port={!s} instance={!s} id={!s} at {!s}>'
.format(self.__class__.__name__, self.cidr_mask, self.port,
self.instance_name, self.id, _id)
)
return rep
@property
def cidr_mask(self):
"""This ACL entry's CIDR mask."""
return self._cidr_mask
@property
def date_created(self):
"""The date which this ACL entry was created on."""
return self._date_created
@property
def description(self):
"""This ACL entry's description."""
return self._description
@property
def _document(self):
"""This ACL entry's document."""
return self.__document
@property
def id(self):
"""This ACL entry's ID."""
return self._id
@property
def instance(self):
"""The ID of the instance to which this ACL entry is associated."""
return self._instance
@property
def instance_name(self):
"""The name of the instance to which this ACL entry is associated."""
return self._instance_name
@property
def instance_type(self):
"""The type of the instance to which this ACL entry is associated."""
return self._instance_type
@property
def login(self):
"""The login of the user to which this ACL entry belongs."""
return self._login
@property
def metadata(self):
"""This ACL entry's metadata."""
return self._metadata
@property
def port(self):
"""This ACL entry's port number."""
return self._port
@property
def service_type(self):
"""The service of the instance to which this ACL entry is associated."""
return self._service_type
def to_dict(self):
"""Render this object as a dictionary."""
return self._document
######################
# Private interface. #
######################
@property
def _client(self):
"""An instance of the objectrocket.client.Client."""
return self.__client
@property
def _url(self):
"""The URL of this ACL object."""
base_url = self._client._url.rstrip('/')
return '{}/instances/{}/acls/{}/'.format(base_url, self.instance_name, self.id)
| {
"repo_name": "objectrocket/python-client",
"path": "objectrocket/acls.py",
"copies": "1",
"size": "9531",
"license": "mit",
"hash": 8794813385424433000,
"line_mean": 33.5326086957,
"line_max": 97,
"alpha_frac": 0.5943762459,
"autogenerated": false,
"ratio": 4.297114517583409,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5391490763483409,
"avg_score": null,
"num_lines": null
} |
""" ACL parsing stuff """
from kazoo.security import (
ACL,
Id,
make_acl,
make_digest_acl,
Permissions
)
class ACLReader(object):
""" Helper class to parse/unparse ACLs """
class BadACL(Exception):
""" Couldn't parse the ACL """
pass
valid_schemes = [
"world",
"auth",
"digest",
"host",
"ip",
"sasl",
"username_password", # internal-only: gen digest from user:password
]
@classmethod
def extract(cls, acls):
""" parse a str that represents a list of ACLs """
return [cls.extract_acl(acl) for acl in acls]
@classmethod
def extract_acl(cls, acl):
""" parse an individual ACL (i.e.: world:anyone:cdrwa) """
try:
scheme, rest = acl.split(":", 1)
credential = ":".join(rest.split(":")[0:-1])
cdrwa = rest.split(":")[-1]
except ValueError:
raise cls.BadACL("Bad ACL: %s. Format is scheme:id:perms" % (acl))
if scheme not in cls.valid_schemes:
raise cls.BadACL("Invalid scheme: %s" % (acl))
create = True if "c" in cdrwa else False
read = True if "r" in cdrwa else False
write = True if "w" in cdrwa else False
delete = True if "d" in cdrwa else False
admin = True if "a" in cdrwa else False
if scheme == "username_password":
try:
username, password = credential.split(":", 1)
except ValueError:
raise cls.BadACL("Bad ACL: %s. Format is scheme:id:perms" % (acl))
return make_digest_acl(username,
password,
read,
write,
create,
delete,
admin)
else:
return make_acl(scheme,
credential,
read,
write,
create,
delete,
admin)
@classmethod
def to_dict(cls, acl):
""" transform an ACL to a dict """
return {
"perms": acl.perms,
"id": {
"scheme": acl.id.scheme,
"id": acl.id.id
}
}
@classmethod
def from_dict(cls, acl_dict):
""" ACL from dict """
perms = acl_dict.get("perms", Permissions.ALL)
id_dict = acl_dict.get("id", {})
id_scheme = id_dict.get("scheme", "world")
id_id = id_dict.get("id", "anyone")
return ACL(perms, Id(id_scheme, id_id))
| {
"repo_name": "rgs1/zk_shell",
"path": "zk_shell/acl.py",
"copies": "1",
"size": "2744",
"license": "apache-2.0",
"hash": -5711195425886257000,
"line_mean": 29.1538461538,
"line_max": 82,
"alpha_frac": 0.4548104956,
"autogenerated": false,
"ratio": 4.274143302180685,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.00013239772275916854,
"num_lines": 91
} |
# acl.py: An acl daemon
import time
from hooks import Hook
botlist = [ "artemis", "botss", "ca", "ChanStat", "doge"
, "falco", "gnat", "Infobot", "Kuko", "NightBot"
, "peer", "Sharpie", "shoko", "simbot", "tacobot"
, "|" ]
throttle = 0
@Hook('PRIVMSG')
def acl_priv(bot, ev):
if bot.name != "subluminal":
return ev
if ev.dest == "#programming" or ev.msg[0] == '[' or ev.user.nick == "***":
if ev.user.nick in botlist:
print("PRIVMSG.")
fixacl(bot)
return ev
@Hook('JOIN')
def acl_join(bot, ev):
if bot.name != "subluminal":
return ev
if "#programming" in ev.params.split(':', 1)[-1].split():
if ev.user.nick in botlist+[bot.getnick()]:
print("JOIN.")
fixacl(bot)
return ev
@Hook('NOTICE')
def acl_notc(bot, ev):
if bot.name != "subluminal":
return ev
if ev.dest == "#programming" or ev.msg[0] == '[' or ev.user.nick == "***":
if ev.user.nick in botlist+[bot.getnick()]:
print("NOTICE.")
fixacl(bot)
return ev
@Hook('NICK')
def acl_nick(bot, ev):
if bot.name != "subluminal":
return ev
if ev.user.nick in botlist or ev.params in botlist:
print("NICK.")
fixacl(bot)
return ev
@Hook('MODE')
def acl_mode(bot, ev):
if bot.name != "subluminal":
return ev
if ev.dest == "#programming" and any(map(lambda t: t[0] in t[1], zip("qaohv", ev.params))):
if ev.user.nick in botlist:
print("MODE.")
fixacl(bot)
return ev
def fixacl(bot):
global throttle
if time.time() - throttle < 10:
return
else:
throttle = time.time()
try:
bots = [x.nick for x in bot.chans["#programming"].users() if x.nick in botlist]
nonbots = [x.nick for x in bot.chans["#programming"].users() if x.nick not in botlist]
except KeyError:
bots = nonbots = []
args = []
if len(bots) > 0:
args.append(['+v']+bots)
if len(nonbots) > 0:
args.append(['-v']+nonbots)
bot.mode("#programming", *args)
| {
"repo_name": "tonyolag/sadaharu",
"path": "plugins/acl.py",
"copies": "1",
"size": "2128",
"license": "mpl-2.0",
"hash": -1879810512932046800,
"line_mean": 25.6,
"line_max": 95,
"alpha_frac": 0.5385338346,
"autogenerated": false,
"ratio": 3.1020408163265305,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.41405746509265307,
"avg_score": null,
"num_lines": null
} |
""" a clumsy attempt at a macro language to let the programmer execute code on the server (ex: determine 64bit)"""
from . import is64bit as is64bit
def macro_call(macro_name, args, kwargs):
""" allow the programmer to perform limited processing on the server by passing macro names and args
:new_key - the key name the macro will create
:args[0] - macro name
:args[1:] - any arguments
:code - the value of the keyword item
:kwargs - the connection keyword dictionary. ??key has been removed
--> the value to put in for kwargs['name'] = value
"""
if isinstance(args, (str, str)):
args = [args] # the user forgot to pass a sequence, so make a string into args[0]
new_key = args[0]
try:
if macro_name == "is64bit":
if is64bit.Python(): # if on 64 bit Python
return new_key, args[1] # return first argument
else:
try:
return new_key, args[2] # else return second argument (if defined)
except IndexError:
return new_key, '' # else return blank
elif macro_name == "getuser": # get the name of the user the server is logged in under
if not new_key in kwargs:
import getpass
return new_key, getpass.getuser()
elif macro_name == "getnode": # get the name of the computer running the server
import platform
try:
return new_key, args[1] % platform.node()
except IndexError:
return new_key, platform.node()
elif macro_name == "getenv": # expand the server's environment variable args[1]
try:
dflt = args[2] # if not found, default from args[2]
except IndexError: # or blank
dflt = ''
return new_key, os.environ.get(args[1], dflt)
elif macro_name == "auto_security":
if not 'user' in kwargs or not kwargs['user']: # missing, blank, or Null username
return new_key, 'Integrated Security=SSPI'
return new_key, 'User ID=%(user)s; Password=%(password)s' % kwargs
elif macro_name == "find_temp_test_path": # helper function for testing ado operation -- undocumented
import tempfile, os
return new_key, os.path.join(tempfile.gettempdir(), 'adodbapi_test', args[1])
raise ValueError ('Unknown connect string macro=%s' % macro_name)
except:
raise ValueError ('Error in macro processing %s %s' % (macro_name, repr(args)))
def process(args, kwargs, expand_macros=False): # --> connection string with keyword arguments processed.
""" attempts to inject arguments into a connection string using Python "%" operator for strings
co: adodbapi connection object
args: positional parameters from the .connect() call
kvargs: keyword arguments from the .connect() call
"""
try:
dsn = args[0]
except IndexError:
dsn = None
if isinstance(dsn, dict): # as a convenience the first argument may be django settings
kwargs.update(dsn)
elif dsn: # the connection string is passed to the connection as part of the keyword dictionary
kwargs['connection_string'] = dsn
try:
a1 = args[1]
except IndexError:
a1 = None
# historically, the second positional argument might be a timeout value
if isinstance(a1, int):
kwargs['timeout'] = a1
# if the second positional argument is a string, then it is user
elif isinstance(a1, str):
kwargs['user'] = a1
# if the second positional argument is a dictionary, use it as keyword arguments, too
elif isinstance(a1, dict):
kwargs.update(a1)
try:
kwargs['password'] = args[2] # the third positional argument is password
kwargs['host'] = args[3] # the fourth positional argument is host name
kwargs['database'] = args[4] # the fifth positional argument is database name
except IndexError:
pass
# make sure connection string is defined somehow
if not 'connection_string' in kwargs:
try: # perhaps 'dsn' was defined
kwargs['connection_string'] = kwargs['dsn']
except KeyError:
try: # as a last effort, use the "host" keyword
kwargs['connection_string'] = kwargs['host']
except KeyError:
raise TypeError ("Must define 'connection_string' for ado connections")
if expand_macros:
for kwarg in list(kwargs.keys()):
if kwarg.startswith('macro_'): # If a key defines a macro
macro_name = kwarg[6:] # name without the "macro_"
macro_code = kwargs.pop(kwarg) # we remove the macro_key and get the code to execute
new_key, rslt = macro_call(macro_name, macro_code, kwargs) # run the code in the local context
kwargs[new_key] = rslt # put the result back in the keywords dict
# special processing for PyRO IPv6 host address
try:
s = kwargs['proxy_host']
if ':' in s: # it is an IPv6 address
if s[0] != '[': # is not surrounded by brackets
kwargs['proxy_host'] = s.join(('[',']')) # put it in brackets
except KeyError:
pass
return kwargs
| {
"repo_name": "sserrot/champion_relationships",
"path": "venv/Lib/site-packages/adodbapi/process_connect_string.py",
"copies": "1",
"size": "5376",
"license": "mit",
"hash": -8932927102207937000,
"line_mean": 44.5593220339,
"line_max": 114,
"alpha_frac": 0.6047247024,
"autogenerated": false,
"ratio": 4.239747634069401,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.53444723364694,
"avg_score": null,
"num_lines": null
} |
"""ACME AuthHandler."""
import itertools
import logging
import time
from acme import challenges
from acme import messages2
from letsencrypt import achallenges
from letsencrypt import constants
from letsencrypt import errors
class AuthHandler(object):
"""ACME Authorization Handler for a client.
:ivar dv_auth: Authenticator capable of solving
:class:`~acme.challenges.DVChallenge` types
:type dv_auth: :class:`letsencrypt.interfaces.IAuthenticator`
:ivar cont_auth: Authenticator capable of solving
:class:`~acme.challenges.ContinuityChallenge` types
:type cont_auth: :class:`letsencrypt.interfaces.IAuthenticator`
:ivar network: Network object for sending and receiving authorization
messages
:type network: :class:`letsencrypt.network2.Network`
:ivar account: Client's Account
:type account: :class:`letsencrypt.account.Account`
:ivar dict authzr: ACME Authorization Resource dict where keys are domains
and values are :class:`acme.messages2.AuthorizationResource`
:ivar list dv_c: DV challenges in the form of
:class:`letsencrypt.achallenges.AnnotatedChallenge`
:ivar list cont_c: Continuity challenges in the
form of :class:`letsencrypt.achallenges.AnnotatedChallenge`
"""
def __init__(self, dv_auth, cont_auth, network, account):
self.dv_auth = dv_auth
self.cont_auth = cont_auth
self.network = network
self.account = account
self.authzr = dict()
# List must be used to keep responses straight.
self.dv_c = []
self.cont_c = []
def get_authorizations(self, domains, best_effort=False):
"""Retrieve all authorizations for challenges.
:param set domains: Domains for authorization
:param bool best_effort: Whether or not all authorizations are required
(this is useful in renewal)
:returns: tuple of lists of authorization resources. Takes the form of
(`completed`, `failed`)
rtype: tuple
:raises AuthorizationError: If unable to retrieve all
authorizations
"""
for domain in domains:
self.authzr[domain] = self.network.request_domain_challenges(
domain, self.account.new_authzr_uri)
self._choose_challenges(domains)
# While there are still challenges remaining...
while self.dv_c or self.cont_c:
cont_resp, dv_resp = self._solve_challenges()
logging.info("Waiting for verification...")
# Send all Responses - this modifies dv_c and cont_c
self._respond(cont_resp, dv_resp, best_effort)
# Just make sure all decisions are complete.
self.verify_authzr_complete()
# Only return valid authorizations
return [authzr for authzr in self.authzr.values()
if authzr.body.status == messages2.STATUS_VALID]
def _choose_challenges(self, domains):
"""Retrieve necessary challenges to satisfy server."""
logging.info("Performing the following challenges:")
for dom in domains:
path = gen_challenge_path(
self.authzr[dom].body.challenges,
self._get_chall_pref(dom),
self.authzr[dom].body.combinations)
dom_cont_c, dom_dv_c = self._challenge_factory(
dom, path)
self.dv_c.extend(dom_dv_c)
self.cont_c.extend(dom_cont_c)
def _solve_challenges(self):
"""Get Responses for challenges from authenticators."""
cont_resp = []
dv_resp = []
try:
if self.cont_c:
cont_resp = self.cont_auth.perform(self.cont_c)
if self.dv_c:
dv_resp = self.dv_auth.perform(self.dv_c)
# This will catch both specific types of errors.
except errors.AuthorizationError:
logging.critical("Failure in setting up challenges.")
logging.info("Attempting to clean up outstanding challenges...")
self._cleanup_challenges()
raise
assert len(cont_resp) == len(self.cont_c)
assert len(dv_resp) == len(self.dv_c)
return cont_resp, dv_resp
def _respond(self, cont_resp, dv_resp, best_effort):
"""Send/Receive confirmation of all challenges.
.. note:: This method also cleans up the auth_handler state.
"""
# TODO: chall_update is a dirty hack to get around acme-spec #105
chall_update = dict()
active_achalls = []
active_achalls.extend(
self._send_responses(self.dv_c, dv_resp, chall_update))
active_achalls.extend(
self._send_responses(self.cont_c, cont_resp, chall_update))
# Check for updated status...
self._poll_challenges(chall_update, best_effort)
# This removes challenges from self.dv_c and self.cont_c
self._cleanup_challenges(active_achalls)
def _send_responses(self, achalls, resps, chall_update):
"""Send responses and make sure errors are handled.
:param dict chall_update: parameter that is updated to hold
authzr -> list of outstanding solved annotated challenges
"""
active_achalls = []
for achall, resp in itertools.izip(achalls, resps):
# Don't send challenges for None and False authenticator responses
if resp:
self.network.answer_challenge(achall.challb, resp)
active_achalls.append(achall)
if achall.domain in chall_update:
chall_update[achall.domain].append(achall)
else:
chall_update[achall.domain] = [achall]
return active_achalls
def _poll_challenges(
self, chall_update, best_effort, min_sleep=3, max_rounds=15):
"""Wait for all challenge results to be determined."""
dom_to_check = set(chall_update.keys())
comp_domains = set()
rounds = 0
while dom_to_check and rounds < max_rounds:
# TODO: Use retry-after...
time.sleep(min_sleep)
for domain in dom_to_check:
comp_challs, failed_challs = self._handle_check(
domain, chall_update[domain])
if len(comp_challs) == len(chall_update[domain]):
comp_domains.add(domain)
elif not failed_challs:
for chall in comp_challs:
chall_update[domain].remove(chall)
# We failed some challenges... damage control
else:
# Right now... just assume a loss and carry on...
if best_effort:
comp_domains.add(domain)
else:
raise errors.AuthorizationError(
"Failed Authorization procedure for %s" % domain)
dom_to_check -= comp_domains
comp_domains.clear()
rounds += 1
def _handle_check(self, domain, achalls):
"""Returns tuple of ('completed', 'failed')."""
completed = []
failed = []
self.authzr[domain], _ = self.network.poll(self.authzr[domain])
if self.authzr[domain].body.status == messages2.STATUS_VALID:
return achalls, []
# Note: if the whole authorization is invalid, the individual failed
# challenges will be determined here...
for achall in achalls:
status = self._get_chall_status(self.authzr[domain], achall)
# This does nothing for challenges that have yet to be decided yet.
if status == messages2.STATUS_VALID:
completed.append(achall)
elif status == messages2.STATUS_INVALID:
failed.append(achall)
return completed, failed
def _get_chall_status(self, authzr, achall): # pylint: disable=no-self-use
"""Get the status of the challenge.
.. warning:: This assumes only one instance of type of challenge in
each challenge resource.
:param authzr: Authorization Resource
:type authzr: :class:`acme.messages2.AuthorizationResource`
:param achall: Annotated challenge for which to get status
:type achall: :class:`letsencrypt.achallenges.AnnotatedChallenge`
"""
for authzr_challb in authzr.body.challenges:
if type(authzr_challb.chall) is type(achall.challb.chall):
return authzr_challb.status
raise errors.AuthorizationError(
"Target challenge not found in authorization resource")
def _get_chall_pref(self, domain):
"""Return list of challenge preferences.
:param str domain: domain for which you are requesting preferences
"""
# Make sure to make a copy...
chall_prefs = []
chall_prefs.extend(self.cont_auth.get_chall_pref(domain))
chall_prefs.extend(self.dv_auth.get_chall_pref(domain))
return chall_prefs
def _cleanup_challenges(self, achall_list=None):
"""Cleanup challenges.
If achall_list is not provided, cleanup all achallenges.
"""
logging.info("Cleaning up challenges")
if achall_list is None:
dv_c = self.dv_c
cont_c = self.cont_c
else:
dv_c = [achall for achall in achall_list
if isinstance(achall.chall, challenges.DVChallenge)]
cont_c = [achall for achall in achall_list if isinstance(
achall.chall, challenges.ContinuityChallenge)]
if dv_c:
self.dv_auth.cleanup(dv_c)
for achall in dv_c:
self.dv_c.remove(achall)
if cont_c:
self.cont_auth.cleanup(cont_c)
for achall in cont_c:
self.cont_c.remove(achall)
def verify_authzr_complete(self):
"""Verifies that all authorizations have been decided.
:returns: Whether all authzr are complete
:rtype: bool
"""
for authzr in self.authzr.values():
if (authzr.body.status != messages2.STATUS_VALID and
authzr.body.status != messages2.STATUS_INVALID):
raise errors.AuthorizationError("Incomplete authorizations")
def _challenge_factory(self, domain, path):
"""Construct Namedtuple Challenges
:param str domain: domain of the enrollee
:param list path: List of indices from `challenges`.
:returns: dv_chall, list of DVChallenge type
:class:`letsencrypt.achallenges.Indexed`
cont_chall, list of ContinuityChallenge type
:class:`letsencrypt.achallenges.Indexed`
:rtype: tuple
:raises errors.LetsEncryptClientError: If Challenge type is not
recognized
"""
dv_chall = []
cont_chall = []
for index in path:
challb = self.authzr[domain].body.challenges[index]
chall = challb.chall
achall = challb_to_achall(challb, self.account.key, domain)
if isinstance(chall, challenges.ContinuityChallenge):
cont_chall.append(achall)
elif isinstance(chall, challenges.DVChallenge):
dv_chall.append(achall)
return cont_chall, dv_chall
def challb_to_achall(challb, key, domain):
"""Converts a ChallengeBody object to an AnnotatedChallenge.
:param challb: ChallengeBody
:type challb: :class:`acme.messages2.ChallengeBody`
:param key: Key
:type key: :class:`letsencrypt.le_util.Key`
:param str domain: Domain of the challb
:returns: Appropriate AnnotatedChallenge
:rtype: :class:`letsencrypt.achallenges.AnnotatedChallenge`
"""
chall = challb.chall
if isinstance(chall, challenges.DVSNI):
logging.info(" DVSNI challenge for %s.", domain)
return achallenges.DVSNI(
challb=challb, domain=domain, key=key)
elif isinstance(chall, challenges.SimpleHTTPS):
logging.info(" SimpleHTTPS challenge for %s.", domain)
return achallenges.SimpleHTTPS(
challb=challb, domain=domain, key=key)
elif isinstance(chall, challenges.DNS):
logging.info(" DNS challenge for %s.", domain)
return achallenges.DNS(challb=challb, domain=domain)
elif isinstance(chall, challenges.RecoveryToken):
logging.info(" Recovery Token Challenge for %s.", domain)
return achallenges.RecoveryToken(challb=challb, domain=domain)
elif isinstance(chall, challenges.RecoveryContact):
logging.info(" Recovery Contact Challenge for %s.", domain)
return achallenges.RecoveryContact(
challb=challb, domain=domain)
elif isinstance(chall, challenges.ProofOfPossession):
logging.info(" Proof-of-Possession Challenge for %s", domain)
return achallenges.ProofOfPossession(
challb=challb, domain=domain)
else:
raise errors.LetsEncryptClientError(
"Received unsupported challenge of type: %s",
chall.typ)
def gen_challenge_path(challbs, preferences, combinations):
"""Generate a plan to get authority over the identity.
.. todo:: This can be possibly be rewritten to use resolved_combinations.
:param tuple challbs: A tuple of challenges
(:class:`acme.messages2.Challenge`) from
:class:`acme.messages2.AuthorizationResource` to be
fulfilled by the client in order to prove possession of the
identifier.
:param list preferences: List of challenge preferences for domain
(:class:`acme.challenges.Challenge` subclasses)
:param tuple combinations: A collection of sets of challenges from
:class:`acme.messages.Challenge`, each of which would
be sufficient to prove possession of the identifier.
:returns: tuple of indices from ``challenges``.
:rtype: tuple
:raises letsencrypt.errors.AuthorizationError: If a
path cannot be created that satisfies the CA given the preferences and
combinations.
"""
if combinations:
return _find_smart_path(challbs, preferences, combinations)
else:
return _find_dumb_path(challbs, preferences)
def _find_smart_path(challbs, preferences, combinations):
"""Find challenge path with server hints.
Can be called if combinations is included. Function uses a simple
ranking system to choose the combo with the lowest cost.
"""
chall_cost = {}
max_cost = 1
for i, chall_cls in enumerate(preferences):
chall_cost[chall_cls] = i
max_cost += i
# max_cost is now equal to sum(indices) + 1
best_combo = []
# Set above completing all of the available challenges
best_combo_cost = max_cost
combo_total = 0
for combo in combinations:
for challenge_index in combo:
combo_total += chall_cost.get(challbs[
challenge_index].chall.__class__, max_cost)
if combo_total < best_combo_cost:
best_combo = combo
best_combo_cost = combo_total
combo_total = 0
if not best_combo:
msg = ("Client does not support any combination of challenges that "
"will satisfy the CA.")
logging.fatal(msg)
raise errors.AuthorizationError(msg)
return best_combo
def _find_dumb_path(challbs, preferences):
"""Find challenge path without server hints.
Should be called if the combinations hint is not included by the
server. This function returns the best path that does not contain
multiple mutually exclusive challenges.
"""
assert len(preferences) == len(set(preferences))
path = []
satisfied = set()
for pref_c in preferences:
for i, offered_challb in enumerate(challbs):
if (isinstance(offered_challb.chall, pref_c) and
is_preferred(offered_challb, satisfied)):
path.append(i)
satisfied.add(offered_challb)
return path
def mutually_exclusive(obj1, obj2, groups, different=False):
"""Are two objects mutually exclusive?"""
for group in groups:
obj1_present = False
obj2_present = False
for obj_cls in group:
obj1_present |= isinstance(obj1, obj_cls)
obj2_present |= isinstance(obj2, obj_cls)
if obj1_present and obj2_present and (
not different or not isinstance(obj1, obj2.__class__)):
return False
return True
def is_preferred(offered_challb, satisfied,
exclusive_groups=constants.EXCLUSIVE_CHALLENGES):
"""Return whether or not the challenge is preferred in path."""
for challb in satisfied:
if not mutually_exclusive(
offered_challb.chall, challb.chall, exclusive_groups,
different=True):
return False
return True
| {
"repo_name": "felixrieseberg/lets-encrypt-preview",
"path": "letsencrypt/auth_handler.py",
"copies": "1",
"size": "17081",
"license": "apache-2.0",
"hash": 2228964295367667200,
"line_mean": 34.4377593361,
"line_max": 79,
"alpha_frac": 0.6211580118,
"autogenerated": false,
"ratio": 4.080506450071668,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5201664461871668,
"avg_score": null,
"num_lines": null
} |
"""ACME AuthHandler."""
import itertools
import logging
import time
from acme import challenges
from acme import messages
from letsencrypt import achallenges
from letsencrypt import constants
from letsencrypt import errors
class AuthHandler(object):
"""ACME Authorization Handler for a client.
:ivar dv_auth: Authenticator capable of solving
:class:`~acme.challenges.DVChallenge` types
:type dv_auth: :class:`letsencrypt.interfaces.IAuthenticator`
:ivar cont_auth: Authenticator capable of solving
:class:`~acme.challenges.ContinuityChallenge` types
:type cont_auth: :class:`letsencrypt.interfaces.IAuthenticator`
:ivar network: Network object for sending and receiving authorization
messages
:type network: :class:`letsencrypt.network.Network`
:ivar account: Client's Account
:type account: :class:`letsencrypt.account.Account`
:ivar dict authzr: ACME Authorization Resource dict where keys are domains
and values are :class:`acme.messages.AuthorizationResource`
:ivar list dv_c: DV challenges in the form of
:class:`letsencrypt.achallenges.AnnotatedChallenge`
:ivar list cont_c: Continuity challenges in the
form of :class:`letsencrypt.achallenges.AnnotatedChallenge`
"""
def __init__(self, dv_auth, cont_auth, network, account):
self.dv_auth = dv_auth
self.cont_auth = cont_auth
self.network = network
self.account = account
self.authzr = dict()
# List must be used to keep responses straight.
self.dv_c = []
self.cont_c = []
def get_authorizations(self, domains, best_effort=False):
"""Retrieve all authorizations for challenges.
:param set domains: Domains for authorization
:param bool best_effort: Whether or not all authorizations are
required (this is useful in renewal)
:returns: tuple of lists of authorization resources. Takes the
form of (`completed`, `failed`)
:rtype: tuple
:raises AuthorizationError: If unable to retrieve all
authorizations
"""
for domain in domains:
self.authzr[domain] = self.network.request_domain_challenges(
domain, self.account.new_authzr_uri)
self._choose_challenges(domains)
# While there are still challenges remaining...
while self.dv_c or self.cont_c:
cont_resp, dv_resp = self._solve_challenges()
logging.info("Waiting for verification...")
# Send all Responses - this modifies dv_c and cont_c
self._respond(cont_resp, dv_resp, best_effort)
# Just make sure all decisions are complete.
self.verify_authzr_complete()
# Only return valid authorizations
return [authzr for authzr in self.authzr.values()
if authzr.body.status == messages.STATUS_VALID]
def _choose_challenges(self, domains):
"""Retrieve necessary challenges to satisfy server."""
logging.info("Performing the following challenges:")
for dom in domains:
path = gen_challenge_path(
self.authzr[dom].body.challenges,
self._get_chall_pref(dom),
self.authzr[dom].body.combinations)
dom_cont_c, dom_dv_c = self._challenge_factory(
dom, path)
self.dv_c.extend(dom_dv_c)
self.cont_c.extend(dom_cont_c)
def _solve_challenges(self):
"""Get Responses for challenges from authenticators."""
cont_resp = []
dv_resp = []
try:
if self.cont_c:
cont_resp = self.cont_auth.perform(self.cont_c)
if self.dv_c:
dv_resp = self.dv_auth.perform(self.dv_c)
# This will catch both specific types of errors.
except errors.AuthorizationError:
logging.critical("Failure in setting up challenges.")
logging.info("Attempting to clean up outstanding challenges...")
self._cleanup_challenges()
raise
assert len(cont_resp) == len(self.cont_c)
assert len(dv_resp) == len(self.dv_c)
return cont_resp, dv_resp
def _respond(self, cont_resp, dv_resp, best_effort):
"""Send/Receive confirmation of all challenges.
.. note:: This method also cleans up the auth_handler state.
"""
# TODO: chall_update is a dirty hack to get around acme-spec #105
chall_update = dict()
active_achalls = []
active_achalls.extend(
self._send_responses(self.dv_c, dv_resp, chall_update))
active_achalls.extend(
self._send_responses(self.cont_c, cont_resp, chall_update))
# Check for updated status...
try:
self._poll_challenges(chall_update, best_effort)
finally:
# This removes challenges from self.dv_c and self.cont_c
self._cleanup_challenges(active_achalls)
def _send_responses(self, achalls, resps, chall_update):
"""Send responses and make sure errors are handled.
:param dict chall_update: parameter that is updated to hold
authzr -> list of outstanding solved annotated challenges
"""
active_achalls = []
for achall, resp in itertools.izip(achalls, resps):
# Don't send challenges for None and False authenticator responses
if resp:
self.network.answer_challenge(achall.challb, resp)
active_achalls.append(achall)
if achall.domain in chall_update:
chall_update[achall.domain].append(achall)
else:
chall_update[achall.domain] = [achall]
return active_achalls
def _poll_challenges(
self, chall_update, best_effort, min_sleep=3, max_rounds=15):
"""Wait for all challenge results to be determined."""
dom_to_check = set(chall_update.keys())
comp_domains = set()
rounds = 0
while dom_to_check and rounds < max_rounds:
# TODO: Use retry-after...
time.sleep(min_sleep)
for domain in dom_to_check:
comp_challs, failed_challs = self._handle_check(
domain, chall_update[domain])
if len(comp_challs) == len(chall_update[domain]):
comp_domains.add(domain)
elif not failed_challs:
for chall in comp_challs:
chall_update[domain].remove(chall)
# We failed some challenges... damage control
else:
# Right now... just assume a loss and carry on...
if best_effort:
comp_domains.add(domain)
else:
raise errors.AuthorizationError(
"Failed Authorization procedure for %s" % domain)
dom_to_check -= comp_domains
comp_domains.clear()
rounds += 1
def _handle_check(self, domain, achalls):
"""Returns tuple of ('completed', 'failed')."""
completed = []
failed = []
self.authzr[domain], _ = self.network.poll(self.authzr[domain])
if self.authzr[domain].body.status == messages.STATUS_VALID:
return achalls, []
# Note: if the whole authorization is invalid, the individual failed
# challenges will be determined here...
for achall in achalls:
status = self._get_chall_status(self.authzr[domain], achall)
# This does nothing for challenges that have yet to be decided yet.
if status == messages.STATUS_VALID:
completed.append(achall)
elif status == messages.STATUS_INVALID:
failed.append(achall)
return completed, failed
def _get_chall_status(self, authzr, achall): # pylint: disable=no-self-use
"""Get the status of the challenge.
.. warning:: This assumes only one instance of type of challenge in
each challenge resource.
:param authzr: Authorization Resource
:type authzr: :class:`acme.messages.AuthorizationResource`
:param achall: Annotated challenge for which to get status
:type achall: :class:`letsencrypt.achallenges.AnnotatedChallenge`
"""
for authzr_challb in authzr.body.challenges:
if type(authzr_challb.chall) is type(achall.challb.chall):
return authzr_challb.status
raise errors.AuthorizationError(
"Target challenge not found in authorization resource")
def _get_chall_pref(self, domain):
"""Return list of challenge preferences.
:param str domain: domain for which you are requesting preferences
"""
# Make sure to make a copy...
chall_prefs = []
chall_prefs.extend(self.cont_auth.get_chall_pref(domain))
chall_prefs.extend(self.dv_auth.get_chall_pref(domain))
return chall_prefs
def _cleanup_challenges(self, achall_list=None):
"""Cleanup challenges.
If achall_list is not provided, cleanup all achallenges.
"""
logging.info("Cleaning up challenges")
if achall_list is None:
dv_c = self.dv_c
cont_c = self.cont_c
else:
dv_c = [achall for achall in achall_list
if isinstance(achall.chall, challenges.DVChallenge)]
cont_c = [achall for achall in achall_list if isinstance(
achall.chall, challenges.ContinuityChallenge)]
if dv_c:
self.dv_auth.cleanup(dv_c)
for achall in dv_c:
self.dv_c.remove(achall)
if cont_c:
self.cont_auth.cleanup(cont_c)
for achall in cont_c:
self.cont_c.remove(achall)
def verify_authzr_complete(self):
"""Verifies that all authorizations have been decided.
:returns: Whether all authzr are complete
:rtype: bool
"""
for authzr in self.authzr.values():
if (authzr.body.status != messages.STATUS_VALID and
authzr.body.status != messages.STATUS_INVALID):
raise errors.AuthorizationError("Incomplete authorizations")
def _challenge_factory(self, domain, path):
"""Construct Namedtuple Challenges
:param str domain: domain of the enrollee
:param list path: List of indices from `challenges`.
:returns: dv_chall, list of DVChallenge type
:class:`letsencrypt.achallenges.Indexed`
cont_chall, list of ContinuityChallenge type
:class:`letsencrypt.achallenges.Indexed`
:rtype: tuple
:raises errors.LetsEncryptClientError: If Challenge type is not
recognized
"""
dv_chall = []
cont_chall = []
for index in path:
challb = self.authzr[domain].body.challenges[index]
chall = challb.chall
achall = challb_to_achall(challb, self.account.key, domain)
if isinstance(chall, challenges.ContinuityChallenge):
cont_chall.append(achall)
elif isinstance(chall, challenges.DVChallenge):
dv_chall.append(achall)
return cont_chall, dv_chall
def challb_to_achall(challb, key, domain):
"""Converts a ChallengeBody object to an AnnotatedChallenge.
:param challb: ChallengeBody
:type challb: :class:`acme.messages.ChallengeBody`
:param key: Key
:type key: :class:`letsencrypt.le_util.Key`
:param str domain: Domain of the challb
:returns: Appropriate AnnotatedChallenge
:rtype: :class:`letsencrypt.achallenges.AnnotatedChallenge`
"""
chall = challb.chall
logging.info("%s challenge for %s", chall.typ, domain)
if isinstance(chall, challenges.DVSNI):
return achallenges.DVSNI(
challb=challb, domain=domain, key=key)
elif isinstance(chall, challenges.SimpleHTTP):
return achallenges.SimpleHTTP(
challb=challb, domain=domain, key=key)
elif isinstance(chall, challenges.DNS):
return achallenges.DNS(challb=challb, domain=domain)
elif isinstance(chall, challenges.RecoveryToken):
return achallenges.RecoveryToken(challb=challb, domain=domain)
elif isinstance(chall, challenges.RecoveryContact):
return achallenges.RecoveryContact(
challb=challb, domain=domain)
elif isinstance(chall, challenges.ProofOfPossession):
return achallenges.ProofOfPossession(
challb=challb, domain=domain)
else:
raise errors.LetsEncryptClientError(
"Received unsupported challenge of type: %s",
chall.typ)
def gen_challenge_path(challbs, preferences, combinations):
"""Generate a plan to get authority over the identity.
.. todo:: This can be possibly be rewritten to use resolved_combinations.
:param tuple challbs: A tuple of challenges
(:class:`acme.messages.Challenge`) from
:class:`acme.messages.AuthorizationResource` to be
fulfilled by the client in order to prove possession of the
identifier.
:param list preferences: List of challenge preferences for domain
(:class:`acme.challenges.Challenge` subclasses)
:param tuple combinations: A collection of sets of challenges from
:class:`acme.messages.Challenge`, each of which would
be sufficient to prove possession of the identifier.
:returns: tuple of indices from ``challenges``.
:rtype: tuple
:raises letsencrypt.errors.AuthorizationError: If a
path cannot be created that satisfies the CA given the preferences and
combinations.
"""
if combinations:
return _find_smart_path(challbs, preferences, combinations)
else:
return _find_dumb_path(challbs, preferences)
def _find_smart_path(challbs, preferences, combinations):
"""Find challenge path with server hints.
Can be called if combinations is included. Function uses a simple
ranking system to choose the combo with the lowest cost.
"""
chall_cost = {}
max_cost = 1
for i, chall_cls in enumerate(preferences):
chall_cost[chall_cls] = i
max_cost += i
# max_cost is now equal to sum(indices) + 1
best_combo = []
# Set above completing all of the available challenges
best_combo_cost = max_cost
combo_total = 0
for combo in combinations:
for challenge_index in combo:
combo_total += chall_cost.get(challbs[
challenge_index].chall.__class__, max_cost)
if combo_total < best_combo_cost:
best_combo = combo
best_combo_cost = combo_total
combo_total = 0
if not best_combo:
msg = ("Client does not support any combination of challenges that "
"will satisfy the CA.")
logging.fatal(msg)
raise errors.AuthorizationError(msg)
return best_combo
def _find_dumb_path(challbs, preferences):
"""Find challenge path without server hints.
Should be called if the combinations hint is not included by the
server. This function returns the best path that does not contain
multiple mutually exclusive challenges.
"""
assert len(preferences) == len(set(preferences))
path = []
satisfied = set()
for pref_c in preferences:
for i, offered_challb in enumerate(challbs):
if (isinstance(offered_challb.chall, pref_c) and
is_preferred(offered_challb, satisfied)):
path.append(i)
satisfied.add(offered_challb)
return path
def mutually_exclusive(obj1, obj2, groups, different=False):
"""Are two objects mutually exclusive?"""
for group in groups:
obj1_present = False
obj2_present = False
for obj_cls in group:
obj1_present |= isinstance(obj1, obj_cls)
obj2_present |= isinstance(obj2, obj_cls)
if obj1_present and obj2_present and (
not different or not isinstance(obj1, obj2.__class__)):
return False
return True
def is_preferred(offered_challb, satisfied,
exclusive_groups=constants.EXCLUSIVE_CHALLENGES):
"""Return whether or not the challenge is preferred in path."""
for challb in satisfied:
if not mutually_exclusive(
offered_challb.chall, challb.chall, exclusive_groups,
different=True):
return False
return True
| {
"repo_name": "digideskio/lets-encrypt-preview",
"path": "letsencrypt/auth_handler.py",
"copies": "1",
"size": "16783",
"license": "apache-2.0",
"hash": 1634707812265737200,
"line_mean": 34.1108786611,
"line_max": 79,
"alpha_frac": 0.6196746708,
"autogenerated": false,
"ratio": 4.096412008786917,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5216086679586918,
"avg_score": null,
"num_lines": null
} |
"""ACME AuthHandler."""
import itertools
import logging
import time
from letsencrypt.acme import challenges
from letsencrypt.acme import messages2
from letsencrypt.client import achallenges
from letsencrypt.client import constants
from letsencrypt.client import errors
class AuthHandler(object):
"""ACME Authorization Handler for a client.
:ivar dv_auth: Authenticator capable of solving
:class:`~letsencrypt.acme.challenges.DVChallenge` types
:type dv_auth: :class:`letsencrypt.client.interfaces.IAuthenticator`
:ivar cont_auth: Authenticator capable of solving
:class:`~letsencrypt.acme.challenges.ContinuityChallenge` types
:type cont_auth: :class:`letsencrypt.client.interfaces.IAuthenticator`
:ivar network: Network object for sending and receiving authorization
messages
:type network: :class:`letsencrypt.client.network2.Network`
:ivar account: Client's Account
:type account: :class:`letsencrypt.client.account.Account`
:ivar dict authzr: ACME Authorization Resource dict where keys are domains
and values are :class:`letsencrypt.acme.messages2.AuthorizationResource`
:ivar list dv_c: DV challenges in the form of
:class:`letsencrypt.client.achallenges.AnnotatedChallenge`
:ivar list cont_c: Continuity challenges in the
form of :class:`letsencrypt.client.achallenges.AnnotatedChallenge`
"""
def __init__(self, dv_auth, cont_auth, network, account):
self.dv_auth = dv_auth
self.cont_auth = cont_auth
self.network = network
self.account = account
self.authzr = dict()
# List must be used to keep responses straight.
self.dv_c = []
self.cont_c = []
def get_authorizations(self, domains, best_effort=False):
"""Retrieve all authorizations for challenges.
:param set domains: Domains for authorization
:param bool best_effort: Whether or not all authorizations are required
(this is useful in renewal)
:returns: tuple of lists of authorization resources. Takes the form of
(`completed`, `failed`)
rtype: tuple
:raises AuthorizationError: If unable to retrieve all
authorizations
"""
for domain in domains:
self.authzr[domain] = self.network.request_domain_challenges(
domain, self.account.new_authzr_uri)
self._choose_challenges(domains)
# While there are still challenges remaining...
while self.dv_c or self.cont_c:
cont_resp, dv_resp = self._solve_challenges()
logging.info("Waiting for verification...")
# Send all Responses - this modifies dv_c and cont_c
self._respond(cont_resp, dv_resp, best_effort)
# Just make sure all decisions are complete.
self.verify_authzr_complete()
# Only return valid authorizations
return [authzr for authzr in self.authzr.values()
if authzr.body.status == messages2.STATUS_VALID]
def _choose_challenges(self, domains):
"""Retrieve necessary challenges to satisfy server."""
logging.info("Performing the following challenges:")
for dom in domains:
path = gen_challenge_path(
self.authzr[dom].body.challenges,
self._get_chall_pref(dom),
self.authzr[dom].body.combinations)
dom_cont_c, dom_dv_c = self._challenge_factory(
dom, path)
self.dv_c.extend(dom_dv_c)
self.cont_c.extend(dom_cont_c)
def _solve_challenges(self):
"""Get Responses for challenges from authenticators."""
cont_resp = []
dv_resp = []
try:
if self.cont_c:
cont_resp = self.cont_auth.perform(self.cont_c)
if self.dv_c:
dv_resp = self.dv_auth.perform(self.dv_c)
# This will catch both specific types of errors.
except errors.AuthorizationError:
logging.critical("Failure in setting up challenges.")
logging.info("Attempting to clean up outstanding challenges...")
self._cleanup_challenges()
raise
assert len(cont_resp) == len(self.cont_c)
assert len(dv_resp) == len(self.dv_c)
return cont_resp, dv_resp
def _respond(self, cont_resp, dv_resp, best_effort):
"""Send/Receive confirmation of all challenges.
.. note:: This method also cleans up the auth_handler state.
"""
# TODO: chall_update is a dirty hack to get around acme-spec #105
chall_update = dict()
active_achalls = []
active_achalls.extend(
self._send_responses(self.dv_c, dv_resp, chall_update))
active_achalls.extend(
self._send_responses(self.cont_c, cont_resp, chall_update))
# Check for updated status...
self._poll_challenges(chall_update, best_effort)
# This removes challenges from self.dv_c and self.cont_c
self._cleanup_challenges(active_achalls)
def _send_responses(self, achalls, resps, chall_update):
"""Send responses and make sure errors are handled.
:param dict chall_update: parameter that is updated to hold
authzr -> list of outstanding solved annotated challenges
"""
active_achalls = []
for achall, resp in itertools.izip(achalls, resps):
# Don't send challenges for None and False authenticator responses
if resp:
self.network.answer_challenge(achall.challb, resp)
active_achalls.append(achall)
if achall.domain in chall_update:
chall_update[achall.domain].append(achall)
else:
chall_update[achall.domain] = [achall]
return active_achalls
def _poll_challenges(
self, chall_update, best_effort, min_sleep=3, max_rounds=15):
"""Wait for all challenge results to be determined."""
dom_to_check = set(chall_update.keys())
comp_domains = set()
rounds = 0
while dom_to_check and rounds < max_rounds:
# TODO: Use retry-after...
time.sleep(min_sleep)
for domain in dom_to_check:
comp_challs, failed_challs = self._handle_check(
domain, chall_update[domain])
if len(comp_challs) == len(chall_update[domain]):
comp_domains.add(domain)
elif not failed_challs:
for chall in comp_challs:
chall_update[domain].remove(chall)
# We failed some challenges... damage control
else:
# Right now... just assume a loss and carry on...
if best_effort:
comp_domains.add(domain)
else:
raise errors.AuthorizationError(
"Failed Authorization procedure for %s" % domain)
dom_to_check -= comp_domains
comp_domains.clear()
rounds += 1
def _handle_check(self, domain, achalls):
"""Returns tuple of ('completed', 'failed')."""
completed = []
failed = []
self.authzr[domain], _ = self.network.poll(self.authzr[domain])
if self.authzr[domain].body.status == messages2.STATUS_VALID:
return achalls, []
# Note: if the whole authorization is invalid, the individual failed
# challenges will be determined here...
for achall in achalls:
status = self._get_chall_status(self.authzr[domain], achall)
# This does nothing for challenges that have yet to be decided yet.
if status == messages2.STATUS_VALID:
completed.append(achall)
elif status == messages2.STATUS_INVALID:
failed.append(achall)
return completed, failed
def _get_chall_status(self, authzr, achall): # pylint: disable=no-self-use
"""Get the status of the challenge.
.. warning:: This assumes only one instance of type of challenge in
each challenge resource.
:param authzr: Authorization Resource
:type authzr: :class:`letsencrypt.acme.messages2.AuthorizationResource`
:param achall: Annotated challenge for which to get status
:type achall: :class:`letsencrypt.client.achallenges.AnnotatedChallenge`
"""
for authzr_challb in authzr.body.challenges:
if type(authzr_challb.chall) is type(achall.challb.chall):
return authzr_challb.status
raise errors.AuthorizationError(
"Target challenge not found in authorization resource")
def _get_chall_pref(self, domain):
"""Return list of challenge preferences.
:param str domain: domain for which you are requesting preferences
"""
# Make sure to make a copy...
chall_prefs = []
chall_prefs.extend(self.cont_auth.get_chall_pref(domain))
chall_prefs.extend(self.dv_auth.get_chall_pref(domain))
return chall_prefs
def _cleanup_challenges(self, achall_list=None):
"""Cleanup challenges.
If achall_list is not provided, cleanup all achallenges.
"""
logging.info("Cleaning up challenges")
if achall_list is None:
dv_c = self.dv_c
cont_c = self.cont_c
else:
dv_c = [achall for achall in achall_list
if isinstance(achall.chall, challenges.DVChallenge)]
cont_c = [achall for achall in achall_list if isinstance(
achall.chall, challenges.ContinuityChallenge)]
if dv_c:
self.dv_auth.cleanup(dv_c)
for achall in dv_c:
self.dv_c.remove(achall)
if cont_c:
self.cont_auth.cleanup(cont_c)
for achall in cont_c:
self.cont_c.remove(achall)
def verify_authzr_complete(self):
"""Verifies that all authorizations have been decided.
:returns: Whether all authzr are complete
:rtype: bool
"""
for authzr in self.authzr.values():
if (authzr.body.status != messages2.STATUS_VALID and
authzr.body.status != messages2.STATUS_INVALID):
raise errors.AuthorizationError("Incomplete authorizations")
def _challenge_factory(self, domain, path):
"""Construct Namedtuple Challenges
:param str domain: domain of the enrollee
:param list path: List of indices from `challenges`.
:returns: dv_chall, list of DVChallenge type
:class:`letsencrypt.client.achallenges.Indexed`
cont_chall, list of ContinuityChallenge type
:class:`letsencrypt.client.achallenges.Indexed`
:rtype: tuple
:raises errors.LetsEncryptClientError: If Challenge type is not
recognized
"""
dv_chall = []
cont_chall = []
for index in path:
challb = self.authzr[domain].body.challenges[index]
chall = challb.chall
achall = challb_to_achall(challb, self.account.key, domain)
if isinstance(chall, challenges.ContinuityChallenge):
cont_chall.append(achall)
elif isinstance(chall, challenges.DVChallenge):
dv_chall.append(achall)
return cont_chall, dv_chall
def challb_to_achall(challb, key, domain):
"""Converts a ChallengeBody object to an AnnotatedChallenge.
:param challb: ChallengeBody
:type challb: :class:`letsencrypt.acme.messages2.ChallengeBody`
:param key: Key
:type key: :class:`letsencrypt.client.le_util.Key`
:param str domain: Domain of the challb
:returns: Appropriate AnnotatedChallenge
:rtype: :class:`letsencrypt.client.achallenges.AnnotatedChallenge`
"""
chall = challb.chall
if isinstance(chall, challenges.DVSNI):
logging.info(" DVSNI challenge for %s.", domain)
return achallenges.DVSNI(
challb=challb, domain=domain, key=key)
elif isinstance(chall, challenges.SimpleHTTPS):
logging.info(" SimpleHTTPS challenge for %s.", domain)
return achallenges.SimpleHTTPS(
challb=challb, domain=domain, key=key)
elif isinstance(chall, challenges.DNS):
logging.info(" DNS challenge for %s.", domain)
return achallenges.DNS(challb=challb, domain=domain)
elif isinstance(chall, challenges.RecoveryToken):
logging.info(" Recovery Token Challenge for %s.", domain)
return achallenges.RecoveryToken(challb=challb, domain=domain)
elif isinstance(chall, challenges.RecoveryContact):
logging.info(" Recovery Contact Challenge for %s.", domain)
return achallenges.RecoveryContact(
challb=challb, domain=domain)
elif isinstance(chall, challenges.ProofOfPossession):
logging.info(" Proof-of-Possession Challenge for %s", domain)
return achallenges.ProofOfPossession(
challb=challb, domain=domain)
else:
raise errors.LetsEncryptClientError(
"Received unsupported challenge of type: %s",
chall.typ)
def gen_challenge_path(challbs, preferences, combinations):
"""Generate a plan to get authority over the identity.
.. todo:: This can be possibly be rewritten to use resolved_combinations.
:param tuple challbs: A tuple of challenges
(:class:`letsencrypt.acme.messages2.Challenge`) from
:class:`letsencrypt.acme.messages2.AuthorizationResource` to be
fulfilled by the client in order to prove possession of the
identifier.
:param list preferences: List of challenge preferences for domain
(:class:`letsencrypt.acme.challenges.Challenge` subclasses)
:param tuple combinations: A collection of sets of challenges from
:class:`letsencrypt.acme.messages.Challenge`, each of which would
be sufficient to prove possession of the identifier.
:returns: tuple of indices from ``challenges``.
:rtype: tuple
:raises letsencrypt.client.errors.AuthorizationError: If a
path cannot be created that satisfies the CA given the preferences and
combinations.
"""
if combinations:
return _find_smart_path(challbs, preferences, combinations)
else:
return _find_dumb_path(challbs, preferences)
def _find_smart_path(challbs, preferences, combinations):
"""Find challenge path with server hints.
Can be called if combinations is included. Function uses a simple
ranking system to choose the combo with the lowest cost.
"""
chall_cost = {}
max_cost = 1
for i, chall_cls in enumerate(preferences):
chall_cost[chall_cls] = i
max_cost += i
# max_cost is now equal to sum(indices) + 1
best_combo = []
# Set above completing all of the available challenges
best_combo_cost = max_cost
combo_total = 0
for combo in combinations:
for challenge_index in combo:
combo_total += chall_cost.get(challbs[
challenge_index].chall.__class__, max_cost)
if combo_total < best_combo_cost:
best_combo = combo
best_combo_cost = combo_total
combo_total = 0
if not best_combo:
msg = ("Client does not support any combination of challenges that "
"will satisfy the CA.")
logging.fatal(msg)
raise errors.AuthorizationError(msg)
return best_combo
def _find_dumb_path(challbs, preferences):
"""Find challenge path without server hints.
Should be called if the combinations hint is not included by the
server. This function returns the best path that does not contain
multiple mutually exclusive challenges.
"""
assert len(preferences) == len(set(preferences))
path = []
satisfied = set()
for pref_c in preferences:
for i, offered_challb in enumerate(challbs):
if (isinstance(offered_challb.chall, pref_c) and
is_preferred(offered_challb, satisfied)):
path.append(i)
satisfied.add(offered_challb)
return path
def mutually_exclusive(obj1, obj2, groups, different=False):
"""Are two objects mutually exclusive?"""
for group in groups:
obj1_present = False
obj2_present = False
for obj_cls in group:
obj1_present |= isinstance(obj1, obj_cls)
obj2_present |= isinstance(obj2, obj_cls)
if obj1_present and obj2_present and (
not different or not isinstance(obj1, obj2.__class__)):
return False
return True
def is_preferred(offered_challb, satisfied,
exclusive_groups=constants.EXCLUSIVE_CHALLENGES):
"""Return whether or not the challenge is preferred in path."""
for challb in satisfied:
if not mutually_exclusive(
offered_challb.chall, challb.chall, exclusive_groups,
different=True):
return False
return True
| {
"repo_name": "diracdeltas/lets-encrypt-preview",
"path": "letsencrypt/client/auth_handler.py",
"copies": "1",
"size": "17318",
"license": "apache-2.0",
"hash": -7784753445113174000,
"line_mean": 34.9294605809,
"line_max": 80,
"alpha_frac": 0.6248412057,
"autogenerated": false,
"ratio": 4.075782537067545,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5200623742767545,
"avg_score": null,
"num_lines": null
} |
"""ACME AuthHandler."""
import itertools
import logging
import time
import zope.component
from acme import challenges
from acme import messages
from certbot import achallenges
from certbot import errors
from certbot import error_handler
from certbot import interfaces
logger = logging.getLogger(__name__)
class AuthHandler(object):
"""ACME Authorization Handler for a client.
:ivar auth: Authenticator capable of solving
:class:`~acme.challenges.Challenge` types
:type auth: :class:`certbot.interfaces.IAuthenticator`
:ivar acme.client.Client acme: ACME client API.
:ivar account: Client's Account
:type account: :class:`certbot.account.Account`
:ivar dict authzr: ACME Authorization Resource dict where keys are domains
and values are :class:`acme.messages.AuthorizationResource`
:ivar list achalls: DV challenges in the form of
:class:`certbot.achallenges.AnnotatedChallenge`
"""
def __init__(self, auth, acme, account):
self.auth = auth
self.acme = acme
self.account = account
self.authzr = dict()
# List must be used to keep responses straight.
self.achalls = []
def get_authorizations(self, domains, best_effort=False):
"""Retrieve all authorizations for challenges.
:param list domains: Domains for authorization
:param bool best_effort: Whether or not all authorizations are
required (this is useful in renewal)
:returns: List of authorization resources
:rtype: list
:raises .AuthorizationError: If unable to retrieve all
authorizations
"""
for domain in domains:
self.authzr[domain] = self.acme.request_domain_challenges(
domain, self.account.regr.new_authzr_uri)
self._choose_challenges(domains)
# While there are still challenges remaining...
while self.achalls:
resp = self._solve_challenges()
logger.info("Waiting for verification...")
# Send all Responses - this modifies achalls
self._respond(resp, best_effort)
# Just make sure all decisions are complete.
self.verify_authzr_complete()
# Only return valid authorizations
retVal = [authzr for authzr in self.authzr.values()
if authzr.body.status == messages.STATUS_VALID]
if not retVal:
raise errors.AuthorizationError(
"Challenges failed for all domains")
return retVal
def _choose_challenges(self, domains):
"""Retrieve necessary challenges to satisfy server."""
logger.info("Performing the following challenges:")
for dom in domains:
path = gen_challenge_path(
self.authzr[dom].body.challenges,
self._get_chall_pref(dom),
self.authzr[dom].body.combinations)
dom_achalls = self._challenge_factory(
dom, path)
self.achalls.extend(dom_achalls)
def _solve_challenges(self):
"""Get Responses for challenges from authenticators."""
resp = []
with error_handler.ErrorHandler(self._cleanup_challenges):
try:
if self.achalls:
resp = self.auth.perform(self.achalls)
except errors.AuthorizationError:
logger.critical("Failure in setting up challenges.")
logger.info("Attempting to clean up outstanding challenges...")
raise
assert len(resp) == len(self.achalls)
return resp
def _respond(self, resp, best_effort):
"""Send/Receive confirmation of all challenges.
.. note:: This method also cleans up the auth_handler state.
"""
# TODO: chall_update is a dirty hack to get around acme-spec #105
chall_update = dict()
active_achalls = self._send_responses(self.achalls,
resp, chall_update)
# Check for updated status...
try:
self._poll_challenges(chall_update, best_effort)
finally:
# This removes challenges from self.achalls
self._cleanup_challenges(active_achalls)
def _send_responses(self, achalls, resps, chall_update):
"""Send responses and make sure errors are handled.
:param dict chall_update: parameter that is updated to hold
authzr -> list of outstanding solved annotated challenges
"""
active_achalls = []
for achall, resp in itertools.izip(achalls, resps):
# This line needs to be outside of the if block below to
# ensure failed challenges are cleaned up correctly
active_achalls.append(achall)
# Don't send challenges for None and False authenticator responses
if resp is not None and resp:
self.acme.answer_challenge(achall.challb, resp)
# TODO: answer_challenge returns challr, with URI,
# that can be used in _find_updated_challr
# comparisons...
if achall.domain in chall_update:
chall_update[achall.domain].append(achall)
else:
chall_update[achall.domain] = [achall]
return active_achalls
def _poll_challenges(
self, chall_update, best_effort, min_sleep=3, max_rounds=15):
"""Wait for all challenge results to be determined."""
dom_to_check = set(chall_update.keys())
comp_domains = set()
rounds = 0
while dom_to_check and rounds < max_rounds:
# TODO: Use retry-after...
time.sleep(min_sleep)
all_failed_achalls = set()
for domain in dom_to_check:
comp_achalls, failed_achalls = self._handle_check(
domain, chall_update[domain])
if len(comp_achalls) == len(chall_update[domain]):
comp_domains.add(domain)
elif not failed_achalls:
for achall, _ in comp_achalls:
chall_update[domain].remove(achall)
# We failed some challenges... damage control
else:
if best_effort:
comp_domains.add(domain)
logger.warning(
"Challenge failed for domain %s",
domain)
else:
all_failed_achalls.update(
updated for _, updated in failed_achalls)
if all_failed_achalls:
_report_failed_challs(all_failed_achalls)
raise errors.FailedChallenges(all_failed_achalls)
dom_to_check -= comp_domains
comp_domains.clear()
rounds += 1
def _handle_check(self, domain, achalls):
"""Returns tuple of ('completed', 'failed')."""
completed = []
failed = []
self.authzr[domain], _ = self.acme.poll(self.authzr[domain])
if self.authzr[domain].body.status == messages.STATUS_VALID:
return achalls, []
# Note: if the whole authorization is invalid, the individual failed
# challenges will be determined here...
for achall in achalls:
updated_achall = achall.update(challb=self._find_updated_challb(
self.authzr[domain], achall))
# This does nothing for challenges that have yet to be decided yet.
if updated_achall.status == messages.STATUS_VALID:
completed.append((achall, updated_achall))
elif updated_achall.status == messages.STATUS_INVALID:
failed.append((achall, updated_achall))
return completed, failed
def _find_updated_challb(self, authzr, achall): # pylint: disable=no-self-use
"""Find updated challenge body within Authorization Resource.
.. warning:: This assumes only one instance of type of challenge in
each challenge resource.
:param .AuthorizationResource authzr: Authorization Resource
:param .AnnotatedChallenge achall: Annotated challenge for which
to get status
"""
for authzr_challb in authzr.body.challenges:
if type(authzr_challb.chall) is type(achall.challb.chall): # noqa
return authzr_challb
raise errors.AuthorizationError(
"Target challenge not found in authorization resource")
def _get_chall_pref(self, domain):
"""Return list of challenge preferences.
:param str domain: domain for which you are requesting preferences
"""
# Make sure to make a copy...
chall_prefs = []
chall_prefs.extend(self.auth.get_chall_pref(domain))
return chall_prefs
def _cleanup_challenges(self, achall_list=None):
"""Cleanup challenges.
If achall_list is not provided, cleanup all achallenges.
"""
logger.info("Cleaning up challenges")
if achall_list is None:
achalls = self.achalls
else:
achalls = achall_list
if achalls:
self.auth.cleanup(achalls)
for achall in achalls:
self.achalls.remove(achall)
def verify_authzr_complete(self):
"""Verifies that all authorizations have been decided.
:returns: Whether all authzr are complete
:rtype: bool
"""
for authzr in self.authzr.values():
if (authzr.body.status != messages.STATUS_VALID and
authzr.body.status != messages.STATUS_INVALID):
raise errors.AuthorizationError("Incomplete authorizations")
def _challenge_factory(self, domain, path):
"""Construct Namedtuple Challenges
:param str domain: domain of the enrollee
:param list path: List of indices from `challenges`.
:returns: achalls, list of challenge type
:class:`certbot.achallenges.Indexed`
:rtype: list
:raises .errors.Error: if challenge type is not recognized
"""
achalls = []
for index in path:
challb = self.authzr[domain].body.challenges[index]
achalls.append(challb_to_achall(challb, self.account.key, domain))
return achalls
def challb_to_achall(challb, account_key, domain):
"""Converts a ChallengeBody object to an AnnotatedChallenge.
:param .ChallengeBody challb: ChallengeBody
:param .JWK account_key: Authorized Account Key
:param str domain: Domain of the challb
:returns: Appropriate AnnotatedChallenge
:rtype: :class:`certbot.achallenges.AnnotatedChallenge`
"""
chall = challb.chall
logger.info("%s challenge for %s", chall.typ, domain)
if isinstance(chall, challenges.KeyAuthorizationChallenge):
return achallenges.KeyAuthorizationAnnotatedChallenge(
challb=challb, domain=domain, account_key=account_key)
elif isinstance(chall, challenges.DNS):
return achallenges.DNS(challb=challb, domain=domain)
else:
raise errors.Error(
"Received unsupported challenge of type: %s", chall.typ)
def gen_challenge_path(challbs, preferences, combinations):
"""Generate a plan to get authority over the identity.
.. todo:: This can be possibly be rewritten to use resolved_combinations.
:param tuple challbs: A tuple of challenges
(:class:`acme.messages.Challenge`) from
:class:`acme.messages.AuthorizationResource` to be
fulfilled by the client in order to prove possession of the
identifier.
:param list preferences: List of challenge preferences for domain
(:class:`acme.challenges.Challenge` subclasses)
:param tuple combinations: A collection of sets of challenges from
:class:`acme.messages.Challenge`, each of which would
be sufficient to prove possession of the identifier.
:returns: tuple of indices from ``challenges``.
:rtype: tuple
:raises certbot.errors.AuthorizationError: If a
path cannot be created that satisfies the CA given the preferences and
combinations.
"""
if combinations:
return _find_smart_path(challbs, preferences, combinations)
else:
return _find_dumb_path(challbs, preferences)
def _find_smart_path(challbs, preferences, combinations):
"""Find challenge path with server hints.
Can be called if combinations is included. Function uses a simple
ranking system to choose the combo with the lowest cost.
"""
chall_cost = {}
max_cost = 1
for i, chall_cls in enumerate(preferences):
chall_cost[chall_cls] = i
max_cost += i
# max_cost is now equal to sum(indices) + 1
best_combo = []
# Set above completing all of the available challenges
best_combo_cost = max_cost
combo_total = 0
for combo in combinations:
for challenge_index in combo:
combo_total += chall_cost.get(challbs[
challenge_index].chall.__class__, max_cost)
if combo_total < best_combo_cost:
best_combo = combo
best_combo_cost = combo_total
combo_total = 0
if not best_combo:
_report_no_chall_path()
return best_combo
def _find_dumb_path(challbs, preferences):
"""Find challenge path without server hints.
Should be called if the combinations hint is not included by the
server. This function either returns a path containing all
challenges provided by the CA or raises an exception.
"""
path = []
for i, challb in enumerate(challbs):
# supported is set to True if the challenge type is supported
supported = next((True for pref_c in preferences
if isinstance(challb.chall, pref_c)), False)
if supported:
path.append(i)
else:
_report_no_chall_path()
return path
def _report_no_chall_path():
"""Logs and raises an error that no satisfiable chall path exists."""
msg = ("Client with the currently selected authenticator does not support "
"any combination of challenges that will satisfy the CA.")
logger.fatal(msg)
raise errors.AuthorizationError(msg)
_ACME_PREFIX = "urn:acme:error:"
_ERROR_HELP_COMMON = (
"To fix these errors, please make sure that your domain name was entered "
"correctly and the DNS A record(s) for that domain contain(s) the "
"right IP address.")
_ERROR_HELP = {
"connection":
_ERROR_HELP_COMMON + " Additionally, please check that your computer "
"has a publicly routable IP address and that no firewalls are preventing "
"the server from communicating with the client. If you're using the "
"webroot plugin, you should also verify that you are serving files "
"from the webroot path you provided.",
"dnssec":
_ERROR_HELP_COMMON + " Additionally, if you have DNSSEC enabled for "
"your domain, please ensure that the signature is valid.",
"malformed":
"To fix these errors, please make sure that you did not provide any "
"invalid information to the client, and try running Certbot "
"again.",
"serverInternal":
"Unfortunately, an error on the ACME server prevented you from completing "
"authorization. Please try again later.",
"tls":
_ERROR_HELP_COMMON + " Additionally, please check that you have an "
"up-to-date TLS configuration that allows the server to communicate "
"with the Certbot client.",
"unauthorized": _ERROR_HELP_COMMON,
"unknownHost": _ERROR_HELP_COMMON,
}
def _report_failed_challs(failed_achalls):
"""Notifies the user about failed challenges.
:param set failed_achalls: A set of failed
:class:`certbot.achallenges.AnnotatedChallenge`.
"""
problems = dict()
for achall in failed_achalls:
if achall.error:
problems.setdefault(achall.error.typ, []).append(achall)
reporter = zope.component.getUtility(interfaces.IReporter)
for achalls in problems.itervalues():
reporter.add_message(
_generate_failed_chall_msg(achalls), reporter.MEDIUM_PRIORITY)
def _generate_failed_chall_msg(failed_achalls):
"""Creates a user friendly error message about failed challenges.
:param list failed_achalls: A list of failed
:class:`certbot.achallenges.AnnotatedChallenge` with the same error
type.
:returns: A formatted error message for the client.
:rtype: str
"""
typ = failed_achalls[0].error.typ
if typ.startswith(_ACME_PREFIX):
typ = typ[len(_ACME_PREFIX):]
msg = ["The following errors were reported by the server:"]
for achall in failed_achalls:
msg.append("\n\nDomain: %s\nType: %s\nDetail: %s" % (
achall.domain, typ, achall.error.detail))
if typ in _ERROR_HELP:
msg.append("\n\n")
msg.append(_ERROR_HELP[typ])
return "".join(msg)
| {
"repo_name": "DavidGarciaCat/letsencrypt",
"path": "certbot/auth_handler.py",
"copies": "4",
"size": "17250",
"license": "apache-2.0",
"hash": -1599481313973257500,
"line_mean": 33.2261904762,
"line_max": 83,
"alpha_frac": 0.6227246377,
"autogenerated": false,
"ratio": 4.212454212454213,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.6835178850154212,
"avg_score": null,
"num_lines": null
} |
"""ACME AuthHandler."""
import itertools
import logging
import time
import zope.component
from acme import challenges
from acme import messages
from letsencrypt import achallenges
from letsencrypt import constants
from letsencrypt import errors
from letsencrypt import error_handler
from letsencrypt import interfaces
logger = logging.getLogger(__name__)
class AuthHandler(object):
"""ACME Authorization Handler for a client.
:ivar dv_auth: Authenticator capable of solving
:class:`~acme.challenges.DVChallenge` types
:type dv_auth: :class:`letsencrypt.interfaces.IAuthenticator`
:ivar cont_auth: Authenticator capable of solving
:class:`~acme.challenges.ContinuityChallenge` types
:type cont_auth: :class:`letsencrypt.interfaces.IAuthenticator`
:ivar acme.client.Client acme: ACME client API.
:ivar account: Client's Account
:type account: :class:`letsencrypt.account.Account`
:ivar dict authzr: ACME Authorization Resource dict where keys are domains
and values are :class:`acme.messages.AuthorizationResource`
:ivar list dv_c: DV challenges in the form of
:class:`letsencrypt.achallenges.AnnotatedChallenge`
:ivar list cont_c: Continuity challenges in the
form of :class:`letsencrypt.achallenges.AnnotatedChallenge`
"""
def __init__(self, dv_auth, cont_auth, acme, account):
self.dv_auth = dv_auth
self.cont_auth = cont_auth
self.acme = acme
self.account = account
self.authzr = dict()
# List must be used to keep responses straight.
self.dv_c = []
self.cont_c = []
def get_authorizations(self, domains, best_effort=False):
"""Retrieve all authorizations for challenges.
:param list domains: Domains for authorization
:param bool best_effort: Whether or not all authorizations are
required (this is useful in renewal)
:returns: tuple of lists of authorization resources. Takes the
form of (`completed`, `failed`)
:rtype: tuple
:raises .AuthorizationError: If unable to retrieve all
authorizations
"""
for domain in domains:
self.authzr[domain] = self.acme.request_domain_challenges(
domain, self.account.regr.new_authzr_uri)
self._choose_challenges(domains)
# While there are still challenges remaining...
while self.dv_c or self.cont_c:
cont_resp, dv_resp = self._solve_challenges()
logger.info("Waiting for verification...")
# Send all Responses - this modifies dv_c and cont_c
self._respond(cont_resp, dv_resp, best_effort)
# Just make sure all decisions are complete.
self.verify_authzr_complete()
# Only return valid authorizations
return [authzr for authzr in self.authzr.values()
if authzr.body.status == messages.STATUS_VALID]
def _choose_challenges(self, domains):
"""Retrieve necessary challenges to satisfy server."""
logger.info("Performing the following challenges:")
for dom in domains:
path = gen_challenge_path(
self.authzr[dom].body.challenges,
self._get_chall_pref(dom),
self.authzr[dom].body.combinations)
dom_cont_c, dom_dv_c = self._challenge_factory(
dom, path)
self.dv_c.extend(dom_dv_c)
self.cont_c.extend(dom_cont_c)
def _solve_challenges(self):
"""Get Responses for challenges from authenticators."""
cont_resp = []
dv_resp = []
with error_handler.ErrorHandler(self._cleanup_challenges):
try:
if self.cont_c:
cont_resp = self.cont_auth.perform(self.cont_c)
if self.dv_c:
dv_resp = self.dv_auth.perform(self.dv_c)
except errors.AuthorizationError:
logger.critical("Failure in setting up challenges.")
logger.info("Attempting to clean up outstanding challenges...")
raise
assert len(cont_resp) == len(self.cont_c)
assert len(dv_resp) == len(self.dv_c)
return cont_resp, dv_resp
def _respond(self, cont_resp, dv_resp, best_effort):
"""Send/Receive confirmation of all challenges.
.. note:: This method also cleans up the auth_handler state.
"""
# TODO: chall_update is a dirty hack to get around acme-spec #105
chall_update = dict()
active_achalls = []
active_achalls.extend(
self._send_responses(self.dv_c, dv_resp, chall_update))
active_achalls.extend(
self._send_responses(self.cont_c, cont_resp, chall_update))
# Check for updated status...
try:
self._poll_challenges(chall_update, best_effort)
finally:
# This removes challenges from self.dv_c and self.cont_c
self._cleanup_challenges(active_achalls)
def _send_responses(self, achalls, resps, chall_update):
"""Send responses and make sure errors are handled.
:param dict chall_update: parameter that is updated to hold
authzr -> list of outstanding solved annotated challenges
"""
active_achalls = []
for achall, resp in itertools.izip(achalls, resps):
# This line needs to be outside of the if block below to
# ensure failed challenges are cleaned up correctly
active_achalls.append(achall)
# Don't send challenges for None and False authenticator responses
if resp is not None and resp:
self.acme.answer_challenge(achall.challb, resp)
# TODO: answer_challenge returns challr, with URI,
# that can be used in _find_updated_challr
# comparisons...
if achall.domain in chall_update:
chall_update[achall.domain].append(achall)
else:
chall_update[achall.domain] = [achall]
return active_achalls
def _poll_challenges(
self, chall_update, best_effort, min_sleep=3, max_rounds=15):
"""Wait for all challenge results to be determined."""
dom_to_check = set(chall_update.keys())
comp_domains = set()
rounds = 0
while dom_to_check and rounds < max_rounds:
# TODO: Use retry-after...
time.sleep(min_sleep)
all_failed_achalls = set()
for domain in dom_to_check:
comp_achalls, failed_achalls = self._handle_check(
domain, chall_update[domain])
if len(comp_achalls) == len(chall_update[domain]):
comp_domains.add(domain)
elif not failed_achalls:
for achall, _ in comp_achalls:
chall_update[domain].remove(achall)
# We failed some challenges... damage control
else:
# Right now... just assume a loss and carry on...
if best_effort:
comp_domains.add(domain)
else:
all_failed_achalls.update(
updated for _, updated in failed_achalls)
if all_failed_achalls:
_report_failed_challs(all_failed_achalls)
raise errors.FailedChallenges(all_failed_achalls)
dom_to_check -= comp_domains
comp_domains.clear()
rounds += 1
def _handle_check(self, domain, achalls):
"""Returns tuple of ('completed', 'failed')."""
completed = []
failed = []
self.authzr[domain], _ = self.acme.poll(self.authzr[domain])
if self.authzr[domain].body.status == messages.STATUS_VALID:
return achalls, []
# Note: if the whole authorization is invalid, the individual failed
# challenges will be determined here...
for achall in achalls:
updated_achall = achall.update(challb=self._find_updated_challb(
self.authzr[domain], achall))
# This does nothing for challenges that have yet to be decided yet.
if updated_achall.status == messages.STATUS_VALID:
completed.append((achall, updated_achall))
elif updated_achall.status == messages.STATUS_INVALID:
failed.append((achall, updated_achall))
return completed, failed
def _find_updated_challb(self, authzr, achall): # pylint: disable=no-self-use
"""Find updated challenge body within Authorization Resource.
.. warning:: This assumes only one instance of type of challenge in
each challenge resource.
:param .AuthorizationResource authzr: Authorization Resource
:param .AnnotatedChallenge achall: Annotated challenge for which
to get status
"""
for authzr_challb in authzr.body.challenges:
if type(authzr_challb.chall) is type(achall.challb.chall): # noqa
return authzr_challb
raise errors.AuthorizationError(
"Target challenge not found in authorization resource")
def _get_chall_pref(self, domain):
"""Return list of challenge preferences.
:param str domain: domain for which you are requesting preferences
"""
# Make sure to make a copy...
chall_prefs = []
chall_prefs.extend(self.cont_auth.get_chall_pref(domain))
chall_prefs.extend(self.dv_auth.get_chall_pref(domain))
return chall_prefs
def _cleanup_challenges(self, achall_list=None):
"""Cleanup challenges.
If achall_list is not provided, cleanup all achallenges.
"""
logger.info("Cleaning up challenges")
if achall_list is None:
dv_c = self.dv_c
cont_c = self.cont_c
else:
dv_c = [achall for achall in achall_list
if isinstance(achall.chall, challenges.DVChallenge)]
cont_c = [achall for achall in achall_list if isinstance(
achall.chall, challenges.ContinuityChallenge)]
if dv_c:
self.dv_auth.cleanup(dv_c)
for achall in dv_c:
self.dv_c.remove(achall)
if cont_c:
self.cont_auth.cleanup(cont_c)
for achall in cont_c:
self.cont_c.remove(achall)
def verify_authzr_complete(self):
"""Verifies that all authorizations have been decided.
:returns: Whether all authzr are complete
:rtype: bool
"""
for authzr in self.authzr.values():
if (authzr.body.status != messages.STATUS_VALID and
authzr.body.status != messages.STATUS_INVALID):
raise errors.AuthorizationError("Incomplete authorizations")
def _challenge_factory(self, domain, path):
"""Construct Namedtuple Challenges
:param str domain: domain of the enrollee
:param list path: List of indices from `challenges`.
:returns: dv_chall, list of DVChallenge type
:class:`letsencrypt.achallenges.Indexed`
cont_chall, list of ContinuityChallenge type
:class:`letsencrypt.achallenges.Indexed`
:rtype: tuple
:raises .errors.Error: if challenge type is not recognized
"""
dv_chall = []
cont_chall = []
for index in path:
challb = self.authzr[domain].body.challenges[index]
chall = challb.chall
achall = challb_to_achall(challb, self.account.key, domain)
if isinstance(chall, challenges.ContinuityChallenge):
cont_chall.append(achall)
elif isinstance(chall, challenges.DVChallenge):
dv_chall.append(achall)
return cont_chall, dv_chall
def challb_to_achall(challb, account_key, domain):
"""Converts a ChallengeBody object to an AnnotatedChallenge.
:param .ChallengeBody challb: ChallengeBody
:param .JWK account_key: Authorized Account Key
:param str domain: Domain of the challb
:returns: Appropriate AnnotatedChallenge
:rtype: :class:`letsencrypt.achallenges.AnnotatedChallenge`
"""
chall = challb.chall
logger.info("%s challenge for %s", chall.typ, domain)
if isinstance(chall, challenges.KeyAuthorizationChallenge):
return achallenges.KeyAuthorizationAnnotatedChallenge(
challb=challb, domain=domain, account_key=account_key)
elif isinstance(chall, challenges.DNS):
return achallenges.DNS(challb=challb, domain=domain)
elif isinstance(chall, challenges.RecoveryContact):
return achallenges.RecoveryContact(
challb=challb, domain=domain)
elif isinstance(chall, challenges.ProofOfPossession):
return achallenges.ProofOfPossession(
challb=challb, domain=domain)
else:
raise errors.Error(
"Received unsupported challenge of type: %s", chall.typ)
def gen_challenge_path(challbs, preferences, combinations):
"""Generate a plan to get authority over the identity.
.. todo:: This can be possibly be rewritten to use resolved_combinations.
:param tuple challbs: A tuple of challenges
(:class:`acme.messages.Challenge`) from
:class:`acme.messages.AuthorizationResource` to be
fulfilled by the client in order to prove possession of the
identifier.
:param list preferences: List of challenge preferences for domain
(:class:`acme.challenges.Challenge` subclasses)
:param tuple combinations: A collection of sets of challenges from
:class:`acme.messages.Challenge`, each of which would
be sufficient to prove possession of the identifier.
:returns: tuple of indices from ``challenges``.
:rtype: tuple
:raises letsencrypt.errors.AuthorizationError: If a
path cannot be created that satisfies the CA given the preferences and
combinations.
"""
if combinations:
return _find_smart_path(challbs, preferences, combinations)
else:
return _find_dumb_path(challbs, preferences)
def _find_smart_path(challbs, preferences, combinations):
"""Find challenge path with server hints.
Can be called if combinations is included. Function uses a simple
ranking system to choose the combo with the lowest cost.
"""
chall_cost = {}
max_cost = 1
for i, chall_cls in enumerate(preferences):
chall_cost[chall_cls] = i
max_cost += i
# max_cost is now equal to sum(indices) + 1
best_combo = []
# Set above completing all of the available challenges
best_combo_cost = max_cost
combo_total = 0
for combo in combinations:
for challenge_index in combo:
combo_total += chall_cost.get(challbs[
challenge_index].chall.__class__, max_cost)
if combo_total < best_combo_cost:
best_combo = combo
best_combo_cost = combo_total
combo_total = 0
if not best_combo:
msg = ("Client does not support any combination of challenges that "
"will satisfy the CA.")
logger.fatal(msg)
raise errors.AuthorizationError(msg)
return best_combo
def _find_dumb_path(challbs, preferences):
"""Find challenge path without server hints.
Should be called if the combinations hint is not included by the
server. This function returns the best path that does not contain
multiple mutually exclusive challenges.
"""
assert len(preferences) == len(set(preferences))
path = []
satisfied = set()
for pref_c in preferences:
for i, offered_challb in enumerate(challbs):
if (isinstance(offered_challb.chall, pref_c) and
is_preferred(offered_challb, satisfied)):
path.append(i)
satisfied.add(offered_challb)
return path
def mutually_exclusive(obj1, obj2, groups, different=False):
"""Are two objects mutually exclusive?"""
for group in groups:
obj1_present = False
obj2_present = False
for obj_cls in group:
obj1_present |= isinstance(obj1, obj_cls)
obj2_present |= isinstance(obj2, obj_cls)
if obj1_present and obj2_present and (
not different or not isinstance(obj1, obj2.__class__)):
return False
return True
def is_preferred(offered_challb, satisfied,
exclusive_groups=constants.EXCLUSIVE_CHALLENGES):
"""Return whether or not the challenge is preferred in path."""
for challb in satisfied:
if not mutually_exclusive(
offered_challb.chall, challb.chall, exclusive_groups,
different=True):
return False
return True
_ACME_PREFIX = "urn:acme:error:"
_ERROR_HELP_COMMON = (
"To fix these errors, please make sure that your domain name was entered "
"correctly and the DNS A record(s) for that domain contain(s) the "
"right IP address.")
_ERROR_HELP = {
"connection":
_ERROR_HELP_COMMON + " Additionally, please check that your computer "
"has a publicly routable IP address and that no firewalls are preventing "
"the server from communicating with the client. If you're using the "
"webroot plugin, you should also verify that you are serving files "
"from the webroot path you provided.",
"dnssec":
_ERROR_HELP_COMMON + " Additionally, if you have DNSSEC enabled for "
"your domain, please ensure that the signature is valid.",
"malformed":
"To fix these errors, please make sure that you did not provide any "
"invalid information to the client, and try running Let's Encrypt "
"again.",
"serverInternal":
"Unfortunately, an error on the ACME server prevented you from completing "
"authorization. Please try again later.",
"tls":
_ERROR_HELP_COMMON + " Additionally, please check that you have an "
"up-to-date TLS configuration that allows the server to communicate "
"with the Let's Encrypt client.",
"unauthorized": _ERROR_HELP_COMMON,
"unknownHost": _ERROR_HELP_COMMON,
}
def _report_failed_challs(failed_achalls):
"""Notifies the user about failed challenges.
:param set failed_achalls: A set of failed
:class:`letsencrypt.achallenges.AnnotatedChallenge`.
"""
problems = dict()
for achall in failed_achalls:
if achall.error:
problems.setdefault(achall.error.typ, []).append(achall)
reporter = zope.component.getUtility(interfaces.IReporter)
for achalls in problems.itervalues():
reporter.add_message(
_generate_failed_chall_msg(achalls), reporter.MEDIUM_PRIORITY)
def _generate_failed_chall_msg(failed_achalls):
"""Creates a user friendly error message about failed challenges.
:param list failed_achalls: A list of failed
:class:`letsencrypt.achallenges.AnnotatedChallenge` with the same error
type.
:returns: A formatted error message for the client.
:rtype: str
"""
typ = failed_achalls[0].error.typ
if typ.startswith(_ACME_PREFIX):
typ = typ[len(_ACME_PREFIX):]
msg = ["The following errors were reported by the server:"]
for achall in failed_achalls:
msg.append("\n\nDomain: %s\nType: %s\nDetail: %s" % (
achall.domain, typ, achall.error.detail))
if typ in _ERROR_HELP:
msg.append("\n\n")
msg.append(_ERROR_HELP[typ])
return "".join(msg)
| {
"repo_name": "thanatos/lets-encrypt-preview",
"path": "letsencrypt/auth_handler.py",
"copies": "2",
"size": "19863",
"license": "apache-2.0",
"hash": 5705601742751394000,
"line_mean": 34.4696428571,
"line_max": 83,
"alpha_frac": 0.6238735337,
"autogenerated": false,
"ratio": 4.081158824738032,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.00006428776329809032,
"num_lines": 560
} |
"""ACME AuthHandler."""
import logging
import time
import datetime
import zope.component
from acme import challenges
from acme import messages
# pylint: disable=unused-import, no-name-in-module
from acme.magic_typing import Dict, List
# pylint: enable=unused-import, no-name-in-module
from certbot import achallenges
from certbot import errors
from certbot import error_handler
from certbot import interfaces
logger = logging.getLogger(__name__)
class AuthHandler(object):
"""ACME Authorization Handler for a client.
:ivar auth: Authenticator capable of solving
:class:`~acme.challenges.Challenge` types
:type auth: :class:`certbot.interfaces.IAuthenticator`
:ivar acme.client.BackwardsCompatibleClientV2 acme_client: ACME client API.
:ivar account: Client's Account
:type account: :class:`certbot.account.Account`
:ivar list pref_challs: sorted user specified preferred challenges
type strings with the most preferred challenge listed first
"""
def __init__(self, auth, acme_client, account, pref_challs):
self.auth = auth
self.acme = acme_client
self.account = account
self.pref_challs = pref_challs
def handle_authorizations(self, orderr, best_effort=False, max_retries=30):
"""
Retrieve all authorizations, perform all challenges required to validate
these authorizations, then poll and wait for the authorization to be checked.
:param acme.messages.OrderResource orderr: must have authorizations filled in
:param bool best_effort: if True, not all authorizations need to be validated (eg. renew)
:param int max_retries: maximum number of retries to poll authorizations
:returns: list of all validated authorizations
:rtype: List
:raises .AuthorizationError: If unable to retrieve all authorizations
"""
authzrs = orderr.authorizations[:]
if not authzrs:
raise errors.AuthorizationError('No authorization to handle.')
# Retrieve challenges that need to be performed to validate authorizations.
achalls = self._choose_challenges(authzrs)
if not achalls:
return authzrs
# Starting now, challenges will be cleaned at the end no matter what.
with error_handler.ExitHandler(self._cleanup_challenges, achalls):
# To begin, let's ask the authenticator plugin to perform all challenges.
try:
resps = self.auth.perform(achalls)
# If debug is on, wait for user input before starting the verification process.
logger.info('Waiting for verification...')
config = zope.component.getUtility(interfaces.IConfig)
if config.debug_challenges:
notify = zope.component.getUtility(interfaces.IDisplay).notification
notify('Challenges loaded. Press continue to submit to CA. '
'Pass "-v" for more info about challenges.', pause=True)
except errors.AuthorizationError as error:
logger.critical('Failure in setting up challenges.')
logger.info('Attempting to clean up outstanding challenges...')
raise error
# All challenges should have been processed by the authenticator.
assert len(resps) == len(achalls), 'Some challenges have not been performed.'
# Inform the ACME CA server that challenges are available for validation.
for achall, resp in zip(achalls, resps):
self.acme.answer_challenge(achall.challb, resp)
# Wait for authorizations to be checked.
self._poll_authorizations(authzrs, max_retries, best_effort)
# Keep validated authorizations only. If there is none, no certificate can be issued.
authzrs_validated = [authzr for authzr in authzrs
if authzr.body.status == messages.STATUS_VALID]
if not authzrs_validated:
raise errors.AuthorizationError('All challenges have failed.')
return authzrs_validated
def _poll_authorizations(self, authzrs, max_retries, best_effort):
"""
Poll the ACME CA server, to wait for confirmation that authorizations have their challenges
all verified. The poll may occur several times, until all authorizations are checked
(valid or invalid), or after a maximum of retries.
"""
authzrs_to_check = {index: (authzr, None)
for index, authzr in enumerate(authzrs)}
authzrs_failed_to_report = []
# Give an initial second to the ACME CA server to check the authorizations
sleep_seconds = 1
for _ in range(max_retries):
# Wait for appropriate time (from Retry-After, initial wait, or no wait)
if sleep_seconds > 0:
time.sleep(sleep_seconds)
# Poll all updated authorizations.
authzrs_to_check = {index: self.acme.poll(authzr) for index, (authzr, _)
in authzrs_to_check.items()}
# Update the original list of authzr with the updated authzrs from server.
for index, (authzr, _) in authzrs_to_check.items():
authzrs[index] = authzr
# Gather failed authorizations
authzrs_failed = [authzr for authzr, _ in authzrs_to_check.values()
if authzr.body.status == messages.STATUS_INVALID]
for authzr_failed in authzrs_failed:
logger.warning('Challenge failed for domain %s',
authzr_failed.body.identifier.value)
# Accumulating all failed authzrs to build a consolidated report
# on them at the end of the polling.
authzrs_failed_to_report.extend(authzrs_failed)
# Extract out the authorization already checked for next poll iteration.
# Poll may stop here because there is no pending authorizations anymore.
authzrs_to_check = {index: (authzr, resp) for index, (authzr, resp)
in authzrs_to_check.items()
if authzr.body.status == messages.STATUS_PENDING}
if not authzrs_to_check:
# Polling process is finished, we can leave the loop
break
# Be merciful with the ACME server CA, check the Retry-After header returned,
# and wait this time before polling again in next loop iteration.
# From all the pending authorizations, we take the greatest Retry-After value
# to avoid polling an authorization before its relevant Retry-After value.
retry_after = max(self.acme.retry_after(resp, 3)
for _, resp in authzrs_to_check.values())
sleep_seconds = (retry_after - datetime.datetime.now()).total_seconds()
# In case of failed authzrs, create a report to the user.
if authzrs_failed_to_report:
_report_failed_authzrs(authzrs_failed_to_report, self.account.key)
if not best_effort:
# Without best effort, having failed authzrs is critical and fail the process.
raise errors.AuthorizationError('Some challenges have failed.')
if authzrs_to_check:
# Here authzrs_to_check is still not empty, meaning we exceeded the max polling attempt.
raise errors.AuthorizationError('All authorizations were not finalized by the CA.')
def _choose_challenges(self, authzrs):
"""
Retrieve necessary and pending challenges to satisfy server.
NB: Necessary and already validated challenges are not retrieved,
as they can be reused for a certificate issuance.
"""
pending_authzrs = [authzr for authzr in authzrs
if authzr.body.status != messages.STATUS_VALID]
achalls = [] # type: List[achallenges.AnnotatedChallenge]
if pending_authzrs:
logger.info("Performing the following challenges:")
for authzr in pending_authzrs:
authzr_challenges = authzr.body.challenges
if self.acme.acme_version == 1:
combinations = authzr.body.combinations
else:
combinations = tuple((i,) for i in range(len(authzr_challenges)))
path = gen_challenge_path(
authzr_challenges,
self._get_chall_pref(authzr.body.identifier.value),
combinations)
achalls.extend(self._challenge_factory(authzr, path))
if any(isinstance(achall.chall, challenges.TLSSNI01) for achall in achalls):
logger.warning("TLS-SNI-01 is deprecated, and will stop working soon.")
return achalls
def _get_chall_pref(self, domain):
"""Return list of challenge preferences.
:param str domain: domain for which you are requesting preferences
"""
chall_prefs = []
# Make sure to make a copy...
plugin_pref = self.auth.get_chall_pref(domain)
if self.pref_challs:
plugin_pref_types = set(chall.typ for chall in plugin_pref)
for typ in self.pref_challs:
if typ in plugin_pref_types:
chall_prefs.append(challenges.Challenge.TYPES[typ])
if chall_prefs:
return chall_prefs
raise errors.AuthorizationError(
"None of the preferred challenges "
"are supported by the selected plugin")
chall_prefs.extend(plugin_pref)
return chall_prefs
def _cleanup_challenges(self, achalls):
"""Cleanup challenges.
:param achalls: annotated challenges to cleanup
:type achalls: `list` of :class:`certbot.achallenges.AnnotatedChallenge`
"""
logger.info("Cleaning up challenges")
self.auth.cleanup(achalls)
def _challenge_factory(self, authzr, path):
"""Construct Namedtuple Challenges
:param messages.AuthorizationResource authzr: authorization
:param list path: List of indices from `challenges`.
:returns: achalls, list of challenge type
:class:`certbot.achallenges.Indexed`
:rtype: list
:raises .errors.Error: if challenge type is not recognized
"""
achalls = []
for index in path:
challb = authzr.body.challenges[index]
achalls.append(challb_to_achall(
challb, self.account.key, authzr.body.identifier.value))
return achalls
def challb_to_achall(challb, account_key, domain):
"""Converts a ChallengeBody object to an AnnotatedChallenge.
:param .ChallengeBody challb: ChallengeBody
:param .JWK account_key: Authorized Account Key
:param str domain: Domain of the challb
:returns: Appropriate AnnotatedChallenge
:rtype: :class:`certbot.achallenges.AnnotatedChallenge`
"""
chall = challb.chall
logger.info("%s challenge for %s", chall.typ, domain)
if isinstance(chall, challenges.KeyAuthorizationChallenge):
return achallenges.KeyAuthorizationAnnotatedChallenge(
challb=challb, domain=domain, account_key=account_key)
elif isinstance(chall, challenges.DNS):
return achallenges.DNS(challb=challb, domain=domain)
else:
raise errors.Error(
"Received unsupported challenge of type: %s", chall.typ)
def gen_challenge_path(challbs, preferences, combinations):
"""Generate a plan to get authority over the identity.
.. todo:: This can be possibly be rewritten to use resolved_combinations.
:param tuple challbs: A tuple of challenges
(:class:`acme.messages.Challenge`) from
:class:`acme.messages.AuthorizationResource` to be
fulfilled by the client in order to prove possession of the
identifier.
:param list preferences: List of challenge preferences for domain
(:class:`acme.challenges.Challenge` subclasses)
:param tuple combinations: A collection of sets of challenges from
:class:`acme.messages.Challenge`, each of which would
be sufficient to prove possession of the identifier.
:returns: tuple of indices from ``challenges``.
:rtype: tuple
:raises certbot.errors.AuthorizationError: If a
path cannot be created that satisfies the CA given the preferences and
combinations.
"""
if combinations:
return _find_smart_path(challbs, preferences, combinations)
else:
return _find_dumb_path(challbs, preferences)
def _find_smart_path(challbs, preferences, combinations):
"""Find challenge path with server hints.
Can be called if combinations is included. Function uses a simple
ranking system to choose the combo with the lowest cost.
"""
chall_cost = {}
max_cost = 1
for i, chall_cls in enumerate(preferences):
chall_cost[chall_cls] = i
max_cost += i
# max_cost is now equal to sum(indices) + 1
best_combo = None
# Set above completing all of the available challenges
best_combo_cost = max_cost
combo_total = 0
for combo in combinations:
for challenge_index in combo:
combo_total += chall_cost.get(challbs[
challenge_index].chall.__class__, max_cost)
if combo_total < best_combo_cost:
best_combo = combo
best_combo_cost = combo_total
combo_total = 0
if not best_combo:
_report_no_chall_path(challbs)
return best_combo
def _find_dumb_path(challbs, preferences):
"""Find challenge path without server hints.
Should be called if the combinations hint is not included by the
server. This function either returns a path containing all
challenges provided by the CA or raises an exception.
"""
path = []
for i, challb in enumerate(challbs):
# supported is set to True if the challenge type is supported
supported = next((True for pref_c in preferences
if isinstance(challb.chall, pref_c)), False)
if supported:
path.append(i)
else:
_report_no_chall_path(challbs)
return path
def _report_no_chall_path(challbs):
"""Logs and raises an error that no satisfiable chall path exists.
:param challbs: challenges from the authorization that can't be satisfied
"""
msg = ("Client with the currently selected authenticator does not support "
"any combination of challenges that will satisfy the CA.")
if len(challbs) == 1 and isinstance(challbs[0].chall, challenges.DNS01):
msg += (
" You may need to use an authenticator "
"plugin that can do challenges over DNS.")
logger.critical(msg)
raise errors.AuthorizationError(msg)
_ERROR_HELP_COMMON = (
"To fix these errors, please make sure that your domain name was entered "
"correctly and the DNS A/AAAA record(s) for that domain contain(s) the "
"right IP address.")
_ERROR_HELP = {
"connection":
_ERROR_HELP_COMMON + " Additionally, please check that your computer "
"has a publicly routable IP address and that no firewalls are preventing "
"the server from communicating with the client. If you're using the "
"webroot plugin, you should also verify that you are serving files "
"from the webroot path you provided.",
"dnssec":
_ERROR_HELP_COMMON + " Additionally, if you have DNSSEC enabled for "
"your domain, please ensure that the signature is valid.",
"malformed":
"To fix these errors, please make sure that you did not provide any "
"invalid information to the client, and try running Certbot "
"again.",
"serverInternal":
"Unfortunately, an error on the ACME server prevented you from completing "
"authorization. Please try again later.",
"tls":
_ERROR_HELP_COMMON + " Additionally, please check that you have an "
"up-to-date TLS configuration that allows the server to communicate "
"with the Certbot client.",
"unauthorized": _ERROR_HELP_COMMON,
"unknownHost": _ERROR_HELP_COMMON,
}
def _report_failed_authzrs(failed_authzrs, account_key):
"""Notifies the user about failed authorizations."""
problems = {} # type: Dict[str, List[achallenges.AnnotatedChallenge]]
failed_achalls = [challb_to_achall(challb, account_key, authzr.body.identifier.value)
for authzr in failed_authzrs for challb in authzr.body.challenges
if challb.error]
for achall in failed_achalls:
problems.setdefault(achall.error.typ, []).append(achall)
reporter = zope.component.getUtility(interfaces.IReporter)
for achalls in problems.values():
reporter.add_message(_generate_failed_chall_msg(achalls), reporter.MEDIUM_PRIORITY)
def _generate_failed_chall_msg(failed_achalls):
"""Creates a user friendly error message about failed challenges.
:param list failed_achalls: A list of failed
:class:`certbot.achallenges.AnnotatedChallenge` with the same error
type.
:returns: A formatted error message for the client.
:rtype: str
"""
error = failed_achalls[0].error
typ = error.typ
if messages.is_acme_error(error):
typ = error.code
msg = ["The following errors were reported by the server:"]
for achall in failed_achalls:
msg.append("\n\nDomain: %s\nType: %s\nDetail: %s" % (
achall.domain, typ, achall.error.detail))
if typ in _ERROR_HELP:
msg.append("\n\n")
msg.append(_ERROR_HELP[typ])
return "".join(msg)
| {
"repo_name": "letsencrypt/letsencrypt",
"path": "certbot/auth_handler.py",
"copies": "1",
"size": "17915",
"license": "apache-2.0",
"hash": 1835269979975303400,
"line_mean": 38.9888392857,
"line_max": 100,
"alpha_frac": 0.6444320402,
"autogenerated": false,
"ratio": 4.207374354156881,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5351806394356882,
"avg_score": null,
"num_lines": null
} |
"""ACME AuthHandler."""
import logging
import time
import six
import zope.component
from acme import challenges
from acme import messages
from certbot import achallenges
from certbot import errors
from certbot import error_handler
from certbot import interfaces
logger = logging.getLogger(__name__)
class AuthHandler(object):
"""ACME Authorization Handler for a client.
:ivar auth: Authenticator capable of solving
:class:`~acme.challenges.Challenge` types
:type auth: :class:`certbot.interfaces.IAuthenticator`
:ivar acme.client.Client acme: ACME client API.
:ivar account: Client's Account
:type account: :class:`certbot.account.Account`
:ivar dict authzr: ACME Authorization Resource dict where keys are domains
and values are :class:`acme.messages.AuthorizationResource`
:ivar list achalls: DV challenges in the form of
:class:`certbot.achallenges.AnnotatedChallenge`
:ivar list pref_challs: sorted user specified preferred challenges
in the form of subclasses of :class:`acme.challenges.Challenge`
with the most preferred challenge listed first
"""
def __init__(self, auth, acme, account, pref_challs):
self.auth = auth
self.acme = acme
self.account = account
self.authzr = dict()
self.pref_challs = pref_challs
# List must be used to keep responses straight.
self.achalls = []
def get_authorizations(self, domains, best_effort=False):
"""Retrieve all authorizations for challenges.
:param list domains: Domains for authorization
:param bool best_effort: Whether or not all authorizations are
required (this is useful in renewal)
:returns: List of authorization resources
:rtype: list
:raises .AuthorizationError: If unable to retrieve all
authorizations
"""
for domain in domains:
self.authzr[domain] = self.acme.request_domain_challenges(
domain, self.account.regr.new_authzr_uri)
self._choose_challenges(domains)
# While there are still challenges remaining...
while self.achalls:
resp = self._solve_challenges()
logger.info("Waiting for verification...")
# Send all Responses - this modifies achalls
self._respond(resp, best_effort)
# Just make sure all decisions are complete.
self.verify_authzr_complete()
# Only return valid authorizations
retVal = [authzr for authzr in self.authzr.values()
if authzr.body.status == messages.STATUS_VALID]
if not retVal:
raise errors.AuthorizationError(
"Challenges failed for all domains")
return retVal
def _choose_challenges(self, domains):
"""Retrieve necessary challenges to satisfy server."""
logger.info("Performing the following challenges:")
for dom in domains:
path = gen_challenge_path(
self.authzr[dom].body.challenges,
self._get_chall_pref(dom),
self.authzr[dom].body.combinations)
dom_achalls = self._challenge_factory(
dom, path)
self.achalls.extend(dom_achalls)
def _solve_challenges(self):
"""Get Responses for challenges from authenticators."""
resp = []
with error_handler.ErrorHandler(self._cleanup_challenges):
try:
if self.achalls:
resp = self.auth.perform(self.achalls)
except errors.AuthorizationError:
logger.critical("Failure in setting up challenges.")
logger.info("Attempting to clean up outstanding challenges...")
raise
assert len(resp) == len(self.achalls)
return resp
def _respond(self, resp, best_effort):
"""Send/Receive confirmation of all challenges.
.. note:: This method also cleans up the auth_handler state.
"""
# TODO: chall_update is a dirty hack to get around acme-spec #105
chall_update = dict()
active_achalls = self._send_responses(self.achalls,
resp, chall_update)
# Check for updated status...
try:
self._poll_challenges(chall_update, best_effort)
finally:
# This removes challenges from self.achalls
self._cleanup_challenges(active_achalls)
def _send_responses(self, achalls, resps, chall_update):
"""Send responses and make sure errors are handled.
:param dict chall_update: parameter that is updated to hold
authzr -> list of outstanding solved annotated challenges
"""
active_achalls = []
for achall, resp in six.moves.zip(achalls, resps):
# This line needs to be outside of the if block below to
# ensure failed challenges are cleaned up correctly
active_achalls.append(achall)
# Don't send challenges for None and False authenticator responses
if resp is not None and resp:
self.acme.answer_challenge(achall.challb, resp)
# TODO: answer_challenge returns challr, with URI,
# that can be used in _find_updated_challr
# comparisons...
if achall.domain in chall_update:
chall_update[achall.domain].append(achall)
else:
chall_update[achall.domain] = [achall]
return active_achalls
def _poll_challenges(
self, chall_update, best_effort, min_sleep=3, max_rounds=15):
"""Wait for all challenge results to be determined."""
dom_to_check = set(chall_update.keys())
comp_domains = set()
rounds = 0
while dom_to_check and rounds < max_rounds:
# TODO: Use retry-after...
time.sleep(min_sleep)
all_failed_achalls = set()
for domain in dom_to_check:
comp_achalls, failed_achalls = self._handle_check(
domain, chall_update[domain])
if len(comp_achalls) == len(chall_update[domain]):
comp_domains.add(domain)
elif not failed_achalls:
for achall, _ in comp_achalls:
chall_update[domain].remove(achall)
# We failed some challenges... damage control
else:
if best_effort:
comp_domains.add(domain)
logger.warning(
"Challenge failed for domain %s",
domain)
else:
all_failed_achalls.update(
updated for _, updated in failed_achalls)
if all_failed_achalls:
_report_failed_challs(all_failed_achalls)
raise errors.FailedChallenges(all_failed_achalls)
dom_to_check -= comp_domains
comp_domains.clear()
rounds += 1
def _handle_check(self, domain, achalls):
"""Returns tuple of ('completed', 'failed')."""
completed = []
failed = []
self.authzr[domain], _ = self.acme.poll(self.authzr[domain])
if self.authzr[domain].body.status == messages.STATUS_VALID:
return achalls, []
# Note: if the whole authorization is invalid, the individual failed
# challenges will be determined here...
for achall in achalls:
updated_achall = achall.update(challb=self._find_updated_challb(
self.authzr[domain], achall))
# This does nothing for challenges that have yet to be decided yet.
if updated_achall.status == messages.STATUS_VALID:
completed.append((achall, updated_achall))
elif updated_achall.status == messages.STATUS_INVALID:
failed.append((achall, updated_achall))
return completed, failed
def _find_updated_challb(self, authzr, achall): # pylint: disable=no-self-use
"""Find updated challenge body within Authorization Resource.
.. warning:: This assumes only one instance of type of challenge in
each challenge resource.
:param .AuthorizationResource authzr: Authorization Resource
:param .AnnotatedChallenge achall: Annotated challenge for which
to get status
"""
for authzr_challb in authzr.body.challenges:
if type(authzr_challb.chall) is type(achall.challb.chall): # noqa
return authzr_challb
raise errors.AuthorizationError(
"Target challenge not found in authorization resource")
def _get_chall_pref(self, domain):
"""Return list of challenge preferences.
:param str domain: domain for which you are requesting preferences
"""
chall_prefs = []
# Make sure to make a copy...
plugin_pref = self.auth.get_chall_pref(domain)
if self.pref_challs:
chall_prefs.extend(pref for pref in self.pref_challs
if pref in plugin_pref)
if chall_prefs:
return chall_prefs
raise errors.AuthorizationError(
"None of the preferred challenges "
"are supported by the selected plugin")
chall_prefs.extend(plugin_pref)
return chall_prefs
def _cleanup_challenges(self, achall_list=None):
"""Cleanup challenges.
If achall_list is not provided, cleanup all achallenges.
"""
logger.info("Cleaning up challenges")
if achall_list is None:
achalls = self.achalls
else:
achalls = achall_list
if achalls:
self.auth.cleanup(achalls)
for achall in achalls:
self.achalls.remove(achall)
def verify_authzr_complete(self):
"""Verifies that all authorizations have been decided.
:returns: Whether all authzr are complete
:rtype: bool
"""
for authzr in self.authzr.values():
if (authzr.body.status != messages.STATUS_VALID and
authzr.body.status != messages.STATUS_INVALID):
raise errors.AuthorizationError("Incomplete authorizations")
def _challenge_factory(self, domain, path):
"""Construct Namedtuple Challenges
:param str domain: domain of the enrollee
:param list path: List of indices from `challenges`.
:returns: achalls, list of challenge type
:class:`certbot.achallenges.Indexed`
:rtype: list
:raises .errors.Error: if challenge type is not recognized
"""
achalls = []
for index in path:
challb = self.authzr[domain].body.challenges[index]
achalls.append(challb_to_achall(challb, self.account.key, domain))
return achalls
def challb_to_achall(challb, account_key, domain):
"""Converts a ChallengeBody object to an AnnotatedChallenge.
:param .ChallengeBody challb: ChallengeBody
:param .JWK account_key: Authorized Account Key
:param str domain: Domain of the challb
:returns: Appropriate AnnotatedChallenge
:rtype: :class:`certbot.achallenges.AnnotatedChallenge`
"""
chall = challb.chall
logger.info("%s challenge for %s", chall.typ, domain)
if isinstance(chall, challenges.KeyAuthorizationChallenge):
return achallenges.KeyAuthorizationAnnotatedChallenge(
challb=challb, domain=domain, account_key=account_key)
elif isinstance(chall, challenges.DNS):
return achallenges.DNS(challb=challb, domain=domain)
else:
raise errors.Error(
"Received unsupported challenge of type: %s", chall.typ)
def gen_challenge_path(challbs, preferences, combinations):
"""Generate a plan to get authority over the identity.
.. todo:: This can be possibly be rewritten to use resolved_combinations.
:param tuple challbs: A tuple of challenges
(:class:`acme.messages.Challenge`) from
:class:`acme.messages.AuthorizationResource` to be
fulfilled by the client in order to prove possession of the
identifier.
:param list preferences: List of challenge preferences for domain
(:class:`acme.challenges.Challenge` subclasses)
:param tuple combinations: A collection of sets of challenges from
:class:`acme.messages.Challenge`, each of which would
be sufficient to prove possession of the identifier.
:returns: tuple of indices from ``challenges``.
:rtype: tuple
:raises certbot.errors.AuthorizationError: If a
path cannot be created that satisfies the CA given the preferences and
combinations.
"""
if combinations:
return _find_smart_path(challbs, preferences, combinations)
else:
return _find_dumb_path(challbs, preferences)
def _find_smart_path(challbs, preferences, combinations):
"""Find challenge path with server hints.
Can be called if combinations is included. Function uses a simple
ranking system to choose the combo with the lowest cost.
"""
chall_cost = {}
max_cost = 1
for i, chall_cls in enumerate(preferences):
chall_cost[chall_cls] = i
max_cost += i
# max_cost is now equal to sum(indices) + 1
best_combo = []
# Set above completing all of the available challenges
best_combo_cost = max_cost
combo_total = 0
for combo in combinations:
for challenge_index in combo:
combo_total += chall_cost.get(challbs[
challenge_index].chall.__class__, max_cost)
if combo_total < best_combo_cost:
best_combo = combo
best_combo_cost = combo_total
combo_total = 0
if not best_combo:
_report_no_chall_path()
return best_combo
def _find_dumb_path(challbs, preferences):
"""Find challenge path without server hints.
Should be called if the combinations hint is not included by the
server. This function either returns a path containing all
challenges provided by the CA or raises an exception.
"""
path = []
for i, challb in enumerate(challbs):
# supported is set to True if the challenge type is supported
supported = next((True for pref_c in preferences
if isinstance(challb.chall, pref_c)), False)
if supported:
path.append(i)
else:
_report_no_chall_path()
return path
def _report_no_chall_path():
"""Logs and raises an error that no satisfiable chall path exists."""
msg = ("Client with the currently selected authenticator does not support "
"any combination of challenges that will satisfy the CA.")
logger.fatal(msg)
raise errors.AuthorizationError(msg)
_ERROR_HELP_COMMON = (
"To fix these errors, please make sure that your domain name was entered "
"correctly and the DNS A record(s) for that domain contain(s) the "
"right IP address.")
_ERROR_HELP = {
"connection":
_ERROR_HELP_COMMON + " Additionally, please check that your computer "
"has a publicly routable IP address and that no firewalls are preventing "
"the server from communicating with the client. If you're using the "
"webroot plugin, you should also verify that you are serving files "
"from the webroot path you provided.",
"dnssec":
_ERROR_HELP_COMMON + " Additionally, if you have DNSSEC enabled for "
"your domain, please ensure that the signature is valid.",
"malformed":
"To fix these errors, please make sure that you did not provide any "
"invalid information to the client, and try running Certbot "
"again.",
"serverInternal":
"Unfortunately, an error on the ACME server prevented you from completing "
"authorization. Please try again later.",
"tls":
_ERROR_HELP_COMMON + " Additionally, please check that you have an "
"up-to-date TLS configuration that allows the server to communicate "
"with the Certbot client.",
"unauthorized": _ERROR_HELP_COMMON,
"unknownHost": _ERROR_HELP_COMMON,
}
def _report_failed_challs(failed_achalls):
"""Notifies the user about failed challenges.
:param set failed_achalls: A set of failed
:class:`certbot.achallenges.AnnotatedChallenge`.
"""
problems = dict()
for achall in failed_achalls:
if achall.error:
problems.setdefault(achall.error.typ, []).append(achall)
reporter = zope.component.getUtility(interfaces.IReporter)
for achalls in six.itervalues(problems):
reporter.add_message(
_generate_failed_chall_msg(achalls), reporter.MEDIUM_PRIORITY)
def _generate_failed_chall_msg(failed_achalls):
"""Creates a user friendly error message about failed challenges.
:param list failed_achalls: A list of failed
:class:`certbot.achallenges.AnnotatedChallenge` with the same error
type.
:returns: A formatted error message for the client.
:rtype: str
"""
error = failed_achalls[0].error
typ = error.typ
if messages.is_acme_error(error):
typ = error.code
msg = ["The following errors were reported by the server:"]
for achall in failed_achalls:
msg.append("\n\nDomain: %s\nType: %s\nDetail: %s" % (
achall.domain, typ, achall.error.detail))
if typ in _ERROR_HELP:
msg.append("\n\n")
msg.append(_ERROR_HELP[typ])
return "".join(msg)
| {
"repo_name": "bsmr-misc-forks/letsencrypt",
"path": "certbot/auth_handler.py",
"copies": "3",
"size": "17866",
"license": "apache-2.0",
"hash": -5732272474727994000,
"line_mean": 33.6912621359,
"line_max": 83,
"alpha_frac": 0.6220194783,
"autogenerated": false,
"ratio": 4.232646292347785,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.6354665770647785,
"avg_score": null,
"num_lines": null
} |
"""ACME client API."""
import base64
import collections
import datetime
from email.utils import parsedate_tz
import heapq
import logging
import time
import six
from six.moves import http_client # pylint: disable=import-error
import josepy as jose
import OpenSSL
import re
from requests_toolbelt.adapters.source import SourceAddressAdapter
import requests
from requests.adapters import HTTPAdapter
import sys
from acme import challenges
from acme import crypto_util
from acme import errors
from acme import jws
from acme import messages
# pylint: disable=unused-import, no-name-in-module
from acme.magic_typing import Dict, List, Set, Text
logger = logging.getLogger(__name__)
# Prior to Python 2.7.9 the stdlib SSL module did not allow a user to configure
# many important security related options. On these platforms we use PyOpenSSL
# for SSL, which does allow these options to be configured.
# https://urllib3.readthedocs.org/en/latest/security.html#insecureplatformwarning
if sys.version_info < (2, 7, 9): # pragma: no cover
try:
# pylint: disable=no-member
requests.packages.urllib3.contrib.pyopenssl.inject_into_urllib3() # type: ignore
except AttributeError:
import urllib3.contrib.pyopenssl # pylint: disable=import-error
urllib3.contrib.pyopenssl.inject_into_urllib3()
DEFAULT_NETWORK_TIMEOUT = 45
DER_CONTENT_TYPE = 'application/pkix-cert'
class ClientBase(object): # pylint: disable=too-many-instance-attributes
"""ACME client base object.
:ivar messages.Directory directory:
:ivar .ClientNetwork net: Client network.
:ivar int acme_version: ACME protocol version. 1 or 2.
"""
def __init__(self, directory, net, acme_version):
"""Initialize.
:param .messages.Directory directory: Directory Resource
:param .ClientNetwork net: Client network.
:param int acme_version: ACME protocol version. 1 or 2.
"""
self.directory = directory
self.net = net
self.acme_version = acme_version
@classmethod
def _regr_from_response(cls, response, uri=None, terms_of_service=None):
if 'terms-of-service' in response.links:
terms_of_service = response.links['terms-of-service']['url']
return messages.RegistrationResource(
body=messages.Registration.from_json(response.json()),
uri=response.headers.get('Location', uri),
terms_of_service=terms_of_service)
def _send_recv_regr(self, regr, body):
response = self._post(regr.uri, body)
# TODO: Boulder returns httplib.ACCEPTED
#assert response.status_code == httplib.OK
# TODO: Boulder does not set Location or Link on update
# (c.f. acme-spec #94)
return self._regr_from_response(
response, uri=regr.uri,
terms_of_service=regr.terms_of_service)
def _post(self, *args, **kwargs):
"""Wrapper around self.net.post that adds the acme_version.
"""
kwargs.setdefault('acme_version', self.acme_version)
if hasattr(self.directory, 'newNonce'):
kwargs.setdefault('new_nonce_url', getattr(self.directory, 'newNonce'))
return self.net.post(*args, **kwargs)
def update_registration(self, regr, update=None):
"""Update registration.
:param messages.RegistrationResource regr: Registration Resource.
:param messages.Registration update: Updated body of the
resource. If not provided, body will be taken from `regr`.
:returns: Updated Registration Resource.
:rtype: `.RegistrationResource`
"""
update = regr.body if update is None else update
body = messages.UpdateRegistration(**dict(update))
updated_regr = self._send_recv_regr(regr, body=body)
self.net.account = updated_regr
return updated_regr
def deactivate_registration(self, regr):
"""Deactivate registration.
:param messages.RegistrationResource regr: The Registration Resource
to be deactivated.
:returns: The Registration resource that was deactivated.
:rtype: `.RegistrationResource`
"""
return self.update_registration(regr, update={'status': 'deactivated'})
def query_registration(self, regr):
"""Query server about registration.
:param messages.RegistrationResource: Existing Registration
Resource.
"""
return self._send_recv_regr(regr, messages.UpdateRegistration())
def _authzr_from_response(self, response, identifier=None, uri=None):
authzr = messages.AuthorizationResource(
body=messages.Authorization.from_json(response.json()),
uri=response.headers.get('Location', uri))
if identifier is not None and authzr.body.identifier != identifier:
raise errors.UnexpectedUpdate(authzr)
return authzr
def answer_challenge(self, challb, response):
"""Answer challenge.
:param challb: Challenge Resource body.
:type challb: `.ChallengeBody`
:param response: Corresponding Challenge response
:type response: `.challenges.ChallengeResponse`
:returns: Challenge Resource with updated body.
:rtype: `.ChallengeResource`
:raises .UnexpectedUpdate:
"""
# Because sending keyAuthorization in a response challenge has been removed from the ACME
# spec, it is not included in the KeyAuthorizationResponseChallenge JSON by default.
# However as a migration path, we temporarily expect a malformed error from the server,
# and fallback by resending the challenge response with the keyAuthorization field.
# TODO: Remove this fallback for Certbot 0.34.0
try:
response = self._post(challb.uri, response)
except messages.Error as error:
if (error.code == 'malformed'
and isinstance(response, challenges.KeyAuthorizationChallengeResponse)):
logger.debug('Error while responding to a challenge without keyAuthorization '
'in the JWS, your ACME CA server may not support it:\n%s', error)
logger.debug('Retrying request with keyAuthorization set.')
response._dump_authorization_key(True) # pylint: disable=protected-access
response = self._post(challb.uri, response)
else:
raise
try:
authzr_uri = response.links['up']['url']
except KeyError:
raise errors.ClientError('"up" Link header missing')
challr = messages.ChallengeResource(
authzr_uri=authzr_uri,
body=messages.ChallengeBody.from_json(response.json()))
# TODO: check that challr.uri == response.headers['Location']?
if challr.uri != challb.uri:
raise errors.UnexpectedUpdate(challr.uri)
return challr
@classmethod
def retry_after(cls, response, default):
"""Compute next `poll` time based on response ``Retry-After`` header.
Handles integers and various datestring formats per
https://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.37
:param requests.Response response: Response from `poll`.
:param int default: Default value (in seconds), used when
``Retry-After`` header is not present or invalid.
:returns: Time point when next `poll` should be performed.
:rtype: `datetime.datetime`
"""
retry_after = response.headers.get('Retry-After', str(default))
try:
seconds = int(retry_after)
except ValueError:
# The RFC 2822 parser handles all of RFC 2616's cases in modern
# environments (primarily HTTP 1.1+ but also py27+)
when = parsedate_tz(retry_after)
if when is not None:
try:
tz_secs = datetime.timedelta(when[-1] if when[-1] else 0)
return datetime.datetime(*when[:7]) - tz_secs
except (ValueError, OverflowError):
pass
seconds = default
return datetime.datetime.now() + datetime.timedelta(seconds=seconds)
def _revoke(self, cert, rsn, url):
"""Revoke certificate.
:param .ComparableX509 cert: `OpenSSL.crypto.X509` wrapped in
`.ComparableX509`
:param int rsn: Reason code for certificate revocation.
:param str url: ACME URL to post to
:raises .ClientError: If revocation is unsuccessful.
"""
response = self._post(url,
messages.Revocation(
certificate=cert,
reason=rsn))
if response.status_code != http_client.OK:
raise errors.ClientError(
'Successful revocation must return HTTP OK status')
class Client(ClientBase):
"""ACME client for a v1 API.
.. todo::
Clean up raised error types hierarchy, document, and handle (wrap)
instances of `.DeserializationError` raised in `from_json()`.
:ivar messages.Directory directory:
:ivar key: `josepy.JWK` (private)
:ivar alg: `josepy.JWASignature`
:ivar bool verify_ssl: Verify SSL certificates?
:ivar .ClientNetwork net: Client network. Useful for testing. If not
supplied, it will be initialized using `key`, `alg` and
`verify_ssl`.
"""
def __init__(self, directory, key, alg=jose.RS256, verify_ssl=True,
net=None):
"""Initialize.
:param directory: Directory Resource (`.messages.Directory`) or
URI from which the resource will be downloaded.
"""
# pylint: disable=too-many-arguments
self.key = key
if net is None:
net = ClientNetwork(key, alg=alg, verify_ssl=verify_ssl)
if isinstance(directory, six.string_types):
directory = messages.Directory.from_json(
net.get(directory).json())
super(Client, self).__init__(directory=directory,
net=net, acme_version=1)
def register(self, new_reg=None):
"""Register.
:param .NewRegistration new_reg:
:returns: Registration Resource.
:rtype: `.RegistrationResource`
"""
new_reg = messages.NewRegistration() if new_reg is None else new_reg
response = self._post(self.directory[new_reg], new_reg)
# TODO: handle errors
assert response.status_code == http_client.CREATED
# "Instance of 'Field' has no key/contact member" bug:
# pylint: disable=no-member
return self._regr_from_response(response)
def agree_to_tos(self, regr):
"""Agree to the terms-of-service.
Agree to the terms-of-service in a Registration Resource.
:param regr: Registration Resource.
:type regr: `.RegistrationResource`
:returns: Updated Registration Resource.
:rtype: `.RegistrationResource`
"""
return self.update_registration(
regr.update(body=regr.body.update(agreement=regr.terms_of_service)))
def request_challenges(self, identifier, new_authzr_uri=None):
"""Request challenges.
:param .messages.Identifier identifier: Identifier to be challenged.
:param str new_authzr_uri: Deprecated. Do not use.
:returns: Authorization Resource.
:rtype: `.AuthorizationResource`
:raises errors.WildcardUnsupportedError: if a wildcard is requested
"""
if new_authzr_uri is not None:
logger.debug("request_challenges with new_authzr_uri deprecated.")
if identifier.value.startswith("*"):
raise errors.WildcardUnsupportedError(
"Requesting an authorization for a wildcard name is"
" forbidden by this version of the ACME protocol.")
new_authz = messages.NewAuthorization(identifier=identifier)
response = self._post(self.directory.new_authz, new_authz)
# TODO: handle errors
assert response.status_code == http_client.CREATED
return self._authzr_from_response(response, identifier)
def request_domain_challenges(self, domain, new_authzr_uri=None):
"""Request challenges for domain names.
This is simply a convenience function that wraps around
`request_challenges`, but works with domain names instead of
generic identifiers. See ``request_challenges`` for more
documentation.
:param str domain: Domain name to be challenged.
:param str new_authzr_uri: Deprecated. Do not use.
:returns: Authorization Resource.
:rtype: `.AuthorizationResource`
:raises errors.WildcardUnsupportedError: if a wildcard is requested
"""
return self.request_challenges(messages.Identifier(
typ=messages.IDENTIFIER_FQDN, value=domain), new_authzr_uri)
def request_issuance(self, csr, authzrs):
"""Request issuance.
:param csr: CSR
:type csr: `OpenSSL.crypto.X509Req` wrapped in `.ComparableX509`
:param authzrs: `list` of `.AuthorizationResource`
:returns: Issued certificate
:rtype: `.messages.CertificateResource`
"""
assert authzrs, "Authorizations list is empty"
logger.debug("Requesting issuance...")
# TODO: assert len(authzrs) == number of SANs
req = messages.CertificateRequest(csr=csr)
content_type = DER_CONTENT_TYPE # TODO: add 'cert_type 'argument
response = self._post(
self.directory.new_cert,
req,
content_type=content_type,
headers={'Accept': content_type})
cert_chain_uri = response.links.get('up', {}).get('url')
try:
uri = response.headers['Location']
except KeyError:
raise errors.ClientError('"Location" Header missing')
return messages.CertificateResource(
uri=uri, authzrs=authzrs, cert_chain_uri=cert_chain_uri,
body=jose.ComparableX509(OpenSSL.crypto.load_certificate(
OpenSSL.crypto.FILETYPE_ASN1, response.content)))
def poll(self, authzr):
"""Poll Authorization Resource for status.
:param authzr: Authorization Resource
:type authzr: `.AuthorizationResource`
:returns: Updated Authorization Resource and HTTP response.
:rtype: (`.AuthorizationResource`, `requests.Response`)
"""
response = self.net.get(authzr.uri)
updated_authzr = self._authzr_from_response(
response, authzr.body.identifier, authzr.uri)
return updated_authzr, response
def poll_and_request_issuance(
self, csr, authzrs, mintime=5, max_attempts=10):
"""Poll and request issuance.
This function polls all provided Authorization Resource URIs
until all challenges are valid, respecting ``Retry-After`` HTTP
headers, and then calls `request_issuance`.
:param .ComparableX509 csr: CSR (`OpenSSL.crypto.X509Req`
wrapped in `.ComparableX509`)
:param authzrs: `list` of `.AuthorizationResource`
:param int mintime: Minimum time before next attempt, used if
``Retry-After`` is not present in the response.
:param int max_attempts: Maximum number of attempts (per
authorization) before `PollError` with non-empty ``waiting``
is raised.
:returns: ``(cert, updated_authzrs)`` `tuple` where ``cert`` is
the issued certificate (`.messages.CertificateResource`),
and ``updated_authzrs`` is a `tuple` consisting of updated
Authorization Resources (`.AuthorizationResource`) as
present in the responses from server, and in the same order
as the input ``authzrs``.
:rtype: `tuple`
:raises PollError: in case of timeout or if some authorization
was marked by the CA as invalid
"""
# pylint: disable=too-many-locals
assert max_attempts > 0
attempts = collections.defaultdict(int) # type: Dict[messages.AuthorizationResource, int]
exhausted = set()
# priority queue with datetime.datetime (based on Retry-After) as key,
# and original Authorization Resource as value
waiting = [
(datetime.datetime.now(), index, authzr)
for index, authzr in enumerate(authzrs)
]
heapq.heapify(waiting)
# mapping between original Authorization Resource and the most
# recently updated one
updated = dict((authzr, authzr) for authzr in authzrs)
while waiting:
# find the smallest Retry-After, and sleep if necessary
when, index, authzr = heapq.heappop(waiting)
now = datetime.datetime.now()
if when > now:
seconds = (when - now).seconds
logger.debug('Sleeping for %d seconds', seconds)
time.sleep(seconds)
# Note that we poll with the latest updated Authorization
# URI, which might have a different URI than initial one
updated_authzr, response = self.poll(updated[authzr])
updated[authzr] = updated_authzr
attempts[authzr] += 1
# pylint: disable=no-member
if updated_authzr.body.status not in (
messages.STATUS_VALID, messages.STATUS_INVALID):
if attempts[authzr] < max_attempts:
# push back to the priority queue, with updated retry_after
heapq.heappush(waiting, (self.retry_after(
response, default=mintime), index, authzr))
else:
exhausted.add(authzr)
if exhausted or any(authzr.body.status == messages.STATUS_INVALID
for authzr in six.itervalues(updated)):
raise errors.PollError(exhausted, updated)
updated_authzrs = tuple(updated[authzr] for authzr in authzrs)
return self.request_issuance(csr, updated_authzrs), updated_authzrs
def _get_cert(self, uri):
"""Returns certificate from URI.
:param str uri: URI of certificate
:returns: tuple of the form
(response, :class:`josepy.util.ComparableX509`)
:rtype: tuple
"""
content_type = DER_CONTENT_TYPE # TODO: make it a param
response = self.net.get(uri, headers={'Accept': content_type},
content_type=content_type)
return response, jose.ComparableX509(OpenSSL.crypto.load_certificate(
OpenSSL.crypto.FILETYPE_ASN1, response.content))
def check_cert(self, certr):
"""Check for new cert.
:param certr: Certificate Resource
:type certr: `.CertificateResource`
:returns: Updated Certificate Resource.
:rtype: `.CertificateResource`
"""
# TODO: acme-spec 5.1 table action should be renamed to
# "refresh cert", and this method integrated with self.refresh
response, cert = self._get_cert(certr.uri)
if 'Location' not in response.headers:
raise errors.ClientError('Location header missing')
if response.headers['Location'] != certr.uri:
raise errors.UnexpectedUpdate(response.text)
return certr.update(body=cert)
def refresh(self, certr):
"""Refresh certificate.
:param certr: Certificate Resource
:type certr: `.CertificateResource`
:returns: Updated Certificate Resource.
:rtype: `.CertificateResource`
"""
# TODO: If a client sends a refresh request and the server is
# not willing to refresh the certificate, the server MUST
# respond with status code 403 (Forbidden)
return self.check_cert(certr)
def fetch_chain(self, certr, max_length=10):
"""Fetch chain for certificate.
:param .CertificateResource certr: Certificate Resource
:param int max_length: Maximum allowed length of the chain.
Note that each element in the certificate requires new
``HTTP GET`` request, and the length of the chain is
controlled by the ACME CA.
:raises errors.Error: if recursion exceeds `max_length`
:returns: Certificate chain for the Certificate Resource. It is
a list ordered so that the first element is a signer of the
certificate from Certificate Resource. Will be empty if
``cert_chain_uri`` is ``None``.
:rtype: `list` of `OpenSSL.crypto.X509` wrapped in `.ComparableX509`
"""
chain = [] # type: List[jose.ComparableX509]
uri = certr.cert_chain_uri
while uri is not None and len(chain) < max_length:
response, cert = self._get_cert(uri)
uri = response.links.get('up', {}).get('url')
chain.append(cert)
if uri is not None:
raise errors.Error(
"Recursion limit reached. Didn't get {0}".format(uri))
return chain
def revoke(self, cert, rsn):
"""Revoke certificate.
:param .ComparableX509 cert: `OpenSSL.crypto.X509` wrapped in
`.ComparableX509`
:param int rsn: Reason code for certificate revocation.
:raises .ClientError: If revocation is unsuccessful.
"""
return self._revoke(cert, rsn, self.directory[messages.Revocation])
class ClientV2(ClientBase):
"""ACME client for a v2 API.
:ivar messages.Directory directory:
:ivar .ClientNetwork net: Client network.
"""
def __init__(self, directory, net):
"""Initialize.
:param .messages.Directory directory: Directory Resource
:param .ClientNetwork net: Client network.
"""
super(ClientV2, self).__init__(directory=directory,
net=net, acme_version=2)
def new_account(self, new_account):
"""Register.
:param .NewRegistration new_account:
:raises .ConflictError: in case the account already exists
:returns: Registration Resource.
:rtype: `.RegistrationResource`
"""
response = self._post(self.directory['newAccount'], new_account)
# if account already exists
if response.status_code == 200 and 'Location' in response.headers:
raise errors.ConflictError(response.headers.get('Location'))
# "Instance of 'Field' has no key/contact member" bug:
# pylint: disable=no-member
regr = self._regr_from_response(response)
self.net.account = regr
return regr
def query_registration(self, regr):
"""Query server about registration.
:param messages.RegistrationResource: Existing Registration
Resource.
"""
self.net.account = regr
updated_regr = super(ClientV2, self).query_registration(regr)
self.net.account = updated_regr
return updated_regr
def update_registration(self, regr, update=None):
"""Update registration.
:param messages.RegistrationResource regr: Registration Resource.
:param messages.Registration update: Updated body of the
resource. If not provided, body will be taken from `regr`.
:returns: Updated Registration Resource.
:rtype: `.RegistrationResource`
"""
# https://github.com/certbot/certbot/issues/6155
new_regr = self._get_v2_account(regr)
return super(ClientV2, self).update_registration(new_regr, update)
def _get_v2_account(self, regr):
self.net.account = None
only_existing_reg = regr.body.update(only_return_existing=True)
response = self._post(self.directory['newAccount'], only_existing_reg)
updated_uri = response.headers['Location']
new_regr = regr.update(uri=updated_uri)
self.net.account = new_regr
return new_regr
def new_order(self, csr_pem):
"""Request a new Order object from the server.
:param str csr_pem: A CSR in PEM format.
:returns: The newly created order.
:rtype: OrderResource
"""
csr = OpenSSL.crypto.load_certificate_request(OpenSSL.crypto.FILETYPE_PEM, csr_pem)
# pylint: disable=protected-access
dnsNames = crypto_util._pyopenssl_cert_or_req_all_names(csr)
identifiers = []
for name in dnsNames:
identifiers.append(messages.Identifier(typ=messages.IDENTIFIER_FQDN,
value=name))
order = messages.NewOrder(identifiers=identifiers)
response = self._post(self.directory['newOrder'], order)
body = messages.Order.from_json(response.json())
authorizations = []
for url in body.authorizations:
authorizations.append(self._authzr_from_response(self._post_as_get(url), uri=url))
return messages.OrderResource(
body=body,
uri=response.headers.get('Location'),
authorizations=authorizations,
csr_pem=csr_pem)
def poll(self, authzr):
"""Poll Authorization Resource for status.
:param authzr: Authorization Resource
:type authzr: `.AuthorizationResource`
:returns: Updated Authorization Resource and HTTP response.
:rtype: (`.AuthorizationResource`, `requests.Response`)
"""
response = self._post_as_get(authzr.uri)
updated_authzr = self._authzr_from_response(
response, authzr.body.identifier, authzr.uri)
return updated_authzr, response
def poll_and_finalize(self, orderr, deadline=None):
"""Poll authorizations and finalize the order.
If no deadline is provided, this method will timeout after 90
seconds.
:param messages.OrderResource orderr: order to finalize
:param datetime.datetime deadline: when to stop polling and timeout
:returns: finalized order
:rtype: messages.OrderResource
"""
if deadline is None:
deadline = datetime.datetime.now() + datetime.timedelta(seconds=90)
orderr = self.poll_authorizations(orderr, deadline)
return self.finalize_order(orderr, deadline)
def poll_authorizations(self, orderr, deadline):
"""Poll Order Resource for status."""
responses = []
for url in orderr.body.authorizations:
while datetime.datetime.now() < deadline:
authzr = self._authzr_from_response(self._post_as_get(url), uri=url)
if authzr.body.status != messages.STATUS_PENDING:
responses.append(authzr)
break
time.sleep(1)
# If we didn't get a response for every authorization, we fell through
# the bottom of the loop due to hitting the deadline.
if len(responses) < len(orderr.body.authorizations):
raise errors.TimeoutError()
failed = []
for authzr in responses:
if authzr.body.status != messages.STATUS_VALID:
for chall in authzr.body.challenges:
if chall.error != None:
failed.append(authzr)
if len(failed) > 0:
raise errors.ValidationError(failed)
return orderr.update(authorizations=responses)
def finalize_order(self, orderr, deadline):
"""Finalize an order and obtain a certificate.
:param messages.OrderResource orderr: order to finalize
:param datetime.datetime deadline: when to stop polling and timeout
:returns: finalized order
:rtype: messages.OrderResource
"""
csr = OpenSSL.crypto.load_certificate_request(
OpenSSL.crypto.FILETYPE_PEM, orderr.csr_pem)
wrapped_csr = messages.CertificateRequest(csr=jose.ComparableX509(csr))
self._post(orderr.body.finalize, wrapped_csr)
while datetime.datetime.now() < deadline:
time.sleep(1)
response = self._post_as_get(orderr.uri)
body = messages.Order.from_json(response.json())
if body.error is not None:
raise errors.IssuanceError(body.error)
if body.certificate is not None:
certificate_response = self._post_as_get(body.certificate).text
return orderr.update(body=body, fullchain_pem=certificate_response)
raise errors.TimeoutError()
def revoke(self, cert, rsn):
"""Revoke certificate.
:param .ComparableX509 cert: `OpenSSL.crypto.X509` wrapped in
`.ComparableX509`
:param int rsn: Reason code for certificate revocation.
:raises .ClientError: If revocation is unsuccessful.
"""
return self._revoke(cert, rsn, self.directory['revokeCert'])
def external_account_required(self):
"""Checks if ACME server requires External Account Binding authentication."""
if hasattr(self.directory, 'meta') and self.directory.meta.external_account_required:
return True
else:
return False
def _post_as_get(self, *args, **kwargs):
"""
Send GET request using the POST-as-GET protocol if needed.
The request will be first issued using POST-as-GET for ACME v2. If the ACME CA servers do
not support this yet and return an error, request will be retried using GET.
For ACME v1, only GET request will be tried, as POST-as-GET is not supported.
:param args:
:param kwargs:
:return:
"""
if self.acme_version >= 2:
# We add an empty payload for POST-as-GET requests
new_args = args[:1] + (None,) + args[1:]
try:
return self._post(*new_args, **kwargs) # pylint: disable=star-args
except messages.Error as error:
if error.code == 'malformed':
logger.debug('Error during a POST-as-GET request, '
'your ACME CA server may not support it:\n%s', error)
logger.debug('Retrying request with GET.')
else: # pragma: no cover
raise
# If POST-as-GET is not supported yet, we use a GET instead.
return self.net.get(*args, **kwargs)
class BackwardsCompatibleClientV2(object):
"""ACME client wrapper that tends towards V2-style calls, but
supports V1 servers.
.. note:: While this class handles the majority of the differences
between versions of the ACME protocol, if you need to support an
ACME server based on version 3 or older of the IETF ACME draft
that uses combinations in authorizations (or lack thereof) to
signal that the client needs to complete something other than
any single challenge in the authorization to make it valid, the
user of this class needs to understand and handle these
differences themselves. This does not apply to either of Let's
Encrypt's endpoints where successfully completing any challenge
in an authorization will make it valid.
:ivar int acme_version: 1 or 2, corresponding to the Let's Encrypt endpoint
:ivar .ClientBase client: either Client or ClientV2
"""
def __init__(self, net, key, server):
directory = messages.Directory.from_json(net.get(server).json())
self.acme_version = self._acme_version_from_directory(directory)
if self.acme_version == 1:
self.client = Client(directory, key=key, net=net)
else:
self.client = ClientV2(directory, net=net)
def __getattr__(self, name):
return getattr(self.client, name)
def new_account_and_tos(self, regr, check_tos_cb=None):
"""Combined register and agree_tos for V1, new_account for V2
:param .NewRegistration regr:
:param callable check_tos_cb: callback that raises an error if
the check does not work
"""
def _assess_tos(tos):
if check_tos_cb is not None:
check_tos_cb(tos)
if self.acme_version == 1:
regr = self.client.register(regr)
if regr.terms_of_service is not None:
_assess_tos(regr.terms_of_service)
return self.client.agree_to_tos(regr)
return regr
else:
if "terms_of_service" in self.client.directory.meta:
_assess_tos(self.client.directory.meta.terms_of_service)
regr = regr.update(terms_of_service_agreed=True)
return self.client.new_account(regr)
def new_order(self, csr_pem):
"""Request a new Order object from the server.
If using ACMEv1, returns a dummy OrderResource with only
the authorizations field filled in.
:param str csr_pem: A CSR in PEM format.
:returns: The newly created order.
:rtype: OrderResource
:raises errors.WildcardUnsupportedError: if a wildcard domain is
requested but unsupported by the ACME version
"""
if self.acme_version == 1:
csr = OpenSSL.crypto.load_certificate_request(OpenSSL.crypto.FILETYPE_PEM, csr_pem)
# pylint: disable=protected-access
dnsNames = crypto_util._pyopenssl_cert_or_req_all_names(csr)
authorizations = []
for domain in dnsNames:
authorizations.append(self.client.request_domain_challenges(domain))
return messages.OrderResource(authorizations=authorizations, csr_pem=csr_pem)
else:
return self.client.new_order(csr_pem)
def finalize_order(self, orderr, deadline):
"""Finalize an order and obtain a certificate.
:param messages.OrderResource orderr: order to finalize
:param datetime.datetime deadline: when to stop polling and timeout
:returns: finalized order
:rtype: messages.OrderResource
"""
if self.acme_version == 1:
csr_pem = orderr.csr_pem
certr = self.client.request_issuance(
jose.ComparableX509(
OpenSSL.crypto.load_certificate_request(OpenSSL.crypto.FILETYPE_PEM, csr_pem)),
orderr.authorizations)
chain = None
while datetime.datetime.now() < deadline:
try:
chain = self.client.fetch_chain(certr)
break
except errors.Error:
time.sleep(1)
if chain is None:
raise errors.TimeoutError(
'Failed to fetch chain. You should not deploy the generated '
'certificate, please rerun the command for a new one.')
cert = OpenSSL.crypto.dump_certificate(
OpenSSL.crypto.FILETYPE_PEM, certr.body.wrapped).decode()
chain = crypto_util.dump_pyopenssl_chain(chain).decode()
return orderr.update(fullchain_pem=(cert + chain))
else:
return self.client.finalize_order(orderr, deadline)
def revoke(self, cert, rsn):
"""Revoke certificate.
:param .ComparableX509 cert: `OpenSSL.crypto.X509` wrapped in
`.ComparableX509`
:param int rsn: Reason code for certificate revocation.
:raises .ClientError: If revocation is unsuccessful.
"""
return self.client.revoke(cert, rsn)
def _acme_version_from_directory(self, directory):
if hasattr(directory, 'newNonce'):
return 2
else:
return 1
def external_account_required(self):
"""Checks if the server requires an external account for ACMEv2 servers.
Always return False for ACMEv1 servers, as it doesn't use External Account Binding."""
if self.acme_version == 1:
return False
else:
return self.client.external_account_required()
class ClientNetwork(object): # pylint: disable=too-many-instance-attributes
"""Wrapper around requests that signs POSTs for authentication.
Also adds user agent, and handles Content-Type.
"""
JSON_CONTENT_TYPE = 'application/json'
JOSE_CONTENT_TYPE = 'application/jose+json'
JSON_ERROR_CONTENT_TYPE = 'application/problem+json'
REPLAY_NONCE_HEADER = 'Replay-Nonce'
"""Initialize.
:param josepy.JWK key: Account private key
:param messages.RegistrationResource account: Account object. Required if you are
planning to use .post() with acme_version=2 for anything other than
creating a new account; may be set later after registering.
:param josepy.JWASignature alg: Algoritm to use in signing JWS.
:param bool verify_ssl: Whether to verify certificates on SSL connections.
:param str user_agent: String to send as User-Agent header.
:param float timeout: Timeout for requests.
:param source_address: Optional source address to bind to when making requests.
:type source_address: str or tuple(str, int)
"""
def __init__(self, key, account=None, alg=jose.RS256, verify_ssl=True,
user_agent='acme-python', timeout=DEFAULT_NETWORK_TIMEOUT,
source_address=None):
# pylint: disable=too-many-arguments
self.key = key
self.account = account
self.alg = alg
self.verify_ssl = verify_ssl
self._nonces = set() # type: Set[Text]
self.user_agent = user_agent
self.session = requests.Session()
self._default_timeout = timeout
adapter = HTTPAdapter()
if source_address is not None:
adapter = SourceAddressAdapter(source_address)
self.session.mount("http://", adapter)
self.session.mount("https://", adapter)
def __del__(self):
# Try to close the session, but don't show exceptions to the
# user if the call to close() fails. See #4840.
try:
self.session.close()
except Exception: # pylint: disable=broad-except
pass
def _wrap_in_jws(self, obj, nonce, url, acme_version):
"""Wrap `JSONDeSerializable` object in JWS.
.. todo:: Implement ``acmePath``.
:param josepy.JSONDeSerializable obj:
:param str url: The URL to which this object will be POSTed
:param bytes nonce:
:rtype: `josepy.JWS`
"""
jobj = obj.json_dumps(indent=2).encode() if obj else b''
logger.debug('JWS payload:\n%s', jobj)
kwargs = {
"alg": self.alg,
"nonce": nonce
}
if acme_version == 2:
kwargs["url"] = url
# newAccount and revokeCert work without the kid
# newAccount must not have kid
if self.account is not None:
kwargs["kid"] = self.account["uri"]
kwargs["key"] = self.key
# pylint: disable=star-args
return jws.JWS.sign(jobj, **kwargs).json_dumps(indent=2)
@classmethod
def _check_response(cls, response, content_type=None):
"""Check response content and its type.
.. note::
Checking is not strict: wrong server response ``Content-Type``
HTTP header is ignored if response is an expected JSON object
(c.f. Boulder #56).
:param str content_type: Expected Content-Type response header.
If JSON is expected and not present in server response, this
function will raise an error. Otherwise, wrong Content-Type
is ignored, but logged.
:raises .messages.Error: If server response body
carries HTTP Problem (draft-ietf-appsawg-http-problem-00).
:raises .ClientError: In case of other networking errors.
"""
response_ct = response.headers.get('Content-Type')
try:
# TODO: response.json() is called twice, once here, and
# once in _get and _post clients
jobj = response.json()
except ValueError:
jobj = None
if response.status_code == 409:
raise errors.ConflictError(response.headers.get('Location'))
if not response.ok:
if jobj is not None:
if response_ct != cls.JSON_ERROR_CONTENT_TYPE:
logger.debug(
'Ignoring wrong Content-Type (%r) for JSON Error',
response_ct)
try:
raise messages.Error.from_json(jobj)
except jose.DeserializationError as error:
# Couldn't deserialize JSON object
raise errors.ClientError((response, error))
else:
# response is not JSON object
raise errors.ClientError(response)
else:
if jobj is not None and response_ct != cls.JSON_CONTENT_TYPE:
logger.debug(
'Ignoring wrong Content-Type (%r) for JSON decodable '
'response', response_ct)
if content_type == cls.JSON_CONTENT_TYPE and jobj is None:
raise errors.ClientError(
'Unexpected response Content-Type: {0}'.format(response_ct))
return response
def _send_request(self, method, url, *args, **kwargs):
# pylint: disable=too-many-locals
"""Send HTTP request.
Makes sure that `verify_ssl` is respected. Logs request and
response (with headers). For allowed parameters please see
`requests.request`.
:param str method: method for the new `requests.Request` object
:param str url: URL for the new `requests.Request` object
:raises requests.exceptions.RequestException: in case of any problems
:returns: HTTP Response
:rtype: `requests.Response`
"""
if method == "POST":
logger.debug('Sending POST request to %s:\n%s',
url, kwargs['data'])
else:
logger.debug('Sending %s request to %s.', method, url)
kwargs['verify'] = self.verify_ssl
kwargs.setdefault('headers', {})
kwargs['headers'].setdefault('User-Agent', self.user_agent)
kwargs.setdefault('timeout', self._default_timeout)
try:
response = self.session.request(method, url, *args, **kwargs)
except requests.exceptions.RequestException as e:
# pylint: disable=pointless-string-statement
"""Requests response parsing
The requests library emits exceptions with a lot of extra text.
We parse them with a regexp to raise a more readable exceptions.
Example:
HTTPSConnectionPool(host='acme-v01.api.letsencrypt.org',
port=443): Max retries exceeded with url: /directory
(Caused by NewConnectionError('
<requests.packages.urllib3.connection.VerifiedHTTPSConnection
object at 0x108356c50>: Failed to establish a new connection:
[Errno 65] No route to host',))"""
# pylint: disable=line-too-long
err_regex = r".*host='(\S*)'.*Max retries exceeded with url\: (\/\w*).*(\[Errno \d+\])([A-Za-z ]*)"
m = re.match(err_regex, str(e))
if m is None:
raise # pragma: no cover
else:
host, path, _err_no, err_msg = m.groups()
raise ValueError("Requesting {0}{1}:{2}".format(host, path, err_msg))
# If content is DER, log the base64 of it instead of raw bytes, to keep
# binary data out of the logs.
if response.headers.get("Content-Type") == DER_CONTENT_TYPE:
debug_content = base64.b64encode(response.content)
else:
debug_content = response.content.decode("utf-8")
logger.debug('Received response:\nHTTP %d\n%s\n\n%s',
response.status_code,
"\n".join(["{0}: {1}".format(k, v)
for k, v in response.headers.items()]),
debug_content)
return response
def head(self, *args, **kwargs):
"""Send HEAD request without checking the response.
Note, that `_check_response` is not called, as it is expected
that status code other than successfully 2xx will be returned, or
messages2.Error will be raised by the server.
"""
return self._send_request('HEAD', *args, **kwargs)
def get(self, url, content_type=JSON_CONTENT_TYPE, **kwargs):
"""Send GET request and check response."""
return self._check_response(
self._send_request('GET', url, **kwargs), content_type=content_type)
def _add_nonce(self, response):
if self.REPLAY_NONCE_HEADER in response.headers:
nonce = response.headers[self.REPLAY_NONCE_HEADER]
try:
decoded_nonce = jws.Header._fields['nonce'].decode(nonce)
except jose.DeserializationError as error:
raise errors.BadNonce(nonce, error)
logger.debug('Storing nonce: %s', nonce)
self._nonces.add(decoded_nonce)
else:
raise errors.MissingNonce(response)
def _get_nonce(self, url, new_nonce_url):
if not self._nonces:
logger.debug('Requesting fresh nonce')
if new_nonce_url is None:
response = self.head(url)
else:
# request a new nonce from the acme newNonce endpoint
response = self._check_response(self.head(new_nonce_url), content_type=None)
self._add_nonce(response)
return self._nonces.pop()
def post(self, *args, **kwargs):
"""POST object wrapped in `.JWS` and check response.
If the server responded with a badNonce error, the request will
be retried once.
"""
try:
return self._post_once(*args, **kwargs)
except messages.Error as error:
if error.code == 'badNonce':
logger.debug('Retrying request after error:\n%s', error)
return self._post_once(*args, **kwargs)
else:
raise
def _post_once(self, url, obj, content_type=JOSE_CONTENT_TYPE,
acme_version=1, **kwargs):
new_nonce_url = kwargs.pop('new_nonce_url', None)
data = self._wrap_in_jws(obj, self._get_nonce(url, new_nonce_url), url, acme_version)
kwargs.setdefault('headers', {'Content-Type': content_type})
response = self._send_request('POST', url, data=data, **kwargs)
response = self._check_response(response, content_type=content_type)
self._add_nonce(response)
return response
| {
"repo_name": "letsencrypt/letsencrypt",
"path": "acme/acme/client.py",
"copies": "1",
"size": "47067",
"license": "apache-2.0",
"hash": -4193641931481446000,
"line_mean": 37.674609696,
"line_max": 111,
"alpha_frac": 0.6171415217,
"autogenerated": false,
"ratio": 4.288955713504647,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5406097235204647,
"avg_score": null,
"num_lines": null
} |
"""ACME client API."""
import collections
import datetime
from email.utils import parsedate_tz
import heapq
import logging
import time
import six
from six.moves import http_client # pylint: disable=import-error
import OpenSSL
import requests
import sys
from acme import errors
from acme import jose
from acme import jws
from acme import messages
logger = logging.getLogger(__name__)
# Prior to Python 2.7.9 the stdlib SSL module did not allow a user to configure
# many important security related options. On these platforms we use PyOpenSSL
# for SSL, which does allow these options to be configured.
# https://urllib3.readthedocs.org/en/latest/security.html#insecureplatformwarning
if sys.version_info < (2, 7, 9): # pragma: no cover
requests.packages.urllib3.contrib.pyopenssl.inject_into_urllib3()
class Client(object): # pylint: disable=too-many-instance-attributes
"""ACME client.
.. todo::
Clean up raised error types hierarchy, document, and handle (wrap)
instances of `.DeserializationError` raised in `from_json()`.
:ivar messages.Directory directory:
:ivar key: `.JWK` (private)
:ivar alg: `.JWASignature`
:ivar bool verify_ssl: Verify SSL certificates?
:ivar .ClientNetwork net: Client network. Useful for testing. If not
supplied, it will be initialized using `key`, `alg` and
`verify_ssl`.
"""
DER_CONTENT_TYPE = 'application/pkix-cert'
def __init__(self, directory, key, alg=jose.RS256, verify_ssl=True,
net=None):
"""Initialize.
:param directory: Directory Resource (`.messages.Directory`) or
URI from which the resource will be downloaded.
"""
self.key = key
self.net = ClientNetwork(key, alg, verify_ssl) if net is None else net
if isinstance(directory, six.string_types):
self.directory = messages.Directory.from_json(
self.net.get(directory).json())
else:
self.directory = directory
@classmethod
def _regr_from_response(cls, response, uri=None, new_authzr_uri=None,
terms_of_service=None):
if 'terms-of-service' in response.links:
terms_of_service = response.links['terms-of-service']['url']
if 'next' in response.links:
new_authzr_uri = response.links['next']['url']
if new_authzr_uri is None:
raise errors.ClientError('"next" link missing')
return messages.RegistrationResource(
body=messages.Registration.from_json(response.json()),
uri=response.headers.get('Location', uri),
new_authzr_uri=new_authzr_uri,
terms_of_service=terms_of_service)
def register(self, new_reg=None):
"""Register.
:param .NewRegistration new_reg:
:returns: Registration Resource.
:rtype: `.RegistrationResource`
"""
new_reg = messages.NewRegistration() if new_reg is None else new_reg
assert isinstance(new_reg, messages.NewRegistration)
response = self.net.post(self.directory[new_reg], new_reg)
# TODO: handle errors
assert response.status_code == http_client.CREATED
# "Instance of 'Field' has no key/contact member" bug:
# pylint: disable=no-member
return self._regr_from_response(response)
def _send_recv_regr(self, regr, body):
response = self.net.post(regr.uri, body)
# TODO: Boulder returns httplib.ACCEPTED
#assert response.status_code == httplib.OK
# TODO: Boulder does not set Location or Link on update
# (c.f. acme-spec #94)
return self._regr_from_response(
response, uri=regr.uri, new_authzr_uri=regr.new_authzr_uri,
terms_of_service=regr.terms_of_service)
def update_registration(self, regr, update=None):
"""Update registration.
:param messages.RegistrationResource regr: Registration Resource.
:param messages.Registration update: Updated body of the
resource. If not provided, body will be taken from `regr`.
:returns: Updated Registration Resource.
:rtype: `.RegistrationResource`
"""
update = regr.body if update is None else update
updated_regr = self._send_recv_regr(
regr, body=messages.UpdateRegistration(**dict(update)))
if updated_regr != regr:
raise errors.UnexpectedUpdate(regr)
return updated_regr
def query_registration(self, regr):
"""Query server about registration.
:param messages.RegistrationResource: Existing Registration
Resource.
"""
return self._send_recv_regr(regr, messages.UpdateRegistration())
def agree_to_tos(self, regr):
"""Agree to the terms-of-service.
Agree to the terms-of-service in a Registration Resource.
:param regr: Registration Resource.
:type regr: `.RegistrationResource`
:returns: Updated Registration Resource.
:rtype: `.RegistrationResource`
"""
return self.update_registration(
regr.update(body=regr.body.update(agreement=regr.terms_of_service)))
def _authzr_from_response(self, response, identifier,
uri=None, new_cert_uri=None):
# pylint: disable=no-self-use
if new_cert_uri is None:
try:
new_cert_uri = response.links['next']['url']
except KeyError:
raise errors.ClientError('"next" link missing')
authzr = messages.AuthorizationResource(
body=messages.Authorization.from_json(response.json()),
uri=response.headers.get('Location', uri),
new_cert_uri=new_cert_uri)
if authzr.body.identifier != identifier:
raise errors.UnexpectedUpdate(authzr)
return authzr
def request_challenges(self, identifier, new_authzr_uri=None):
"""Request challenges.
:param .messages.Identifier identifier: Identifier to be challenged.
:param str new_authzr_uri: ``new-authorization`` URI. If omitted,
will default to value found in ``directory``.
:returns: Authorization Resource.
:rtype: `.AuthorizationResource`
"""
new_authz = messages.NewAuthorization(identifier=identifier)
response = self.net.post(self.directory.new_authz
if new_authzr_uri is None else new_authzr_uri,
new_authz)
# TODO: handle errors
assert response.status_code == http_client.CREATED
return self._authzr_from_response(response, identifier)
def request_domain_challenges(self, domain, new_authzr_uri=None):
"""Request challenges for domain names.
This is simply a convenience function that wraps around
`request_challenges`, but works with domain names instead of
generic identifiers. See ``request_challenges`` for more
documentation.
:param str domain: Domain name to be challenged.
:returns: Authorization Resource.
:rtype: `.AuthorizationResource`
"""
return self.request_challenges(messages.Identifier(
typ=messages.IDENTIFIER_FQDN, value=domain), new_authzr_uri)
def answer_challenge(self, challb, response):
"""Answer challenge.
:param challb: Challenge Resource body.
:type challb: `.ChallengeBody`
:param response: Corresponding Challenge response
:type response: `.challenges.ChallengeResponse`
:returns: Challenge Resource with updated body.
:rtype: `.ChallengeResource`
:raises .UnexpectedUpdate:
"""
response = self.net.post(challb.uri, response)
try:
authzr_uri = response.links['up']['url']
except KeyError:
raise errors.ClientError('"up" Link header missing')
challr = messages.ChallengeResource(
authzr_uri=authzr_uri,
body=messages.ChallengeBody.from_json(response.json()))
# TODO: check that challr.uri == response.headers['Location']?
if challr.uri != challb.uri:
raise errors.UnexpectedUpdate(challr.uri)
return challr
@classmethod
def retry_after(cls, response, default):
"""Compute next `poll` time based on response ``Retry-After`` header.
Handles integers and various datestring formats per
https://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.37
:param requests.Response response: Response from `poll`.
:param int default: Default value (in seconds), used when
``Retry-After`` header is not present or invalid.
:returns: Time point when next `poll` should be performed.
:rtype: `datetime.datetime`
"""
retry_after = response.headers.get('Retry-After', str(default))
try:
seconds = int(retry_after)
except ValueError:
# The RFC 2822 parser handles all of RFC 2616's cases in modern
# environments (primarily HTTP 1.1+ but also py27+)
when = parsedate_tz(retry_after)
if when is not None:
try:
tz_secs = datetime.timedelta(when[-1] if when[-1] else 0)
return datetime.datetime(*when[:7]) - tz_secs
except (ValueError, OverflowError):
pass
seconds = default
return datetime.datetime.now() + datetime.timedelta(seconds=seconds)
def poll(self, authzr):
"""Poll Authorization Resource for status.
:param authzr: Authorization Resource
:type authzr: `.AuthorizationResource`
:returns: Updated Authorization Resource and HTTP response.
:rtype: (`.AuthorizationResource`, `requests.Response`)
"""
response = self.net.get(authzr.uri)
updated_authzr = self._authzr_from_response(
response, authzr.body.identifier, authzr.uri, authzr.new_cert_uri)
# TODO: check and raise UnexpectedUpdate
return updated_authzr, response
def request_issuance(self, csr, authzrs):
"""Request issuance.
:param csr: CSR
:type csr: `OpenSSL.crypto.X509Req` wrapped in `.ComparableX509`
:param authzrs: `list` of `.AuthorizationResource`
:returns: Issued certificate
:rtype: `.messages.CertificateResource`
"""
assert authzrs, "Authorizations list is empty"
logger.debug("Requesting issuance...")
# TODO: assert len(authzrs) == number of SANs
req = messages.CertificateRequest(csr=csr)
content_type = self.DER_CONTENT_TYPE # TODO: add 'cert_type 'argument
response = self.net.post(
authzrs[0].new_cert_uri, # TODO: acme-spec #90
req,
content_type=content_type,
headers={'Accept': content_type})
cert_chain_uri = response.links.get('up', {}).get('url')
try:
uri = response.headers['Location']
except KeyError:
raise errors.ClientError('"Location" Header missing')
return messages.CertificateResource(
uri=uri, authzrs=authzrs, cert_chain_uri=cert_chain_uri,
body=jose.ComparableX509(OpenSSL.crypto.load_certificate(
OpenSSL.crypto.FILETYPE_ASN1, response.content)))
def poll_and_request_issuance(
self, csr, authzrs, mintime=5, max_attempts=10):
"""Poll and request issuance.
This function polls all provided Authorization Resource URIs
until all challenges are valid, respecting ``Retry-After`` HTTP
headers, and then calls `request_issuance`.
:param .ComparableX509 csr: CSR (`OpenSSL.crypto.X509Req`
wrapped in `.ComparableX509`)
:param authzrs: `list` of `.AuthorizationResource`
:param int mintime: Minimum time before next attempt, used if
``Retry-After`` is not present in the response.
:param int max_attempts: Maximum number of attempts (per
authorization) before `PollError` with non-empty ``waiting``
is raised.
:returns: ``(cert, updated_authzrs)`` `tuple` where ``cert`` is
the issued certificate (`.messages.CertificateResource`),
and ``updated_authzrs`` is a `tuple` consisting of updated
Authorization Resources (`.AuthorizationResource`) as
present in the responses from server, and in the same order
as the input ``authzrs``.
:rtype: `tuple`
:raises PollError: in case of timeout or if some authorization
was marked by the CA as invalid
"""
# pylint: disable=too-many-locals
assert max_attempts > 0
attempts = collections.defaultdict(int)
exhausted = set()
# priority queue with datetime.datetime (based on Retry-After) as key,
# and original Authorization Resource as value
waiting = [(datetime.datetime.now(), authzr) for authzr in authzrs]
# mapping between original Authorization Resource and the most
# recently updated one
updated = dict((authzr, authzr) for authzr in authzrs)
while waiting:
# find the smallest Retry-After, and sleep if necessary
when, authzr = heapq.heappop(waiting)
now = datetime.datetime.now()
if when > now:
seconds = (when - now).seconds
logger.debug('Sleeping for %d seconds', seconds)
time.sleep(seconds)
# Note that we poll with the latest updated Authorization
# URI, which might have a different URI than initial one
updated_authzr, response = self.poll(updated[authzr])
updated[authzr] = updated_authzr
attempts[authzr] += 1
# pylint: disable=no-member
if updated_authzr.body.status not in (
messages.STATUS_VALID, messages.STATUS_INVALID):
if attempts[authzr] < max_attempts:
# push back to the priority queue, with updated retry_after
heapq.heappush(waiting, (self.retry_after(
response, default=mintime), authzr))
else:
exhausted.add(authzr)
if exhausted or any(authzr.body.status == messages.STATUS_INVALID
for authzr in six.itervalues(updated)):
raise errors.PollError(exhausted, updated)
updated_authzrs = tuple(updated[authzr] for authzr in authzrs)
return self.request_issuance(csr, updated_authzrs), updated_authzrs
def _get_cert(self, uri):
"""Returns certificate from URI.
:param str uri: URI of certificate
:returns: tuple of the form
(response, :class:`acme.jose.ComparableX509`)
:rtype: tuple
"""
content_type = self.DER_CONTENT_TYPE # TODO: make it a param
response = self.net.get(uri, headers={'Accept': content_type},
content_type=content_type)
return response, jose.ComparableX509(OpenSSL.crypto.load_certificate(
OpenSSL.crypto.FILETYPE_ASN1, response.content))
def check_cert(self, certr):
"""Check for new cert.
:param certr: Certificate Resource
:type certr: `.CertificateResource`
:returns: Updated Certificate Resource.
:rtype: `.CertificateResource`
"""
# TODO: acme-spec 5.1 table action should be renamed to
# "refresh cert", and this method integrated with self.refresh
response, cert = self._get_cert(certr.uri)
if 'Location' not in response.headers:
raise errors.ClientError('Location header missing')
if response.headers['Location'] != certr.uri:
raise errors.UnexpectedUpdate(response.text)
return certr.update(body=cert)
def refresh(self, certr):
"""Refresh certificate.
:param certr: Certificate Resource
:type certr: `.CertificateResource`
:returns: Updated Certificate Resource.
:rtype: `.CertificateResource`
"""
# TODO: If a client sends a refresh request and the server is
# not willing to refresh the certificate, the server MUST
# respond with status code 403 (Forbidden)
return self.check_cert(certr)
def fetch_chain(self, certr, max_length=10):
"""Fetch chain for certificate.
:param .CertificateResource certr: Certificate Resource
:param int max_length: Maximum allowed length of the chain.
Note that each element in the certificate requires new
``HTTP GET`` request, and the length of the chain is
controlled by the ACME CA.
:raises errors.Error: if recursion exceeds `max_length`
:returns: Certificate chain for the Certificate Resource. It is
a list ordered so that the first element is a signer of the
certificate from Certificate Resource. Will be empty if
``cert_chain_uri`` is ``None``.
:rtype: `list` of `OpenSSL.crypto.X509` wrapped in `.ComparableX509`
"""
chain = []
uri = certr.cert_chain_uri
while uri is not None and len(chain) < max_length:
response, cert = self._get_cert(uri)
uri = response.links.get('up', {}).get('url')
chain.append(cert)
if uri is not None:
raise errors.Error(
"Recursion limit reached. Didn't get {0}".format(uri))
return chain
def revoke(self, cert):
"""Revoke certificate.
:param .ComparableX509 cert: `OpenSSL.crypto.X509` wrapped in
`.ComparableX509`
:raises .ClientError: If revocation is unsuccessful.
"""
response = self.net.post(self.directory[messages.Revocation],
messages.Revocation(certificate=cert),
content_type=None)
if response.status_code != http_client.OK:
raise errors.ClientError(
'Successful revocation must return HTTP OK status')
class ClientNetwork(object): # pylint: disable=too-many-instance-attributes
"""Client network."""
JSON_CONTENT_TYPE = 'application/json'
JOSE_CONTENT_TYPE = 'application/jose+json'
JSON_ERROR_CONTENT_TYPE = 'application/problem+json'
REPLAY_NONCE_HEADER = 'Replay-Nonce'
def __init__(self, key, alg=jose.RS256, verify_ssl=True,
user_agent='acme-python'):
self.key = key
self.alg = alg
self.verify_ssl = verify_ssl
self._nonces = set()
self.user_agent = user_agent
self.session = requests.Session()
def __del__(self):
self.session.close()
def _wrap_in_jws(self, obj, nonce):
"""Wrap `JSONDeSerializable` object in JWS.
.. todo:: Implement ``acmePath``.
:param .JSONDeSerializable obj:
:param bytes nonce:
:rtype: `.JWS`
"""
jobj = obj.json_dumps().encode()
logger.debug('Serialized JSON: %s', jobj)
return jws.JWS.sign(
payload=jobj, key=self.key, alg=self.alg, nonce=nonce).json_dumps()
@classmethod
def _check_response(cls, response, content_type=None):
"""Check response content and its type.
.. note::
Checking is not strict: wrong server response ``Content-Type``
HTTP header is ignored if response is an expected JSON object
(c.f. Boulder #56).
:param str content_type: Expected Content-Type response header.
If JSON is expected and not present in server response, this
function will raise an error. Otherwise, wrong Content-Type
is ignored, but logged.
:raises .messages.Error: If server response body
carries HTTP Problem (draft-ietf-appsawg-http-problem-00).
:raises .ClientError: In case of other networking errors.
"""
logger.debug('Received response %s (headers: %s): %r',
response, response.headers, response.content)
response_ct = response.headers.get('Content-Type')
try:
# TODO: response.json() is called twice, once here, and
# once in _get and _post clients
jobj = response.json()
except ValueError:
jobj = None
if not response.ok:
if jobj is not None:
if response_ct != cls.JSON_ERROR_CONTENT_TYPE:
logger.debug(
'Ignoring wrong Content-Type (%r) for JSON Error',
response_ct)
try:
raise messages.Error.from_json(jobj)
except jose.DeserializationError as error:
# Couldn't deserialize JSON object
raise errors.ClientError((response, error))
else:
# response is not JSON object
raise errors.ClientError(response)
else:
if jobj is not None and response_ct != cls.JSON_CONTENT_TYPE:
logger.debug(
'Ignoring wrong Content-Type (%r) for JSON decodable '
'response', response_ct)
if content_type == cls.JSON_CONTENT_TYPE and jobj is None:
raise errors.ClientError(
'Unexpected response Content-Type: {0}'.format(response_ct))
return response
def _send_request(self, method, url, *args, **kwargs):
"""Send HTTP request.
Makes sure that `verify_ssl` is respected. Logs request and
response (with headers). For allowed parameters please see
`requests.request`.
:param str method: method for the new `requests.Request` object
:param str url: URL for the new `requests.Request` object
:raises requests.exceptions.RequestException: in case of any problems
:returns: HTTP Response
:rtype: `requests.Response`
"""
logging.debug('Sending %s request to %s. args: %r, kwargs: %r',
method, url, args, kwargs)
kwargs['verify'] = self.verify_ssl
kwargs.setdefault('headers', {})
kwargs['headers'].setdefault('User-Agent', self.user_agent)
response = self.session.request(method, url, *args, **kwargs)
logging.debug('Received %s. Headers: %s. Content: %r',
response, response.headers, response.content)
return response
def head(self, *args, **kwargs):
"""Send HEAD request without checking the response.
Note, that `_check_response` is not called, as it is expected
that status code other than successfully 2xx will be returned, or
messages2.Error will be raised by the server.
"""
return self._send_request('HEAD', *args, **kwargs)
def get(self, url, content_type=JSON_CONTENT_TYPE, **kwargs):
"""Send GET request and check response."""
return self._check_response(
self._send_request('GET', url, **kwargs), content_type=content_type)
def _add_nonce(self, response):
if self.REPLAY_NONCE_HEADER in response.headers:
nonce = response.headers[self.REPLAY_NONCE_HEADER]
try:
decoded_nonce = jws.Header._fields['nonce'].decode(nonce)
except jose.DeserializationError as error:
raise errors.BadNonce(nonce, error)
logger.debug('Storing nonce: %r', decoded_nonce)
self._nonces.add(decoded_nonce)
else:
raise errors.MissingNonce(response)
def _get_nonce(self, url):
if not self._nonces:
logging.debug('Requesting fresh nonce')
self._add_nonce(self.head(url))
return self._nonces.pop()
def post(self, url, obj, content_type=JOSE_CONTENT_TYPE, **kwargs):
"""POST object wrapped in `.JWS` and check response."""
data = self._wrap_in_jws(obj, self._get_nonce(url))
kwargs.setdefault('headers', {'Content-Type': content_type})
response = self._send_request('POST', url, data=data, **kwargs)
self._add_nonce(response)
return self._check_response(response, content_type=content_type)
| {
"repo_name": "bsmr-misc-forks/letsencrypt",
"path": "acme/acme/client.py",
"copies": "2",
"size": "24586",
"license": "apache-2.0",
"hash": -8286883210775513000,
"line_mean": 36.7665130568,
"line_max": 81,
"alpha_frac": 0.6128284389,
"autogenerated": false,
"ratio": 4.276569838232736,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.00008701210472241465,
"num_lines": 651
} |
"""ACME client API."""
import collections
import datetime
import heapq
import logging
import time
import six
from six.moves import http_client # pylint: disable=import-error
import OpenSSL
import requests
import sys
import werkzeug
from acme import errors
from acme import jose
from acme import jws
from acme import messages
logger = logging.getLogger(__name__)
# Prior to Python 2.7.9 the stdlib SSL module did not allow a user to configure
# many important security related options. On these platforms we use PyOpenSSL
# for SSL, which does allow these options to be configured.
# https://urllib3.readthedocs.org/en/latest/security.html#insecureplatformwarning
if sys.version_info < (2, 7, 9): # pragma: no cover
requests.packages.urllib3.contrib.pyopenssl.inject_into_urllib3()
class Client(object): # pylint: disable=too-many-instance-attributes
"""ACME client.
.. todo::
Clean up raised error types hierarchy, document, and handle (wrap)
instances of `.DeserializationError` raised in `from_json()`.
:ivar messages.Directory directory:
:ivar key: `.JWK` (private)
:ivar alg: `.JWASignature`
:ivar bool verify_ssl: Verify SSL certificates?
:ivar .ClientNetwork net: Client network. Useful for testing. If not
supplied, it will be initialized using `key`, `alg` and
`verify_ssl`.
"""
DER_CONTENT_TYPE = 'application/pkix-cert'
def __init__(self, directory, key, alg=jose.RS256, verify_ssl=True,
net=None):
"""Initialize.
:param directory: Directory Resource (`.messages.Directory`) or
URI from which the resource will be downloaded.
"""
self.key = key
self.net = ClientNetwork(key, alg, verify_ssl) if net is None else net
if isinstance(directory, six.string_types):
self.directory = messages.Directory.from_json(
self.net.get(directory).json())
else:
self.directory = directory
@classmethod
def _regr_from_response(cls, response, uri=None, new_authzr_uri=None,
terms_of_service=None):
if 'terms-of-service' in response.links:
terms_of_service = response.links['terms-of-service']['url']
if 'next' in response.links:
new_authzr_uri = response.links['next']['url']
if new_authzr_uri is None:
raise errors.ClientError('"next" link missing')
return messages.RegistrationResource(
body=messages.Registration.from_json(response.json()),
uri=response.headers.get('Location', uri),
new_authzr_uri=new_authzr_uri,
terms_of_service=terms_of_service)
def register(self, new_reg=None):
"""Register.
:param .NewRegistration new_reg:
:returns: Registration Resource.
:rtype: `.RegistrationResource`
:raises .UnexpectedUpdate:
"""
new_reg = messages.NewRegistration() if new_reg is None else new_reg
assert isinstance(new_reg, messages.NewRegistration)
response = self.net.post(self.directory[new_reg], new_reg)
# TODO: handle errors
assert response.status_code == http_client.CREATED
# "Instance of 'Field' has no key/contact member" bug:
# pylint: disable=no-member
regr = self._regr_from_response(response)
if (regr.body.key != self.key.public_key() or
regr.body.contact != new_reg.contact):
raise errors.UnexpectedUpdate(regr)
return regr
def _send_recv_regr(self, regr, body):
response = self.net.post(regr.uri, body)
# TODO: Boulder returns httplib.ACCEPTED
#assert response.status_code == httplib.OK
# TODO: Boulder does not set Location or Link on update
# (c.f. acme-spec #94)
return self._regr_from_response(
response, uri=regr.uri, new_authzr_uri=regr.new_authzr_uri,
terms_of_service=regr.terms_of_service)
def update_registration(self, regr, update=None):
"""Update registration.
:param messages.RegistrationResource regr: Registration Resource.
:param messages.Registration update: Updated body of the
resource. If not provided, body will be taken from `regr`.
:returns: Updated Registration Resource.
:rtype: `.RegistrationResource`
"""
update = regr.body if update is None else update
updated_regr = self._send_recv_regr(
regr, body=messages.UpdateRegistration(**dict(update)))
if updated_regr != regr:
raise errors.UnexpectedUpdate(regr)
return updated_regr
def query_registration(self, regr):
"""Query server about registration.
:param messages.RegistrationResource: Existing Registration
Resource.
"""
return self._send_recv_regr(regr, messages.UpdateRegistration())
def agree_to_tos(self, regr):
"""Agree to the terms-of-service.
Agree to the terms-of-service in a Registration Resource.
:param regr: Registration Resource.
:type regr: `.RegistrationResource`
:returns: Updated Registration Resource.
:rtype: `.RegistrationResource`
"""
return self.update_registration(
regr.update(body=regr.body.update(agreement=regr.terms_of_service)))
def _authzr_from_response(self, response, identifier,
uri=None, new_cert_uri=None):
# pylint: disable=no-self-use
if new_cert_uri is None:
try:
new_cert_uri = response.links['next']['url']
except KeyError:
raise errors.ClientError('"next" link missing')
authzr = messages.AuthorizationResource(
body=messages.Authorization.from_json(response.json()),
uri=response.headers.get('Location', uri),
new_cert_uri=new_cert_uri)
if authzr.body.identifier != identifier:
raise errors.UnexpectedUpdate(authzr)
return authzr
def request_challenges(self, identifier, new_authzr_uri=None):
"""Request challenges.
:param .messages.Identifier identifier: Identifier to be challenged.
:param str new_authzr_uri: ``new-authorization`` URI. If omitted,
will default to value found in ``directory``.
:returns: Authorization Resource.
:rtype: `.AuthorizationResource`
"""
new_authz = messages.NewAuthorization(identifier=identifier)
response = self.net.post(self.directory.new_authz
if new_authzr_uri is None else new_authzr_uri,
new_authz)
# TODO: handle errors
assert response.status_code == http_client.CREATED
return self._authzr_from_response(response, identifier)
def request_domain_challenges(self, domain, new_authzr_uri=None):
"""Request challenges for domain names.
This is simply a convenience function that wraps around
`request_challenges`, but works with domain names instead of
generic identifiers. See ``request_challenges`` for more
documentation.
:param str domain: Domain name to be challenged.
:returns: Authorization Resource.
:rtype: `.AuthorizationResource`
"""
return self.request_challenges(messages.Identifier(
typ=messages.IDENTIFIER_FQDN, value=domain), new_authzr_uri)
def answer_challenge(self, challb, response):
"""Answer challenge.
:param challb: Challenge Resource body.
:type challb: `.ChallengeBody`
:param response: Corresponding Challenge response
:type response: `.challenges.ChallengeResponse`
:returns: Challenge Resource with updated body.
:rtype: `.ChallengeResource`
:raises .UnexpectedUpdate:
"""
response = self.net.post(challb.uri, response)
try:
authzr_uri = response.links['up']['url']
except KeyError:
raise errors.ClientError('"up" Link header missing')
challr = messages.ChallengeResource(
authzr_uri=authzr_uri,
body=messages.ChallengeBody.from_json(response.json()))
# TODO: check that challr.uri == response.headers['Location']?
if challr.uri != challb.uri:
raise errors.UnexpectedUpdate(challr.uri)
return challr
@classmethod
def retry_after(cls, response, default):
"""Compute next `poll` time based on response ``Retry-After`` header.
:param requests.Response response: Response from `poll`.
:param int default: Default value (in seconds), used when
``Retry-After`` header is not present or invalid.
:returns: Time point when next `poll` should be performed.
:rtype: `datetime.datetime`
"""
retry_after = response.headers.get('Retry-After', str(default))
try:
seconds = int(retry_after)
except ValueError:
# pylint: disable=no-member
decoded = werkzeug.parse_date(retry_after) # RFC1123
if decoded is None:
seconds = default
else:
return decoded
return datetime.datetime.now() + datetime.timedelta(seconds=seconds)
def poll(self, authzr):
"""Poll Authorization Resource for status.
:param authzr: Authorization Resource
:type authzr: `.AuthorizationResource`
:returns: Updated Authorization Resource and HTTP response.
:rtype: (`.AuthorizationResource`, `requests.Response`)
"""
response = self.net.get(authzr.uri)
updated_authzr = self._authzr_from_response(
response, authzr.body.identifier, authzr.uri, authzr.new_cert_uri)
# TODO: check and raise UnexpectedUpdate
return updated_authzr, response
def request_issuance(self, csr, authzrs):
"""Request issuance.
:param csr: CSR
:type csr: `OpenSSL.crypto.X509Req` wrapped in `.ComparableX509`
:param authzrs: `list` of `.AuthorizationResource`
:returns: Issued certificate
:rtype: `.messages.CertificateResource`
"""
assert authzrs, "Authorizations list is empty"
logger.debug("Requesting issuance...")
# TODO: assert len(authzrs) == number of SANs
req = messages.CertificateRequest(csr=csr)
content_type = self.DER_CONTENT_TYPE # TODO: add 'cert_type 'argument
response = self.net.post(
authzrs[0].new_cert_uri, # TODO: acme-spec #90
req,
content_type=content_type,
headers={'Accept': content_type})
cert_chain_uri = response.links.get('up', {}).get('url')
try:
uri = response.headers['Location']
except KeyError:
raise errors.ClientError('"Location" Header missing')
return messages.CertificateResource(
uri=uri, authzrs=authzrs, cert_chain_uri=cert_chain_uri,
body=jose.ComparableX509(OpenSSL.crypto.load_certificate(
OpenSSL.crypto.FILETYPE_ASN1, response.content)))
def poll_and_request_issuance(
self, csr, authzrs, mintime=5, max_attempts=10):
"""Poll and request issuance.
This function polls all provided Authorization Resource URIs
until all challenges are valid, respecting ``Retry-After`` HTTP
headers, and then calls `request_issuance`.
:param .ComparableX509 csr: CSR (`OpenSSL.crypto.X509Req`
wrapped in `.ComparableX509`)
:param authzrs: `list` of `.AuthorizationResource`
:param int mintime: Minimum time before next attempt, used if
``Retry-After`` is not present in the response.
:param int max_attempts: Maximum number of attempts (per
authorization) before `PollError` with non-empty ``waiting``
is raised.
:returns: ``(cert, updated_authzrs)`` `tuple` where ``cert`` is
the issued certificate (`.messages.CertificateResource`),
and ``updated_authzrs`` is a `tuple` consisting of updated
Authorization Resources (`.AuthorizationResource`) as
present in the responses from server, and in the same order
as the input ``authzrs``.
:rtype: `tuple`
:raises PollError: in case of timeout or if some authorization
was marked by the CA as invalid
"""
# pylint: disable=too-many-locals
assert max_attempts > 0
attempts = collections.defaultdict(int)
exhausted = set()
# priority queue with datetime (based on Retry-After) as key,
# and original Authorization Resource as value
waiting = [(datetime.datetime.now(), authzr) for authzr in authzrs]
# mapping between original Authorization Resource and the most
# recently updated one
updated = dict((authzr, authzr) for authzr in authzrs)
while waiting:
# find the smallest Retry-After, and sleep if necessary
when, authzr = heapq.heappop(waiting)
now = datetime.datetime.now()
if when > now:
seconds = (when - now).seconds
logger.debug('Sleeping for %d seconds', seconds)
time.sleep(seconds)
# Note that we poll with the latest updated Authorization
# URI, which might have a different URI than initial one
updated_authzr, response = self.poll(updated[authzr])
updated[authzr] = updated_authzr
attempts[authzr] += 1
# pylint: disable=no-member
if updated_authzr.body.status not in (
messages.STATUS_VALID, messages.STATUS_INVALID):
if attempts[authzr] < max_attempts:
# push back to the priority queue, with updated retry_after
heapq.heappush(waiting, (self.retry_after(
response, default=mintime), authzr))
else:
exhausted.add(authzr)
if exhausted or any(authzr.body.status == messages.STATUS_INVALID
for authzr in six.itervalues(updated)):
raise errors.PollError(exhausted, updated)
updated_authzrs = tuple(updated[authzr] for authzr in authzrs)
return self.request_issuance(csr, updated_authzrs), updated_authzrs
def _get_cert(self, uri):
"""Returns certificate from URI.
:param str uri: URI of certificate
:returns: tuple of the form
(response, :class:`acme.jose.ComparableX509`)
:rtype: tuple
"""
content_type = self.DER_CONTENT_TYPE # TODO: make it a param
response = self.net.get(uri, headers={'Accept': content_type},
content_type=content_type)
return response, jose.ComparableX509(OpenSSL.crypto.load_certificate(
OpenSSL.crypto.FILETYPE_ASN1, response.content))
def check_cert(self, certr):
"""Check for new cert.
:param certr: Certificate Resource
:type certr: `.CertificateResource`
:returns: Updated Certificate Resource.
:rtype: `.CertificateResource`
"""
# TODO: acme-spec 5.1 table action should be renamed to
# "refresh cert", and this method integrated with self.refresh
response, cert = self._get_cert(certr.uri)
if 'Location' not in response.headers:
raise errors.ClientError('Location header missing')
if response.headers['Location'] != certr.uri:
raise errors.UnexpectedUpdate(response.text)
return certr.update(body=cert)
def refresh(self, certr):
"""Refresh certificate.
:param certr: Certificate Resource
:type certr: `.CertificateResource`
:returns: Updated Certificate Resource.
:rtype: `.CertificateResource`
"""
# TODO: If a client sends a refresh request and the server is
# not willing to refresh the certificate, the server MUST
# respond with status code 403 (Forbidden)
return self.check_cert(certr)
def fetch_chain(self, certr, max_length=10):
"""Fetch chain for certificate.
:param .CertificateResource certr: Certificate Resource
:param int max_length: Maximum allowed length of the chain.
Note that each element in the certificate requires new
``HTTP GET`` request, and the length of the chain is
controlled by the ACME CA.
:raises errors.Error: if recursion exceeds `max_length`
:returns: Certificate chain for the Certificate Resource. It is
a list ordered so that the first element is a signer of the
certificate from Certificate Resource. Will be empty if
``cert_chain_uri`` is ``None``.
:rtype: `list` of `OpenSSL.crypto.X509` wrapped in `.ComparableX509`
"""
chain = []
uri = certr.cert_chain_uri
while uri is not None and len(chain) < max_length:
response, cert = self._get_cert(uri)
uri = response.links.get('up', {}).get('url')
chain.append(cert)
if uri is not None:
raise errors.Error(
"Recursion limit reached. Didn't get {0}".format(uri))
return chain
def revoke(self, cert):
"""Revoke certificate.
:param .ComparableX509 cert: `OpenSSL.crypto.X509` wrapped in
`.ComparableX509`
:raises .ClientError: If revocation is unsuccessful.
"""
response = self.net.post(self.directory[messages.Revocation],
messages.Revocation(certificate=cert),
content_type=None)
if response.status_code != http_client.OK:
raise errors.ClientError(
'Successful revocation must return HTTP OK status')
class ClientNetwork(object): # pylint: disable=too-many-instance-attributes
"""Client network."""
JSON_CONTENT_TYPE = 'application/json'
JSON_ERROR_CONTENT_TYPE = 'application/problem+json'
REPLAY_NONCE_HEADER = 'Replay-Nonce'
def __init__(self, key, alg=jose.RS256, verify_ssl=True,
user_agent='acme-python'):
self.key = key
self.alg = alg
self.verify_ssl = verify_ssl
self._nonces = set()
self.user_agent = user_agent
def _wrap_in_jws(self, obj, nonce):
"""Wrap `JSONDeSerializable` object in JWS.
.. todo:: Implement ``acmePath``.
:param .JSONDeSerializable obj:
:param bytes nonce:
:rtype: `.JWS`
"""
jobj = obj.json_dumps().encode()
logger.debug('Serialized JSON: %s', jobj)
return jws.JWS.sign(
payload=jobj, key=self.key, alg=self.alg, nonce=nonce).json_dumps()
@classmethod
def _check_response(cls, response, content_type=None):
"""Check response content and its type.
.. note::
Checking is not strict: wrong server response ``Content-Type``
HTTP header is ignored if response is an expected JSON object
(c.f. Boulder #56).
:param str content_type: Expected Content-Type response header.
If JSON is expected and not present in server response, this
function will raise an error. Otherwise, wrong Content-Type
is ignored, but logged.
:raises .messages.Error: If server response body
carries HTTP Problem (draft-ietf-appsawg-http-problem-00).
:raises .ClientError: In case of other networking errors.
"""
logger.debug('Received response %s (headers: %s): %r',
response, response.headers, response.content)
response_ct = response.headers.get('Content-Type')
try:
# TODO: response.json() is called twice, once here, and
# once in _get and _post clients
jobj = response.json()
except ValueError:
jobj = None
if not response.ok:
if jobj is not None:
if response_ct != cls.JSON_ERROR_CONTENT_TYPE:
logger.debug(
'Ignoring wrong Content-Type (%r) for JSON Error',
response_ct)
try:
raise messages.Error.from_json(jobj)
except jose.DeserializationError as error:
# Couldn't deserialize JSON object
raise errors.ClientError((response, error))
else:
# response is not JSON object
raise errors.ClientError(response)
else:
if jobj is not None and response_ct != cls.JSON_CONTENT_TYPE:
logger.debug(
'Ignoring wrong Content-Type (%r) for JSON decodable '
'response', response_ct)
if content_type == cls.JSON_CONTENT_TYPE and jobj is None:
raise errors.ClientError(
'Unexpected response Content-Type: {0}'.format(response_ct))
return response
def _send_request(self, method, url, *args, **kwargs):
"""Send HTTP request.
Makes sure that `verify_ssl` is respected. Logs request and
response (with headers). For allowed parameters please see
`requests.request`.
:param str method: method for the new `requests.Request` object
:param str url: URL for the new `requests.Request` object
:raises requests.exceptions.RequestException: in case of any problems
:returns: HTTP Response
:rtype: `requests.Response`
"""
logging.debug('Sending %s request to %s. args: %r, kwargs: %r',
method, url, args, kwargs)
kwargs['verify'] = self.verify_ssl
kwargs.setdefault('headers', {})
kwargs['headers'].setdefault('User-Agent', self.user_agent)
response = requests.request(method, url, *args, **kwargs)
logging.debug('Received %s. Headers: %s. Content: %r',
response, response.headers, response.content)
return response
def head(self, *args, **kwargs):
"""Send HEAD request without checking the response.
Note, that `_check_response` is not called, as it is expected
that status code other than successfully 2xx will be returned, or
messages2.Error will be raised by the server.
"""
return self._send_request('HEAD', *args, **kwargs)
def get(self, url, content_type=JSON_CONTENT_TYPE, **kwargs):
"""Send GET request and check response."""
return self._check_response(
self._send_request('GET', url, **kwargs), content_type=content_type)
def _add_nonce(self, response):
if self.REPLAY_NONCE_HEADER in response.headers:
nonce = response.headers[self.REPLAY_NONCE_HEADER]
try:
decoded_nonce = jws.Header._fields['nonce'].decode(nonce)
except jose.DeserializationError as error:
raise errors.BadNonce(nonce, error)
logger.debug('Storing nonce: %r', decoded_nonce)
self._nonces.add(decoded_nonce)
else:
raise errors.MissingNonce(response)
def _get_nonce(self, url):
if not self._nonces:
logging.debug('Requesting fresh nonce')
self._add_nonce(self.head(url))
return self._nonces.pop()
def post(self, url, obj, content_type=JSON_CONTENT_TYPE, **kwargs):
"""POST object wrapped in `.JWS` and check response."""
data = self._wrap_in_jws(obj, self._get_nonce(url))
response = self._send_request('POST', url, data=data, **kwargs)
self._add_nonce(response)
return self._check_response(response, content_type=content_type)
| {
"repo_name": "TheBoegl/letsencrypt",
"path": "acme/acme/client.py",
"copies": "1",
"size": "24151",
"license": "apache-2.0",
"hash": 8504688011325509000,
"line_mean": 36.4434108527,
"line_max": 81,
"alpha_frac": 0.612355596,
"autogenerated": false,
"ratio": 4.282851569427203,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.00008782151965006501,
"num_lines": 645
} |
"""ACME client API."""
import datetime
import heapq
import httplib
import logging
import time
import M2Crypto
import requests
import werkzeug
from acme import errors
from acme import jose
from acme import jws
from acme import messages
# https://urllib3.readthedocs.org/en/latest/security.html#insecureplatformwarning
requests.packages.urllib3.contrib.pyopenssl.inject_into_urllib3()
class Client(object): # pylint: disable=too-many-instance-attributes
"""ACME client.
.. todo::
Clean up raised error types hierarchy, document, and handle (wrap)
instances of `.DeserializationError` raised in `from_json()`.
:ivar str new_reg_uri: Location of new-reg
:ivar key: `.JWK` (private)
:ivar alg: `.JWASignature`
:ivar bool verify_ssl: Verify SSL certificates?
"""
DER_CONTENT_TYPE = 'application/pkix-cert'
JSON_CONTENT_TYPE = 'application/json'
JSON_ERROR_CONTENT_TYPE = 'application/problem+json'
REPLAY_NONCE_HEADER = 'Replay-Nonce'
def __init__(self, new_reg_uri, key, alg=jose.RS256, verify_ssl=True):
self.new_reg_uri = new_reg_uri
self.key = key
self.alg = alg
self.verify_ssl = verify_ssl
self._nonces = set()
def _wrap_in_jws(self, obj, nonce):
"""Wrap `JSONDeSerializable` object in JWS.
.. todo:: Implement ``acmePath``.
:param JSONDeSerializable obj:
:rtype: `.JWS`
"""
dumps = obj.json_dumps()
logging.debug('Serialized JSON: %s', dumps)
return jws.JWS.sign(
payload=dumps, key=self.key, alg=self.alg, nonce=nonce).json_dumps()
@classmethod
def _check_response(cls, response, content_type=None):
"""Check response content and its type.
.. note::
Checking is not strict: wrong server response ``Content-Type``
HTTP header is ignored if response is an expected JSON object
(c.f. Boulder #56).
:param str content_type: Expected Content-Type response header.
If JSON is expected and not present in server response, this
function will raise an error. Otherwise, wrong Content-Type
is ignored, but logged.
:raises .messages.Error: If server response body
carries HTTP Problem (draft-ietf-appsawg-http-problem-00).
:raises .ClientError: In case of other networking errors.
"""
logging.debug('Received response %s (headers: %s): %r',
response, response.headers, response.content)
response_ct = response.headers.get('Content-Type')
try:
# TODO: response.json() is called twice, once here, and
# once in _get and _post clients
jobj = response.json()
except ValueError as error:
jobj = None
if not response.ok:
if jobj is not None:
if response_ct != cls.JSON_ERROR_CONTENT_TYPE:
logging.debug(
'Ignoring wrong Content-Type (%r) for JSON Error',
response_ct)
try:
raise messages.Error.from_json(jobj)
except jose.DeserializationError as error:
# Couldn't deserialize JSON object
raise errors.ClientError((response, error))
else:
# response is not JSON object
raise errors.ClientError(response)
else:
if jobj is not None and response_ct != cls.JSON_CONTENT_TYPE:
logging.debug(
'Ignoring wrong Content-Type (%r) for JSON decodable '
'response', response_ct)
if content_type == cls.JSON_CONTENT_TYPE and jobj is None:
raise errors.ClientError(
'Unexpected response Content-Type: {0}'.format(response_ct))
def _get(self, uri, content_type=JSON_CONTENT_TYPE, **kwargs):
"""Send GET request.
:raises .ClientError:
:returns: HTTP Response
:rtype: `requests.Response`
"""
logging.debug('Sending GET request to %s', uri)
kwargs.setdefault('verify', self.verify_ssl)
try:
response = requests.get(uri, **kwargs)
except requests.exceptions.RequestException as error:
raise errors.ClientError(error)
self._check_response(response, content_type=content_type)
return response
def _add_nonce(self, response):
if self.REPLAY_NONCE_HEADER in response.headers:
nonce = response.headers[self.REPLAY_NONCE_HEADER]
error = jws.Header.validate_nonce(nonce)
if error is None:
logging.debug('Storing nonce: %r', nonce)
self._nonces.add(nonce)
else:
raise errors.ClientError('Invalid nonce ({0}): {1}'.format(
nonce, error))
else:
raise errors.ClientError(
'Server {0} response did not include a replay nonce'.format(
response.request.method))
def _get_nonce(self, uri):
if not self._nonces:
logging.debug('Requesting fresh nonce by sending HEAD to %s', uri)
self._add_nonce(requests.head(uri))
return self._nonces.pop()
def _post(self, uri, obj, content_type=JSON_CONTENT_TYPE, **kwargs):
"""Send POST data.
:param JSONDeSerializable obj: Will be wrapped in JWS.
:param str content_type: Expected ``Content-Type``, fails if not set.
:raises acme.messages.ClientError:
:returns: HTTP Response
:rtype: `requests.Response`
"""
data = self._wrap_in_jws(obj, self._get_nonce(uri))
logging.debug('Sending POST data to %s: %s', uri, data)
kwargs.setdefault('verify', self.verify_ssl)
try:
response = requests.post(uri, data=data, **kwargs)
except requests.exceptions.RequestException as error:
raise errors.ClientError(error)
self._add_nonce(response)
self._check_response(response, content_type=content_type)
return response
@classmethod
def _regr_from_response(cls, response, uri=None, new_authzr_uri=None,
terms_of_service=None):
terms_of_service = (
response.links['terms-of-service']['url']
if 'terms-of-service' in response.links else terms_of_service)
if new_authzr_uri is None:
try:
new_authzr_uri = response.links['next']['url']
except KeyError:
raise errors.ClientError('"next" link missing')
return messages.RegistrationResource(
body=messages.Registration.from_json(response.json()),
uri=response.headers.get('Location', uri),
new_authzr_uri=new_authzr_uri,
terms_of_service=terms_of_service)
def register(self, contact=messages.Registration._fields[
'contact'].default):
"""Register.
:param contact: Contact list, as accepted by `.Registration`
:type contact: `tuple`
:returns: Registration Resource.
:rtype: `.RegistrationResource`
:raises .UnexpectedUpdate:
"""
new_reg = messages.Registration(contact=contact)
response = self._post(self.new_reg_uri, new_reg)
assert response.status_code == httplib.CREATED # TODO: handle errors
regr = self._regr_from_response(response)
if regr.body.key != self.key.public() or regr.body.contact != contact:
raise errors.UnexpectedUpdate(regr)
return regr
def update_registration(self, regr):
"""Update registration.
:pram regr: Registration Resource.
:type regr: `.RegistrationResource`
:returns: Updated Registration Resource.
:rtype: `.RegistrationResource`
"""
response = self._post(regr.uri, regr.body)
# TODO: Boulder returns httplib.ACCEPTED
#assert response.status_code == httplib.OK
# TODO: Boulder does not set Location or Link on update
# (c.f. acme-spec #94)
updated_regr = self._regr_from_response(
response, uri=regr.uri, new_authzr_uri=regr.new_authzr_uri,
terms_of_service=regr.terms_of_service)
if updated_regr != regr:
raise errors.UnexpectedUpdate(regr)
return updated_regr
def agree_to_tos(self, regr):
"""Agree to the terms-of-service.
Agree to the terms-of-service in a Registration Resource.
:param regr: Registration Resource.
:type regr: `.RegistrationResource`
:returns: Updated Registration Resource.
:rtype: `.RegistrationResource`
"""
return self.update_registration(
regr.update(body=regr.body.update(agreement=regr.terms_of_service)))
def _authzr_from_response(self, response, identifier,
uri=None, new_cert_uri=None):
# pylint: disable=no-self-use
if new_cert_uri is None:
try:
new_cert_uri = response.links['next']['url']
except KeyError:
raise errors.ClientError('"next" link missing')
authzr = messages.AuthorizationResource(
body=messages.Authorization.from_json(response.json()),
uri=response.headers.get('Location', uri),
new_cert_uri=new_cert_uri)
if authzr.body.identifier != identifier:
raise errors.UnexpectedUpdate(authzr)
return authzr
def request_challenges(self, identifier, new_authzr_uri):
"""Request challenges.
:param identifier: Identifier to be challenged.
:type identifier: `.messages.Identifier`
:param str new_authzr_uri: new-authorization URI
:returns: Authorization Resource.
:rtype: `.AuthorizationResource`
"""
new_authz = messages.Authorization(identifier=identifier)
response = self._post(new_authzr_uri, new_authz)
assert response.status_code == httplib.CREATED # TODO: handle errors
return self._authzr_from_response(response, identifier)
def request_domain_challenges(self, domain, new_authz_uri):
"""Request challenges for domain names.
This is simply a convenience function that wraps around
`request_challenges`, but works with domain names instead of
generic identifiers.
:param str domain: Domain name to be challenged.
:param str new_authzr_uri: new-authorization URI
:returns: Authorization Resource.
:rtype: `.AuthorizationResource`
"""
return self.request_challenges(messages.Identifier(
typ=messages.IDENTIFIER_FQDN, value=domain), new_authz_uri)
def answer_challenge(self, challb, response):
"""Answer challenge.
:param challb: Challenge Resource body.
:type challb: `.ChallengeBody`
:param response: Corresponding Challenge response
:type response: `.challenges.ChallengeResponse`
:returns: Challenge Resource with updated body.
:rtype: `.ChallengeResource`
:raises errors.UnexpectedUpdate:
"""
response = self._post(challb.uri, response)
try:
authzr_uri = response.links['up']['url']
except KeyError:
raise errors.ClientError('"up" Link header missing')
challr = messages.ChallengeResource(
authzr_uri=authzr_uri,
body=messages.ChallengeBody.from_json(response.json()))
# TODO: check that challr.uri == response.headers['Location']?
if challr.uri != challb.uri:
raise errors.UnexpectedUpdate(challr.uri)
return challr
@classmethod
def retry_after(cls, response, default):
"""Compute next `poll` time based on response ``Retry-After`` header.
:param response: Response from `poll`.
:type response: `requests.Response`
:param int default: Default value (in seconds), used when
``Retry-After`` header is not present or invalid.
:returns: Time point when next `poll` should be performed.
:rtype: `datetime.datetime`
"""
retry_after = response.headers.get('Retry-After', str(default))
try:
seconds = int(retry_after)
except ValueError:
# pylint: disable=no-member
decoded = werkzeug.parse_date(retry_after) # RFC1123
if decoded is None:
seconds = default
else:
return decoded
return datetime.datetime.now() + datetime.timedelta(seconds=seconds)
def poll(self, authzr):
"""Poll Authorization Resource for status.
:param authzr: Authorization Resource
:type authzr: `.AuthorizationResource`
:returns: Updated Authorization Resource and HTTP response.
:rtype: (`.AuthorizationResource`, `requests.Response`)
"""
response = self._get(authzr.uri)
updated_authzr = self._authzr_from_response(
response, authzr.body.identifier, authzr.uri, authzr.new_cert_uri)
# TODO: check and raise UnexpectedUpdate
return updated_authzr, response
def request_issuance(self, csr, authzrs):
"""Request issuance.
:param csr: CSR
:type csr: `M2Crypto.X509.Request` wrapped in `.ComparableX509`
:param authzrs: `list` of `.AuthorizationResource`
:returns: Issued certificate
:rtype: `.messages.CertificateResource`
"""
assert authzrs, "Authorizations list is empty"
logging.debug("Requesting issuance...")
# TODO: assert len(authzrs) == number of SANs
req = messages.CertificateRequest(
csr=csr, authorizations=tuple(authzr.uri for authzr in authzrs))
content_type = self.DER_CONTENT_TYPE # TODO: add 'cert_type 'argument
response = self._post(
authzrs[0].new_cert_uri, # TODO: acme-spec #90
req,
content_type=content_type,
headers={'Accept': content_type})
cert_chain_uri = response.links.get('up', {}).get('url')
try:
uri = response.headers['Location']
except KeyError:
raise errors.ClientError('"Location" Header missing')
return messages.CertificateResource(
uri=uri, authzrs=authzrs, cert_chain_uri=cert_chain_uri,
body=jose.ComparableX509(
M2Crypto.X509.load_cert_der_string(response.content)))
def poll_and_request_issuance(self, csr, authzrs, mintime=5):
"""Poll and request issuance.
This function polls all provided Authorization Resource URIs
until all challenges are valid, respecting ``Retry-After`` HTTP
headers, and then calls `request_issuance`.
.. todo:: add `max_attempts` or `timeout`
:param csr: CSR.
:type csr: `M2Crypto.X509.Request` wrapped in `.ComparableX509`
:param authzrs: `list` of `.AuthorizationResource`
:param int mintime: Minimum time before next attempt, used if
``Retry-After`` is not present in the response.
:returns: ``(cert, updated_authzrs)`` `tuple` where ``cert`` is
the issued certificate (`.messages.CertificateResource.),
and ``updated_authzrs`` is a `tuple` consisting of updated
Authorization Resources (`.AuthorizationResource`) as
present in the responses from server, and in the same order
as the input ``authzrs``.
:rtype: `tuple`
"""
# priority queue with datetime (based on Retry-After) as key,
# and original Authorization Resource as value
waiting = [(datetime.datetime.now(), authzr) for authzr in authzrs]
# mapping between original Authorization Resource and the most
# recently updated one
updated = dict((authzr, authzr) for authzr in authzrs)
while waiting:
# find the smallest Retry-After, and sleep if necessary
when, authzr = heapq.heappop(waiting)
now = datetime.datetime.now()
if when > now:
seconds = (when - now).seconds
logging.debug('Sleeping for %d seconds', seconds)
time.sleep(seconds)
# Note that we poll with the latest updated Authorization
# URI, which might have a different URI than initial one
updated_authzr, response = self.poll(updated[authzr])
updated[authzr] = updated_authzr
# pylint: disable=no-member
if updated_authzr.body.status != messages.STATUS_VALID:
# push back to the priority queue, with updated retry_after
heapq.heappush(waiting, (self.retry_after(
response, default=mintime), authzr))
updated_authzrs = tuple(updated[authzr] for authzr in authzrs)
return self.request_issuance(csr, updated_authzrs), updated_authzrs
def _get_cert(self, uri):
"""Returns certificate from URI.
:param str uri: URI of certificate
:returns: tuple of the form
(response, :class:`acme.jose.ComparableX509`)
:rtype: tuple
"""
content_type = self.DER_CONTENT_TYPE # TODO: make it a param
response = self._get(uri, headers={'Accept': content_type},
content_type=content_type)
return response, jose.ComparableX509(
M2Crypto.X509.load_cert_der_string(response.content))
def check_cert(self, certr):
"""Check for new cert.
:param certr: Certificate Resource
:type certr: `.CertificateResource`
:returns: Updated Certificate Resource.
:rtype: `.CertificateResource`
"""
# TODO: acme-spec 5.1 table action should be renamed to
# "refresh cert", and this method integrated with self.refresh
response, cert = self._get_cert(certr.uri)
if 'Location' not in response.headers:
raise errors.ClientError('Location header missing')
if response.headers['Location'] != certr.uri:
raise errors.UnexpectedUpdate(response.text)
return certr.update(body=cert)
def refresh(self, certr):
"""Refresh certificate.
:param certr: Certificate Resource
:type certr: `.CertificateResource`
:returns: Updated Certificate Resource.
:rtype: `.CertificateResource`
"""
# TODO: If a client sends a refresh request and the server is
# not willing to refresh the certificate, the server MUST
# respond with status code 403 (Forbidden)
return self.check_cert(certr)
def fetch_chain(self, certr):
"""Fetch chain for certificate.
:param certr: Certificate Resource
:type certr: `.CertificateResource`
:returns: Certificate chain, or `None` if no "up" Link was provided.
:rtype: `M2Crypto.X509.X509` wrapped in `.ComparableX509`
"""
if certr.cert_chain_uri is not None:
return self._get_cert(certr.cert_chain_uri)[1]
else:
return None
def revoke(self, cert):
"""Revoke certificate.
:param .ComparableX509 cert: `M2Crypto.X509.X509` wrapped in
`.ComparableX509`
:raises .ClientError: If revocation is unsuccessful.
"""
response = self._post(messages.Revocation.url(self.new_reg_uri),
messages.Revocation(certificate=cert))
if response.status_code != httplib.OK:
raise errors.ClientError(
'Successful revocation must return HTTP OK status')
| {
"repo_name": "digideskio/lets-encrypt-preview",
"path": "acme/client.py",
"copies": "1",
"size": "19923",
"license": "apache-2.0",
"hash": -3140902676937840600,
"line_mean": 34.8327338129,
"line_max": 81,
"alpha_frac": 0.6090950158,
"autogenerated": false,
"ratio": 4.2362321922177335,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.00010187928088901427,
"num_lines": 556
} |
"""ACME client API."""
import datetime
import heapq
import json
import logging
import time
from six.moves import http_client # pylint: disable=import-error
import OpenSSL
import requests
import six
import werkzeug
from acme import errors
from acme import jose
from acme import jws
from acme import messages
logger = logging.getLogger(__name__)
# https://urllib3.readthedocs.org/en/latest/security.html#insecureplatformwarning
if six.PY2:
requests.packages.urllib3.contrib.pyopenssl.inject_into_urllib3()
class Client(object): # pylint: disable=too-many-instance-attributes
"""ACME client.
.. todo::
Clean up raised error types hierarchy, document, and handle (wrap)
instances of `.DeserializationError` raised in `from_json()`.
:ivar str new_reg_uri: Location of new-reg
:ivar key: `.JWK` (private)
:ivar alg: `.JWASignature`
:ivar bool verify_ssl: Verify SSL certificates?
:ivar .ClientNetwork net: Client network. Useful for testing. If not
supplied, it will be initialized using `key`, `alg` and
`verify_ssl`.
"""
DER_CONTENT_TYPE = 'application/pkix-cert'
def __init__(self, new_reg_uri, key, alg=jose.RS256,
verify_ssl=True, net=None):
self.new_reg_uri = new_reg_uri
self.key = key
self.net = ClientNetwork(key, alg, verify_ssl) if net is None else net
@classmethod
def _regr_from_response(cls, response, uri=None, new_authzr_uri=None,
terms_of_service=None):
terms_of_service = (
response.links['terms-of-service']['url']
if 'terms-of-service' in response.links else terms_of_service)
if new_authzr_uri is None:
try:
new_authzr_uri = response.links['next']['url']
except KeyError:
raise errors.ClientError('"next" link missing')
return messages.RegistrationResource(
body=messages.Registration.from_json(response.json()),
uri=response.headers.get('Location', uri),
new_authzr_uri=new_authzr_uri,
terms_of_service=terms_of_service)
def register(self, new_reg=None):
"""Register.
:param contact: Contact list, as accepted by `.Registration`
:type contact: `tuple`
:returns: Registration Resource.
:rtype: `.RegistrationResource`
:raises .UnexpectedUpdate:
"""
new_reg = messages.Registration() if new_reg is None else new_reg
response = self.net.post(self.new_reg_uri, new_reg)
# TODO: handle errors
assert response.status_code == http_client.CREATED
# "Instance of 'Field' has no key/contact member" bug:
# pylint: disable=no-member
regr = self._regr_from_response(response)
if (regr.body.key != self.key.public_key() or
regr.body.contact != new_reg.contact):
raise errors.UnexpectedUpdate(regr)
return regr
def update_registration(self, regr):
"""Update registration.
:pram regr: Registration Resource.
:type regr: `.RegistrationResource`
:returns: Updated Registration Resource.
:rtype: `.RegistrationResource`
"""
response = self.net.post(regr.uri, regr.body)
# TODO: Boulder returns httplib.ACCEPTED
#assert response.status_code == httplib.OK
# TODO: Boulder does not set Location or Link on update
# (c.f. acme-spec #94)
updated_regr = self._regr_from_response(
response, uri=regr.uri, new_authzr_uri=regr.new_authzr_uri,
terms_of_service=regr.terms_of_service)
if updated_regr != regr:
raise errors.UnexpectedUpdate(regr)
return updated_regr
def agree_to_tos(self, regr):
"""Agree to the terms-of-service.
Agree to the terms-of-service in a Registration Resource.
:param regr: Registration Resource.
:type regr: `.RegistrationResource`
:returns: Updated Registration Resource.
:rtype: `.RegistrationResource`
"""
return self.update_registration(
regr.update(body=regr.body.update(agreement=regr.terms_of_service)))
def _authzr_from_response(self, response, identifier,
uri=None, new_cert_uri=None):
# pylint: disable=no-self-use
if new_cert_uri is None:
try:
new_cert_uri = response.links['next']['url']
except KeyError:
raise errors.ClientError('"next" link missing')
authzr = messages.AuthorizationResource(
body=messages.Authorization.from_json(response.json()),
uri=response.headers.get('Location', uri),
new_cert_uri=new_cert_uri)
if authzr.body.identifier != identifier:
raise errors.UnexpectedUpdate(authzr)
return authzr
def request_challenges(self, identifier, new_authzr_uri):
"""Request challenges.
:param identifier: Identifier to be challenged.
:type identifier: `.messages.Identifier`
:param str new_authzr_uri: new-authorization URI
:returns: Authorization Resource.
:rtype: `.AuthorizationResource`
"""
new_authz = messages.Authorization(identifier=identifier)
response = self.net.post(new_authzr_uri, new_authz)
# TODO: handle errors
assert response.status_code == http_client.CREATED
return self._authzr_from_response(response, identifier)
def request_domain_challenges(self, domain, new_authz_uri):
"""Request challenges for domain names.
This is simply a convenience function that wraps around
`request_challenges`, but works with domain names instead of
generic identifiers.
:param str domain: Domain name to be challenged.
:param str new_authzr_uri: new-authorization URI
:returns: Authorization Resource.
:rtype: `.AuthorizationResource`
"""
return self.request_challenges(messages.Identifier(
typ=messages.IDENTIFIER_FQDN, value=domain), new_authz_uri)
def answer_challenge(self, challb, response):
"""Answer challenge.
:param challb: Challenge Resource body.
:type challb: `.ChallengeBody`
:param response: Corresponding Challenge response
:type response: `.challenges.ChallengeResponse`
:returns: Challenge Resource with updated body.
:rtype: `.ChallengeResource`
:raises .UnexpectedUpdate:
"""
response = self.net.post(challb.uri, response)
try:
authzr_uri = response.links['up']['url']
except KeyError:
raise errors.ClientError('"up" Link header missing')
challr = messages.ChallengeResource(
authzr_uri=authzr_uri,
body=messages.ChallengeBody.from_json(response.json()))
# TODO: check that challr.uri == response.headers['Location']?
if challr.uri != challb.uri:
raise errors.UnexpectedUpdate(challr.uri)
return challr
@classmethod
def retry_after(cls, response, default):
"""Compute next `poll` time based on response ``Retry-After`` header.
:param response: Response from `poll`.
:type response: `requests.Response`
:param int default: Default value (in seconds), used when
``Retry-After`` header is not present or invalid.
:returns: Time point when next `poll` should be performed.
:rtype: `datetime.datetime`
"""
retry_after = response.headers.get('Retry-After', str(default))
try:
seconds = int(retry_after)
except ValueError:
# pylint: disable=no-member
decoded = werkzeug.parse_date(retry_after) # RFC1123
if decoded is None:
seconds = default
else:
return decoded
return datetime.datetime.now() + datetime.timedelta(seconds=seconds)
def poll(self, authzr):
"""Poll Authorization Resource for status.
:param authzr: Authorization Resource
:type authzr: `.AuthorizationResource`
:returns: Updated Authorization Resource and HTTP response.
:rtype: (`.AuthorizationResource`, `requests.Response`)
"""
response = self.net.get(authzr.uri)
updated_authzr = self._authzr_from_response(
response, authzr.body.identifier, authzr.uri, authzr.new_cert_uri)
# TODO: check and raise UnexpectedUpdate
return updated_authzr, response
def request_issuance(self, csr, authzrs):
"""Request issuance.
:param csr: CSR
:type csr: `OpenSSL.crypto.X509Req` wrapped in `.ComparableX509`
:param authzrs: `list` of `.AuthorizationResource`
:returns: Issued certificate
:rtype: `.messages.CertificateResource`
"""
assert authzrs, "Authorizations list is empty"
logger.debug("Requesting issuance...")
# TODO: assert len(authzrs) == number of SANs
req = messages.CertificateRequest(
csr=csr, authorizations=tuple(authzr.uri for authzr in authzrs))
content_type = self.DER_CONTENT_TYPE # TODO: add 'cert_type 'argument
response = self.net.post(
authzrs[0].new_cert_uri, # TODO: acme-spec #90
req,
content_type=content_type,
headers={'Accept': content_type})
cert_chain_uri = response.links.get('up', {}).get('url')
try:
uri = response.headers['Location']
except KeyError:
raise errors.ClientError('"Location" Header missing')
return messages.CertificateResource(
uri=uri, authzrs=authzrs, cert_chain_uri=cert_chain_uri,
body=jose.ComparableX509(OpenSSL.crypto.load_certificate(
OpenSSL.crypto.FILETYPE_ASN1, response.content)))
def poll_and_request_issuance(self, csr, authzrs, mintime=5):
"""Poll and request issuance.
This function polls all provided Authorization Resource URIs
until all challenges are valid, respecting ``Retry-After`` HTTP
headers, and then calls `request_issuance`.
.. todo:: add `max_attempts` or `timeout`
:param csr: CSR.
:type csr: `OpenSSL.crypto.X509Req` wrapped in `.ComparableX509`
:param authzrs: `list` of `.AuthorizationResource`
:param int mintime: Minimum time before next attempt, used if
``Retry-After`` is not present in the response.
:returns: ``(cert, updated_authzrs)`` `tuple` where ``cert`` is
the issued certificate (`.messages.CertificateResource.),
and ``updated_authzrs`` is a `tuple` consisting of updated
Authorization Resources (`.AuthorizationResource`) as
present in the responses from server, and in the same order
as the input ``authzrs``.
:rtype: `tuple`
"""
# priority queue with datetime (based on Retry-After) as key,
# and original Authorization Resource as value
waiting = [(datetime.datetime.now(), authzr) for authzr in authzrs]
# mapping between original Authorization Resource and the most
# recently updated one
updated = dict((authzr, authzr) for authzr in authzrs)
while waiting:
# find the smallest Retry-After, and sleep if necessary
when, authzr = heapq.heappop(waiting)
now = datetime.datetime.now()
if when > now:
seconds = (when - now).seconds
logger.debug('Sleeping for %d seconds', seconds)
time.sleep(seconds)
# Note that we poll with the latest updated Authorization
# URI, which might have a different URI than initial one
updated_authzr, response = self.poll(updated[authzr])
updated[authzr] = updated_authzr
# pylint: disable=no-member
if updated_authzr.body.status != messages.STATUS_VALID:
# push back to the priority queue, with updated retry_after
heapq.heappush(waiting, (self.retry_after(
response, default=mintime), authzr))
updated_authzrs = tuple(updated[authzr] for authzr in authzrs)
return self.request_issuance(csr, updated_authzrs), updated_authzrs
def _get_cert(self, uri):
"""Returns certificate from URI.
:param str uri: URI of certificate
:returns: tuple of the form
(response, :class:`acme.jose.ComparableX509`)
:rtype: tuple
"""
content_type = self.DER_CONTENT_TYPE # TODO: make it a param
response = self.net.get(uri, headers={'Accept': content_type},
content_type=content_type)
return response, jose.ComparableX509(OpenSSL.crypto.load_certificate(
OpenSSL.crypto.FILETYPE_ASN1, response.content))
def check_cert(self, certr):
"""Check for new cert.
:param certr: Certificate Resource
:type certr: `.CertificateResource`
:returns: Updated Certificate Resource.
:rtype: `.CertificateResource`
"""
# TODO: acme-spec 5.1 table action should be renamed to
# "refresh cert", and this method integrated with self.refresh
response, cert = self._get_cert(certr.uri)
if 'Location' not in response.headers:
raise errors.ClientError('Location header missing')
if response.headers['Location'] != certr.uri:
raise errors.UnexpectedUpdate(response.text)
return certr.update(body=cert)
def refresh(self, certr):
"""Refresh certificate.
:param certr: Certificate Resource
:type certr: `.CertificateResource`
:returns: Updated Certificate Resource.
:rtype: `.CertificateResource`
"""
# TODO: If a client sends a refresh request and the server is
# not willing to refresh the certificate, the server MUST
# respond with status code 403 (Forbidden)
return self.check_cert(certr)
def fetch_chain(self, certr):
"""Fetch chain for certificate.
:param certr: Certificate Resource
:type certr: `.CertificateResource`
:returns: Certificate chain, or `None` if no "up" Link was provided.
:rtype: `OpenSSL.crypto.X509` wrapped in `.ComparableX509`
"""
if certr.cert_chain_uri is not None:
return self._get_cert(certr.cert_chain_uri)[1]
else:
return None
def revoke(self, cert):
"""Revoke certificate.
:param .ComparableX509 cert: `OpenSSL.crypto.X509` wrapped in
`.ComparableX509`
:raises .ClientError: If revocation is unsuccessful.
"""
response = self.net.post(messages.Revocation.url(self.new_reg_uri),
messages.Revocation(certificate=cert))
if response.status_code != http_client.OK:
raise errors.ClientError(
'Successful revocation must return HTTP OK status')
class ClientNetwork(object):
"""Client network."""
JSON_CONTENT_TYPE = 'application/json'
JSON_ERROR_CONTENT_TYPE = 'application/problem+json'
REPLAY_NONCE_HEADER = 'Replay-Nonce'
def __init__(self, key, alg=jose.RS256, verify_ssl=True):
self.key = key
self.alg = alg
self.verify_ssl = verify_ssl
self._nonces = set()
def _wrap_in_jws(self, obj, nonce):
"""Wrap `JSONDeSerializable` object in JWS.
.. todo:: Implement ``acmePath``.
:param .ClientRequestableResource obj:
:param bytes nonce:
:rtype: `.JWS`
"""
jobj = obj.to_json()
jobj['resource'] = obj.resource_type
dumps = json.dumps(jobj).encode()
logger.debug('Serialized JSON: %s', dumps)
return jws.JWS.sign(
payload=dumps, key=self.key, alg=self.alg, nonce=nonce).json_dumps()
@classmethod
def _check_response(cls, response, content_type=None):
"""Check response content and its type.
.. note::
Checking is not strict: wrong server response ``Content-Type``
HTTP header is ignored if response is an expected JSON object
(c.f. Boulder #56).
:param str content_type: Expected Content-Type response header.
If JSON is expected and not present in server response, this
function will raise an error. Otherwise, wrong Content-Type
is ignored, but logged.
:raises .messages.Error: If server response body
carries HTTP Problem (draft-ietf-appsawg-http-problem-00).
:raises .ClientError: In case of other networking errors.
"""
logger.debug('Received response %s (headers: %s): %r',
response, response.headers, response.content)
response_ct = response.headers.get('Content-Type')
try:
# TODO: response.json() is called twice, once here, and
# once in _get and _post clients
jobj = response.json()
except ValueError as error:
jobj = None
if not response.ok:
if jobj is not None:
if response_ct != cls.JSON_ERROR_CONTENT_TYPE:
logger.debug(
'Ignoring wrong Content-Type (%r) for JSON Error',
response_ct)
try:
raise messages.Error.from_json(jobj)
except jose.DeserializationError as error:
# Couldn't deserialize JSON object
raise errors.ClientError((response, error))
else:
# response is not JSON object
raise errors.ClientError(response)
else:
if jobj is not None and response_ct != cls.JSON_CONTENT_TYPE:
logger.debug(
'Ignoring wrong Content-Type (%r) for JSON decodable '
'response', response_ct)
if content_type == cls.JSON_CONTENT_TYPE and jobj is None:
raise errors.ClientError(
'Unexpected response Content-Type: {0}'.format(response_ct))
return response
def _send_request(self, method, url, *args, **kwargs):
"""Send HTTP request.
Makes sure that `verify_ssl` is respected. Logs request and
response (with headers). For allowed parameters please see
`requests.request`.
:param str method: method for the new `requests.Request` object
:param str url: URL for the new `requests.Request` object
:raises requests.exceptions.RequestException: in case of any problems
:returns: HTTP Response
:rtype: `requests.Response`
"""
logging.debug('Sending %s request to %s', method, url)
kwargs['verify'] = self.verify_ssl
response = requests.request(method, url, *args, **kwargs)
logging.debug('Received %s. Headers: %s. Content: %r',
response, response.headers, response.content)
return response
def head(self, *args, **kwargs):
"""Send HEAD request without checking the response.
Note, that `_check_response` is not called, as it is expected
that status code other than successfuly 2xx will be returned, or
messages2.Error will be raised by the server.
"""
return self._send_request('HEAD', *args, **kwargs)
def get(self, url, content_type=JSON_CONTENT_TYPE, **kwargs):
"""Send GET request and check response."""
return self._check_response(
self._send_request('GET', url, **kwargs), content_type=content_type)
def _add_nonce(self, response):
if self.REPLAY_NONCE_HEADER in response.headers:
nonce = response.headers[self.REPLAY_NONCE_HEADER]
try:
decoded_nonce = jws.Header._fields['nonce'].decode(nonce)
except jose.DeserializationError as error:
raise errors.BadNonce(nonce, error)
logger.debug('Storing nonce: %r', decoded_nonce)
self._nonces.add(decoded_nonce)
else:
raise errors.MissingNonce(response)
def _get_nonce(self, url):
if not self._nonces:
logging.debug('Requesting fresh nonce')
self._add_nonce(self.head(url))
return self._nonces.pop()
def post(self, url, obj, content_type=JSON_CONTENT_TYPE, **kwargs):
"""POST object wrapped in `.JWS` and check response."""
data = self._wrap_in_jws(obj, self._get_nonce(url))
response = self._send_request('POST', url, data=data, **kwargs)
self._add_nonce(response)
return self._check_response(response, content_type=content_type)
| {
"repo_name": "Jonadabe/letsencrypt",
"path": "acme/acme/client.py",
"copies": "1",
"size": "21072",
"license": "apache-2.0",
"hash": 4524162679454449000,
"line_mean": 35.0821917808,
"line_max": 81,
"alpha_frac": 0.6124715262,
"autogenerated": false,
"ratio": 4.1992825827022715,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.00011813451915520139,
"num_lines": 584
} |
"""ACME client API."""
import datetime
import heapq
import logging
import time
from six.moves import http_client # pylint: disable=import-error
import OpenSSL
import requests
import six
import werkzeug
from acme import errors
from acme import jose
from acme import jws
from acme import messages
logger = logging.getLogger(__name__)
# https://urllib3.readthedocs.org/en/latest/security.html#insecureplatformwarning
if six.PY2:
requests.packages.urllib3.contrib.pyopenssl.inject_into_urllib3()
class Client(object): # pylint: disable=too-many-instance-attributes
"""ACME client.
.. todo::
Clean up raised error types hierarchy, document, and handle (wrap)
instances of `.DeserializationError` raised in `from_json()`.
:ivar str new_reg_uri: Location of new-reg
:ivar key: `.JWK` (private)
:ivar alg: `.JWASignature`
:ivar bool verify_ssl: Verify SSL certificates?
:ivar .ClientNetwork net: Client network. Useful for testing. If not
supplied, it will be initialized using `key`, `alg` and
`verify_ssl`.
"""
DER_CONTENT_TYPE = 'application/pkix-cert'
def __init__(self, new_reg_uri, key, alg=jose.RS256,
verify_ssl=True, net=None):
self.new_reg_uri = new_reg_uri
self.key = key
self.net = ClientNetwork(key, alg, verify_ssl) if net is None else net
@classmethod
def _regr_from_response(cls, response, uri=None, new_authzr_uri=None,
terms_of_service=None):
terms_of_service = (
response.links['terms-of-service']['url']
if 'terms-of-service' in response.links else terms_of_service)
if new_authzr_uri is None:
try:
new_authzr_uri = response.links['next']['url']
except KeyError:
raise errors.ClientError('"next" link missing')
return messages.RegistrationResource(
body=messages.Registration.from_json(response.json()),
uri=response.headers.get('Location', uri),
new_authzr_uri=new_authzr_uri,
terms_of_service=terms_of_service)
def register(self, new_reg=None):
"""Register.
:param .NewRegistration new_reg:
:returns: Registration Resource.
:rtype: `.RegistrationResource`
:raises .UnexpectedUpdate:
"""
new_reg = messages.NewRegistration() if new_reg is None else new_reg
assert isinstance(new_reg, messages.NewRegistration)
response = self.net.post(self.new_reg_uri, new_reg)
# TODO: handle errors
assert response.status_code == http_client.CREATED
# "Instance of 'Field' has no key/contact member" bug:
# pylint: disable=no-member
regr = self._regr_from_response(response)
if (regr.body.key != self.key.public_key() or
regr.body.contact != new_reg.contact):
raise errors.UnexpectedUpdate(regr)
return regr
def _send_recv_regr(self, regr, body):
response = self.net.post(regr.uri, body)
# TODO: Boulder returns httplib.ACCEPTED
#assert response.status_code == httplib.OK
# TODO: Boulder does not set Location or Link on update
# (c.f. acme-spec #94)
return self._regr_from_response(
response, uri=regr.uri, new_authzr_uri=regr.new_authzr_uri,
terms_of_service=regr.terms_of_service)
def update_registration(self, regr, update=None):
"""Update registration.
:param messages.RegistrationResource regr: Registration Resource.
:param messages.Registration update: Updated body of the
resource. If not provided, body will be taken from `regr`.
:returns: Updated Registration Resource.
:rtype: `.RegistrationResource`
"""
update = regr.body if update is None else update
updated_regr = self._send_recv_regr(
regr, body=messages.UpdateRegistration(**dict(update)))
if updated_regr != regr:
raise errors.UnexpectedUpdate(regr)
return updated_regr
def query_registration(self, regr):
"""Query server about registration.
:param messages.RegistrationResource: Existing Registration
Resource.
"""
return self._send_recv_regr(regr, messages.UpdateRegistration())
def agree_to_tos(self, regr):
"""Agree to the terms-of-service.
Agree to the terms-of-service in a Registration Resource.
:param regr: Registration Resource.
:type regr: `.RegistrationResource`
:returns: Updated Registration Resource.
:rtype: `.RegistrationResource`
"""
return self.update_registration(
regr.update(body=regr.body.update(agreement=regr.terms_of_service)))
def _authzr_from_response(self, response, identifier,
uri=None, new_cert_uri=None):
# pylint: disable=no-self-use
if new_cert_uri is None:
try:
new_cert_uri = response.links['next']['url']
except KeyError:
raise errors.ClientError('"next" link missing')
authzr = messages.AuthorizationResource(
body=messages.Authorization.from_json(response.json()),
uri=response.headers.get('Location', uri),
new_cert_uri=new_cert_uri)
if authzr.body.identifier != identifier:
raise errors.UnexpectedUpdate(authzr)
return authzr
def request_challenges(self, identifier, new_authzr_uri):
"""Request challenges.
:param identifier: Identifier to be challenged.
:type identifier: `.messages.Identifier`
:param str new_authzr_uri: new-authorization URI
:returns: Authorization Resource.
:rtype: `.AuthorizationResource`
"""
new_authz = messages.NewAuthorization(identifier=identifier)
response = self.net.post(new_authzr_uri, new_authz)
# TODO: handle errors
assert response.status_code == http_client.CREATED
return self._authzr_from_response(response, identifier)
def request_domain_challenges(self, domain, new_authz_uri):
"""Request challenges for domain names.
This is simply a convenience function that wraps around
`request_challenges`, but works with domain names instead of
generic identifiers.
:param str domain: Domain name to be challenged.
:param str new_authzr_uri: new-authorization URI
:returns: Authorization Resource.
:rtype: `.AuthorizationResource`
"""
return self.request_challenges(messages.Identifier(
typ=messages.IDENTIFIER_FQDN, value=domain), new_authz_uri)
def answer_challenge(self, challb, response):
"""Answer challenge.
:param challb: Challenge Resource body.
:type challb: `.ChallengeBody`
:param response: Corresponding Challenge response
:type response: `.challenges.ChallengeResponse`
:returns: Challenge Resource with updated body.
:rtype: `.ChallengeResource`
:raises .UnexpectedUpdate:
"""
response = self.net.post(challb.uri, response)
try:
authzr_uri = response.links['up']['url']
except KeyError:
raise errors.ClientError('"up" Link header missing')
challr = messages.ChallengeResource(
authzr_uri=authzr_uri,
body=messages.ChallengeBody.from_json(response.json()))
# TODO: check that challr.uri == response.headers['Location']?
if challr.uri != challb.uri:
raise errors.UnexpectedUpdate(challr.uri)
return challr
@classmethod
def retry_after(cls, response, default):
"""Compute next `poll` time based on response ``Retry-After`` header.
:param response: Response from `poll`.
:type response: `requests.Response`
:param int default: Default value (in seconds), used when
``Retry-After`` header is not present or invalid.
:returns: Time point when next `poll` should be performed.
:rtype: `datetime.datetime`
"""
retry_after = response.headers.get('Retry-After', str(default))
try:
seconds = int(retry_after)
except ValueError:
# pylint: disable=no-member
decoded = werkzeug.parse_date(retry_after) # RFC1123
if decoded is None:
seconds = default
else:
return decoded
return datetime.datetime.now() + datetime.timedelta(seconds=seconds)
def poll(self, authzr):
"""Poll Authorization Resource for status.
:param authzr: Authorization Resource
:type authzr: `.AuthorizationResource`
:returns: Updated Authorization Resource and HTTP response.
:rtype: (`.AuthorizationResource`, `requests.Response`)
"""
response = self.net.get(authzr.uri)
updated_authzr = self._authzr_from_response(
response, authzr.body.identifier, authzr.uri, authzr.new_cert_uri)
# TODO: check and raise UnexpectedUpdate
return updated_authzr, response
def request_issuance(self, csr, authzrs):
"""Request issuance.
:param csr: CSR
:type csr: `OpenSSL.crypto.X509Req` wrapped in `.ComparableX509`
:param authzrs: `list` of `.AuthorizationResource`
:returns: Issued certificate
:rtype: `.messages.CertificateResource`
"""
assert authzrs, "Authorizations list is empty"
logger.debug("Requesting issuance...")
# TODO: assert len(authzrs) == number of SANs
req = messages.CertificateRequest(csr=csr)
content_type = self.DER_CONTENT_TYPE # TODO: add 'cert_type 'argument
response = self.net.post(
authzrs[0].new_cert_uri, # TODO: acme-spec #90
req,
content_type=content_type,
headers={'Accept': content_type})
cert_chain_uri = response.links.get('up', {}).get('url')
try:
uri = response.headers['Location']
except KeyError:
raise errors.ClientError('"Location" Header missing')
return messages.CertificateResource(
uri=uri, authzrs=authzrs, cert_chain_uri=cert_chain_uri,
body=jose.ComparableX509(OpenSSL.crypto.load_certificate(
OpenSSL.crypto.FILETYPE_ASN1, response.content)))
def poll_and_request_issuance(self, csr, authzrs, mintime=5):
"""Poll and request issuance.
This function polls all provided Authorization Resource URIs
until all challenges are valid, respecting ``Retry-After`` HTTP
headers, and then calls `request_issuance`.
.. todo:: add `max_attempts` or `timeout`
:param csr: CSR.
:type csr: `OpenSSL.crypto.X509Req` wrapped in `.ComparableX509`
:param authzrs: `list` of `.AuthorizationResource`
:param int mintime: Minimum time before next attempt, used if
``Retry-After`` is not present in the response.
:returns: ``(cert, updated_authzrs)`` `tuple` where ``cert`` is
the issued certificate (`.messages.CertificateResource.),
and ``updated_authzrs`` is a `tuple` consisting of updated
Authorization Resources (`.AuthorizationResource`) as
present in the responses from server, and in the same order
as the input ``authzrs``.
:rtype: `tuple`
"""
# priority queue with datetime (based on Retry-After) as key,
# and original Authorization Resource as value
waiting = [(datetime.datetime.now(), authzr) for authzr in authzrs]
# mapping between original Authorization Resource and the most
# recently updated one
updated = dict((authzr, authzr) for authzr in authzrs)
while waiting:
# find the smallest Retry-After, and sleep if necessary
when, authzr = heapq.heappop(waiting)
now = datetime.datetime.now()
if when > now:
seconds = (when - now).seconds
logger.debug('Sleeping for %d seconds', seconds)
time.sleep(seconds)
# Note that we poll with the latest updated Authorization
# URI, which might have a different URI than initial one
updated_authzr, response = self.poll(updated[authzr])
updated[authzr] = updated_authzr
# pylint: disable=no-member
if updated_authzr.body.status != messages.STATUS_VALID:
# push back to the priority queue, with updated retry_after
heapq.heappush(waiting, (self.retry_after(
response, default=mintime), authzr))
updated_authzrs = tuple(updated[authzr] for authzr in authzrs)
return self.request_issuance(csr, updated_authzrs), updated_authzrs
def _get_cert(self, uri):
"""Returns certificate from URI.
:param str uri: URI of certificate
:returns: tuple of the form
(response, :class:`acme.jose.ComparableX509`)
:rtype: tuple
"""
content_type = self.DER_CONTENT_TYPE # TODO: make it a param
response = self.net.get(uri, headers={'Accept': content_type},
content_type=content_type)
return response, jose.ComparableX509(OpenSSL.crypto.load_certificate(
OpenSSL.crypto.FILETYPE_ASN1, response.content))
def check_cert(self, certr):
"""Check for new cert.
:param certr: Certificate Resource
:type certr: `.CertificateResource`
:returns: Updated Certificate Resource.
:rtype: `.CertificateResource`
"""
# TODO: acme-spec 5.1 table action should be renamed to
# "refresh cert", and this method integrated with self.refresh
response, cert = self._get_cert(certr.uri)
if 'Location' not in response.headers:
raise errors.ClientError('Location header missing')
if response.headers['Location'] != certr.uri:
raise errors.UnexpectedUpdate(response.text)
return certr.update(body=cert)
def refresh(self, certr):
"""Refresh certificate.
:param certr: Certificate Resource
:type certr: `.CertificateResource`
:returns: Updated Certificate Resource.
:rtype: `.CertificateResource`
"""
# TODO: If a client sends a refresh request and the server is
# not willing to refresh the certificate, the server MUST
# respond with status code 403 (Forbidden)
return self.check_cert(certr)
def fetch_chain(self, certr):
"""Fetch chain for certificate.
:param certr: Certificate Resource
:type certr: `.CertificateResource`
:returns: Certificate chain, or `None` if no "up" Link was provided.
:rtype: `OpenSSL.crypto.X509` wrapped in `.ComparableX509`
"""
if certr.cert_chain_uri is not None:
return self._get_cert(certr.cert_chain_uri)[1]
else:
return None
def revoke(self, cert):
"""Revoke certificate.
:param .ComparableX509 cert: `OpenSSL.crypto.X509` wrapped in
`.ComparableX509`
:raises .ClientError: If revocation is unsuccessful.
"""
response = self.net.post(messages.Revocation.url(self.new_reg_uri),
messages.Revocation(certificate=cert))
if response.status_code != http_client.OK:
raise errors.ClientError(
'Successful revocation must return HTTP OK status')
class ClientNetwork(object):
"""Client network."""
JSON_CONTENT_TYPE = 'application/json'
JSON_ERROR_CONTENT_TYPE = 'application/problem+json'
REPLAY_NONCE_HEADER = 'Replay-Nonce'
def __init__(self, key, alg=jose.RS256, verify_ssl=True):
self.key = key
self.alg = alg
self.verify_ssl = verify_ssl
self._nonces = set()
def _wrap_in_jws(self, obj, nonce):
"""Wrap `JSONDeSerializable` object in JWS.
.. todo:: Implement ``acmePath``.
:param .JSONDeSerializable obj:
:param bytes nonce:
:rtype: `.JWS`
"""
jobj = obj.json_dumps().encode()
logger.debug('Serialized JSON: %s', jobj)
return jws.JWS.sign(
payload=jobj, key=self.key, alg=self.alg, nonce=nonce).json_dumps()
@classmethod
def _check_response(cls, response, content_type=None):
"""Check response content and its type.
.. note::
Checking is not strict: wrong server response ``Content-Type``
HTTP header is ignored if response is an expected JSON object
(c.f. Boulder #56).
:param str content_type: Expected Content-Type response header.
If JSON is expected and not present in server response, this
function will raise an error. Otherwise, wrong Content-Type
is ignored, but logged.
:raises .messages.Error: If server response body
carries HTTP Problem (draft-ietf-appsawg-http-problem-00).
:raises .ClientError: In case of other networking errors.
"""
logger.debug('Received response %s (headers: %s): %r',
response, response.headers, response.content)
response_ct = response.headers.get('Content-Type')
try:
# TODO: response.json() is called twice, once here, and
# once in _get and _post clients
jobj = response.json()
except ValueError as error:
jobj = None
if not response.ok:
if jobj is not None:
if response_ct != cls.JSON_ERROR_CONTENT_TYPE:
logger.debug(
'Ignoring wrong Content-Type (%r) for JSON Error',
response_ct)
try:
raise messages.Error.from_json(jobj)
except jose.DeserializationError as error:
# Couldn't deserialize JSON object
raise errors.ClientError((response, error))
else:
# response is not JSON object
raise errors.ClientError(response)
else:
if jobj is not None and response_ct != cls.JSON_CONTENT_TYPE:
logger.debug(
'Ignoring wrong Content-Type (%r) for JSON decodable '
'response', response_ct)
if content_type == cls.JSON_CONTENT_TYPE and jobj is None:
raise errors.ClientError(
'Unexpected response Content-Type: {0}'.format(response_ct))
return response
def _send_request(self, method, url, *args, **kwargs):
"""Send HTTP request.
Makes sure that `verify_ssl` is respected. Logs request and
response (with headers). For allowed parameters please see
`requests.request`.
:param str method: method for the new `requests.Request` object
:param str url: URL for the new `requests.Request` object
:raises requests.exceptions.RequestException: in case of any problems
:returns: HTTP Response
:rtype: `requests.Response`
"""
logging.debug('Sending %s request to %s. args: %r, kwargs: %r',
method, url, args, kwargs)
kwargs['verify'] = self.verify_ssl
response = requests.request(method, url, *args, **kwargs)
logging.debug('Received %s. Headers: %s. Content: %r',
response, response.headers, response.content)
return response
def head(self, *args, **kwargs):
"""Send HEAD request without checking the response.
Note, that `_check_response` is not called, as it is expected
that status code other than successfuly 2xx will be returned, or
messages2.Error will be raised by the server.
"""
return self._send_request('HEAD', *args, **kwargs)
def get(self, url, content_type=JSON_CONTENT_TYPE, **kwargs):
"""Send GET request and check response."""
return self._check_response(
self._send_request('GET', url, **kwargs), content_type=content_type)
def _add_nonce(self, response):
if self.REPLAY_NONCE_HEADER in response.headers:
nonce = response.headers[self.REPLAY_NONCE_HEADER]
try:
decoded_nonce = jws.Header._fields['nonce'].decode(nonce)
except jose.DeserializationError as error:
raise errors.BadNonce(nonce, error)
logger.debug('Storing nonce: %r', decoded_nonce)
self._nonces.add(decoded_nonce)
else:
raise errors.MissingNonce(response)
def _get_nonce(self, url):
if not self._nonces:
logging.debug('Requesting fresh nonce')
self._add_nonce(self.head(url))
return self._nonces.pop()
def post(self, url, obj, content_type=JSON_CONTENT_TYPE, **kwargs):
"""POST object wrapped in `.JWS` and check response."""
data = self._wrap_in_jws(obj, self._get_nonce(url))
response = self._send_request('POST', url, data=data, **kwargs)
self._add_nonce(response)
return self._check_response(response, content_type=content_type)
| {
"repo_name": "rugk/letsencrypt",
"path": "acme/acme/client.py",
"copies": "1",
"size": "21572",
"license": "apache-2.0",
"hash": 3680922643416733700,
"line_mean": 35.1946308725,
"line_max": 81,
"alpha_frac": 0.6134340812,
"autogenerated": false,
"ratio": 4.207528769260776,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.00009504174525887909,
"num_lines": 596
} |
"""ACME client API."""
import datetime
import heapq
import logging
import time
import six
from six.moves import http_client # pylint: disable=import-error
import OpenSSL
import requests
import sys
import werkzeug
from acme import errors
from acme import jose
from acme import jws
from acme import messages
logger = logging.getLogger(__name__)
# Prior to Python 2.7.9 the stdlib SSL module did not allow a user to configure
# many important security related options. On these platforms we use PyOpenSSL
# for SSL, which does allow these options to be configured.
# https://urllib3.readthedocs.org/en/latest/security.html#insecureplatformwarning
if sys.version_info < (2, 7, 9): # pragma: no cover
requests.packages.urllib3.contrib.pyopenssl.inject_into_urllib3()
class Client(object): # pylint: disable=too-many-instance-attributes
"""ACME client.
.. todo::
Clean up raised error types hierarchy, document, and handle (wrap)
instances of `.DeserializationError` raised in `from_json()`.
:ivar messages.Directory directory:
:ivar key: `.JWK` (private)
:ivar alg: `.JWASignature`
:ivar bool verify_ssl: Verify SSL certificates?
:ivar .ClientNetwork net: Client network. Useful for testing. If not
supplied, it will be initialized using `key`, `alg` and
`verify_ssl`.
"""
DER_CONTENT_TYPE = 'application/pkix-cert'
def __init__(self, directory, key, alg=jose.RS256, verify_ssl=True,
net=None):
"""Initialize.
:param directory: Directory Resource (`.messages.Directory`) or
URI from which the resource will be downloaded.
"""
self.key = key
self.net = ClientNetwork(key, alg, verify_ssl) if net is None else net
if isinstance(directory, six.string_types):
self.directory = messages.Directory.from_json(
self.net.get(directory).json())
else:
self.directory = directory
@classmethod
def _regr_from_response(cls, response, uri=None, new_authzr_uri=None,
terms_of_service=None):
terms_of_service = (
response.links['terms-of-service']['url']
if 'terms-of-service' in response.links else terms_of_service)
if new_authzr_uri is None:
try:
new_authzr_uri = response.links['next']['url']
except KeyError:
raise errors.ClientError('"next" link missing')
return messages.RegistrationResource(
body=messages.Registration.from_json(response.json()),
uri=response.headers.get('Location', uri),
new_authzr_uri=new_authzr_uri,
terms_of_service=terms_of_service)
def register(self, new_reg=None):
"""Register.
:param .NewRegistration new_reg:
:returns: Registration Resource.
:rtype: `.RegistrationResource`
:raises .UnexpectedUpdate:
"""
new_reg = messages.NewRegistration() if new_reg is None else new_reg
assert isinstance(new_reg, messages.NewRegistration)
response = self.net.post(self.directory[new_reg], new_reg)
# TODO: handle errors
assert response.status_code == http_client.CREATED
# "Instance of 'Field' has no key/contact member" bug:
# pylint: disable=no-member
regr = self._regr_from_response(response)
if (regr.body.key != self.key.public_key() or
regr.body.contact != new_reg.contact):
raise errors.UnexpectedUpdate(regr)
return regr
def _send_recv_regr(self, regr, body):
response = self.net.post(regr.uri, body)
# TODO: Boulder returns httplib.ACCEPTED
#assert response.status_code == httplib.OK
# TODO: Boulder does not set Location or Link on update
# (c.f. acme-spec #94)
return self._regr_from_response(
response, uri=regr.uri, new_authzr_uri=regr.new_authzr_uri,
terms_of_service=regr.terms_of_service)
def update_registration(self, regr, update=None):
"""Update registration.
:param messages.RegistrationResource regr: Registration Resource.
:param messages.Registration update: Updated body of the
resource. If not provided, body will be taken from `regr`.
:returns: Updated Registration Resource.
:rtype: `.RegistrationResource`
"""
update = regr.body if update is None else update
updated_regr = self._send_recv_regr(
regr, body=messages.UpdateRegistration(**dict(update)))
if updated_regr != regr:
raise errors.UnexpectedUpdate(regr)
return updated_regr
def query_registration(self, regr):
"""Query server about registration.
:param messages.RegistrationResource: Existing Registration
Resource.
"""
return self._send_recv_regr(regr, messages.UpdateRegistration())
def agree_to_tos(self, regr):
"""Agree to the terms-of-service.
Agree to the terms-of-service in a Registration Resource.
:param regr: Registration Resource.
:type regr: `.RegistrationResource`
:returns: Updated Registration Resource.
:rtype: `.RegistrationResource`
"""
return self.update_registration(
regr.update(body=regr.body.update(agreement=regr.terms_of_service)))
def _authzr_from_response(self, response, identifier,
uri=None, new_cert_uri=None):
# pylint: disable=no-self-use
if new_cert_uri is None:
try:
new_cert_uri = response.links['next']['url']
except KeyError:
raise errors.ClientError('"next" link missing')
authzr = messages.AuthorizationResource(
body=messages.Authorization.from_json(response.json()),
uri=response.headers.get('Location', uri),
new_cert_uri=new_cert_uri)
if authzr.body.identifier != identifier:
raise errors.UnexpectedUpdate(authzr)
return authzr
def request_challenges(self, identifier, new_authzr_uri):
"""Request challenges.
:param identifier: Identifier to be challenged.
:type identifier: `.messages.Identifier`
:param str new_authzr_uri: new-authorization URI
:returns: Authorization Resource.
:rtype: `.AuthorizationResource`
"""
new_authz = messages.NewAuthorization(identifier=identifier)
response = self.net.post(new_authzr_uri, new_authz)
# TODO: handle errors
assert response.status_code == http_client.CREATED
return self._authzr_from_response(response, identifier)
def request_domain_challenges(self, domain, new_authz_uri):
"""Request challenges for domain names.
This is simply a convenience function that wraps around
`request_challenges`, but works with domain names instead of
generic identifiers.
:param str domain: Domain name to be challenged.
:param str new_authzr_uri: new-authorization URI
:returns: Authorization Resource.
:rtype: `.AuthorizationResource`
"""
return self.request_challenges(messages.Identifier(
typ=messages.IDENTIFIER_FQDN, value=domain), new_authz_uri)
def answer_challenge(self, challb, response):
"""Answer challenge.
:param challb: Challenge Resource body.
:type challb: `.ChallengeBody`
:param response: Corresponding Challenge response
:type response: `.challenges.ChallengeResponse`
:returns: Challenge Resource with updated body.
:rtype: `.ChallengeResource`
:raises .UnexpectedUpdate:
"""
response = self.net.post(challb.uri, response)
try:
authzr_uri = response.links['up']['url']
except KeyError:
raise errors.ClientError('"up" Link header missing')
challr = messages.ChallengeResource(
authzr_uri=authzr_uri,
body=messages.ChallengeBody.from_json(response.json()))
# TODO: check that challr.uri == response.headers['Location']?
if challr.uri != challb.uri:
raise errors.UnexpectedUpdate(challr.uri)
return challr
@classmethod
def retry_after(cls, response, default):
"""Compute next `poll` time based on response ``Retry-After`` header.
:param requests.Response response: Response from `poll`.
:param int default: Default value (in seconds), used when
``Retry-After`` header is not present or invalid.
:returns: Time point when next `poll` should be performed.
:rtype: `datetime.datetime`
"""
retry_after = response.headers.get('Retry-After', str(default))
try:
seconds = int(retry_after)
except ValueError:
# pylint: disable=no-member
decoded = werkzeug.parse_date(retry_after) # RFC1123
if decoded is None:
seconds = default
else:
return decoded
return datetime.datetime.now() + datetime.timedelta(seconds=seconds)
def poll(self, authzr):
"""Poll Authorization Resource for status.
:param authzr: Authorization Resource
:type authzr: `.AuthorizationResource`
:returns: Updated Authorization Resource and HTTP response.
:rtype: (`.AuthorizationResource`, `requests.Response`)
"""
response = self.net.get(authzr.uri)
updated_authzr = self._authzr_from_response(
response, authzr.body.identifier, authzr.uri, authzr.new_cert_uri)
# TODO: check and raise UnexpectedUpdate
return updated_authzr, response
def request_issuance(self, csr, authzrs):
"""Request issuance.
:param csr: CSR
:type csr: `OpenSSL.crypto.X509Req` wrapped in `.ComparableX509`
:param authzrs: `list` of `.AuthorizationResource`
:returns: Issued certificate
:rtype: `.messages.CertificateResource`
"""
assert authzrs, "Authorizations list is empty"
logger.debug("Requesting issuance...")
# TODO: assert len(authzrs) == number of SANs
req = messages.CertificateRequest(csr=csr)
content_type = self.DER_CONTENT_TYPE # TODO: add 'cert_type 'argument
response = self.net.post(
authzrs[0].new_cert_uri, # TODO: acme-spec #90
req,
content_type=content_type,
headers={'Accept': content_type})
cert_chain_uri = response.links.get('up', {}).get('url')
try:
uri = response.headers['Location']
except KeyError:
raise errors.ClientError('"Location" Header missing')
return messages.CertificateResource(
uri=uri, authzrs=authzrs, cert_chain_uri=cert_chain_uri,
body=jose.ComparableX509(OpenSSL.crypto.load_certificate(
OpenSSL.crypto.FILETYPE_ASN1, response.content)))
def poll_and_request_issuance(
self, csr, authzrs, mintime=5, max_attempts=10):
"""Poll and request issuance.
This function polls all provided Authorization Resource URIs
until all challenges are valid, respecting ``Retry-After`` HTTP
headers, and then calls `request_issuance`.
:param .ComparableX509 csr: CSR (`OpenSSL.crypto.X509Req`
wrapped in `.ComparableX509`)
:param authzrs: `list` of `.AuthorizationResource`
:param int mintime: Minimum time before next attempt, used if
``Retry-After`` is not present in the response.
:param int max_attempts: Maximum number of attempts before
`PollError` with non-empty ``waiting`` is raised.
:returns: ``(cert, updated_authzrs)`` `tuple` where ``cert`` is
the issued certificate (`.messages.CertificateResource`),
and ``updated_authzrs`` is a `tuple` consisting of updated
Authorization Resources (`.AuthorizationResource`) as
present in the responses from server, and in the same order
as the input ``authzrs``.
:rtype: `tuple`
:raises PollError: in case of timeout or if some authorization
was marked by the CA as invalid
"""
# priority queue with datetime (based on Retry-After) as key,
# and original Authorization Resource as value
waiting = [(datetime.datetime.now(), authzr) for authzr in authzrs]
# mapping between original Authorization Resource and the most
# recently updated one
updated = dict((authzr, authzr) for authzr in authzrs)
while waiting and max_attempts:
max_attempts -= 1
# find the smallest Retry-After, and sleep if necessary
when, authzr = heapq.heappop(waiting)
now = datetime.datetime.now()
if when > now:
seconds = (when - now).seconds
logger.debug('Sleeping for %d seconds', seconds)
time.sleep(seconds)
# Note that we poll with the latest updated Authorization
# URI, which might have a different URI than initial one
updated_authzr, response = self.poll(updated[authzr])
updated[authzr] = updated_authzr
# pylint: disable=no-member
if updated_authzr.body.status not in (
messages.STATUS_VALID, messages.STATUS_INVALID):
# push back to the priority queue, with updated retry_after
heapq.heappush(waiting, (self.retry_after(
response, default=mintime), authzr))
if not max_attempts or any(authzr.body.status == messages.STATUS_INVALID
for authzr in six.itervalues(updated)):
raise errors.PollError(waiting, updated)
updated_authzrs = tuple(updated[authzr] for authzr in authzrs)
return self.request_issuance(csr, updated_authzrs), updated_authzrs
def _get_cert(self, uri):
"""Returns certificate from URI.
:param str uri: URI of certificate
:returns: tuple of the form
(response, :class:`acme.jose.ComparableX509`)
:rtype: tuple
"""
content_type = self.DER_CONTENT_TYPE # TODO: make it a param
response = self.net.get(uri, headers={'Accept': content_type},
content_type=content_type)
return response, jose.ComparableX509(OpenSSL.crypto.load_certificate(
OpenSSL.crypto.FILETYPE_ASN1, response.content))
def check_cert(self, certr):
"""Check for new cert.
:param certr: Certificate Resource
:type certr: `.CertificateResource`
:returns: Updated Certificate Resource.
:rtype: `.CertificateResource`
"""
# TODO: acme-spec 5.1 table action should be renamed to
# "refresh cert", and this method integrated with self.refresh
response, cert = self._get_cert(certr.uri)
if 'Location' not in response.headers:
raise errors.ClientError('Location header missing')
if response.headers['Location'] != certr.uri:
raise errors.UnexpectedUpdate(response.text)
return certr.update(body=cert)
def refresh(self, certr):
"""Refresh certificate.
:param certr: Certificate Resource
:type certr: `.CertificateResource`
:returns: Updated Certificate Resource.
:rtype: `.CertificateResource`
"""
# TODO: If a client sends a refresh request and the server is
# not willing to refresh the certificate, the server MUST
# respond with status code 403 (Forbidden)
return self.check_cert(certr)
def fetch_chain(self, certr, max_length=10):
"""Fetch chain for certificate.
:param .CertificateResource certr: Certificate Resource
:param int max_length: Maximum allowed length of the chain.
Note that each element in the certificate requires new
``HTTP GET`` request, and the length of the chain is
controlled by the ACME CA.
:raises errors.Error: if recursion exceeds `max_length`
:returns: Certificate chain for the Certificate Resource. It is
a list ordered so that the first element is a signer of the
certificate from Certificate Resource. Will be empty if
``cert_chain_uri`` is ``None``.
:rtype: `list` of `OpenSSL.crypto.X509` wrapped in `.ComparableX509`
"""
chain = []
uri = certr.cert_chain_uri
while uri is not None and len(chain) < max_length:
response, cert = self._get_cert(uri)
uri = response.links.get('up', {}).get('url')
chain.append(cert)
if uri is not None:
raise errors.Error(
"Recursion limit reached. Didn't get {0}".format(uri))
return chain
def revoke(self, cert):
"""Revoke certificate.
:param .ComparableX509 cert: `OpenSSL.crypto.X509` wrapped in
`.ComparableX509`
:raises .ClientError: If revocation is unsuccessful.
"""
response = self.net.post(self.directory[messages.Revocation],
messages.Revocation(certificate=cert),
content_type=None)
if response.status_code != http_client.OK:
raise errors.ClientError(
'Successful revocation must return HTTP OK status')
class ClientNetwork(object):
"""Client network."""
JSON_CONTENT_TYPE = 'application/json'
JSON_ERROR_CONTENT_TYPE = 'application/problem+json'
REPLAY_NONCE_HEADER = 'Replay-Nonce'
def __init__(self, key, alg=jose.RS256, verify_ssl=True,
user_agent='acme-python'):
self.key = key
self.alg = alg
self.verify_ssl = verify_ssl
self._nonces = set()
self.user_agent = user_agent
def _wrap_in_jws(self, obj, nonce):
"""Wrap `JSONDeSerializable` object in JWS.
.. todo:: Implement ``acmePath``.
:param .JSONDeSerializable obj:
:param bytes nonce:
:rtype: `.JWS`
"""
jobj = obj.json_dumps().encode()
logger.debug('Serialized JSON: %s', jobj)
return jws.JWS.sign(
payload=jobj, key=self.key, alg=self.alg, nonce=nonce).json_dumps()
@classmethod
def _check_response(cls, response, content_type=None):
"""Check response content and its type.
.. note::
Checking is not strict: wrong server response ``Content-Type``
HTTP header is ignored if response is an expected JSON object
(c.f. Boulder #56).
:param str content_type: Expected Content-Type response header.
If JSON is expected and not present in server response, this
function will raise an error. Otherwise, wrong Content-Type
is ignored, but logged.
:raises .messages.Error: If server response body
carries HTTP Problem (draft-ietf-appsawg-http-problem-00).
:raises .ClientError: In case of other networking errors.
"""
logger.debug('Received response %s (headers: %s): %r',
response, response.headers, response.content)
response_ct = response.headers.get('Content-Type')
try:
# TODO: response.json() is called twice, once here, and
# once in _get and _post clients
jobj = response.json()
except ValueError as error:
jobj = None
if not response.ok:
if jobj is not None:
if response_ct != cls.JSON_ERROR_CONTENT_TYPE:
logger.debug(
'Ignoring wrong Content-Type (%r) for JSON Error',
response_ct)
try:
raise messages.Error.from_json(jobj)
except jose.DeserializationError as error:
# Couldn't deserialize JSON object
raise errors.ClientError((response, error))
else:
# response is not JSON object
raise errors.ClientError(response)
else:
if jobj is not None and response_ct != cls.JSON_CONTENT_TYPE:
logger.debug(
'Ignoring wrong Content-Type (%r) for JSON decodable '
'response', response_ct)
if content_type == cls.JSON_CONTENT_TYPE and jobj is None:
raise errors.ClientError(
'Unexpected response Content-Type: {0}'.format(response_ct))
return response
def _send_request(self, method, url, *args, **kwargs):
"""Send HTTP request.
Makes sure that `verify_ssl` is respected. Logs request and
response (with headers). For allowed parameters please see
`requests.request`.
:param str method: method for the new `requests.Request` object
:param str url: URL for the new `requests.Request` object
:raises requests.exceptions.RequestException: in case of any problems
:returns: HTTP Response
:rtype: `requests.Response`
"""
logging.debug('Sending %s request to %s. args: %r, kwargs: %r',
method, url, args, kwargs)
kwargs['verify'] = self.verify_ssl
kwargs.setdefault('headers', {})
kwargs['headers'].setdefault('User-Agent', self.user_agent)
response = requests.request(method, url, *args, **kwargs)
logging.debug('Received %s. Headers: %s. Content: %r',
response, response.headers, response.content)
return response
def head(self, *args, **kwargs):
"""Send HEAD request without checking the response.
Note, that `_check_response` is not called, as it is expected
that status code other than successfully 2xx will be returned, or
messages2.Error will be raised by the server.
"""
return self._send_request('HEAD', *args, **kwargs)
def get(self, url, content_type=JSON_CONTENT_TYPE, **kwargs):
"""Send GET request and check response."""
return self._check_response(
self._send_request('GET', url, **kwargs), content_type=content_type)
def _add_nonce(self, response):
if self.REPLAY_NONCE_HEADER in response.headers:
nonce = response.headers[self.REPLAY_NONCE_HEADER]
try:
decoded_nonce = jws.Header._fields['nonce'].decode(nonce)
except jose.DeserializationError as error:
raise errors.BadNonce(nonce, error)
logger.debug('Storing nonce: %r', decoded_nonce)
self._nonces.add(decoded_nonce)
else:
raise errors.MissingNonce(response)
def _get_nonce(self, url):
if not self._nonces:
logging.debug('Requesting fresh nonce')
self._add_nonce(self.head(url))
return self._nonces.pop()
def post(self, url, obj, content_type=JSON_CONTENT_TYPE, **kwargs):
"""POST object wrapped in `.JWS` and check response."""
data = self._wrap_in_jws(obj, self._get_nonce(url))
response = self._send_request('POST', url, data=data, **kwargs)
self._add_nonce(response)
return self._check_response(response, content_type=content_type)
| {
"repo_name": "kuba/letsencrypt",
"path": "acme/acme/client.py",
"copies": "4",
"size": "23680",
"license": "apache-2.0",
"hash": -8956500801101237000,
"line_mean": 36.2327044025,
"line_max": 81,
"alpha_frac": 0.613597973,
"autogenerated": false,
"ratio": 4.272825694695056,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.00010847572199156858,
"num_lines": 636
} |
"""ACME errors."""
from acme.jose import errors as jose_errors
class Error(Exception):
"""Generic ACME error."""
class DependencyError(Error):
"""Dependency error"""
class SchemaValidationError(jose_errors.DeserializationError):
"""JSON schema ACME object validation error."""
class ClientError(Error):
"""Network error."""
class UnexpectedUpdate(ClientError):
"""Unexpected update error."""
class NonceError(ClientError):
"""Server response nonce error."""
class BadNonce(NonceError):
"""Bad nonce error."""
def __init__(self, nonce, error, *args, **kwargs):
super(BadNonce, self).__init__(*args, **kwargs)
self.nonce = nonce
self.error = error
def __str__(self):
return 'Invalid nonce ({0!r}): {1}'.format(self.nonce, self.error)
class MissingNonce(NonceError):
"""Missing nonce error.
According to the specification an "ACME server MUST include an
Replay-Nonce header field in each successful response to a POST it
provides to a client (...)".
:ivar requests.Response response: HTTP Response
"""
def __init__(self, response, *args, **kwargs):
super(MissingNonce, self).__init__(*args, **kwargs)
self.response = response
def __str__(self):
return ('Server {0} response did not include a replay '
'nonce, headers: {1} (This may be a service outage)'.format(
self.response.request.method, self.response.headers))
class PollError(ClientError):
"""Generic error when polling for authorization fails.
This might be caused by either timeout (`exhausted` will be non-empty)
or by some authorization being invalid.
:ivar exhausted: Set of `.AuthorizationResource` that didn't finish
within max allowed attempts.
:ivar updated: Mapping from original `.AuthorizationResource`
to the most recently updated one
"""
def __init__(self, exhausted, updated):
self.exhausted = exhausted
self.updated = updated
super(PollError, self).__init__()
@property
def timeout(self):
"""Was the error caused by timeout?"""
return bool(self.exhausted)
def __repr__(self):
return '{0}(exhausted={1!r}, updated={2!r})'.format(
self.__class__.__name__, self.exhausted, self.updated)
| {
"repo_name": "bsmr-misc-forks/letsencrypt",
"path": "acme/acme/errors.py",
"copies": "7",
"size": "2357",
"license": "apache-2.0",
"hash": -457312797930660100,
"line_mean": 27.0595238095,
"line_max": 76,
"alpha_frac": 0.6414934238,
"autogenerated": false,
"ratio": 4.208928571428571,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.8350421995228571,
"avg_score": null,
"num_lines": null
} |
"""ACME errors."""
from acme.jose import errors as jose_errors
class Error(Exception):
"""Generic ACME error."""
class SchemaValidationError(jose_errors.DeserializationError):
"""JSON schema ACME object validation error."""
class ClientError(Error):
"""Network error."""
class UnexpectedUpdate(ClientError):
"""Unexpected update error."""
class NonceError(ClientError):
"""Server response nonce error."""
class BadNonce(NonceError):
"""Bad nonce error."""
def __init__(self, nonce, error, *args, **kwargs):
super(BadNonce, self).__init__(*args, **kwargs)
self.nonce = nonce
self.error = error
def __str__(self):
return 'Invalid nonce ({0!r}): {1}'.format(self.nonce, self.error)
class MissingNonce(NonceError):
"""Missing nonce error.
According to the specification an "ACME server MUST include an
Replay-Nonce header field in each successful response to a POST it
provides to a client (...)".
:ivar requests.Response response: HTTP Response
"""
def __init__(self, response, *args, **kwargs):
super(MissingNonce, self).__init__(*args, **kwargs)
self.response = response
def __str__(self):
return ('Server {0} response did not include a replay '
'nonce, headers: {1}'.format(
self.response.request.method, self.response.headers))
class PollError(ClientError):
"""Generic error when polling for authorization fails.
This might be caused by either timeout (`waiting` will be non-empty)
or by some authorization being invalid.
:ivar waiting: Priority queue with `datetime.datatime` (based on
``Retry-After``) as key, and original `.AuthorizationResource`
as value.
:ivar updated: Mapping from original `.AuthorizationResource`
to the most recently updated one
"""
def __init__(self, waiting, updated):
self.waiting = waiting
self.updated = updated
super(PollError, self).__init__()
@property
def timeout(self):
"""Was the error caused by timeout?"""
return bool(self.waiting)
def __repr__(self):
return '{0}(waiting={1!r}, updated={2!r})'.format(
self.__class__.__name__, self.waiting, self.updated)
| {
"repo_name": "goofwear/letsencrypt",
"path": "acme/acme/errors.py",
"copies": "10",
"size": "2302",
"license": "apache-2.0",
"hash": 4331217052093213000,
"line_mean": 27.4197530864,
"line_max": 74,
"alpha_frac": 0.6350999131,
"autogenerated": false,
"ratio": 4.239410681399631,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0,
"num_lines": 81
} |
"""ACME errors."""
from josepy import errors as jose_errors
class Error(Exception):
"""Generic ACME error."""
class DependencyError(Error):
"""Dependency error"""
class SchemaValidationError(jose_errors.DeserializationError):
"""JSON schema ACME object validation error."""
class ClientError(Error):
"""Network error."""
class UnexpectedUpdate(ClientError):
"""Unexpected update error."""
class NonceError(ClientError):
"""Server response nonce error."""
class BadNonce(NonceError):
"""Bad nonce error."""
def __init__(self, nonce, error, *args, **kwargs):
super(BadNonce, self).__init__(*args, **kwargs)
self.nonce = nonce
self.error = error
def __str__(self):
return 'Invalid nonce ({0!r}): {1}'.format(self.nonce, self.error)
class MissingNonce(NonceError):
"""Missing nonce error.
According to the specification an "ACME server MUST include an
Replay-Nonce header field in each successful response to a POST it
provides to a client (...)".
:ivar requests.Response response: HTTP Response
"""
def __init__(self, response, *args, **kwargs):
super(MissingNonce, self).__init__(*args, **kwargs)
self.response = response
def __str__(self):
return ('Server {0} response did not include a replay '
'nonce, headers: {1} (This may be a service outage)'.format(
self.response.request.method, self.response.headers))
class PollError(ClientError):
"""Generic error when polling for authorization fails.
This might be caused by either timeout (`exhausted` will be non-empty)
or by some authorization being invalid.
:ivar exhausted: Set of `.AuthorizationResource` that didn't finish
within max allowed attempts.
:ivar updated: Mapping from original `.AuthorizationResource`
to the most recently updated one
"""
def __init__(self, exhausted, updated):
self.exhausted = exhausted
self.updated = updated
super(PollError, self).__init__()
@property
def timeout(self):
"""Was the error caused by timeout?"""
return bool(self.exhausted)
def __repr__(self):
return '{0}(exhausted={1!r}, updated={2!r})'.format(
self.__class__.__name__, self.exhausted, self.updated)
class ValidationError(Error):
"""Error for authorization failures. Contains a list of authorization
resources, each of which is invalid and should have an error field.
"""
def __init__(self, failed_authzrs):
self.failed_authzrs = failed_authzrs
super(ValidationError, self).__init__()
class TimeoutError(Error):
"""Error for when polling an authorization or an order times out."""
class IssuanceError(Error):
"""Error sent by the server after requesting issuance of a certificate."""
def __init__(self, error):
"""Initialize.
:param messages.Error error: The error provided by the server.
"""
self.error = error
super(IssuanceError, self).__init__()
class ConflictError(ClientError):
"""Error for when the server returns a 409 (Conflict) HTTP status.
In the version of ACME implemented by Boulder, this is used to find an
account if you only have the private key, but don't know the account URL.
Also used in V2 of the ACME client for the same purpose.
"""
def __init__(self, location):
self.location = location
super(ConflictError, self).__init__()
class WildcardUnsupportedError(Error):
"""Error for when a wildcard is requested but is unsupported by ACME CA."""
| {
"repo_name": "letsencrypt/letsencrypt",
"path": "acme/acme/errors.py",
"copies": "2",
"size": "3657",
"license": "apache-2.0",
"hash": 253334883678068600,
"line_mean": 28.9754098361,
"line_max": 79,
"alpha_frac": 0.6529942576,
"autogenerated": false,
"ratio": 4.252325581395349,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5905319838995349,
"avg_score": null,
"num_lines": null
} |
"""ACME Identifier Validation Challenges."""
import abc
import functools
import hashlib
import logging
import socket
import sys
from cryptography.hazmat.primitives import hashes # type: ignore
import josepy as jose
import OpenSSL
import requests
import six
from acme import errors
from acme import crypto_util
from acme import fields
from acme import _TLSSNI01DeprecationModule
logger = logging.getLogger(__name__)
# pylint: disable=too-few-public-methods
class Challenge(jose.TypedJSONObjectWithFields):
# _fields_to_partial_json | pylint: disable=abstract-method
"""ACME challenge."""
TYPES = {} # type: dict
@classmethod
def from_json(cls, jobj):
try:
return super(Challenge, cls).from_json(jobj)
except jose.UnrecognizedTypeError as error:
logger.debug(error)
return UnrecognizedChallenge.from_json(jobj)
class ChallengeResponse(jose.TypedJSONObjectWithFields):
# _fields_to_partial_json | pylint: disable=abstract-method
"""ACME challenge response."""
TYPES = {} # type: dict
resource_type = 'challenge'
resource = fields.Resource(resource_type)
class UnrecognizedChallenge(Challenge):
"""Unrecognized challenge.
ACME specification defines a generic framework for challenges and
defines some standard challenges that are implemented in this
module. However, other implementations (including peers) might
define additional challenge types, which should be ignored if
unrecognized.
:ivar jobj: Original JSON decoded object.
"""
def __init__(self, jobj):
super(UnrecognizedChallenge, self).__init__()
object.__setattr__(self, "jobj", jobj)
def to_partial_json(self):
# pylint: disable=no-member
return self.jobj
@classmethod
def from_json(cls, jobj):
return cls(jobj)
class _TokenChallenge(Challenge):
"""Challenge with token.
:ivar bytes token:
"""
TOKEN_SIZE = 128 / 8 # Based on the entropy value from the spec
"""Minimum size of the :attr:`token` in bytes."""
# TODO: acme-spec doesn't specify token as base64-encoded value
token = jose.Field(
"token", encoder=jose.encode_b64jose, decoder=functools.partial(
jose.decode_b64jose, size=TOKEN_SIZE, minimum=True))
# XXX: rename to ~token_good_for_url
@property
def good_token(self): # XXX: @token.decoder
"""Is `token` good?
.. todo:: acme-spec wants "It MUST NOT contain any non-ASCII
characters", but it should also warrant that it doesn't
contain ".." or "/"...
"""
# TODO: check that path combined with uri does not go above
# URI_ROOT_PATH!
return b'..' not in self.token and b'/' not in self.token
class KeyAuthorizationChallengeResponse(ChallengeResponse):
"""Response to Challenges based on Key Authorization.
:param unicode key_authorization:
"""
key_authorization = jose.Field("keyAuthorization")
thumbprint_hash_function = hashes.SHA256
def __init__(self, *args, **kwargs):
super(KeyAuthorizationChallengeResponse, self).__init__(*args, **kwargs)
self._dump_authorization_key(False)
def verify(self, chall, account_public_key):
"""Verify the key authorization.
:param KeyAuthorization chall: Challenge that corresponds to
this response.
:param JWK account_public_key:
:return: ``True`` iff verification of the key authorization was
successful.
:rtype: bool
"""
parts = self.key_authorization.split('.') # pylint: disable=no-member
if len(parts) != 2:
logger.debug("Key authorization (%r) is not well formed",
self.key_authorization)
return False
if parts[0] != chall.encode("token"):
logger.debug("Mismatching token in key authorization: "
"%r instead of %r", parts[0], chall.encode("token"))
return False
thumbprint = jose.b64encode(account_public_key.thumbprint(
hash_function=self.thumbprint_hash_function)).decode()
if parts[1] != thumbprint:
logger.debug("Mismatching thumbprint in key authorization: "
"%r instead of %r", parts[0], thumbprint)
return False
return True
def _dump_authorization_key(self, dump):
# type: (bool) -> None
"""
Set if keyAuthorization is dumped in the JSON representation of this ChallengeResponse.
NB: This method is declared as private because it will eventually be removed.
:param bool dump: True to dump the keyAuthorization, False otherwise
"""
object.__setattr__(self, '_dump_auth_key', dump)
def to_partial_json(self):
jobj = super(KeyAuthorizationChallengeResponse, self).to_partial_json()
if not self._dump_auth_key: # pylint: disable=no-member
jobj.pop('keyAuthorization', None)
return jobj
@six.add_metaclass(abc.ABCMeta)
class KeyAuthorizationChallenge(_TokenChallenge):
# pylint: disable=abstract-class-little-used,too-many-ancestors
"""Challenge based on Key Authorization.
:param response_cls: Subclass of `KeyAuthorizationChallengeResponse`
that will be used to generate `response`.
:param str typ: type of the challenge
"""
typ = NotImplemented
response_cls = NotImplemented
thumbprint_hash_function = (
KeyAuthorizationChallengeResponse.thumbprint_hash_function)
def key_authorization(self, account_key):
"""Generate Key Authorization.
:param JWK account_key:
:rtype unicode:
"""
return self.encode("token") + "." + jose.b64encode(
account_key.thumbprint(
hash_function=self.thumbprint_hash_function)).decode()
def response(self, account_key):
"""Generate response to the challenge.
:param JWK account_key:
:returns: Response (initialized `response_cls`) to the challenge.
:rtype: KeyAuthorizationChallengeResponse
"""
return self.response_cls(
key_authorization=self.key_authorization(account_key))
@abc.abstractmethod
def validation(self, account_key, **kwargs):
"""Generate validation for the challenge.
Subclasses must implement this method, but they are likely to
return completely different data structures, depending on what's
necessary to complete the challenge. Interpretation of that
return value must be known to the caller.
:param JWK account_key:
:returns: Challenge-specific validation.
"""
raise NotImplementedError() # pragma: no cover
def response_and_validation(self, account_key, *args, **kwargs):
"""Generate response and validation.
Convenience function that return results of `response` and
`validation`.
:param JWK account_key:
:rtype: tuple
"""
return (self.response(account_key),
self.validation(account_key, *args, **kwargs))
@ChallengeResponse.register
class DNS01Response(KeyAuthorizationChallengeResponse):
"""ACME dns-01 challenge response."""
typ = "dns-01"
def simple_verify(self, chall, domain, account_public_key):
"""Simple verify.
This method no longer checks DNS records and is a simple wrapper
around `KeyAuthorizationChallengeResponse.verify`.
:param challenges.DNS01 chall: Corresponding challenge.
:param unicode domain: Domain name being verified.
:param JWK account_public_key: Public key for the key pair
being authorized.
:return: ``True`` iff verification of the key authorization was
successful.
:rtype: bool
"""
# pylint: disable=unused-argument
verified = self.verify(chall, account_public_key)
if not verified:
logger.debug("Verification of key authorization in response failed")
return verified
@Challenge.register # pylint: disable=too-many-ancestors
class DNS01(KeyAuthorizationChallenge):
"""ACME dns-01 challenge."""
response_cls = DNS01Response
typ = response_cls.typ
LABEL = "_acme-challenge"
"""Label clients prepend to the domain name being validated."""
def validation(self, account_key, **unused_kwargs):
"""Generate validation.
:param JWK account_key:
:rtype: unicode
"""
return jose.b64encode(hashlib.sha256(self.key_authorization(
account_key).encode("utf-8")).digest()).decode()
def validation_domain_name(self, name):
"""Domain name for TXT validation record.
:param unicode name: Domain name being validated.
"""
return "{0}.{1}".format(self.LABEL, name)
@ChallengeResponse.register
class HTTP01Response(KeyAuthorizationChallengeResponse):
"""ACME http-01 challenge response."""
typ = "http-01"
PORT = 80
"""Verification port as defined by the protocol.
You can override it (e.g. for testing) by passing ``port`` to
`simple_verify`.
"""
WHITESPACE_CUTSET = "\n\r\t "
"""Whitespace characters which should be ignored at the end of the body."""
def simple_verify(self, chall, domain, account_public_key, port=None):
"""Simple verify.
:param challenges.SimpleHTTP chall: Corresponding challenge.
:param unicode domain: Domain name being verified.
:param JWK account_public_key: Public key for the key pair
being authorized.
:param int port: Port used in the validation.
:returns: ``True`` iff validation with the files currently served by the
HTTP server is successful.
:rtype: bool
"""
if not self.verify(chall, account_public_key):
logger.debug("Verification of key authorization in response failed")
return False
# TODO: ACME specification defines URI template that doesn't
# allow to use a custom port... Make sure port is not in the
# request URI, if it's standard.
if port is not None and port != self.PORT:
logger.warning(
"Using non-standard port for http-01 verification: %s", port)
domain += ":{0}".format(port)
uri = chall.uri(domain)
logger.debug("Verifying %s at %s...", chall.typ, uri)
try:
http_response = requests.get(uri)
except requests.exceptions.RequestException as error:
logger.error("Unable to reach %s: %s", uri, error)
return False
logger.debug("Received %s: %s. Headers: %s", http_response,
http_response.text, http_response.headers)
challenge_response = http_response.text.rstrip(self.WHITESPACE_CUTSET)
if self.key_authorization != challenge_response:
logger.debug("Key authorization from response (%r) doesn't match "
"HTTP response (%r)", self.key_authorization,
challenge_response)
return False
return True
@Challenge.register # pylint: disable=too-many-ancestors
class HTTP01(KeyAuthorizationChallenge):
"""ACME http-01 challenge."""
response_cls = HTTP01Response
typ = response_cls.typ
URI_ROOT_PATH = ".well-known/acme-challenge"
"""URI root path for the server provisioned resource."""
@property
def path(self):
"""Path (starting with '/') for provisioned resource.
:rtype: string
"""
return '/' + self.URI_ROOT_PATH + '/' + self.encode('token')
def uri(self, domain):
"""Create an URI to the provisioned resource.
Forms an URI to the HTTPS server provisioned resource
(containing :attr:`~SimpleHTTP.token`).
:param unicode domain: Domain name being verified.
:rtype: string
"""
return "http://" + domain + self.path
def validation(self, account_key, **unused_kwargs):
"""Generate validation.
:param JWK account_key:
:rtype: unicode
"""
return self.key_authorization(account_key)
@ChallengeResponse.register
class TLSSNI01Response(KeyAuthorizationChallengeResponse):
"""ACME tls-sni-01 challenge response."""
typ = "tls-sni-01"
DOMAIN_SUFFIX = b".acme.invalid"
"""Domain name suffix."""
PORT = 443
"""Verification port as defined by the protocol.
You can override it (e.g. for testing) by passing ``port`` to
`simple_verify`.
"""
@property
def z(self): # pylint: disable=invalid-name
"""``z`` value used for verification.
:rtype bytes:
"""
return hashlib.sha256(
self.key_authorization.encode("utf-8")).hexdigest().lower().encode()
@property
def z_domain(self):
"""Domain name used for verification, generated from `z`.
:rtype bytes:
"""
return self.z[:32] + b'.' + self.z[32:] + self.DOMAIN_SUFFIX
def gen_cert(self, key=None, bits=2048):
"""Generate tls-sni-01 certificate.
:param OpenSSL.crypto.PKey key: Optional private key used in
certificate generation. If not provided (``None``), then
fresh key will be generated.
:param int bits: Number of bits for newly generated key.
:rtype: `tuple` of `OpenSSL.crypto.X509` and `OpenSSL.crypto.PKey`
"""
if key is None:
key = OpenSSL.crypto.PKey()
key.generate_key(OpenSSL.crypto.TYPE_RSA, bits)
return crypto_util.gen_ss_cert(key, [
# z_domain is too big to fit into CN, hence first dummy domain
'dummy', self.z_domain.decode()], force_san=True), key
def probe_cert(self, domain, **kwargs):
"""Probe tls-sni-01 challenge certificate.
:param unicode domain:
"""
# TODO: domain is not necessary if host is provided
if "host" not in kwargs:
host = socket.gethostbyname(domain)
logger.debug('%s resolved to %s', domain, host)
kwargs["host"] = host
kwargs.setdefault("port", self.PORT)
kwargs["name"] = self.z_domain
# TODO: try different methods?
# pylint: disable=protected-access
return crypto_util.probe_sni(**kwargs)
def verify_cert(self, cert):
"""Verify tls-sni-01 challenge certificate.
:param OpensSSL.crypto.X509 cert: Challenge certificate.
:returns: Whether the certificate was successfully verified.
:rtype: bool
"""
# pylint: disable=protected-access
sans = crypto_util._pyopenssl_cert_or_req_san(cert)
logger.debug('Certificate %s. SANs: %s', cert.digest('sha256'), sans)
return self.z_domain.decode() in sans
def simple_verify(self, chall, domain, account_public_key,
cert=None, **kwargs):
"""Simple verify.
Verify ``validation`` using ``account_public_key``, optionally
probe tls-sni-01 certificate and check using `verify_cert`.
:param .challenges.TLSSNI01 chall: Corresponding challenge.
:param str domain: Domain name being validated.
:param JWK account_public_key:
:param OpenSSL.crypto.X509 cert: Optional certificate. If not
provided (``None``) certificate will be retrieved using
`probe_cert`.
:param int port: Port used to probe the certificate.
:returns: ``True`` iff client's control of the domain has been
verified.
:rtype: bool
"""
if not self.verify(chall, account_public_key):
logger.debug("Verification of key authorization in response failed")
return False
if cert is None:
try:
cert = self.probe_cert(domain=domain, **kwargs)
except errors.Error as error:
logger.debug(str(error), exc_info=True)
return False
return self.verify_cert(cert)
@Challenge.register # pylint: disable=too-many-ancestors
class TLSSNI01(KeyAuthorizationChallenge):
"""ACME tls-sni-01 challenge."""
response_cls = TLSSNI01Response
typ = response_cls.typ
# boulder#962, ietf-wg-acme#22
#n = jose.Field("n", encoder=int, decoder=int)
def __init__(self, *args, **kwargs):
super(TLSSNI01, self).__init__(*args, **kwargs)
def validation(self, account_key, **kwargs):
"""Generate validation.
:param JWK account_key:
:param OpenSSL.crypto.PKey cert_key: Optional private key used
in certificate generation. If not provided (``None``), then
fresh key will be generated.
:rtype: `tuple` of `OpenSSL.crypto.X509` and `OpenSSL.crypto.PKey`
"""
return self.response(account_key).gen_cert(key=kwargs.get('cert_key'))
@ChallengeResponse.register
class TLSALPN01Response(KeyAuthorizationChallengeResponse):
"""ACME TLS-ALPN-01 challenge response.
This class only allows initiating a TLS-ALPN-01 challenge returned from the
CA. Full support for responding to TLS-ALPN-01 challenges by generating and
serving the expected response certificate is not currently provided.
"""
typ = "tls-alpn-01"
@Challenge.register # pylint: disable=too-many-ancestors
class TLSALPN01(KeyAuthorizationChallenge):
"""ACME tls-alpn-01 challenge.
This class simply allows parsing the TLS-ALPN-01 challenge returned from
the CA. Full TLS-ALPN-01 support is not currently provided.
"""
typ = "tls-alpn-01"
response_cls = TLSALPN01Response
def validation(self, account_key, **kwargs):
"""Generate validation for the challenge."""
raise NotImplementedError()
@Challenge.register # pylint: disable=too-many-ancestors
class DNS(_TokenChallenge):
"""ACME "dns" challenge."""
typ = "dns"
LABEL = "_acme-challenge"
"""Label clients prepend to the domain name being validated."""
def gen_validation(self, account_key, alg=jose.RS256, **kwargs):
"""Generate validation.
:param .JWK account_key: Private account key.
:param .JWA alg:
:returns: This challenge wrapped in `.JWS`
:rtype: .JWS
"""
return jose.JWS.sign(
payload=self.json_dumps(sort_keys=True).encode('utf-8'),
key=account_key, alg=alg, **kwargs)
def check_validation(self, validation, account_public_key):
"""Check validation.
:param JWS validation:
:param JWK account_public_key:
:rtype: bool
"""
if not validation.verify(key=account_public_key):
return False
try:
return self == self.json_loads(
validation.payload.decode('utf-8'))
except jose.DeserializationError as error:
logger.debug("Checking validation for DNS failed: %s", error)
return False
def gen_response(self, account_key, **kwargs):
"""Generate response.
:param .JWK account_key: Private account key.
:param .JWA alg:
:rtype: DNSResponse
"""
return DNSResponse(validation=self.gen_validation(
account_key, **kwargs))
def validation_domain_name(self, name):
"""Domain name for TXT validation record.
:param unicode name: Domain name being validated.
"""
return "{0}.{1}".format(self.LABEL, name)
@ChallengeResponse.register
class DNSResponse(ChallengeResponse):
"""ACME "dns" challenge response.
:param JWS validation:
"""
typ = "dns"
validation = jose.Field("validation", decoder=jose.JWS.from_json)
def check_validation(self, chall, account_public_key):
"""Check validation.
:param challenges.DNS chall:
:param JWK account_public_key:
:rtype: bool
"""
return chall.check_validation(self.validation, account_public_key)
# Patching ourselves to warn about TLS-SNI challenge deprecation and removal.
sys.modules[__name__] = _TLSSNI01DeprecationModule(sys.modules[__name__])
| {
"repo_name": "letsencrypt/letsencrypt",
"path": "acme/acme/challenges.py",
"copies": "1",
"size": "20341",
"license": "apache-2.0",
"hash": -8530197909797558000,
"line_mean": 30.4876160991,
"line_max": 95,
"alpha_frac": 0.632958065,
"autogenerated": false,
"ratio": 4.231537341377158,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.00017914317469773966,
"num_lines": 646
} |
"""ACME Identifier Validation Challenges."""
import abc
import functools
import hashlib
import logging
import socket
from cryptography.hazmat.primitives import hashes
import OpenSSL
import requests
from acme import dns_resolver
from acme import errors
from acme import crypto_util
from acme import fields
from acme import jose
logger = logging.getLogger(__name__)
# pylint: disable=too-few-public-methods
class Challenge(jose.TypedJSONObjectWithFields):
# _fields_to_partial_json | pylint: disable=abstract-method
"""ACME challenge."""
TYPES = {}
@classmethod
def from_json(cls, jobj):
try:
return super(Challenge, cls).from_json(jobj)
except jose.UnrecognizedTypeError as error:
logger.debug(error)
return UnrecognizedChallenge.from_json(jobj)
class ChallengeResponse(jose.TypedJSONObjectWithFields):
# _fields_to_partial_json | pylint: disable=abstract-method
"""ACME challenge response."""
TYPES = {}
resource_type = 'challenge'
resource = fields.Resource(resource_type)
class UnrecognizedChallenge(Challenge):
"""Unrecognized challenge.
ACME specification defines a generic framework for challenges and
defines some standard challenges that are implemented in this
module. However, other implementations (including peers) might
define additional challenge types, which should be ignored if
unrecognized.
:ivar jobj: Original JSON decoded object.
"""
def __init__(self, jobj):
super(UnrecognizedChallenge, self).__init__()
object.__setattr__(self, "jobj", jobj)
def to_partial_json(self):
# pylint: disable=no-member
return self.jobj
@classmethod
def from_json(cls, jobj):
return cls(jobj)
class _TokenChallenge(Challenge):
"""Challenge with token.
:ivar bytes token:
"""
TOKEN_SIZE = 128 / 8 # Based on the entropy value from the spec
"""Minimum size of the :attr:`token` in bytes."""
# TODO: acme-spec doesn't specify token as base64-encoded value
token = jose.Field(
"token", encoder=jose.encode_b64jose, decoder=functools.partial(
jose.decode_b64jose, size=TOKEN_SIZE, minimum=True))
# XXX: rename to ~token_good_for_url
@property
def good_token(self): # XXX: @token.decoder
"""Is `token` good?
.. todo:: acme-spec wants "It MUST NOT contain any non-ASCII
characters", but it should also warrant that it doesn't
contain ".." or "/"...
"""
# TODO: check that path combined with uri does not go above
# URI_ROOT_PATH!
return b'..' not in self.token and b'/' not in self.token
class KeyAuthorizationChallengeResponse(ChallengeResponse):
"""Response to Challenges based on Key Authorization.
:param unicode key_authorization:
"""
key_authorization = jose.Field("keyAuthorization")
thumbprint_hash_function = hashes.SHA256
def verify(self, chall, account_public_key):
"""Verify the key authorization.
:param KeyAuthorization chall: Challenge that corresponds to
this response.
:param JWK account_public_key:
:return: ``True`` iff verification of the key authorization was
successful.
:rtype: bool
"""
parts = self.key_authorization.split('.') # pylint: disable=no-member
if len(parts) != 2:
logger.debug("Key authorization (%r) is not well formed",
self.key_authorization)
return False
if parts[0] != chall.encode("token"):
logger.debug("Mismatching token in key authorization: "
"%r instead of %r", parts[0], chall.encode("token"))
return False
thumbprint = jose.b64encode(account_public_key.thumbprint(
hash_function=self.thumbprint_hash_function)).decode()
if parts[1] != thumbprint:
logger.debug("Mismatching thumbprint in key authorization: "
"%r instead of %r", parts[0], thumbprint)
return False
return True
class KeyAuthorizationChallenge(_TokenChallenge):
# pylint: disable=abstract-class-little-used,too-many-ancestors
"""Challenge based on Key Authorization.
:param response_cls: Subclass of `KeyAuthorizationChallengeResponse`
that will be used to generate `response`.
"""
__metaclass__ = abc.ABCMeta
response_cls = NotImplemented
thumbprint_hash_function = (
KeyAuthorizationChallengeResponse.thumbprint_hash_function)
def key_authorization(self, account_key):
"""Generate Key Authorization.
:param JWK account_key:
:rtype unicode:
"""
return self.encode("token") + "." + jose.b64encode(
account_key.thumbprint(
hash_function=self.thumbprint_hash_function)).decode()
def response(self, account_key):
"""Generate response to the challenge.
:param JWK account_key:
:returns: Response (initialized `response_cls`) to the challenge.
:rtype: KeyAuthorizationChallengeResponse
"""
return self.response_cls(
key_authorization=self.key_authorization(account_key))
@abc.abstractmethod
def validation(self, account_key, **kwargs):
"""Generate validation for the challenge.
Subclasses must implement this method, but they are likely to
return completely different data structures, depending on what's
necessary to complete the challenge. Interepretation of that
return value must be known to the caller.
:param JWK account_key:
:returns: Challenge-specific validation.
"""
raise NotImplementedError() # pragma: no cover
def response_and_validation(self, account_key, *args, **kwargs):
"""Generate response and validation.
Convenience function that return results of `response` and
`validation`.
:param JWK account_key:
:rtype: tuple
"""
return (self.response(account_key),
self.validation(account_key, *args, **kwargs))
@ChallengeResponse.register
class DNS01Response(KeyAuthorizationChallengeResponse):
"""ACME dns-01 challenge response."""
typ = "dns-01"
def simple_verify(self, chall, domain, account_public_key):
"""Simple verify.
:param challenges.DNS01 chall: Corresponding challenge.
:param unicode domain: Domain name being verified.
:param JWK account_public_key: Public key for the key pair
being authorized.
:returns: ``True`` iff validation with the TXT records resolved from a
DNS server is successful.
:rtype: bool
"""
if not self.verify(chall, account_public_key):
logger.debug("Verification of key authorization in response failed")
return False
validation_domain_name = chall.validation_domain_name(domain)
validation = chall.validation(account_public_key)
logger.debug("Verifying %s at %s...", chall.typ, validation_domain_name)
try:
txt_records = dns_resolver.txt_records_for_name(
validation_domain_name)
except errors.DependencyError:
raise errors.DependencyError("Local validation for 'dns-01' "
"challenges requires 'dnspython'")
exists = validation in txt_records
if not exists:
logger.debug("Key authorization from response (%r) doesn't match "
"any DNS response in %r", self.key_authorization,
txt_records)
return exists
@Challenge.register # pylint: disable=too-many-ancestors
class DNS01(KeyAuthorizationChallenge):
"""ACME dns-01 challenge."""
response_cls = DNS01Response
typ = response_cls.typ
LABEL = "_acme-challenge"
"""Label clients prepend to the domain name being validated."""
def validation(self, account_key, **unused_kwargs):
"""Generate validation.
:param JWK account_key:
:rtype: unicode
"""
return jose.b64encode(hashlib.sha256(self.key_authorization(
account_key).encode("utf-8")).digest()).decode()
def validation_domain_name(self, name):
"""Domain name for TXT validation record.
:param unicode name: Domain name being validated.
"""
return "{0}.{1}".format(self.LABEL, name)
@ChallengeResponse.register
class HTTP01Response(KeyAuthorizationChallengeResponse):
"""ACME http-01 challenge response."""
typ = "http-01"
PORT = 80
"""Verification port as defined by the protocol.
You can override it (e.g. for testing) by passing ``port`` to
`simple_verify`.
"""
WHITESPACE_CUTSET = "\n\r\t "
"""Whitespace characters which should be ignored at the end of the body."""
def simple_verify(self, chall, domain, account_public_key, port=None):
"""Simple verify.
:param challenges.SimpleHTTP chall: Corresponding challenge.
:param unicode domain: Domain name being verified.
:param JWK account_public_key: Public key for the key pair
being authorized.
:param int port: Port used in the validation.
:returns: ``True`` iff validation with the files currently served by the
HTTP server is successful.
:rtype: bool
"""
if not self.verify(chall, account_public_key):
logger.debug("Verification of key authorization in response failed")
return False
# TODO: ACME specification defines URI template that doesn't
# allow to use a custom port... Make sure port is not in the
# request URI, if it's standard.
if port is not None and port != self.PORT:
logger.warning(
"Using non-standard port for http-01 verification: %s", port)
domain += ":{0}".format(port)
uri = chall.uri(domain)
logger.debug("Verifying %s at %s...", chall.typ, uri)
try:
http_response = requests.get(uri)
except requests.exceptions.RequestException as error:
logger.error("Unable to reach %s: %s", uri, error)
return False
logger.debug("Received %s: %s. Headers: %s", http_response,
http_response.text, http_response.headers)
challenge_response = http_response.text.rstrip(self.WHITESPACE_CUTSET)
if self.key_authorization != challenge_response:
logger.debug("Key authorization from response (%r) doesn't match "
"HTTP response (%r)", self.key_authorization,
challenge_response)
return False
return True
@Challenge.register # pylint: disable=too-many-ancestors
class HTTP01(KeyAuthorizationChallenge):
"""ACME http-01 challenge."""
response_cls = HTTP01Response
typ = response_cls.typ
URI_ROOT_PATH = ".well-known/acme-challenge"
"""URI root path for the server provisioned resource."""
@property
def path(self):
"""Path (starting with '/') for provisioned resource.
:rtype: string
"""
return '/' + self.URI_ROOT_PATH + '/' + self.encode('token')
def uri(self, domain):
"""Create an URI to the provisioned resource.
Forms an URI to the HTTPS server provisioned resource
(containing :attr:`~SimpleHTTP.token`).
:param unicode domain: Domain name being verified.
:rtype: string
"""
return "http://" + domain + self.path
def validation(self, account_key, **unused_kwargs):
"""Generate validation.
:param JWK account_key:
:rtype: unicode
"""
return self.key_authorization(account_key)
@ChallengeResponse.register
class TLSSNI01Response(KeyAuthorizationChallengeResponse):
"""ACME tls-sni-01 challenge response."""
typ = "tls-sni-01"
DOMAIN_SUFFIX = b".acme.invalid"
"""Domain name suffix."""
PORT = 443
"""Verification port as defined by the protocol.
You can override it (e.g. for testing) by passing ``port`` to
`simple_verify`.
"""
@property
def z(self): # pylint: disable=invalid-name
"""``z`` value used for verification.
:rtype bytes:
"""
return hashlib.sha256(
self.key_authorization.encode("utf-8")).hexdigest().lower().encode()
@property
def z_domain(self):
"""Domain name used for verification, generated from `z`.
:rtype bytes:
"""
return self.z[:32] + b'.' + self.z[32:] + self.DOMAIN_SUFFIX
def gen_cert(self, key=None, bits=2048):
"""Generate tls-sni-01 certificate.
:param OpenSSL.crypto.PKey key: Optional private key used in
certificate generation. If not provided (``None``), then
fresh key will be generated.
:param int bits: Number of bits for newly generated key.
:rtype: `tuple` of `OpenSSL.crypto.X509` and `OpenSSL.crypto.PKey`
"""
if key is None:
key = OpenSSL.crypto.PKey()
key.generate_key(OpenSSL.crypto.TYPE_RSA, bits)
return crypto_util.gen_ss_cert(key, [
# z_domain is too big to fit into CN, hence first dummy domain
'dummy', self.z_domain.decode()], force_san=True), key
def probe_cert(self, domain, **kwargs):
"""Probe tls-sni-01 challenge certificate.
:param unicode domain:
"""
# TODO: domain is not necessary if host is provided
if "host" not in kwargs:
host = socket.gethostbyname(domain)
logging.debug('%s resolved to %s', domain, host)
kwargs["host"] = host
kwargs.setdefault("port", self.PORT)
kwargs["name"] = self.z_domain
# TODO: try different methods?
# pylint: disable=protected-access
return crypto_util.probe_sni(**kwargs)
def verify_cert(self, cert):
"""Verify tls-sni-01 challenge certificate.
:param OpensSSL.crypto.X509 cert: Challenge certificate.
:returns: Whether the certificate was successfully verified.
:rtype: bool
"""
# pylint: disable=protected-access
sans = crypto_util._pyopenssl_cert_or_req_san(cert)
logging.debug('Certificate %s. SANs: %s', cert.digest('sha1'), sans)
return self.z_domain.decode() in sans
def simple_verify(self, chall, domain, account_public_key,
cert=None, **kwargs):
"""Simple verify.
Verify ``validation`` using ``account_public_key``, optionally
probe tls-sni-01 certificate and check using `verify_cert`.
:param .challenges.TLSSNI01 chall: Corresponding challenge.
:param str domain: Domain name being validated.
:param JWK account_public_key:
:param OpenSSL.crypto.X509 cert: Optional certificate. If not
provided (``None``) certificate will be retrieved using
`probe_cert`.
:param int port: Port used to probe the certificate.
:returns: ``True`` iff client's control of the domain has been
verified.
:rtype: bool
"""
if not self.verify(chall, account_public_key):
logger.debug("Verification of key authorization in response failed")
return False
if cert is None:
try:
cert = self.probe_cert(domain=domain, **kwargs)
except errors.Error as error:
logger.debug(error, exc_info=True)
return False
return self.verify_cert(cert)
@Challenge.register # pylint: disable=too-many-ancestors
class TLSSNI01(KeyAuthorizationChallenge):
"""ACME tls-sni-01 challenge."""
response_cls = TLSSNI01Response
typ = response_cls.typ
# boulder#962, ietf-wg-acme#22
#n = jose.Field("n", encoder=int, decoder=int)
def validation(self, account_key, **kwargs):
"""Generate validation.
:param JWK account_key:
:param OpenSSL.crypto.PKey cert_key: Optional private key used
in certificate generation. If not provided (``None``), then
fresh key will be generated.
:rtype: `tuple` of `OpenSSL.crypto.X509` and `OpenSSL.crypto.PKey`
"""
return self.response(account_key).gen_cert(key=kwargs.get('cert_key'))
@Challenge.register # pylint: disable=too-many-ancestors
class DNS(_TokenChallenge):
"""ACME "dns" challenge."""
typ = "dns"
LABEL = "_acme-challenge"
"""Label clients prepend to the domain name being validated."""
def gen_validation(self, account_key, alg=jose.RS256, **kwargs):
"""Generate validation.
:param .JWK account_key: Private account key.
:param .JWA alg:
:returns: This challenge wrapped in `.JWS`
:rtype: .JWS
"""
return jose.JWS.sign(
payload=self.json_dumps(sort_keys=True).encode('utf-8'),
key=account_key, alg=alg, **kwargs)
def check_validation(self, validation, account_public_key):
"""Check validation.
:param JWS validation:
:param JWK account_public_key:
:rtype: bool
"""
if not validation.verify(key=account_public_key):
return False
try:
return self == self.json_loads(
validation.payload.decode('utf-8'))
except jose.DeserializationError as error:
logger.debug("Checking validation for DNS failed: %s", error)
return False
def gen_response(self, account_key, **kwargs):
"""Generate response.
:param .JWK account_key: Private account key.
:param .JWA alg:
:rtype: DNSResponse
"""
return DNSResponse(validation=self.gen_validation(
account_key, **kwargs))
def validation_domain_name(self, name):
"""Domain name for TXT validation record.
:param unicode name: Domain name being validated.
"""
return "{0}.{1}".format(self.LABEL, name)
@ChallengeResponse.register
class DNSResponse(ChallengeResponse):
"""ACME "dns" challenge response.
:param JWS validation:
"""
typ = "dns"
validation = jose.Field("validation", decoder=jose.JWS.from_json)
def check_validation(self, chall, account_public_key):
"""Check validation.
:param challenges.DNS chall:
:param JWK account_public_key:
:rtype: bool
"""
return chall.check_validation(self.validation, account_public_key)
| {
"repo_name": "bsmr-misc-forks/letsencrypt",
"path": "acme/acme/challenges.py",
"copies": "3",
"size": "18834",
"license": "apache-2.0",
"hash": -9173462830768785000,
"line_mean": 30.2857142857,
"line_max": 80,
"alpha_frac": 0.6233407667,
"autogenerated": false,
"ratio": 4.2678450033990485,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.6391185770099048,
"avg_score": null,
"num_lines": null
} |
"""ACME Identifier Validation Challenges."""
import abc
import functools
import hashlib
import logging
import socket
from cryptography.hazmat.primitives import hashes # type: ignore
import OpenSSL
import requests
from acme import errors
from acme import crypto_util
from acme import fields
from acme import jose
logger = logging.getLogger(__name__)
# pylint: disable=too-few-public-methods
class Challenge(jose.TypedJSONObjectWithFields):
# _fields_to_partial_json | pylint: disable=abstract-method
"""ACME challenge."""
TYPES = {} # type: dict
@classmethod
def from_json(cls, jobj):
try:
return super(Challenge, cls).from_json(jobj)
except jose.UnrecognizedTypeError as error:
logger.debug(error)
return UnrecognizedChallenge.from_json(jobj)
class ChallengeResponse(jose.TypedJSONObjectWithFields):
# _fields_to_partial_json | pylint: disable=abstract-method
"""ACME challenge response."""
TYPES = {} # type: dict
resource_type = 'challenge'
resource = fields.Resource(resource_type)
class UnrecognizedChallenge(Challenge):
"""Unrecognized challenge.
ACME specification defines a generic framework for challenges and
defines some standard challenges that are implemented in this
module. However, other implementations (including peers) might
define additional challenge types, which should be ignored if
unrecognized.
:ivar jobj: Original JSON decoded object.
"""
def __init__(self, jobj):
super(UnrecognizedChallenge, self).__init__()
object.__setattr__(self, "jobj", jobj)
def to_partial_json(self):
# pylint: disable=no-member
return self.jobj
@classmethod
def from_json(cls, jobj):
return cls(jobj)
class _TokenChallenge(Challenge):
"""Challenge with token.
:ivar bytes token:
"""
TOKEN_SIZE = 128 / 8 # Based on the entropy value from the spec
"""Minimum size of the :attr:`token` in bytes."""
# TODO: acme-spec doesn't specify token as base64-encoded value
token = jose.Field(
"token", encoder=jose.encode_b64jose, decoder=functools.partial(
jose.decode_b64jose, size=TOKEN_SIZE, minimum=True))
# XXX: rename to ~token_good_for_url
@property
def good_token(self): # XXX: @token.decoder
"""Is `token` good?
.. todo:: acme-spec wants "It MUST NOT contain any non-ASCII
characters", but it should also warrant that it doesn't
contain ".." or "/"...
"""
# TODO: check that path combined with uri does not go above
# URI_ROOT_PATH!
return b'..' not in self.token and b'/' not in self.token
class KeyAuthorizationChallengeResponse(ChallengeResponse):
"""Response to Challenges based on Key Authorization.
:param unicode key_authorization:
"""
key_authorization = jose.Field("keyAuthorization")
thumbprint_hash_function = hashes.SHA256
def verify(self, chall, account_public_key):
"""Verify the key authorization.
:param KeyAuthorization chall: Challenge that corresponds to
this response.
:param JWK account_public_key:
:return: ``True`` iff verification of the key authorization was
successful.
:rtype: bool
"""
parts = self.key_authorization.split('.') # pylint: disable=no-member
if len(parts) != 2:
logger.debug("Key authorization (%r) is not well formed",
self.key_authorization)
return False
if parts[0] != chall.encode("token"):
logger.debug("Mismatching token in key authorization: "
"%r instead of %r", parts[0], chall.encode("token"))
return False
thumbprint = jose.b64encode(account_public_key.thumbprint(
hash_function=self.thumbprint_hash_function)).decode()
if parts[1] != thumbprint:
logger.debug("Mismatching thumbprint in key authorization: "
"%r instead of %r", parts[0], thumbprint)
return False
return True
class KeyAuthorizationChallenge(_TokenChallenge):
# pylint: disable=abstract-class-little-used,too-many-ancestors
"""Challenge based on Key Authorization.
:param response_cls: Subclass of `KeyAuthorizationChallengeResponse`
that will be used to generate `response`.
"""
__metaclass__ = abc.ABCMeta
response_cls = NotImplemented
thumbprint_hash_function = (
KeyAuthorizationChallengeResponse.thumbprint_hash_function)
def key_authorization(self, account_key):
"""Generate Key Authorization.
:param JWK account_key:
:rtype unicode:
"""
return self.encode("token") + "." + jose.b64encode(
account_key.thumbprint(
hash_function=self.thumbprint_hash_function)).decode()
def response(self, account_key):
"""Generate response to the challenge.
:param JWK account_key:
:returns: Response (initialized `response_cls`) to the challenge.
:rtype: KeyAuthorizationChallengeResponse
"""
return self.response_cls(
key_authorization=self.key_authorization(account_key))
@abc.abstractmethod
def validation(self, account_key, **kwargs):
"""Generate validation for the challenge.
Subclasses must implement this method, but they are likely to
return completely different data structures, depending on what's
necessary to complete the challenge. Interpretation of that
return value must be known to the caller.
:param JWK account_key:
:returns: Challenge-specific validation.
"""
raise NotImplementedError() # pragma: no cover
def response_and_validation(self, account_key, *args, **kwargs):
"""Generate response and validation.
Convenience function that return results of `response` and
`validation`.
:param JWK account_key:
:rtype: tuple
"""
return (self.response(account_key),
self.validation(account_key, *args, **kwargs))
@ChallengeResponse.register
class DNS01Response(KeyAuthorizationChallengeResponse):
"""ACME dns-01 challenge response."""
typ = "dns-01"
def simple_verify(self, chall, domain, account_public_key):
"""Simple verify.
This method no longer checks DNS records and is a simple wrapper
around `KeyAuthorizationChallengeResponse.verify`.
:param challenges.DNS01 chall: Corresponding challenge.
:param unicode domain: Domain name being verified.
:param JWK account_public_key: Public key for the key pair
being authorized.
:return: ``True`` iff verification of the key authorization was
successful.
:rtype: bool
"""
# pylint: disable=unused-argument
verified = self.verify(chall, account_public_key)
if not verified:
logger.debug("Verification of key authorization in response failed")
return verified
@Challenge.register # pylint: disable=too-many-ancestors
class DNS01(KeyAuthorizationChallenge):
"""ACME dns-01 challenge."""
response_cls = DNS01Response
typ = response_cls.typ
LABEL = "_acme-challenge"
"""Label clients prepend to the domain name being validated."""
def validation(self, account_key, **unused_kwargs):
"""Generate validation.
:param JWK account_key:
:rtype: unicode
"""
return jose.b64encode(hashlib.sha256(self.key_authorization(
account_key).encode("utf-8")).digest()).decode()
def validation_domain_name(self, name):
"""Domain name for TXT validation record.
:param unicode name: Domain name being validated.
"""
return "{0}.{1}".format(self.LABEL, name)
@ChallengeResponse.register
class HTTP01Response(KeyAuthorizationChallengeResponse):
"""ACME http-01 challenge response."""
typ = "http-01"
PORT = 80
"""Verification port as defined by the protocol.
You can override it (e.g. for testing) by passing ``port`` to
`simple_verify`.
"""
WHITESPACE_CUTSET = "\n\r\t "
"""Whitespace characters which should be ignored at the end of the body."""
def simple_verify(self, chall, domain, account_public_key, port=None):
"""Simple verify.
:param challenges.SimpleHTTP chall: Corresponding challenge.
:param unicode domain: Domain name being verified.
:param JWK account_public_key: Public key for the key pair
being authorized.
:param int port: Port used in the validation.
:returns: ``True`` iff validation with the files currently served by the
HTTP server is successful.
:rtype: bool
"""
if not self.verify(chall, account_public_key):
logger.debug("Verification of key authorization in response failed")
return False
# TODO: ACME specification defines URI template that doesn't
# allow to use a custom port... Make sure port is not in the
# request URI, if it's standard.
if port is not None and port != self.PORT:
logger.warning(
"Using non-standard port for http-01 verification: %s", port)
domain += ":{0}".format(port)
uri = chall.uri(domain)
logger.debug("Verifying %s at %s...", chall.typ, uri)
try:
http_response = requests.get(uri)
except requests.exceptions.RequestException as error:
logger.error("Unable to reach %s: %s", uri, error)
return False
logger.debug("Received %s: %s. Headers: %s", http_response,
http_response.text, http_response.headers)
challenge_response = http_response.text.rstrip(self.WHITESPACE_CUTSET)
if self.key_authorization != challenge_response:
logger.debug("Key authorization from response (%r) doesn't match "
"HTTP response (%r)", self.key_authorization,
challenge_response)
return False
return True
@Challenge.register # pylint: disable=too-many-ancestors
class HTTP01(KeyAuthorizationChallenge):
"""ACME http-01 challenge."""
response_cls = HTTP01Response
typ = response_cls.typ
URI_ROOT_PATH = ".well-known/acme-challenge"
"""URI root path for the server provisioned resource."""
@property
def path(self):
"""Path (starting with '/') for provisioned resource.
:rtype: string
"""
return '/' + self.URI_ROOT_PATH + '/' + self.encode('token')
def uri(self, domain):
"""Create an URI to the provisioned resource.
Forms an URI to the HTTPS server provisioned resource
(containing :attr:`~SimpleHTTP.token`).
:param unicode domain: Domain name being verified.
:rtype: string
"""
return "http://" + domain + self.path
def validation(self, account_key, **unused_kwargs):
"""Generate validation.
:param JWK account_key:
:rtype: unicode
"""
return self.key_authorization(account_key)
@ChallengeResponse.register
class TLSSNI01Response(KeyAuthorizationChallengeResponse):
"""ACME tls-sni-01 challenge response."""
typ = "tls-sni-01"
DOMAIN_SUFFIX = b".acme.invalid"
"""Domain name suffix."""
PORT = 443
"""Verification port as defined by the protocol.
You can override it (e.g. for testing) by passing ``port`` to
`simple_verify`.
"""
@property
def z(self): # pylint: disable=invalid-name
"""``z`` value used for verification.
:rtype bytes:
"""
return hashlib.sha256(
self.key_authorization.encode("utf-8")).hexdigest().lower().encode()
@property
def z_domain(self):
"""Domain name used for verification, generated from `z`.
:rtype bytes:
"""
return self.z[:32] + b'.' + self.z[32:] + self.DOMAIN_SUFFIX
def gen_cert(self, key=None, bits=2048):
"""Generate tls-sni-01 certificate.
:param OpenSSL.crypto.PKey key: Optional private key used in
certificate generation. If not provided (``None``), then
fresh key will be generated.
:param int bits: Number of bits for newly generated key.
:rtype: `tuple` of `OpenSSL.crypto.X509` and `OpenSSL.crypto.PKey`
"""
if key is None:
key = OpenSSL.crypto.PKey()
key.generate_key(OpenSSL.crypto.TYPE_RSA, bits)
return crypto_util.gen_ss_cert(key, [
# z_domain is too big to fit into CN, hence first dummy domain
'dummy', self.z_domain.decode()], force_san=True), key
def probe_cert(self, domain, **kwargs):
"""Probe tls-sni-01 challenge certificate.
:param unicode domain:
"""
# TODO: domain is not necessary if host is provided
if "host" not in kwargs:
host = socket.gethostbyname(domain)
logger.debug('%s resolved to %s', domain, host)
kwargs["host"] = host
kwargs.setdefault("port", self.PORT)
kwargs["name"] = self.z_domain
# TODO: try different methods?
# pylint: disable=protected-access
return crypto_util.probe_sni(**kwargs)
def verify_cert(self, cert):
"""Verify tls-sni-01 challenge certificate.
:param OpensSSL.crypto.X509 cert: Challenge certificate.
:returns: Whether the certificate was successfully verified.
:rtype: bool
"""
# pylint: disable=protected-access
sans = crypto_util._pyopenssl_cert_or_req_san(cert)
logger.debug('Certificate %s. SANs: %s', cert.digest('sha256'), sans)
return self.z_domain.decode() in sans
def simple_verify(self, chall, domain, account_public_key,
cert=None, **kwargs):
"""Simple verify.
Verify ``validation`` using ``account_public_key``, optionally
probe tls-sni-01 certificate and check using `verify_cert`.
:param .challenges.TLSSNI01 chall: Corresponding challenge.
:param str domain: Domain name being validated.
:param JWK account_public_key:
:param OpenSSL.crypto.X509 cert: Optional certificate. If not
provided (``None``) certificate will be retrieved using
`probe_cert`.
:param int port: Port used to probe the certificate.
:returns: ``True`` iff client's control of the domain has been
verified.
:rtype: bool
"""
if not self.verify(chall, account_public_key):
logger.debug("Verification of key authorization in response failed")
return False
if cert is None:
try:
cert = self.probe_cert(domain=domain, **kwargs)
except errors.Error as error:
logger.debug(error, exc_info=True)
return False
return self.verify_cert(cert)
@Challenge.register # pylint: disable=too-many-ancestors
class TLSSNI01(KeyAuthorizationChallenge):
"""ACME tls-sni-01 challenge."""
response_cls = TLSSNI01Response
typ = response_cls.typ
# boulder#962, ietf-wg-acme#22
#n = jose.Field("n", encoder=int, decoder=int)
def validation(self, account_key, **kwargs):
"""Generate validation.
:param JWK account_key:
:param OpenSSL.crypto.PKey cert_key: Optional private key used
in certificate generation. If not provided (``None``), then
fresh key will be generated.
:rtype: `tuple` of `OpenSSL.crypto.X509` and `OpenSSL.crypto.PKey`
"""
return self.response(account_key).gen_cert(key=kwargs.get('cert_key'))
@Challenge.register # pylint: disable=too-many-ancestors
class DNS(_TokenChallenge):
"""ACME "dns" challenge."""
typ = "dns"
LABEL = "_acme-challenge"
"""Label clients prepend to the domain name being validated."""
def gen_validation(self, account_key, alg=jose.RS256, **kwargs):
"""Generate validation.
:param .JWK account_key: Private account key.
:param .JWA alg:
:returns: This challenge wrapped in `.JWS`
:rtype: .JWS
"""
return jose.JWS.sign(
payload=self.json_dumps(sort_keys=True).encode('utf-8'),
key=account_key, alg=alg, **kwargs)
def check_validation(self, validation, account_public_key):
"""Check validation.
:param JWS validation:
:param JWK account_public_key:
:rtype: bool
"""
if not validation.verify(key=account_public_key):
return False
try:
return self == self.json_loads(
validation.payload.decode('utf-8'))
except jose.DeserializationError as error:
logger.debug("Checking validation for DNS failed: %s", error)
return False
def gen_response(self, account_key, **kwargs):
"""Generate response.
:param .JWK account_key: Private account key.
:param .JWA alg:
:rtype: DNSResponse
"""
return DNSResponse(validation=self.gen_validation(
account_key, **kwargs))
def validation_domain_name(self, name):
"""Domain name for TXT validation record.
:param unicode name: Domain name being validated.
"""
return "{0}.{1}".format(self.LABEL, name)
@ChallengeResponse.register
class DNSResponse(ChallengeResponse):
"""ACME "dns" challenge response.
:param JWS validation:
"""
typ = "dns"
validation = jose.Field("validation", decoder=jose.JWS.from_json)
def check_validation(self, chall, account_public_key):
"""Check validation.
:param challenges.DNS chall:
:param JWK account_public_key:
:rtype: bool
"""
return chall.check_validation(self.validation, account_public_key)
| {
"repo_name": "jsha/letsencrypt",
"path": "acme/acme/challenges.py",
"copies": "2",
"size": "18234",
"license": "apache-2.0",
"hash": 5472439461718068000,
"line_mean": 29.9575551783,
"line_max": 80,
"alpha_frac": 0.6258089284,
"autogenerated": false,
"ratio": 4.256302521008403,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5882111449408404,
"avg_score": null,
"num_lines": null
} |
"""ACME Identifier Validation Challenges."""
import binascii
import functools
import hashlib
import logging
import os
import socket
from cryptography.hazmat.backends import default_backend
from cryptography import x509
import OpenSSL
import requests
from acme import errors
from acme import crypto_util
from acme import fields
from acme import jose
from acme import other
logger = logging.getLogger(__name__)
# pylint: disable=too-few-public-methods
class Challenge(jose.TypedJSONObjectWithFields):
# _fields_to_partial_json | pylint: disable=abstract-method
"""ACME challenge."""
TYPES = {}
class ContinuityChallenge(Challenge): # pylint: disable=abstract-method
"""Client validation challenges."""
class DVChallenge(Challenge): # pylint: disable=abstract-method
"""Domain validation challenges."""
class ChallengeResponse(jose.TypedJSONObjectWithFields):
# _fields_to_partial_json | pylint: disable=abstract-method
"""ACME challenge response."""
TYPES = {}
resource_type = 'challenge'
resource = fields.Resource(resource_type)
@Challenge.register
class SimpleHTTP(DVChallenge):
"""ACME "simpleHttp" challenge.
:ivar unicode token:
"""
typ = "simpleHttp"
token = jose.Field("token")
@ChallengeResponse.register
class SimpleHTTPResponse(ChallengeResponse):
"""ACME "simpleHttp" challenge response.
:ivar unicode path:
:ivar unicode tls:
"""
typ = "simpleHttp"
path = jose.Field("path")
tls = jose.Field("tls", default=True, omitempty=True)
URI_ROOT_PATH = ".well-known/acme-challenge"
"""URI root path for the server provisioned resource."""
_URI_TEMPLATE = "{scheme}://{domain}/" + URI_ROOT_PATH + "/{path}"
MAX_PATH_LEN = 25
"""Maximum allowed `path` length."""
CONTENT_TYPE = "text/plain"
@property
def good_path(self):
"""Is `path` good?
.. todo:: acme-spec: "The value MUST be comprised entirely of
characters from the URL-safe alphabet for Base64 encoding
[RFC4648]", base64.b64decode ignores those characters
"""
# TODO: check that path combined with uri does not go above
# URI_ROOT_PATH!
return len(self.path) <= 25
@property
def scheme(self):
"""URL scheme for the provisioned resource."""
return "https" if self.tls else "http"
@property
def port(self):
"""Port that the ACME client should be listening for validation."""
return 443 if self.tls else 80
def uri(self, domain):
"""Create an URI to the provisioned resource.
Forms an URI to the HTTPS server provisioned resource
(containing :attr:`~SimpleHTTP.token`).
:param unicode domain: Domain name being verified.
"""
return self._URI_TEMPLATE.format(
scheme=self.scheme, domain=domain, path=self.path)
def simple_verify(self, chall, domain, port=None):
"""Simple verify.
According to the ACME specification, "the ACME server MUST
ignore the certificate provided by the HTTPS server", so
``requests.get`` is called with ``verify=False``.
:param .SimpleHTTP chall: Corresponding challenge.
:param unicode domain: Domain name being verified.
:param int port: Port used in the validation.
:returns: ``True`` iff validation is successful, ``False``
otherwise.
:rtype: bool
"""
# TODO: ACME specification defines URI template that doesn't
# allow to use a custom port... Make sure port is not in the
# request URI, if it's standard.
if port is not None and port != self.port:
logger.warn(
"Using non-standard port for SimpleHTTP verification: %s", port)
domain += ":{0}".format(port)
uri = self.uri(domain)
logger.debug("Verifying %s at %s...", chall.typ, uri)
try:
http_response = requests.get(uri, verify=False)
except requests.exceptions.RequestException as error:
logger.error("Unable to reach %s: %s", uri, error)
return False
logger.debug(
"Received %s. Headers: %s", http_response, http_response.headers)
good_token = http_response.text == chall.token
if not good_token:
logger.error(
"Unable to verify %s! Expected: %r, returned: %r.",
uri, chall.token, http_response.text)
# TODO: spec contradicts itself, c.f.
# https://github.com/letsencrypt/acme-spec/pull/156/files#r33136438
good_ct = self.CONTENT_TYPE == http_response.headers.get(
"Content-Type", self.CONTENT_TYPE)
return self.good_path and good_ct and good_token
@Challenge.register
class DVSNI(DVChallenge):
"""ACME "dvsni" challenge.
:ivar bytes r: Random data, **not** base64-encoded.
:ivar bytes nonce: Random data, **not** hex-encoded.
"""
typ = "dvsni"
DOMAIN_SUFFIX = b".acme.invalid"
"""Domain name suffix."""
R_SIZE = 32
"""Required size of the :attr:`r` in bytes."""
NONCE_SIZE = 16
"""Required size of the :attr:`nonce` in bytes."""
PORT = 443
"""Port to perform DVSNI challenge."""
r = jose.Field("r", encoder=jose.encode_b64jose, # pylint: disable=invalid-name
decoder=functools.partial(jose.decode_b64jose, size=R_SIZE))
nonce = jose.Field("nonce", encoder=jose.encode_hex16,
decoder=functools.partial(functools.partial(
jose.decode_hex16, size=NONCE_SIZE)))
@property
def nonce_domain(self):
"""Domain name used in SNI.
:rtype: bytes
"""
return binascii.hexlify(self.nonce) + self.DOMAIN_SUFFIX
def probe_cert(self, domain, **kwargs):
"""Probe DVSNI challenge certificate."""
host = socket.gethostbyname(domain)
logging.debug('%s resolved to %s', domain, host)
kwargs.setdefault("host", host)
kwargs.setdefault("port", self.PORT)
kwargs["name"] = self.nonce_domain
# TODO: try different methods?
# pylint: disable=protected-access
return crypto_util._probe_sni(**kwargs)
@ChallengeResponse.register
class DVSNIResponse(ChallengeResponse):
"""ACME "dvsni" challenge response.
:param bytes s: Random data, **not** base64-encoded.
"""
typ = "dvsni"
DOMAIN_SUFFIX = DVSNI.DOMAIN_SUFFIX
"""Domain name suffix."""
S_SIZE = 32
"""Required size of the :attr:`s` in bytes."""
s = jose.Field("s", encoder=jose.encode_b64jose, # pylint: disable=invalid-name
decoder=functools.partial(jose.decode_b64jose, size=S_SIZE))
def __init__(self, s=None, *args, **kwargs):
s = os.urandom(self.S_SIZE) if s is None else s
super(DVSNIResponse, self).__init__(s=s, *args, **kwargs)
def z(self, chall): # pylint: disable=invalid-name
"""Compute the parameter ``z``.
:param challenge: Corresponding challenge.
:type challenge: :class:`DVSNI`
:rtype: bytes
"""
z = hashlib.new("sha256") # pylint: disable=invalid-name
z.update(chall.r)
z.update(self.s)
return z.hexdigest().encode()
def z_domain(self, chall):
"""Domain name for certificate subjectAltName.
:rtype bytes:
"""
return self.z(chall) + self.DOMAIN_SUFFIX
def gen_cert(self, chall, domain, key):
"""Generate DVSNI certificate.
:param .DVSNI chall: Corresponding challenge.
:param unicode domain:
:param OpenSSL.crypto.PKey
"""
return crypto_util.gen_ss_cert(key, [
domain, chall.nonce_domain.decode(), self.z_domain(chall).decode()])
def simple_verify(self, chall, domain, public_key, **kwargs):
"""Simple verify.
Probes DVSNI certificate and checks it using `verify_cert`;
hence all arguments documented in `verify_cert`.
"""
try:
cert = chall.probe_cert(domain=domain, **kwargs)
except errors.Error as error:
logger.debug(error, exc_info=True)
return False
return self.verify_cert(chall, domain, public_key, cert)
def verify_cert(self, chall, domain, public_key, cert):
"""Verify DVSNI certificate.
:param .challenges.DVSNI chall: Corresponding challenge.
:param str domain: Domain name being validated.
:param public_key: Public key for the key pair
being authorized. If ``None`` key verification is not
performed!
:type public_key:
`~cryptography.hazmat.primitives.asymmetric.rsa.RSAPublicKey`
or
`~cryptography.hazmat.primitives.asymmetric.dsa.DSAPublicKey`
or
`~cryptography.hazmat.primitives.asymmetric.ec.EllipticCurvePublicKey`
wrapped in `.ComparableKey
:param OpenSSL.crypto.X509 cert:
:returns: ``True`` iff client's control of the domain has been
verified, ``False`` otherwise.
:rtype: bool
"""
# TODO: check "It is a valid self-signed certificate" and
# return False if not
# pylint: disable=protected-access
sans = crypto_util._pyopenssl_cert_or_req_san(cert)
logging.debug('Certificate %s. SANs: %s', cert.digest('sha1'), sans)
cert = x509.load_der_x509_certificate(
OpenSSL.crypto.dump_certificate(OpenSSL.crypto.FILETYPE_ASN1, cert),
default_backend())
if public_key is None:
logging.warn('No key verification is performed')
elif public_key != jose.ComparableKey(cert.public_key()):
return False
return domain in sans and self.z_domain(chall).decode() in sans
@Challenge.register
class RecoveryContact(ContinuityChallenge):
"""ACME "recoveryContact" challenge.
:ivar unicode activation_url:
:ivar unicode success_url:
:ivar unicode contact:
"""
typ = "recoveryContact"
activation_url = jose.Field("activationURL", omitempty=True)
success_url = jose.Field("successURL", omitempty=True)
contact = jose.Field("contact", omitempty=True)
@ChallengeResponse.register
class RecoveryContactResponse(ChallengeResponse):
"""ACME "recoveryContact" challenge response.
:ivar unicode token:
"""
typ = "recoveryContact"
token = jose.Field("token", omitempty=True)
@Challenge.register
class RecoveryToken(ContinuityChallenge):
"""ACME "recoveryToken" challenge."""
typ = "recoveryToken"
@ChallengeResponse.register
class RecoveryTokenResponse(ChallengeResponse):
"""ACME "recoveryToken" challenge response.
:ivar unicode token:
"""
typ = "recoveryToken"
token = jose.Field("token", omitempty=True)
@Challenge.register
class ProofOfPossession(ContinuityChallenge):
"""ACME "proofOfPossession" challenge.
:ivar .JWAAlgorithm alg:
:ivar bytes nonce: Random data, **not** base64-encoded.
:ivar hints: Various clues for the client (:class:`Hints`).
"""
typ = "proofOfPossession"
NONCE_SIZE = 16
class Hints(jose.JSONObjectWithFields):
"""Hints for "proofOfPossession" challenge.
:ivar jwk: JSON Web Key (:class:`acme.jose.JWK`)
:ivar tuple cert_fingerprints: `tuple` of `unicode`
:ivar tuple certs: Sequence of :class:`acme.jose.ComparableX509`
certificates.
:ivar tuple subject_key_identifiers: `tuple` of `unicode`
:ivar tuple issuers: `tuple` of `unicode`
:ivar tuple authorized_for: `tuple` of `unicode`
"""
jwk = jose.Field("jwk", decoder=jose.JWK.from_json)
cert_fingerprints = jose.Field(
"certFingerprints", omitempty=True, default=())
certs = jose.Field("certs", omitempty=True, default=())
subject_key_identifiers = jose.Field(
"subjectKeyIdentifiers", omitempty=True, default=())
serial_numbers = jose.Field("serialNumbers", omitempty=True, default=())
issuers = jose.Field("issuers", omitempty=True, default=())
authorized_for = jose.Field("authorizedFor", omitempty=True, default=())
@certs.encoder
def certs(value): # pylint: disable=missing-docstring,no-self-argument
return tuple(jose.encode_cert(cert) for cert in value)
@certs.decoder
def certs(value): # pylint: disable=missing-docstring,no-self-argument
return tuple(jose.decode_cert(cert) for cert in value)
alg = jose.Field("alg", decoder=jose.JWASignature.from_json)
nonce = jose.Field(
"nonce", encoder=jose.encode_b64jose, decoder=functools.partial(
jose.decode_b64jose, size=NONCE_SIZE))
hints = jose.Field("hints", decoder=Hints.from_json)
@ChallengeResponse.register
class ProofOfPossessionResponse(ChallengeResponse):
"""ACME "proofOfPossession" challenge response.
:ivar bytes nonce: Random data, **not** base64-encoded.
:ivar acme.other.Signature signature: Sugnature of this message.
"""
typ = "proofOfPossession"
NONCE_SIZE = ProofOfPossession.NONCE_SIZE
nonce = jose.Field(
"nonce", encoder=jose.encode_b64jose, decoder=functools.partial(
jose.decode_b64jose, size=NONCE_SIZE))
signature = jose.Field("signature", decoder=other.Signature.from_json)
def verify(self):
"""Verify the challenge."""
# self.signature is not Field | pylint: disable=no-member
return self.signature.verify(self.nonce)
@Challenge.register
class DNS(DVChallenge):
"""ACME "dns" challenge.
:ivar unicode token:
"""
typ = "dns"
token = jose.Field("token")
@ChallengeResponse.register
class DNSResponse(ChallengeResponse):
"""ACME "dns" challenge response."""
typ = "dns"
| {
"repo_name": "tdfischer/lets-encrypt-preview",
"path": "acme/acme/challenges.py",
"copies": "3",
"size": "13959",
"license": "apache-2.0",
"hash": 4274274505383522300,
"line_mean": 29.7466960352,
"line_max": 84,
"alpha_frac": 0.6373665735,
"autogenerated": false,
"ratio": 3.83489010989011,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.597225668339011,
"avg_score": null,
"num_lines": null
} |
"""ACME Identifier Validation Challenges."""
import binascii
import functools
import hashlib
import logging
import os
import requests
from acme import interfaces
from acme import jose
from acme import other
logger = logging.getLogger(__name__)
# pylint: disable=too-few-public-methods
class Challenge(jose.TypedJSONObjectWithFields):
# _fields_to_partial_json | pylint: disable=abstract-method
"""ACME challenge."""
TYPES = {}
class ContinuityChallenge(Challenge): # pylint: disable=abstract-method
"""Client validation challenges."""
class DVChallenge(Challenge): # pylint: disable=abstract-method
"""Domain validation challenges."""
class ChallengeResponse(interfaces.ClientRequestableResource,
jose.TypedJSONObjectWithFields):
# _fields_to_partial_json | pylint: disable=abstract-method
"""ACME challenge response."""
TYPES = {}
resource_type = 'challenge'
@classmethod
def from_json(cls, jobj):
if jobj is None:
# if the client chooses not to respond to a given
# challenge, then the corresponding entry in the response
# array is set to None (null)
return None
return super(ChallengeResponse, cls).from_json(jobj)
@Challenge.register
class SimpleHTTP(DVChallenge):
"""ACME "simpleHttp" challenge.
:ivar unicode token:
"""
typ = "simpleHttp"
token = jose.Field("token")
@ChallengeResponse.register
class SimpleHTTPResponse(ChallengeResponse):
"""ACME "simpleHttp" challenge response.
:ivar unicode path:
:ivar unicode tls:
"""
typ = "simpleHttp"
path = jose.Field("path")
tls = jose.Field("tls", default=True, omitempty=True)
URI_ROOT_PATH = ".well-known/acme-challenge"
"""URI root path for the server provisioned resource."""
_URI_TEMPLATE = "{scheme}://{domain}/" + URI_ROOT_PATH + "/{path}"
MAX_PATH_LEN = 25
"""Maximum allowed `path` length."""
CONTENT_TYPE = "text/plain"
@property
def good_path(self):
"""Is `path` good?
.. todo:: acme-spec: "The value MUST be comprised entirely of
characters from the URL-safe alphabet for Base64 encoding
[RFC4648]", base64.b64decode ignores those characters
"""
# TODO: check that path combined with uri does not go above
# URI_ROOT_PATH!
return len(self.path) <= 25
@property
def scheme(self):
"""URL scheme for the provisioned resource."""
return "https" if self.tls else "http"
@property
def port(self):
"""Port that the ACME client should be listening for validation."""
return 443 if self.tls else 80
def uri(self, domain):
"""Create an URI to the provisioned resource.
Forms an URI to the HTTPS server provisioned resource
(containing :attr:`~SimpleHTTP.token`).
:param unicode domain: Domain name being verified.
"""
return self._URI_TEMPLATE.format(
scheme=self.scheme, domain=domain, path=self.path)
def simple_verify(self, chall, domain, port=None):
"""Simple verify.
According to the ACME specification, "the ACME server MUST
ignore the certificate provided by the HTTPS server", so
``requests.get`` is called with ``verify=False``.
:param .SimpleHTTP chall: Corresponding challenge.
:param unicode domain: Domain name being verified.
:param int port: Port used in the validation.
:returns: ``True`` iff validation is successful, ``False``
otherwise.
:rtype: bool
"""
# TODO: ACME specification defines URI template that doesn't
# allow to use a custom port... Make sure port is not in the
# request URI, if it's standard.
if port is not None and port != self.port:
logger.warn(
"Using non-standard port for SimpleHTTP verification: %s", port)
domain += ":{0}".format(port)
uri = self.uri(domain)
logger.debug("Verifying %s at %s...", chall.typ, uri)
try:
http_response = requests.get(uri, verify=False)
except requests.exceptions.RequestException as error:
logger.error("Unable to reach %s: %s", uri, error)
return False
logger.debug(
"Received %s. Headers: %s", http_response, http_response.headers)
good_token = http_response.text == chall.token
if not good_token:
logger.error(
"Unable to verify %s! Expected: %r, returned: %r.",
uri, chall.token, http_response.text)
# TODO: spec contradicts itself, c.f.
# https://github.com/letsencrypt/acme-spec/pull/156/files#r33136438
good_ct = self.CONTENT_TYPE == http_response.headers.get(
"Content-Type", self.CONTENT_TYPE)
return self.good_path and good_ct and good_token
@Challenge.register
class DVSNI(DVChallenge):
"""ACME "dvsni" challenge.
:ivar bytes r: Random data, **not** base64-encoded.
:ivar bytes nonce: Random data, **not** hex-encoded.
"""
typ = "dvsni"
DOMAIN_SUFFIX = b".acme.invalid"
"""Domain name suffix."""
R_SIZE = 32
"""Required size of the :attr:`r` in bytes."""
NONCE_SIZE = 16
"""Required size of the :attr:`nonce` in bytes."""
PORT = 443
"""Port to perform DVSNI challenge."""
r = jose.Field("r", encoder=jose.encode_b64jose, # pylint: disable=invalid-name
decoder=functools.partial(jose.decode_b64jose, size=R_SIZE))
nonce = jose.Field("nonce", encoder=jose.encode_hex16,
decoder=functools.partial(functools.partial(
jose.decode_hex16, size=NONCE_SIZE)))
@property
def nonce_domain(self):
"""Domain name used in SNI.
:rtype: bytes
"""
return binascii.hexlify(self.nonce) + self.DOMAIN_SUFFIX
@ChallengeResponse.register
class DVSNIResponse(ChallengeResponse):
"""ACME "dvsni" challenge response.
:param bytes s: Random data, **not** base64-encoded.
"""
typ = "dvsni"
DOMAIN_SUFFIX = DVSNI.DOMAIN_SUFFIX
"""Domain name suffix."""
S_SIZE = 32
"""Required size of the :attr:`s` in bytes."""
s = jose.Field("s", encoder=jose.encode_b64jose, # pylint: disable=invalid-name
decoder=functools.partial(jose.decode_b64jose, size=S_SIZE))
def __init__(self, s=None, *args, **kwargs):
s = os.urandom(self.S_SIZE) if s is None else s
super(DVSNIResponse, self).__init__(s=s, *args, **kwargs)
def z(self, chall): # pylint: disable=invalid-name
"""Compute the parameter ``z``.
:param challenge: Corresponding challenge.
:type challenge: :class:`DVSNI`
:rtype: bytes
"""
z = hashlib.new("sha256") # pylint: disable=invalid-name
z.update(chall.r)
z.update(self.s)
return z.hexdigest().encode()
def z_domain(self, chall):
"""Domain name for certificate subjectAltName."""
return self.z(chall) + self.DOMAIN_SUFFIX
@Challenge.register
class RecoveryContact(ContinuityChallenge):
"""ACME "recoveryContact" challenge.
:ivar unicode activation_url:
:ivar unicode success_url:
:ivar unicode contact:
"""
typ = "recoveryContact"
activation_url = jose.Field("activationURL", omitempty=True)
success_url = jose.Field("successURL", omitempty=True)
contact = jose.Field("contact", omitempty=True)
@ChallengeResponse.register
class RecoveryContactResponse(ChallengeResponse):
"""ACME "recoveryContact" challenge response.
:ivar unicode token:
"""
typ = "recoveryContact"
token = jose.Field("token", omitempty=True)
@Challenge.register
class RecoveryToken(ContinuityChallenge):
"""ACME "recoveryToken" challenge."""
typ = "recoveryToken"
@ChallengeResponse.register
class RecoveryTokenResponse(ChallengeResponse):
"""ACME "recoveryToken" challenge response.
:ivar unicode token:
"""
typ = "recoveryToken"
token = jose.Field("token", omitempty=True)
@Challenge.register
class ProofOfPossession(ContinuityChallenge):
"""ACME "proofOfPossession" challenge.
:ivar .JWAAlgorithm alg:
:ivar bytes nonce: Random data, **not** base64-encoded.
:ivar hints: Various clues for the client (:class:`Hints`).
"""
typ = "proofOfPossession"
NONCE_SIZE = 16
class Hints(jose.JSONObjectWithFields):
"""Hints for "proofOfPossession" challenge.
:ivar jwk: JSON Web Key (:class:`acme.jose.JWK`)
:ivar tuple cert_fingerprints: `tuple` of `unicode`
:ivar tuple certs: Sequence of :class:`acme.jose.ComparableX509`
certificates.
:ivar tuple subject_key_identifiers: `tuple` of `unicode`
:ivar tuple issuers: `tuple` of `unicode`
:ivar tuple authorized_for: `tuple` of `unicode`
"""
jwk = jose.Field("jwk", decoder=jose.JWK.from_json)
cert_fingerprints = jose.Field(
"certFingerprints", omitempty=True, default=())
certs = jose.Field("certs", omitempty=True, default=())
subject_key_identifiers = jose.Field(
"subjectKeyIdentifiers", omitempty=True, default=())
serial_numbers = jose.Field("serialNumbers", omitempty=True, default=())
issuers = jose.Field("issuers", omitempty=True, default=())
authorized_for = jose.Field("authorizedFor", omitempty=True, default=())
@certs.encoder
def certs(value): # pylint: disable=missing-docstring,no-self-argument
return tuple(jose.encode_cert(cert) for cert in value)
@certs.decoder
def certs(value): # pylint: disable=missing-docstring,no-self-argument
return tuple(jose.decode_cert(cert) for cert in value)
alg = jose.Field("alg", decoder=jose.JWASignature.from_json)
nonce = jose.Field(
"nonce", encoder=jose.encode_b64jose, decoder=functools.partial(
jose.decode_b64jose, size=NONCE_SIZE))
hints = jose.Field("hints", decoder=Hints.from_json)
@ChallengeResponse.register
class ProofOfPossessionResponse(ChallengeResponse):
"""ACME "proofOfPossession" challenge response.
:ivar bytes nonce: Random data, **not** base64-encoded.
:ivar acme.other.Signature signature: Sugnature of this message.
"""
typ = "proofOfPossession"
NONCE_SIZE = ProofOfPossession.NONCE_SIZE
nonce = jose.Field(
"nonce", encoder=jose.encode_b64jose, decoder=functools.partial(
jose.decode_b64jose, size=NONCE_SIZE))
signature = jose.Field("signature", decoder=other.Signature.from_json)
def verify(self):
"""Verify the challenge."""
# self.signature is not Field | pylint: disable=no-member
return self.signature.verify(self.nonce)
@Challenge.register
class DNS(DVChallenge):
"""ACME "dns" challenge.
:ivar unicode token:
"""
typ = "dns"
token = jose.Field("token")
@ChallengeResponse.register
class DNSResponse(ChallengeResponse):
"""ACME "dns" challenge response."""
typ = "dns"
| {
"repo_name": "Jonadabe/letsencrypt",
"path": "acme/acme/challenges.py",
"copies": "1",
"size": "11254",
"license": "apache-2.0",
"hash": -9103298142820307000,
"line_mean": 29.0106666667,
"line_max": 84,
"alpha_frac": 0.6392393816,
"autogenerated": false,
"ratio": 3.802027027027027,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9939792189309671,
"avg_score": 0.00029484386347131445,
"num_lines": 375
} |
"""ACME Identifier Validation Challenges."""
import binascii
import functools
import hashlib
import Crypto.Random
from acme import jose
from acme import other
# pylint: disable=too-few-public-methods
class Challenge(jose.TypedJSONObjectWithFields):
# _fields_to_partial_json | pylint: disable=abstract-method
"""ACME challenge."""
TYPES = {}
class ContinuityChallenge(Challenge): # pylint: disable=abstract-method
"""Client validation challenges."""
class DVChallenge(Challenge): # pylint: disable=abstract-method
"""Domain validation challenges."""
class ChallengeResponse(jose.TypedJSONObjectWithFields):
# _fields_to_partial_json | pylint: disable=abstract-method
"""ACME challenge response."""
TYPES = {}
@classmethod
def from_json(cls, jobj):
if jobj is None:
# if the client chooses not to respond to a given
# challenge, then the corresponding entry in the response
# array is set to None (null)
return None
return super(ChallengeResponse, cls).from_json(jobj)
@Challenge.register
class SimpleHTTP(DVChallenge):
"""ACME "simpleHttp" challenge."""
typ = "simpleHttp"
token = jose.Field("token")
@ChallengeResponse.register
class SimpleHTTPResponse(ChallengeResponse):
"""ACME "simpleHttp" challenge response."""
typ = "simpleHttp"
path = jose.Field("path")
tls = jose.Field("tls", default=True, omitempty=True)
URI_ROOT_PATH = ".well-known/acme-challenge"
"""URI root path for the server provisioned resource."""
_URI_TEMPLATE = "{scheme}://{domain}/" + URI_ROOT_PATH + "/{path}"
MAX_PATH_LEN = 25
"""Maximum allowed `path` length."""
@property
def good_path(self):
"""Is `path` good?
.. todo:: acme-spec: "The value MUST be comprised entirely of
characters from the URL-safe alphabet for Base64 encoding
[RFC4648]", base64.b64decode ignores those characters
"""
return len(self.path) <= 25
@property
def scheme(self):
"""URL scheme for the provisioned resource."""
return "https" if self.tls else "http"
def uri(self, domain):
"""Create an URI to the provisioned resource.
Forms an URI to the HTTPS server provisioned resource
(containing :attr:`~SimpleHTTP.token`).
:param str domain: Domain name being verified.
"""
return self._URI_TEMPLATE.format(
scheme=self.scheme, domain=domain, path=self.path)
@Challenge.register
class DVSNI(DVChallenge):
"""ACME "dvsni" challenge.
:ivar str r: Random data, **not** base64-encoded.
:ivar str nonce: Random data, **not** hex-encoded.
"""
typ = "dvsni"
DOMAIN_SUFFIX = ".acme.invalid"
"""Domain name suffix."""
R_SIZE = 32
"""Required size of the :attr:`r` in bytes."""
NONCE_SIZE = 16
"""Required size of the :attr:`nonce` in bytes."""
PORT = 443
"""Port to perform DVSNI challenge."""
r = jose.Field("r", encoder=jose.b64encode, # pylint: disable=invalid-name
decoder=functools.partial(jose.decode_b64jose, size=R_SIZE))
nonce = jose.Field("nonce", encoder=binascii.hexlify,
decoder=functools.partial(functools.partial(
jose.decode_hex16, size=NONCE_SIZE)))
@property
def nonce_domain(self):
"""Domain name used in SNI."""
return binascii.hexlify(self.nonce) + self.DOMAIN_SUFFIX
@ChallengeResponse.register
class DVSNIResponse(ChallengeResponse):
"""ACME "dvsni" challenge response.
:param str s: Random data, **not** base64-encoded.
"""
typ = "dvsni"
DOMAIN_SUFFIX = DVSNI.DOMAIN_SUFFIX
"""Domain name suffix."""
S_SIZE = 32
"""Required size of the :attr:`s` in bytes."""
s = jose.Field("s", encoder=jose.b64encode, # pylint: disable=invalid-name
decoder=functools.partial(jose.decode_b64jose, size=S_SIZE))
def __init__(self, s=None, *args, **kwargs):
s = Crypto.Random.get_random_bytes(self.S_SIZE) if s is None else s
super(DVSNIResponse, self).__init__(s=s, *args, **kwargs)
def z(self, chall): # pylint: disable=invalid-name
"""Compute the parameter ``z``.
:param challenge: Corresponding challenge.
:type challenge: :class:`DVSNI`
"""
z = hashlib.new("sha256") # pylint: disable=invalid-name
z.update(chall.r)
z.update(self.s)
return z.hexdigest()
def z_domain(self, chall):
"""Domain name for certificate subjectAltName."""
return self.z(chall) + self.DOMAIN_SUFFIX
@Challenge.register
class RecoveryContact(ContinuityChallenge):
"""ACME "recoveryContact" challenge."""
typ = "recoveryContact"
activation_url = jose.Field("activationURL", omitempty=True)
success_url = jose.Field("successURL", omitempty=True)
contact = jose.Field("contact", omitempty=True)
@ChallengeResponse.register
class RecoveryContactResponse(ChallengeResponse):
"""ACME "recoveryContact" challenge response."""
typ = "recoveryContact"
token = jose.Field("token", omitempty=True)
@Challenge.register
class RecoveryToken(ContinuityChallenge):
"""ACME "recoveryToken" challenge."""
typ = "recoveryToken"
@ChallengeResponse.register
class RecoveryTokenResponse(ChallengeResponse):
"""ACME "recoveryToken" challenge response."""
typ = "recoveryToken"
token = jose.Field("token", omitempty=True)
@Challenge.register
class ProofOfPossession(ContinuityChallenge):
"""ACME "proofOfPossession" challenge.
:ivar str nonce: Random data, **not** base64-encoded.
:ivar hints: Various clues for the client (:class:`Hints`).
"""
typ = "proofOfPossession"
NONCE_SIZE = 16
class Hints(jose.JSONObjectWithFields):
"""Hints for "proofOfPossession" challenge.
:ivar jwk: JSON Web Key (:class:`acme.jose.JWK`)
:ivar list certs: List of :class:`acme.jose.ComparableX509`
certificates.
"""
jwk = jose.Field("jwk", decoder=jose.JWK.from_json)
cert_fingerprints = jose.Field(
"certFingerprints", omitempty=True, default=())
certs = jose.Field("certs", omitempty=True, default=())
subject_key_identifiers = jose.Field(
"subjectKeyIdentifiers", omitempty=True, default=())
serial_numbers = jose.Field("serialNumbers", omitempty=True, default=())
issuers = jose.Field("issuers", omitempty=True, default=())
authorized_for = jose.Field("authorizedFor", omitempty=True, default=())
@certs.encoder
def certs(value): # pylint: disable=missing-docstring,no-self-argument
return tuple(jose.encode_cert(cert) for cert in value)
@certs.decoder
def certs(value): # pylint: disable=missing-docstring,no-self-argument
return tuple(jose.decode_cert(cert) for cert in value)
alg = jose.Field("alg", decoder=jose.JWASignature.from_json)
nonce = jose.Field(
"nonce", encoder=jose.b64encode, decoder=functools.partial(
jose.decode_b64jose, size=NONCE_SIZE))
hints = jose.Field("hints", decoder=Hints.from_json)
@ChallengeResponse.register
class ProofOfPossessionResponse(ChallengeResponse):
"""ACME "proofOfPossession" challenge response.
:ivar str nonce: Random data, **not** base64-encoded.
:ivar signature: :class:`~acme.other.Signature` of this message.
"""
typ = "proofOfPossession"
NONCE_SIZE = ProofOfPossession.NONCE_SIZE
nonce = jose.Field(
"nonce", encoder=jose.b64encode, decoder=functools.partial(
jose.decode_b64jose, size=NONCE_SIZE))
signature = jose.Field("signature", decoder=other.Signature.from_json)
def verify(self):
"""Verify the challenge."""
# self.signature is not Field | pylint: disable=no-member
return self.signature.verify(self.nonce)
@Challenge.register
class DNS(DVChallenge):
"""ACME "dns" challenge."""
typ = "dns"
token = jose.Field("token")
@ChallengeResponse.register
class DNSResponse(ChallengeResponse):
"""ACME "dns" challenge response."""
typ = "dns"
| {
"repo_name": "digideskio/lets-encrypt-preview",
"path": "acme/challenges.py",
"copies": "1",
"size": "8279",
"license": "apache-2.0",
"hash": -3924780379372150000,
"line_mean": 29.1054545455,
"line_max": 80,
"alpha_frac": 0.6518903249,
"autogenerated": false,
"ratio": 3.700938757264193,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4852829082164193,
"avg_score": null,
"num_lines": null
} |
"""ACME Identifier Validation Challenges."""
import binascii
import functools
import hashlib
import Crypto.Random
from letsencrypt.acme import jose
from letsencrypt.acme import other
# pylint: disable=too-few-public-methods
class Challenge(jose.TypedJSONObjectWithFields):
# _fields_to_partial_json | pylint: disable=abstract-method
"""ACME challenge."""
TYPES = {}
class ContinuityChallenge(Challenge): # pylint: disable=abstract-method
"""Client validation challenges."""
class DVChallenge(Challenge): # pylint: disable=abstract-method
"""Domain validation challenges."""
class ChallengeResponse(jose.TypedJSONObjectWithFields):
# _fields_to_partial_json | pylint: disable=abstract-method
"""ACME challenge response."""
TYPES = {}
@classmethod
def from_json(cls, jobj):
if jobj is None:
# if the client chooses not to respond to a given
# challenge, then the corresponding entry in the response
# array is set to None (null)
return None
return super(ChallengeResponse, cls).from_json(jobj)
@Challenge.register
class SimpleHTTPS(DVChallenge):
"""ACME "simpleHttps" challenge."""
typ = "simpleHttps"
token = jose.Field("token")
@ChallengeResponse.register
class SimpleHTTPSResponse(ChallengeResponse):
"""ACME "simpleHttps" challenge response."""
typ = "simpleHttps"
path = jose.Field("path")
URI_TEMPLATE = "https://{domain}/.well-known/acme-challenge/{path}"
"""URI template for HTTPS server provisioned resource."""
def uri(self, domain):
"""Create an URI to the provisioned resource.
Forms an URI to the HTTPS server provisioned resource (containing
:attr:`~SimpleHTTPS.token`) by populating the :attr:`URI_TEMPLATE`.
:param str domain: Domain name being verified.
"""
return self.URI_TEMPLATE.format(domain=domain, path=self.path)
@Challenge.register
class DVSNI(DVChallenge):
"""ACME "dvsni" challenge.
:ivar str r: Random data, **not** base64-encoded.
:ivar str nonce: Random data, **not** hex-encoded.
"""
typ = "dvsni"
DOMAIN_SUFFIX = ".acme.invalid"
"""Domain name suffix."""
R_SIZE = 32
"""Required size of the :attr:`r` in bytes."""
NONCE_SIZE = 16
"""Required size of the :attr:`nonce` in bytes."""
PORT = 443
"""Port to perform DVSNI challenge."""
r = jose.Field("r", encoder=jose.b64encode, # pylint: disable=invalid-name
decoder=functools.partial(jose.decode_b64jose, size=R_SIZE))
nonce = jose.Field("nonce", encoder=binascii.hexlify,
decoder=functools.partial(functools.partial(
jose.decode_hex16, size=NONCE_SIZE)))
@property
def nonce_domain(self):
"""Domain name used in SNI."""
return binascii.hexlify(self.nonce) + self.DOMAIN_SUFFIX
@ChallengeResponse.register
class DVSNIResponse(ChallengeResponse):
"""ACME "dvsni" challenge response.
:param str s: Random data, **not** base64-encoded.
"""
typ = "dvsni"
DOMAIN_SUFFIX = DVSNI.DOMAIN_SUFFIX
"""Domain name suffix."""
S_SIZE = 32
"""Required size of the :attr:`s` in bytes."""
s = jose.Field("s", encoder=jose.b64encode, # pylint: disable=invalid-name
decoder=functools.partial(jose.decode_b64jose, size=S_SIZE))
def __init__(self, s=None, *args, **kwargs):
s = Crypto.Random.get_random_bytes(self.S_SIZE) if s is None else s
super(DVSNIResponse, self).__init__(s=s, *args, **kwargs)
def z(self, chall): # pylint: disable=invalid-name
"""Compute the parameter ``z``.
:param challenge: Corresponding challenge.
:type challenge: :class:`DVSNI`
"""
z = hashlib.new("sha256") # pylint: disable=invalid-name
z.update(chall.r)
z.update(self.s)
return z.hexdigest()
def z_domain(self, chall):
"""Domain name for certificate subjectAltName."""
return self.z(chall) + self.DOMAIN_SUFFIX
@Challenge.register
class RecoveryContact(ContinuityChallenge):
"""ACME "recoveryContact" challenge."""
typ = "recoveryContact"
activation_url = jose.Field("activationURL", omitempty=True)
success_url = jose.Field("successURL", omitempty=True)
contact = jose.Field("contact", omitempty=True)
@ChallengeResponse.register
class RecoveryContactResponse(ChallengeResponse):
"""ACME "recoveryContact" challenge response."""
typ = "recoveryContact"
token = jose.Field("token", omitempty=True)
@Challenge.register
class RecoveryToken(ContinuityChallenge):
"""ACME "recoveryToken" challenge."""
typ = "recoveryToken"
@ChallengeResponse.register
class RecoveryTokenResponse(ChallengeResponse):
"""ACME "recoveryToken" challenge response."""
typ = "recoveryToken"
token = jose.Field("token", omitempty=True)
@Challenge.register
class ProofOfPossession(ContinuityChallenge):
"""ACME "proofOfPossession" challenge.
:ivar str nonce: Random data, **not** base64-encoded.
:ivar hints: Various clues for the client (:class:`Hints`).
"""
typ = "proofOfPossession"
NONCE_SIZE = 16
class Hints(jose.JSONObjectWithFields):
"""Hints for "proofOfPossession" challenge.
:ivar jwk: JSON Web Key (:class:`letsencrypt.acme.jose.JWK`)
:ivar list certs: List of :class:`letsencrypt.acme.jose.ComparableX509`
certificates.
"""
jwk = jose.Field("jwk", decoder=jose.JWK.from_json)
cert_fingerprints = jose.Field(
"certFingerprints", omitempty=True, default=())
certs = jose.Field("certs", omitempty=True, default=())
subject_key_identifiers = jose.Field(
"subjectKeyIdentifiers", omitempty=True, default=())
serial_numbers = jose.Field("serialNumbers", omitempty=True, default=())
issuers = jose.Field("issuers", omitempty=True, default=())
authorized_for = jose.Field("authorizedFor", omitempty=True, default=())
@certs.encoder
def certs(value): # pylint: disable=missing-docstring,no-self-argument
return tuple(jose.encode_cert(cert) for cert in value)
@certs.decoder
def certs(value): # pylint: disable=missing-docstring,no-self-argument
return tuple(jose.decode_cert(cert) for cert in value)
alg = jose.Field("alg", decoder=jose.JWASignature.from_json)
nonce = jose.Field(
"nonce", encoder=jose.b64encode, decoder=functools.partial(
jose.decode_b64jose, size=NONCE_SIZE))
hints = jose.Field("hints", decoder=Hints.from_json)
@ChallengeResponse.register
class ProofOfPossessionResponse(ChallengeResponse):
"""ACME "proofOfPossession" challenge response.
:ivar str nonce: Random data, **not** base64-encoded.
:ivar signature: :class:`~letsencrypt.acme.other.Signature` of this message.
"""
typ = "proofOfPossession"
NONCE_SIZE = ProofOfPossession.NONCE_SIZE
nonce = jose.Field(
"nonce", encoder=jose.b64encode, decoder=functools.partial(
jose.decode_b64jose, size=NONCE_SIZE))
signature = jose.Field("signature", decoder=other.Signature.from_json)
def verify(self):
"""Verify the challenge."""
# self.signature is not Field | pylint: disable=no-member
return self.signature.verify(self.nonce)
@Challenge.register
class DNS(DVChallenge):
"""ACME "dns" challenge."""
typ = "dns"
token = jose.Field("token")
@ChallengeResponse.register
class DNSResponse(ChallengeResponse):
"""ACME "dns" challenge response."""
typ = "dns"
| {
"repo_name": "diracdeltas/lets-encrypt-preview",
"path": "letsencrypt/acme/challenges.py",
"copies": "1",
"size": "7721",
"license": "apache-2.0",
"hash": 8905233111028043000,
"line_mean": 29.6388888889,
"line_max": 80,
"alpha_frac": 0.6610542676,
"autogenerated": false,
"ratio": 3.685441527446301,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.48464957950463006,
"avg_score": null,
"num_lines": null
} |
"""ACME Identifier Validation Challenges."""
import functools
import hashlib
import logging
import socket
import OpenSSL
import requests
from acme import errors
from acme import crypto_util
from acme import fields
from acme import jose
from acme import other
logger = logging.getLogger(__name__)
# pylint: disable=too-few-public-methods
class Challenge(jose.TypedJSONObjectWithFields):
# _fields_to_partial_json | pylint: disable=abstract-method
"""ACME challenge."""
TYPES = {}
@classmethod
def from_json(cls, jobj):
try:
return super(Challenge, cls).from_json(jobj)
except jose.UnrecognizedTypeError as error:
logger.debug(error)
return UnrecognizedChallenge.from_json(jobj)
class ContinuityChallenge(Challenge): # pylint: disable=abstract-method
"""Client validation challenges."""
class DVChallenge(Challenge): # pylint: disable=abstract-method
"""Domain validation challenges."""
class ChallengeResponse(jose.TypedJSONObjectWithFields):
# _fields_to_partial_json | pylint: disable=abstract-method
"""ACME challenge response."""
TYPES = {}
resource_type = 'challenge'
resource = fields.Resource(resource_type)
class UnrecognizedChallenge(Challenge):
"""Unrecognized challenge.
ACME specification defines a generic framework for challenges and
defines some standard challenges that are implemented in this
module. However, other implementations (including peers) might
define additional challenge types, which should be ignored if
unrecognized.
:ivar jobj: Original JSON decoded object.
"""
def __init__(self, jobj):
super(UnrecognizedChallenge, self).__init__()
object.__setattr__(self, "jobj", jobj)
def to_partial_json(self):
# pylint: disable=no-member
return self.jobj
@classmethod
def from_json(cls, jobj):
return cls(jobj)
@Challenge.register
class SimpleHTTP(DVChallenge):
"""ACME "simpleHttp" challenge.
:ivar unicode token:
"""
typ = "simpleHttp"
TOKEN_SIZE = 128 / 8 # Based on the entropy value from the spec
"""Minimum size of the :attr:`token` in bytes."""
URI_ROOT_PATH = ".well-known/acme-challenge"
"""URI root path for the server provisioned resource."""
# TODO: acme-spec doesn't specify token as base64-encoded value
token = jose.Field(
"token", encoder=jose.encode_b64jose, decoder=functools.partial(
jose.decode_b64jose, size=TOKEN_SIZE, minimum=True))
@property
def good_token(self): # XXX: @token.decoder
"""Is `token` good?
.. todo:: acme-spec wants "It MUST NOT contain any non-ASCII
characters", but it should also warrant that it doesn't
contain ".." or "/"...
"""
# TODO: check that path combined with uri does not go above
# URI_ROOT_PATH!
return b'..' not in self.token and b'/' not in self.token
@property
def path(self):
"""Path (starting with '/') for provisioned resource."""
return '/' + self.URI_ROOT_PATH + '/' + self.encode('token')
@ChallengeResponse.register
class SimpleHTTPResponse(ChallengeResponse):
"""ACME "simpleHttp" challenge response.
:ivar bool tls:
"""
typ = "simpleHttp"
tls = jose.Field("tls", default=True, omitempty=True)
URI_ROOT_PATH = SimpleHTTP.URI_ROOT_PATH
_URI_TEMPLATE = "{scheme}://{domain}/" + URI_ROOT_PATH + "/{token}"
CONTENT_TYPE = "application/jose+json"
PORT = 80
TLS_PORT = 443
@property
def scheme(self):
"""URL scheme for the provisioned resource."""
return "https" if self.tls else "http"
@property
def port(self):
"""Port that the ACME client should be listening for validation."""
return self.TLS_PORT if self.tls else self.PORT
def uri(self, domain, chall):
"""Create an URI to the provisioned resource.
Forms an URI to the HTTPS server provisioned resource
(containing :attr:`~SimpleHTTP.token`).
:param unicode domain: Domain name being verified.
:param challenges.SimpleHTTP chall:
"""
return self._URI_TEMPLATE.format(
scheme=self.scheme, domain=domain, token=chall.encode("token"))
def gen_resource(self, chall):
"""Generate provisioned resource.
:param challenges.SimpleHTTP chall:
:rtype: SimpleHTTPProvisionedResource
"""
return SimpleHTTPProvisionedResource(token=chall.token, tls=self.tls)
def gen_validation(self, chall, account_key, alg=jose.RS256, **kwargs):
"""Generate validation.
:param challenges.SimpleHTTP chall:
:param .JWK account_key: Private account key.
:param .JWA alg:
:returns: `.SimpleHTTPProvisionedResource` signed in `.JWS`
:rtype: .JWS
"""
return jose.JWS.sign(
payload=self.gen_resource(chall).json_dumps(
sort_keys=True).encode('utf-8'),
key=account_key, alg=alg, **kwargs)
def check_validation(self, validation, chall, account_public_key):
"""Check validation.
:param .JWS validation:
:param challenges.SimpleHTTP chall:
:type account_public_key:
`~cryptography.hazmat.primitives.asymmetric.rsa.RSAPublicKey`
or
`~cryptography.hazmat.primitives.asymmetric.dsa.DSAPublicKey`
or
`~cryptography.hazmat.primitives.asymmetric.ec.EllipticCurvePublicKey`
wrapped in `.ComparableKey`
:rtype: bool
"""
if not validation.verify(key=account_public_key):
return False
try:
resource = SimpleHTTPProvisionedResource.json_loads(
validation.payload.decode('utf-8'))
except jose.DeserializationError as error:
logger.debug(error)
return False
return resource.token == chall.token and resource.tls == self.tls
def simple_verify(self, chall, domain, account_public_key, port=None):
"""Simple verify.
According to the ACME specification, "the ACME server MUST
ignore the certificate provided by the HTTPS server", so
``requests.get`` is called with ``verify=False``.
:param challenges.SimpleHTTP chall: Corresponding challenge.
:param unicode domain: Domain name being verified.
:param account_public_key: Public key for the key pair
being authorized. If ``None`` key verification is not
performed!
:type account_public_key:
`~cryptography.hazmat.primitives.asymmetric.rsa.RSAPublicKey`
or
`~cryptography.hazmat.primitives.asymmetric.dsa.DSAPublicKey`
or
`~cryptography.hazmat.primitives.asymmetric.ec.EllipticCurvePublicKey`
wrapped in `.ComparableKey`
:param int port: Port used in the validation.
:returns: ``True`` iff validation is successful, ``False``
otherwise.
:rtype: bool
"""
# TODO: ACME specification defines URI template that doesn't
# allow to use a custom port... Make sure port is not in the
# request URI, if it's standard.
if port is not None and port != self.port:
logger.warn(
"Using non-standard port for SimpleHTTP verification: %s", port)
domain += ":{0}".format(port)
uri = self.uri(domain, chall)
logger.debug("Verifying %s at %s...", chall.typ, uri)
try:
http_response = requests.get(uri, verify=False)
except requests.exceptions.RequestException as error:
logger.error("Unable to reach %s: %s", uri, error)
return False
logger.debug("Received %s: %s. Headers: %s", http_response,
http_response.text, http_response.headers)
if self.CONTENT_TYPE != http_response.headers.get(
"Content-Type", self.CONTENT_TYPE):
return False
try:
validation = jose.JWS.json_loads(http_response.text)
except jose.DeserializationError as error:
logger.debug(error)
return False
return self.check_validation(validation, chall, account_public_key)
class SimpleHTTPProvisionedResource(jose.JSONObjectWithFields):
"""SimpleHTTP provisioned resource."""
typ = fields.Fixed("type", SimpleHTTP.typ)
token = SimpleHTTP._fields["token"]
# If the "tls" field is not included in the response, then
# validation object MUST have its "tls" field set to "true".
tls = jose.Field("tls", omitempty=False)
@Challenge.register
class DVSNI(DVChallenge):
"""ACME "dvsni" challenge.
:ivar bytes token: Random data, **not** base64-encoded.
"""
typ = "dvsni"
PORT = 443
"""Port to perform DVSNI challenge."""
TOKEN_SIZE = 128 / 8 # Based on the entropy value from the spec
"""Minimum size of the :attr:`token` in bytes."""
token = jose.Field(
"token", encoder=jose.encode_b64jose, decoder=functools.partial(
jose.decode_b64jose, size=TOKEN_SIZE, minimum=True))
def gen_response(self, account_key, alg=jose.RS256, **kwargs):
"""Generate response.
:param .JWK account_key: Private account key.
:rtype: .DVSNIResponse
"""
return DVSNIResponse(validation=jose.JWS.sign(
payload=self.json_dumps(sort_keys=True).encode('utf-8'),
key=account_key, alg=alg, **kwargs))
@ChallengeResponse.register
class DVSNIResponse(ChallengeResponse):
"""ACME "dvsni" challenge response.
:param bytes s: Random data, **not** base64-encoded.
"""
typ = "dvsni"
DOMAIN_SUFFIX = b".acme.invalid"
"""Domain name suffix."""
PORT = DVSNI.PORT
"""Port to perform DVSNI challenge."""
validation = jose.Field("validation", decoder=jose.JWS.from_json)
@property
def z(self): # pylint: disable=invalid-name
"""The ``z`` parameter.
:rtype: bytes
"""
# Instance of 'Field' has no 'signature' member
# pylint: disable=no-member
return hashlib.sha256(self.validation.signature.encode(
"signature").encode("utf-8")).hexdigest().encode()
@property
def z_domain(self):
"""Domain name for certificate subjectAltName.
:rtype: bytes
"""
z = self.z # pylint: disable=invalid-name
return z[:32] + b'.' + z[32:] + self.DOMAIN_SUFFIX
@property
def chall(self):
"""Get challenge encoded in the `validation` payload.
:rtype: challenges.DVSNI
"""
# pylint: disable=no-member
return DVSNI.json_loads(self.validation.payload.decode('utf-8'))
def gen_cert(self, key=None, bits=2048):
"""Generate DVSNI certificate.
:param OpenSSL.crypto.PKey key: Optional private key used in
certificate generation. If not provided (``None``), then
fresh key will be generated.
:param int bits: Number of bits for newly generated key.
:rtype: `tuple` of `OpenSSL.crypto.X509` and
`OpenSSL.crypto.PKey`
"""
if key is None:
key = OpenSSL.crypto.PKey()
key.generate_key(OpenSSL.crypto.TYPE_RSA, bits)
return crypto_util.gen_ss_cert(key, [
# z_domain is too big to fit into CN, hence first dummy domain
'dummy', self.z_domain.decode()], force_san=True), key
def probe_cert(self, domain, **kwargs):
"""Probe DVSNI challenge certificate.
:param unicode domain:
"""
if "host" not in kwargs:
host = socket.gethostbyname(domain)
logging.debug('%s resolved to %s', domain, host)
kwargs["host"] = host
kwargs.setdefault("port", self.PORT)
kwargs["name"] = self.z_domain
# TODO: try different methods?
# pylint: disable=protected-access
return crypto_util.probe_sni(**kwargs)
def verify_cert(self, cert):
"""Verify DVSNI challenge certificate."""
# pylint: disable=protected-access
sans = crypto_util._pyopenssl_cert_or_req_san(cert)
logging.debug('Certificate %s. SANs: %s', cert.digest('sha1'), sans)
return self.z_domain.decode() in sans
def simple_verify(self, chall, domain, account_public_key,
cert=None, **kwargs):
"""Simple verify.
Verify ``validation`` using ``account_public_key``, optionally
probe DVSNI certificate and check using `verify_cert`.
:param .challenges.DVSNI chall: Corresponding challenge.
:param str domain: Domain name being validated.
:type account_public_key:
`~cryptography.hazmat.primitives.asymmetric.rsa.RSAPublicKey`
or
`~cryptography.hazmat.primitives.asymmetric.dsa.DSAPublicKey`
or
`~cryptography.hazmat.primitives.asymmetric.ec.EllipticCurvePublicKey`
wrapped in `.ComparableKey`
:param OpenSSL.crypto.X509 cert: Optional certificate. If not
provided (``None``) certificate will be retrieved using
`probe_cert`.
:returns: ``True`` iff client's control of the domain has been
verified, ``False`` otherwise.
:rtype: bool
"""
# pylint: disable=no-member
if not self.validation.verify(key=account_public_key):
return False
# TODO: it's not checked that payload has exectly 2 fields!
try:
decoded_chall = self.chall
except jose.DeserializationError as error:
logger.debug(error, exc_info=True)
return False
if decoded_chall.token != chall.token:
logger.debug("Wrong token: expected %r, found %r",
chall.token, decoded_chall.token)
return False
if cert is None:
try:
cert = self.probe_cert(domain=domain, **kwargs)
except errors.Error as error:
logger.debug(error, exc_info=True)
return False
return self.verify_cert(cert)
@Challenge.register
class RecoveryContact(ContinuityChallenge):
"""ACME "recoveryContact" challenge.
:ivar unicode activation_url:
:ivar unicode success_url:
:ivar unicode contact:
"""
typ = "recoveryContact"
activation_url = jose.Field("activationURL", omitempty=True)
success_url = jose.Field("successURL", omitempty=True)
contact = jose.Field("contact", omitempty=True)
@ChallengeResponse.register
class RecoveryContactResponse(ChallengeResponse):
"""ACME "recoveryContact" challenge response.
:ivar unicode token:
"""
typ = "recoveryContact"
token = jose.Field("token", omitempty=True)
@Challenge.register
class ProofOfPossession(ContinuityChallenge):
"""ACME "proofOfPossession" challenge.
:ivar .JWAAlgorithm alg:
:ivar bytes nonce: Random data, **not** base64-encoded.
:ivar hints: Various clues for the client (:class:`Hints`).
"""
typ = "proofOfPossession"
NONCE_SIZE = 16
class Hints(jose.JSONObjectWithFields):
"""Hints for "proofOfPossession" challenge.
:ivar jwk: JSON Web Key (:class:`acme.jose.JWK`)
:ivar tuple cert_fingerprints: `tuple` of `unicode`
:ivar tuple certs: Sequence of :class:`acme.jose.ComparableX509`
certificates.
:ivar tuple subject_key_identifiers: `tuple` of `unicode`
:ivar tuple issuers: `tuple` of `unicode`
:ivar tuple authorized_for: `tuple` of `unicode`
"""
jwk = jose.Field("jwk", decoder=jose.JWK.from_json)
cert_fingerprints = jose.Field(
"certFingerprints", omitempty=True, default=())
certs = jose.Field("certs", omitempty=True, default=())
subject_key_identifiers = jose.Field(
"subjectKeyIdentifiers", omitempty=True, default=())
serial_numbers = jose.Field("serialNumbers", omitempty=True, default=())
issuers = jose.Field("issuers", omitempty=True, default=())
authorized_for = jose.Field("authorizedFor", omitempty=True, default=())
@certs.encoder
def certs(value): # pylint: disable=missing-docstring,no-self-argument
return tuple(jose.encode_cert(cert) for cert in value)
@certs.decoder
def certs(value): # pylint: disable=missing-docstring,no-self-argument
return tuple(jose.decode_cert(cert) for cert in value)
alg = jose.Field("alg", decoder=jose.JWASignature.from_json)
nonce = jose.Field(
"nonce", encoder=jose.encode_b64jose, decoder=functools.partial(
jose.decode_b64jose, size=NONCE_SIZE))
hints = jose.Field("hints", decoder=Hints.from_json)
@ChallengeResponse.register
class ProofOfPossessionResponse(ChallengeResponse):
"""ACME "proofOfPossession" challenge response.
:ivar bytes nonce: Random data, **not** base64-encoded.
:ivar acme.other.Signature signature: Sugnature of this message.
"""
typ = "proofOfPossession"
NONCE_SIZE = ProofOfPossession.NONCE_SIZE
nonce = jose.Field(
"nonce", encoder=jose.encode_b64jose, decoder=functools.partial(
jose.decode_b64jose, size=NONCE_SIZE))
signature = jose.Field("signature", decoder=other.Signature.from_json)
def verify(self):
"""Verify the challenge."""
# self.signature is not Field | pylint: disable=no-member
return self.signature.verify(self.nonce)
@Challenge.register
class DNS(DVChallenge):
"""ACME "dns" challenge.
:ivar unicode token:
"""
typ = "dns"
LABEL = "_acme-challenge"
"""Label clients prepend to the domain name being validated."""
TOKEN_SIZE = 128 / 8 # Based on the entropy value from the spec
"""Minimum size of the :attr:`token` in bytes."""
token = jose.Field(
"token", encoder=jose.encode_b64jose, decoder=functools.partial(
jose.decode_b64jose, size=TOKEN_SIZE, minimum=True))
def gen_validation(self, account_key, alg=jose.RS256, **kwargs):
"""Generate validation.
:param .JWK account_key: Private account key.
:param .JWA alg:
:returns: This challenge wrapped in `.JWS`
:rtype: .JWS
"""
return jose.JWS.sign(
payload=self.json_dumps(sort_keys=True).encode('utf-8'),
key=account_key, alg=alg, **kwargs)
def check_validation(self, validation, account_public_key):
"""Check validation.
:param JWS validation:
:type account_public_key:
`~cryptography.hazmat.primitives.asymmetric.rsa.RSAPublicKey`
or
`~cryptography.hazmat.primitives.asymmetric.dsa.DSAPublicKey`
or
`~cryptography.hazmat.primitives.asymmetric.ec.EllipticCurvePublicKey`
wrapped in `.ComparableKey`
:rtype: bool
"""
if not validation.verify(key=account_public_key):
return False
try:
return self == self.json_loads(
validation.payload.decode('utf-8'))
except jose.DeserializationError as error:
logger.debug("Checking validation for DNS failed: %s", error)
return False
def gen_response(self, account_key, **kwargs):
"""Generate response.
:param .JWK account_key: Private account key.
:param .JWA alg:
:rtype: DNSResponse
"""
return DNSResponse(validation=self.gen_validation(
self, account_key, **kwargs))
def validation_domain_name(self, name):
"""Domain name for TXT validation record.
:param unicode name: Domain name being validated.
"""
return "{0}.{1}".format(self.LABEL, name)
@ChallengeResponse.register
class DNSResponse(ChallengeResponse):
"""ACME "dns" challenge response.
:param JWS validation:
"""
typ = "dns"
validation = jose.Field("validation", decoder=jose.JWS.from_json)
def check_validation(self, chall, account_public_key):
"""Check validation.
:param challenges.DNS chall:
:type account_public_key:
`~cryptography.hazmat.primitives.asymmetric.rsa.RSAPublicKey`
or
`~cryptography.hazmat.primitives.asymmetric.dsa.DSAPublicKey`
or
`~cryptography.hazmat.primitives.asymmetric.ec.EllipticCurvePublicKey`
wrapped in `.ComparableKey`
:rtype: bool
"""
return chall.check_validation(self.validation, account_public_key)
| {
"repo_name": "hsduk/lets-encrypt-preview",
"path": "acme/acme/challenges.py",
"copies": "2",
"size": "20820",
"license": "apache-2.0",
"hash": 7212515072836097000,
"line_mean": 30.786259542,
"line_max": 82,
"alpha_frac": 0.6280979827,
"autogenerated": false,
"ratio": 3.946171341925701,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5574269324625701,
"avg_score": null,
"num_lines": null
} |
"""ACME JOSE JWS."""
from acme import errors
from acme import jose
class Header(jose.Header):
"""ACME JOSE Header.
.. todo:: Implement ``acmePath``.
"""
nonce = jose.Field('nonce', omitempty=True)
@classmethod
def validate_nonce(cls, nonce):
"""Validate nonce.
:returns: ``None`` if ``nonce`` is valid, decoding errors otherwise.
"""
try:
jose.b64decode(nonce)
except (ValueError, TypeError) as error:
return error
else:
return None
@nonce.decoder
def nonce(value): # pylint: disable=missing-docstring,no-self-argument
error = Header.validate_nonce(value)
if error is not None:
# TODO: custom error
raise errors.Error("Invalid nonce: {0}".format(error))
return value
class Signature(jose.Signature):
"""ACME Signature."""
__slots__ = jose.Signature._orig_slots # pylint: disable=no-member
# TODO: decoder/encoder should accept cls? Otherwise, subclassing
# JSONObjectWithFields is tricky...
header_cls = Header
header = jose.Field(
'header', omitempty=True, default=header_cls(),
decoder=header_cls.from_json)
# TODO: decoder should check that nonce is in the protected header
class JWS(jose.JWS):
"""ACME JWS."""
signature_cls = Signature
__slots__ = jose.JWS._orig_slots # pylint: disable=no-member
@classmethod
def sign(cls, payload, key, alg, nonce): # pylint: disable=arguments-differ
return super(JWS, cls).sign(payload, key=key, alg=alg,
protect=frozenset(['nonce']), nonce=nonce)
| {
"repo_name": "digideskio/lets-encrypt-preview",
"path": "acme/jws.py",
"copies": "1",
"size": "1679",
"license": "apache-2.0",
"hash": -4647342334307890000,
"line_mean": 27.4576271186,
"line_max": 80,
"alpha_frac": 0.6098868374,
"autogenerated": false,
"ratio": 3.8955916473317864,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5005478484731787,
"avg_score": null,
"num_lines": null
} |
"""ACME JOSE JWS."""
from acme import jose
class Header(jose.Header):
"""ACME JOSE Header.
.. todo:: Implement ``acmePath``.
"""
nonce = jose.Field('nonce', omitempty=True, encoder=jose.encode_b64jose)
@nonce.decoder
def nonce(value): # pylint: disable=missing-docstring,no-self-argument
try:
return jose.decode_b64jose(value)
except jose.DeserializationError as error:
# TODO: custom error
raise jose.DeserializationError("Invalid nonce: {0}".format(error))
class Signature(jose.Signature):
"""ACME Signature."""
__slots__ = jose.Signature._orig_slots # pylint: disable=no-member
# TODO: decoder/encoder should accept cls? Otherwise, subclassing
# JSONObjectWithFields is tricky...
header_cls = Header
header = jose.Field(
'header', omitempty=True, default=header_cls(),
decoder=header_cls.from_json)
# TODO: decoder should check that nonce is in the protected header
class JWS(jose.JWS):
"""ACME JWS."""
signature_cls = Signature
__slots__ = jose.JWS._orig_slots # pylint: disable=no-member
@classmethod
def sign(cls, payload, key, alg, nonce): # pylint: disable=arguments-differ
return super(JWS, cls).sign(payload, key=key, alg=alg,
protect=frozenset(['nonce']), nonce=nonce)
| {
"repo_name": "sjerdo/letsencrypt",
"path": "acme/acme/jws.py",
"copies": "64",
"size": "1380",
"license": "apache-2.0",
"hash": -33666575031580732,
"line_mean": 30.3636363636,
"line_max": 80,
"alpha_frac": 0.6369565217,
"autogenerated": false,
"ratio": 3.6507936507936507,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0002805836139169472,
"num_lines": 44
} |
"""ACME JSON fields."""
import logging
import josepy as jose
import pyrfc3339
logger = logging.getLogger(__name__)
class Fixed(jose.Field):
"""Fixed field."""
def __init__(self, json_name, value):
self.value = value
super(Fixed, self).__init__(
json_name=json_name, default=value, omitempty=False)
def decode(self, value):
if value != self.value:
raise jose.DeserializationError('Expected {0!r}'.format(self.value))
return self.value
def encode(self, value):
if value != self.value:
logger.warning(
'Overriding fixed field (%s) with %r', self.json_name, value)
return value
class RFC3339Field(jose.Field):
"""RFC3339 field encoder/decoder.
Handles decoding/encoding between RFC3339 strings and aware (not
naive) `datetime.datetime` objects
(e.g. ``datetime.datetime.now(pytz.utc)``).
"""
@classmethod
def default_encoder(cls, value):
return pyrfc3339.generate(value)
@classmethod
def default_decoder(cls, value):
try:
return pyrfc3339.parse(value)
except ValueError as error:
raise jose.DeserializationError(error)
class Resource(jose.Field):
"""Resource MITM field."""
def __init__(self, resource_type, *args, **kwargs):
self.resource_type = resource_type
super(Resource, self).__init__(
'resource', default=resource_type, *args, **kwargs)
def decode(self, value):
if value != self.resource_type:
raise jose.DeserializationError(
'Wrong resource type: {0} instead of {1}'.format(
value, self.resource_type))
return value
| {
"repo_name": "letsencrypt/letsencrypt",
"path": "acme/acme/fields.py",
"copies": "2",
"size": "1744",
"license": "apache-2.0",
"hash": 4220739702821247500,
"line_mean": 25.8307692308,
"line_max": 80,
"alpha_frac": 0.6072247706,
"autogenerated": false,
"ratio": 3.990846681922197,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0001899335232668566,
"num_lines": 65
} |
"""ACME JSON fields."""
import logging
import pyrfc3339
from acme import jose
logger = logging.getLogger(__name__)
class Fixed(jose.Field):
"""Fixed field."""
def __init__(self, json_name, value):
self.value = value
super(Fixed, self).__init__(
json_name=json_name, default=value, omitempty=False)
def decode(self, value):
if value != self.value:
raise jose.DeserializationError('Expected {0!r}'.format(self.value))
return self.value
def encode(self, value):
if value != self.value:
logger.warning(
'Overriding fixed field (%s) with %r', self.json_name, value)
return value
class RFC3339Field(jose.Field):
"""RFC3339 field encoder/decoder.
Handles decoding/encoding between RFC3339 strings and aware (not
naive) `datetime.datetime` objects
(e.g. ``datetime.datetime.now(pytz.utc)``).
"""
@classmethod
def default_encoder(cls, value):
return pyrfc3339.generate(value)
@classmethod
def default_decoder(cls, value):
try:
return pyrfc3339.parse(value)
except ValueError as error:
raise jose.DeserializationError(error)
class Resource(jose.Field):
"""Resource MITM field."""
def __init__(self, resource_type, *args, **kwargs):
self.resource_type = resource_type
super(Resource, self).__init__(
'resource', default=resource_type, *args, **kwargs)
def decode(self, value):
if value != self.resource_type:
raise jose.DeserializationError(
'Wrong resource type: {0} instead of {1}'.format(
value, self.resource_type))
return value
| {
"repo_name": "jsha/letsencrypt",
"path": "acme/acme/fields.py",
"copies": "9",
"size": "1745",
"license": "apache-2.0",
"hash": 8316138989773572000,
"line_mean": 25.4393939394,
"line_max": 80,
"alpha_frac": 0.6068767908,
"autogenerated": false,
"ratio": 4.002293577981652,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9109170368781652,
"avg_score": null,
"num_lines": null
} |
"""ACME protocol client class and helper functions."""
import logging
import os
import pkg_resources
import M2Crypto
import zope.component
from acme import jose
from acme.jose import jwk
from letsencrypt import account
from letsencrypt import auth_handler
from letsencrypt import continuity_auth
from letsencrypt import crypto_util
from letsencrypt import errors
from letsencrypt import interfaces
from letsencrypt import le_util
from letsencrypt import network
from letsencrypt import reverter
from letsencrypt import revoker
from letsencrypt import storage
from letsencrypt.display import ops as display_ops
from letsencrypt.display import enhancements
class Client(object):
"""ACME protocol client.
:ivar network: Network object for sending and receiving messages
:type network: :class:`letsencrypt.network.Network`
:ivar account: Account object used for registration
:type account: :class:`letsencrypt.account.Account`
:ivar auth_handler: Object that supports the IAuthenticator interface.
auth_handler contains both a dv_authenticator and a
continuity_authenticator
:type auth_handler: :class:`letsencrypt.auth_handler.AuthHandler`
:ivar installer: Object supporting the IInstaller interface.
:type installer: :class:`letsencrypt.interfaces.IInstaller`
:ivar config: Configuration.
:type config: :class:`~letsencrypt.interfaces.IConfig`
"""
def __init__(self, config, account_, dv_auth, installer):
"""Initialize a client.
:param dv_auth: IAuthenticator that can solve the
:const:`letsencrypt.constants.DV_CHALLENGES`.
The :meth:`~letsencrypt.interfaces.IAuthenticator.prepare`
must have already been run.
:type dv_auth: :class:`letsencrypt.interfaces.IAuthenticator`
"""
self.account = account_
self.installer = installer
# TODO: Allow for other alg types besides RS256
self.network = network.Network(
config.server, jwk.JWKRSA.load(self.account.key.pem),
verify_ssl=(not config.no_verify_ssl))
self.config = config
# TODO: Check if self.config.enroll_autorenew is None. If
# so, set it based to the default: figure out if dv_auth is
# standalone (then default is False, otherwise default is True)
if dv_auth is not None:
cont_auth = continuity_auth.ContinuityAuthenticator(config,
installer)
self.auth_handler = auth_handler.AuthHandler(
dv_auth, cont_auth, self.network, self.account)
else:
self.auth_handler = None
def register(self):
"""New Registration with the ACME server."""
self.account = self.network.register_from_account(self.account)
if self.account.terms_of_service is not None:
if not self.config.tos:
# TODO: Replace with self.account.terms_of_service
eula = pkg_resources.resource_string("letsencrypt", "EULA")
agree = zope.component.getUtility(interfaces.IDisplay).yesno(
eula, "Agree", "Cancel")
else:
agree = True
if agree:
self.account.regr = self.network.agree_to_tos(self.account.regr)
else:
# What is the proper response here...
raise errors.LetsEncryptClientError("Must agree to TOS")
self.account.save()
self._report_new_account()
def _report_new_account(self):
"""Informs the user about their new Let's Encrypt account."""
reporter = zope.component.getUtility(interfaces.IReporter)
reporter.add_message(
"Your account credentials have been saved in your Let's Encrypt "
"configuration directory at {0}. You should make a secure backup "
"of this folder now. This configuration directory will also "
"contain certificates and private keys obtained by Let's Encrypt "
"so making regular backups of this folder is ideal.".format(
self.config.config_dir),
reporter.MEDIUM_PRIORITY, True)
assert self.account.recovery_token is not None
recovery_msg = ("If you lose your account credentials, you can recover "
"them using the token \"{0}\". You must write that down "
"and put it in a safe place.".format(
self.account.recovery_token))
if self.account.email is not None:
recovery_msg += (" Another recovery method will be e-mails sent to "
"{0}.".format(self.account.email))
reporter.add_message(recovery_msg, reporter.HIGH_PRIORITY, True)
def obtain_certificate(self, domains, csr=None):
"""Obtains a certificate from the ACME server.
:meth:`.register` must be called before :meth:`.obtain_certificate`
.. todo:: This function does not currently handle CSR correctly.
:param set domains: domains to get a certificate
:param csr: CSR must contain requested domains, the key used to generate
this CSR can be different than self.authkey
:type csr: :class:`CSR`
:returns: Certificate, private key, and certificate chain (all
PEM-encoded).
:rtype: `tuple` of `str`
"""
if self.auth_handler is None:
msg = ("Unable to obtain certificate because authenticator is "
"not set.")
logging.warning(msg)
raise errors.LetsEncryptClientError(msg)
if self.account.regr is None:
raise errors.LetsEncryptClientError(
"Please register with the ACME server first.")
# Perform Challenges/Get Authorizations
authzr = self.auth_handler.get_authorizations(domains)
# Create CSR from names
cert_key = crypto_util.init_save_key(
self.config.rsa_key_size, self.config.key_dir)
csr = crypto_util.init_save_csr(
cert_key, domains, self.config.cert_dir)
# Retrieve certificate
certr = self.network.request_issuance(
jose.ComparableX509(
M2Crypto.X509.load_request_der_string(csr.data)),
authzr)
cert_pem = certr.body.as_pem()
chain_pem = None
if certr.cert_chain_uri is not None:
chain_pem = self.network.fetch_chain(certr)
if chain_pem is None:
# XXX: just to stop RenewableCert from complaining; this is
# probably not a good solution
chain_pem = ""
else:
chain_pem = chain_pem.as_pem()
return cert_pem, cert_key.pem, chain_pem
def obtain_and_enroll_certificate(
self, domains, authenticator, installer, plugins, csr=None):
"""Obtain and enroll certificate.
Get a new certificate for the specified domains using the specified
authenticator and installer, and then create a new renewable lineage
containing it.
:param list domains: Domains to request.
:param authenticator: The authenticator to use.
:type authenticator: :class:`letsencrypt.interfaces.IAuthenticator`
:param installer: The installer to use.
:type installer: :class:`letsencrypt.interfaces.IInstaller`
:param plugins: A PluginsFactory object.
:param str csr: A preexisting CSR to use with this request.
:returns: A new :class:`letsencrypt.storage.RenewableCert` instance
referred to the enrolled cert lineage, or False if the cert could
not be obtained.
"""
cert, privkey, chain = self.obtain_certificate(domains, csr)
self.config.namespace.authenticator = plugins.find_init(
authenticator).name
if installer is not None:
self.config.namespace.installer = plugins.find_init(installer).name
# XXX: We clearly need a more general and correct way of getting
# options into the configobj for the RenewableCert instance.
# This is a quick-and-dirty way to do it to allow integration
# testing to start. (Note that the config parameter to new_lineage
# ideally should be a ConfigObj, but in this case a dict will be
# accepted in practice.)
params = vars(self.config.namespace)
config = {"renewer_config_file":
params["renewer_config_file"]} if "renewer_config_file" in params else None
renewable_cert = storage.RenewableCert.new_lineage(domains[0], cert, privkey,
chain, params, config)
self._report_renewal_status(renewable_cert)
return renewable_cert
def _report_renewal_status(self, cert):
# pylint: disable=no-self-use
"""Informs the user about automatic renewal and deployment.
:param cert: Newly issued certificate
:type cert: :class:`letsencrypt.storage.RenewableCert`
"""
if ("autorenew" not in cert.configuration
or cert.configuration.as_bool("autorenew")):
if ("autodeploy" not in cert.configuration or
cert.configuration.as_bool("autodeploy")):
msg = "Automatic renewal and deployment has "
else:
msg = "Automatic renewal but not automatic deployment has "
else:
if ("autodeploy" not in cert.configuration or
cert.configuration.as_bool("autodeploy")):
msg = "Automatic deployment but not automatic renewal has "
else:
msg = "Automatic renewal and deployment has not "
msg += ("been enabled for your certificate. These settings can be "
"configured in the directories under {0}.").format(
cert.configuration["renewal_configs_dir"])
reporter = zope.component.getUtility(interfaces.IReporter)
reporter.add_message(msg, reporter.LOW_PRIORITY, True)
def save_certificate(self, certr, cert_path, chain_path):
# pylint: disable=no-self-use
"""Saves the certificate received from the ACME server.
:param certr: ACME "certificate" resource.
:type certr: :class:`acme.messages.Certificate`
:param str cert_path: Path to attempt to save the cert file
:param str chain_path: Path to attempt to save the chain file
:returns: cert_path, chain_path (absolute paths to the actual files)
:rtype: `tuple` of `str`
:raises IOError: If unable to find room to write the cert files
"""
# try finally close
cert_chain_abspath = None
cert_file, act_cert_path = le_util.unique_file(cert_path, 0o644)
# TODO: Except
cert_pem = certr.body.as_pem()
try:
cert_file.write(cert_pem)
finally:
cert_file.close()
logging.info("Server issued certificate; certificate written to %s",
act_cert_path)
if certr.cert_chain_uri is not None:
# TODO: Except
chain_cert = self.network.fetch_chain(certr)
if chain_cert is not None:
chain_file, act_chain_path = le_util.unique_file(
chain_path, 0o644)
chain_pem = chain_cert.as_pem()
try:
chain_file.write(chain_pem)
finally:
chain_file.close()
logging.info("Cert chain written to %s", act_chain_path)
# This expects a valid chain file
cert_chain_abspath = os.path.abspath(act_chain_path)
return os.path.abspath(act_cert_path), cert_chain_abspath
def deploy_certificate(self, domains, privkey_path, cert_path, chain_path):
"""Install certificate
:param list domains: list of domains to install the certificate
:param str privkey_path: path to certificate private key
:param str cert_path: certificate file path (optional)
:param str chain_path: chain file path
"""
if self.installer is None:
logging.warning("No installer specified, client is unable to deploy"
"the certificate")
raise errors.LetsEncryptClientError("No installer available")
chain_path = None if chain_path is None else os.path.abspath(chain_path)
for dom in domains:
# TODO: Provide a fullchain reference for installers like
# nginx that want it
self.installer.deploy_cert(
dom, os.path.abspath(cert_path),
os.path.abspath(privkey_path), chain_path)
self.installer.save("Deployed Let's Encrypt Certificate")
# sites may have been enabled / final cleanup
self.installer.restart()
display_ops.success_installation(domains)
def enhance_config(self, domains, redirect=None):
"""Enhance the configuration.
.. todo:: This needs to handle the specific enhancements offered by the
installer. We will also have to find a method to pass in the chosen
values efficiently.
:param list domains: list of domains to configure
:param redirect: If traffic should be forwarded from HTTP to HTTPS.
:type redirect: bool or None
:raises letsencrypt.errors.LetsEncryptClientError: if
no installer is specified in the client.
"""
if self.installer is None:
logging.warning("No installer is specified, there isn't any "
"configuration to enhance.")
raise errors.LetsEncryptClientError("No installer available")
if redirect is None:
redirect = enhancements.ask("redirect")
if redirect:
self.redirect_to_ssl(domains)
def redirect_to_ssl(self, domains):
"""Redirect all traffic from HTTP to HTTPS
:param vhost: list of ssl_vhosts
:type vhost: :class:`letsencrypt.interfaces.IInstaller`
"""
for dom in domains:
try:
self.installer.enhance(dom, "redirect")
except errors.LetsEncryptConfiguratorError:
logging.warn("Unable to perform redirect for %s", dom)
self.installer.save("Add Redirects")
self.installer.restart()
def validate_key_csr(privkey, csr=None):
"""Validate Key and CSR files.
Verifies that the client key and csr arguments are valid and correspond to
one another. This does not currently check the names in the CSR due to
the inability to read SANs from CSRs in python crypto libraries.
If csr is left as None, only the key will be validated.
:param privkey: Key associated with CSR
:type privkey: :class:`letsencrypt.le_util.Key`
:param csr: CSR
:type csr: :class:`letsencrypt.le_util.CSR`
:raises letsencrypt.errors.LetsEncryptClientError: when
validation fails
"""
# TODO: Handle all of these problems appropriately
# The client can eventually do things like prompt the user
# and allow the user to take more appropriate actions
# Key must be readable and valid.
if privkey.pem and not crypto_util.valid_privkey(privkey.pem):
raise errors.LetsEncryptClientError(
"The provided key is not a valid key")
if csr:
if csr.form == "der":
csr_obj = M2Crypto.X509.load_request_der_string(csr.data)
csr = le_util.CSR(csr.file, csr_obj.as_pem(), "der")
# If CSR is provided, it must be readable and valid.
if csr.data and not crypto_util.valid_csr(csr.data):
raise errors.LetsEncryptClientError(
"The provided CSR is not a valid CSR")
# If both CSR and key are provided, the key must be the same key used
# in the CSR.
if csr.data and privkey.pem:
if not crypto_util.csr_matches_pubkey(
csr.data, privkey.pem):
raise errors.LetsEncryptClientError(
"The key and CSR do not match")
def determine_account(config):
"""Determine which account to use.
Will create an account if necessary.
:param config: Configuration object
:type config: :class:`letsencrypt.interfaces.IConfig`
:returns: Account
:rtype: :class:`letsencrypt.account.Account`
"""
accounts = account.Account.get_accounts(config)
if len(accounts) == 1:
return accounts[0]
elif len(accounts) > 1:
return display_ops.choose_account(accounts)
return account.Account.from_prompts(config)
def rollback(default_installer, checkpoints, config, plugins):
"""Revert configuration the specified number of checkpoints.
:param int checkpoints: Number of checkpoints to revert.
:param config: Configuration.
:type config: :class:`letsencrypt.interfaces.IConfig`
"""
# Misconfigurations are only a slight problems... allow the user to rollback
installer = display_ops.pick_installer(
config, default_installer, plugins, question="Which installer "
"should be used for rollback?")
# No Errors occurred during init... proceed normally
# If installer is None... couldn't find an installer... there shouldn't be
# anything to rollback
if installer is not None:
installer.rollback_checkpoints(checkpoints)
installer.restart()
def revoke(default_installer, config, plugins, no_confirm, cert, authkey):
"""Revoke certificates.
:param config: Configuration.
:type config: :class:`letsencrypt.interfaces.IConfig`
"""
installer = display_ops.pick_installer(
config, default_installer, plugins, question="Which installer "
"should be used for certificate revocation?")
revoc = revoker.Revoker(installer, config, no_confirm)
# Cert is most selective, so it is chosen first.
if cert is not None:
revoc.revoke_from_cert(cert[0])
elif authkey is not None:
revoc.revoke_from_key(le_util.Key(authkey[0], authkey[1]))
else:
revoc.revoke_from_menu()
def view_config_changes(config):
"""View checkpoints and associated configuration changes.
.. note:: This assumes that the installation is using a Reverter object.
:param config: Configuration.
:type config: :class:`letsencrypt.interfaces.IConfig`
"""
rev = reverter.Reverter(config)
rev.recovery_routine()
rev.view_config_changes()
| {
"repo_name": "digideskio/lets-encrypt-preview",
"path": "letsencrypt/client.py",
"copies": "1",
"size": "18743",
"license": "apache-2.0",
"hash": 3064397763512932000,
"line_mean": 36.7122736419,
"line_max": 93,
"alpha_frac": 0.6327695673,
"autogenerated": false,
"ratio": 4.326638965835642,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5459408533135641,
"avg_score": null,
"num_lines": null
} |
"""ACME protocol implementation.
This module is an implementation of the `ACME protocol`_.
.. _`ACME protocol`: https://ietf-wg-acme.github.io/acme
"""
import sys
import warnings
# This code exists to keep backwards compatibility with people using acme.jose
# before it became the standalone josepy package.
#
# It is based on
# https://github.com/requests/requests/blob/1278ecdf71a312dc2268f3bfc0aabfab3c006dcf/requests/packages.py
import josepy as jose
for mod in list(sys.modules):
# This traversal is apparently necessary such that the identities are
# preserved (acme.jose.* is josepy.*)
if mod == 'josepy' or mod.startswith('josepy.'):
sys.modules['acme.' + mod.replace('josepy', 'jose', 1)] = sys.modules[mod]
# This class takes a similar approach to the cryptography project to deprecate attributes
# in public modules. See the _ModuleWithDeprecation class here:
# https://github.com/pyca/cryptography/blob/91105952739442a74582d3e62b3d2111365b0dc7/src/cryptography/utils.py#L129
class _TLSSNI01DeprecationModule(object):
"""
Internal class delegating to a module, and displaying warnings when
attributes related to TLS-SNI-01 are accessed.
"""
def __init__(self, module):
self.__dict__['_module'] = module
def __getattr__(self, attr):
if 'TLSSNI01' in attr:
warnings.warn('{0} attribute is deprecated, and will be removed soon.'.format(attr),
DeprecationWarning, stacklevel=2)
return getattr(self._module, attr)
def __setattr__(self, attr, value): # pragma: no cover
setattr(self._module, attr, value)
def __delattr__(self, attr): # pragma: no cover
delattr(self._module, attr)
def __dir__(self): # pragma: no cover
return ['_module'] + dir(self._module)
| {
"repo_name": "letsencrypt/letsencrypt",
"path": "acme/acme/__init__.py",
"copies": "2",
"size": "1820",
"license": "apache-2.0",
"hash": 2913773559539316700,
"line_mean": 35.4,
"line_max": 115,
"alpha_frac": 0.689010989,
"autogenerated": false,
"ratio": 3.5756385068762278,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0006693716446542183,
"num_lines": 50
} |
"""ACME protocol messages."""
from acme import challenges
from acme import fields
from acme import jose
class Error(jose.JSONObjectWithFields, Exception):
"""ACME error.
https://tools.ietf.org/html/draft-ietf-appsawg-http-problem-00
"""
ERROR_TYPE_NAMESPACE = 'urn:acme:error:'
ERROR_TYPE_DESCRIPTIONS = {
'malformed': 'The request message was malformed',
'unauthorized': 'The client lacks sufficient authorization',
'serverInternal': 'The server experienced an internal error',
'badCSR': 'The CSR is unacceptable (e.g., due to a short key)',
}
typ = jose.Field('type')
title = jose.Field('title', omitempty=True)
detail = jose.Field('detail')
@typ.encoder
def typ(value): # pylint: disable=missing-docstring,no-self-argument
return Error.ERROR_TYPE_NAMESPACE + value
@typ.decoder
def typ(value): # pylint: disable=missing-docstring,no-self-argument
# pylint thinks isinstance(value, Error), so startswith is not found
# pylint: disable=no-member
if not value.startswith(Error.ERROR_TYPE_NAMESPACE):
raise jose.DeserializationError('Missing error type prefix')
without_prefix = value[len(Error.ERROR_TYPE_NAMESPACE):]
if without_prefix not in Error.ERROR_TYPE_DESCRIPTIONS:
raise jose.DeserializationError('Error type not recognized')
return without_prefix
@property
def description(self):
"""Hardcoded error description based on its type."""
return self.ERROR_TYPE_DESCRIPTIONS[self.typ]
def __str__(self):
if self.typ is not None:
return ' :: '.join([self.typ, self.description, self.detail])
else:
return str(self.detail)
class _Constant(jose.JSONDeSerializable):
"""ACME constant."""
__slots__ = ('name',)
POSSIBLE_NAMES = NotImplemented
def __init__(self, name):
self.POSSIBLE_NAMES[name] = self
self.name = name
def to_partial_json(self):
return self.name
@classmethod
def from_json(cls, value):
if value not in cls.POSSIBLE_NAMES:
raise jose.DeserializationError(
'{0} not recognized'.format(cls.__name__))
return cls.POSSIBLE_NAMES[value]
def __repr__(self):
return '{0}({1})'.format(self.__class__.__name__, self.name)
def __eq__(self, other):
return isinstance(other, type(self)) and other.name == self.name
def __ne__(self, other):
return not self.__eq__(other)
class Status(_Constant):
"""ACME "status" field."""
POSSIBLE_NAMES = {}
STATUS_UNKNOWN = Status('unknown')
STATUS_PENDING = Status('pending')
STATUS_PROCESSING = Status('processing')
STATUS_VALID = Status('valid')
STATUS_INVALID = Status('invalid')
STATUS_REVOKED = Status('revoked')
class IdentifierType(_Constant):
"""ACME identifier type."""
POSSIBLE_NAMES = {}
IDENTIFIER_FQDN = IdentifierType('dns') # IdentifierDNS in Boulder
class Identifier(jose.JSONObjectWithFields):
"""ACME identifier.
:ivar acme.messages2.IdentifierType typ:
"""
typ = jose.Field('type', decoder=IdentifierType.from_json)
value = jose.Field('value')
class Resource(jose.ImmutableMap):
"""ACME Resource.
:ivar acme.messages2.ResourceBody body: Resource body.
:ivar str uri: Location of the resource.
"""
__slots__ = ('body', 'uri')
class ResourceBody(jose.JSONObjectWithFields):
"""ACME Resource Body."""
class RegistrationResource(Resource):
"""Registration Resource.
:ivar acme.messages2.Registration body:
:ivar str new_authzr_uri: URI found in the 'next' ``Link`` header
:ivar str terms_of_service: URL for the CA TOS.
"""
__slots__ = ('body', 'uri', 'new_authzr_uri', 'terms_of_service')
class Registration(ResourceBody):
"""Registration Resource Body.
:ivar acme.jose.jwk.JWK key: Public key.
:ivar tuple contact: Contact information following ACME spec
"""
# on new-reg key server ignores 'key' and populates it based on
# JWS.signature.combined.jwk
key = jose.Field('key', omitempty=True, decoder=jose.JWK.from_json)
contact = jose.Field('contact', omitempty=True, default=())
recovery_token = jose.Field('recoveryToken', omitempty=True)
agreement = jose.Field('agreement', omitempty=True)
class ChallengeResource(Resource, jose.JSONObjectWithFields):
"""Challenge Resource.
:ivar acme.messages2.ChallengeBody body:
:ivar str authzr_uri: URI found in the 'up' ``Link`` header.
"""
__slots__ = ('body', 'authzr_uri')
@property
def uri(self): # pylint: disable=missing-docstring,no-self-argument
# bug? 'method already defined line None'
# pylint: disable=function-redefined
return self.body.uri
class ChallengeBody(ResourceBody):
"""Challenge Resource Body.
.. todo::
Confusingly, this has a similar name to `.challenges.Challenge`,
as well as `.achallenges.AnnotatedChallenge`. Please use names
such as ``challb`` to distinguish instances of this class from
``achall``.
:ivar acme.challenges.Challenge: Wrapped challenge.
Conveniently, all challenge fields are proxied, i.e. you can
call ``challb.x`` to get ``challb.chall.x`` contents.
:ivar acme.messages2.Status status:
:ivar datetime.datetime validated:
"""
__slots__ = ('chall',)
uri = jose.Field('uri')
status = jose.Field('status', decoder=Status.from_json)
validated = fields.RFC3339Field('validated', omitempty=True)
def to_partial_json(self):
jobj = super(ChallengeBody, self).to_partial_json()
jobj.update(self.chall.to_partial_json())
return jobj
@classmethod
def fields_from_json(cls, jobj):
jobj_fields = super(ChallengeBody, cls).fields_from_json(jobj)
jobj_fields['chall'] = challenges.Challenge.from_json(jobj)
return jobj_fields
def __getattr__(self, name):
return getattr(self.chall, name)
class AuthorizationResource(Resource):
"""Authorization Resource.
:ivar acme.messages2.Authorization body:
:ivar str new_cert_uri: URI found in the 'next' ``Link`` header
"""
__slots__ = ('body', 'uri', 'new_cert_uri')
class Authorization(ResourceBody):
"""Authorization Resource Body.
:ivar acme.messages2.Identifier identifier:
:ivar list challenges: `list` of `.ChallengeBody`
:ivar tuple combinations: Challenge combinations (`tuple` of `tuple`
of `int`, as opposed to `list` of `list` from the spec).
:ivar acme.jose.jwk.JWK key: Public key.
:ivar tuple contact:
:ivar acme.messages2.Status status:
:ivar datetime.datetime expires:
"""
identifier = jose.Field('identifier', decoder=Identifier.from_json)
challenges = jose.Field('challenges', omitempty=True)
combinations = jose.Field('combinations', omitempty=True)
status = jose.Field('status', omitempty=True, decoder=Status.from_json)
# TODO: 'expires' is allowed for Authorization Resources in
# general, but for Key Authorization '[t]he "expires" field MUST
# be absent'... then acme-spec gives example with 'expires'
# present... That's confusing!
expires = fields.RFC3339Field('expires', omitempty=True)
@challenges.decoder
def challenges(value): # pylint: disable=missing-docstring,no-self-argument
return tuple(ChallengeBody.from_json(chall) for chall in value)
@property
def resolved_combinations(self):
"""Combinations with challenges instead of indices."""
return tuple(tuple(self.challenges[idx] for idx in combo)
for combo in self.combinations)
class CertificateRequest(jose.JSONObjectWithFields):
"""ACME new-cert request.
:ivar acme.jose.util.ComparableX509 csr:
`M2Crypto.X509.Request` wrapped in `.ComparableX509`
:ivar tuple authorizations: `tuple` of URIs (`str`)
"""
csr = jose.Field('csr', decoder=jose.decode_csr, encoder=jose.encode_csr)
authorizations = jose.Field('authorizations', decoder=tuple)
class CertificateResource(Resource):
"""Certificate Resource.
:ivar acme.jose.util.ComparableX509 body:
`M2Crypto.X509.X509` wrapped in `.ComparableX509`
:ivar str cert_chain_uri: URI found in the 'up' ``Link`` header
:ivar tuple authzrs: `tuple` of `AuthorizationResource`.
"""
__slots__ = ('body', 'uri', 'cert_chain_uri', 'authzrs')
class Revocation(jose.JSONObjectWithFields):
"""Revocation message.
:ivar revoke: Either a `datetime.datetime` or `Revocation.NOW`.
:ivar tuple authorizations: Same as `CertificateRequest.authorizations`
"""
NOW = 'now'
"""A possible value for `revoke`, denoting that certificate should
be revoked now."""
revoke = jose.Field('revoke')
authorizations = CertificateRequest._fields['authorizations']
@revoke.decoder
def revoke(value): # pylint: disable=missing-docstring,no-self-argument
if value == Revocation.NOW:
return value
else:
return fields.RFC3339Field.default_decoder(value)
@revoke.encoder
def revoke(value): # pylint: disable=missing-docstring,no-self-argument
if value == Revocation.NOW:
return value
else:
return fields.RFC3339Field.default_encoder(value)
| {
"repo_name": "felixrieseberg/lets-encrypt-preview",
"path": "acme/messages2.py",
"copies": "1",
"size": "9440",
"license": "apache-2.0",
"hash": -5438037687231989000,
"line_mean": 30.7845117845,
"line_max": 80,
"alpha_frac": 0.6629237288,
"autogenerated": false,
"ratio": 3.759458383114297,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9921044864208131,
"avg_score": 0.00026744954123312874,
"num_lines": 297
} |
"""ACME protocol messages."""
from letsencrypt.acme import challenges
from letsencrypt.acme import fields
from letsencrypt.acme import jose
class Error(jose.JSONObjectWithFields, Exception):
"""ACME error.
https://tools.ietf.org/html/draft-ietf-appsawg-http-problem-00
"""
ERROR_TYPE_NAMESPACE = 'urn:acme:error:'
ERROR_TYPE_DESCRIPTIONS = {
'malformed': 'The request message was malformed',
'unauthorized': 'The client lacks sufficient authorization',
'serverInternal': 'The server experienced an internal error',
'badCSR': 'The CSR is unacceptable (e.g., due to a short key)',
}
# TODO: Boulder omits 'type' and 'instance', spec requires, boulder#128
typ = jose.Field('type', omitempty=True)
title = jose.Field('title', omitempty=True)
detail = jose.Field('detail')
instance = jose.Field('instance', omitempty=True)
@typ.encoder
def typ(value): # pylint: disable=missing-docstring,no-self-argument
return Error.ERROR_TYPE_NAMESPACE + value
@typ.decoder
def typ(value): # pylint: disable=missing-docstring,no-self-argument
# pylint thinks isinstance(value, Error), so startswith is not found
# pylint: disable=no-member
if not value.startswith(Error.ERROR_TYPE_NAMESPACE):
raise jose.DeserializationError('Missing error type prefix')
without_prefix = value[len(Error.ERROR_TYPE_NAMESPACE):]
if without_prefix not in Error.ERROR_TYPE_DESCRIPTIONS:
raise jose.DeserializationError('Error type not recognized')
return without_prefix
@property
def description(self):
"""Hardcoded error description based on its type."""
return self.ERROR_TYPE_DESCRIPTIONS[self.typ]
def __str__(self):
if self.typ is not None:
return ' :: '.join([self.typ, self.description, self.detail])
else:
return str(self.detail)
class _Constant(jose.JSONDeSerializable):
"""ACME constant."""
__slots__ = ('name',)
POSSIBLE_NAMES = NotImplemented
def __init__(self, name):
self.POSSIBLE_NAMES[name] = self
self.name = name
def to_partial_json(self):
return self.name
@classmethod
def from_json(cls, value):
if value not in cls.POSSIBLE_NAMES:
raise jose.DeserializationError(
'{0} not recognized'.format(cls.__name__))
return cls.POSSIBLE_NAMES[value]
def __repr__(self):
return '{0}({1})'.format(self.__class__.__name__, self.name)
def __eq__(self, other):
return isinstance(other, type(self)) and other.name == self.name
def __ne__(self, other):
return not self.__eq__(other)
class Status(_Constant):
"""ACME "status" field."""
POSSIBLE_NAMES = {}
STATUS_UNKNOWN = Status('unknown')
STATUS_PENDING = Status('pending')
STATUS_PROCESSING = Status('processing')
STATUS_VALID = Status('valid')
STATUS_INVALID = Status('invalid')
STATUS_REVOKED = Status('revoked')
class IdentifierType(_Constant):
"""ACME identifier type."""
POSSIBLE_NAMES = {}
IDENTIFIER_FQDN = IdentifierType('dns') # IdentifierDNS in Boulder
class Identifier(jose.JSONObjectWithFields):
"""ACME identifier.
:ivar letsencrypt.acme.messages2.IdentifierType typ:
"""
typ = jose.Field('type', decoder=IdentifierType.from_json)
value = jose.Field('value')
class Resource(jose.ImmutableMap):
"""ACME Resource.
:ivar letsencrypt.acme.messages2.ResourceBody body: Resource body.
:ivar str uri: Location of the resource.
"""
__slots__ = ('body', 'uri')
class ResourceBody(jose.JSONObjectWithFields):
"""ACME Resource Body."""
class RegistrationResource(Resource):
"""Registration Resource.
:ivar letsencrypt.acme.messages2.Registration body:
:ivar str new_authzr_uri: URI found in the 'next' ``Link`` header
:ivar str terms_of_service: URL for the CA TOS.
"""
__slots__ = ('body', 'uri', 'new_authzr_uri', 'terms_of_service')
class Registration(ResourceBody):
"""Registration Resource Body.
:ivar letsencrypt.acme.jose.jwk.JWK key: Public key.
:ivar tuple contact: Contact information following ACME spec
"""
# on new-reg key server ignores 'key' and populates it based on
# JWS.signature.combined.jwk
key = jose.Field('key', omitempty=True, decoder=jose.JWK.from_json)
contact = jose.Field('contact', omitempty=True, default=())
recovery_token = jose.Field('recoveryToken', omitempty=True)
agreement = jose.Field('agreement', omitempty=True)
class ChallengeResource(Resource, jose.JSONObjectWithFields):
"""Challenge Resource.
:ivar letsencrypt.acme.messages2.ChallengeBody body:
:ivar str authzr_uri: URI found in the 'up' ``Link`` header.
"""
__slots__ = ('body', 'authzr_uri')
@property
def uri(self): # pylint: disable=missing-docstring,no-self-argument
# bug? 'method already defined line None'
# pylint: disable=function-redefined
return self.body.uri
class ChallengeBody(ResourceBody):
"""Challenge Resource Body.
.. todo::
Confusingly, this has a similar name to `.challenges.Challenge`,
as well as `.achallenges.AnnotatedChallenge`. Please use names
such as ``challb`` to distinguish instances of this class from
``achall``.
:ivar letsencrypt.acme.challenges.Challenge: Wrapped challenge.
Conveniently, all challenge fields are proxied, i.e. you can
call ``challb.x`` to get ``challb.chall.x`` contents.
:ivar letsencrypt.acme.messages2.Status status:
:ivar datetime.datetime validated:
"""
__slots__ = ('chall',)
uri = jose.Field('uri')
status = jose.Field('status', decoder=Status.from_json)
validated = fields.RFC3339Field('validated', omitempty=True)
def to_partial_json(self):
jobj = super(ChallengeBody, self).to_partial_json()
jobj.update(self.chall.to_partial_json())
return jobj
@classmethod
def fields_from_json(cls, jobj):
jobj_fields = super(ChallengeBody, cls).fields_from_json(jobj)
jobj_fields['chall'] = challenges.Challenge.from_json(jobj)
return jobj_fields
def __getattr__(self, name):
return getattr(self.chall, name)
class AuthorizationResource(Resource):
"""Authorization Resource.
:ivar letsencrypt.acme.messages2.Authorization body:
:ivar str new_cert_uri: URI found in the 'next' ``Link`` header
"""
__slots__ = ('body', 'uri', 'new_cert_uri')
class Authorization(ResourceBody):
"""Authorization Resource Body.
:ivar letsencrypt.acme.messages2.Identifier identifier:
:ivar list challenges: `list` of `.ChallengeBody`
:ivar tuple combinations: Challenge combinations (`tuple` of `tuple`
of `int`, as opposed to `list` of `list` from the spec).
:ivar letsencrypt.acme.jose.jwk.JWK key: Public key.
:ivar tuple contact:
:ivar letsencrypt.acme.messages2.Status status:
:ivar datetime.datetime expires:
"""
identifier = jose.Field('identifier', decoder=Identifier.from_json)
challenges = jose.Field('challenges', omitempty=True)
combinations = jose.Field('combinations', omitempty=True)
# TODO: acme-spec #92, #98
key = Registration._fields['key']
contact = Registration._fields['contact']
status = jose.Field('status', omitempty=True, decoder=Status.from_json)
# TODO: 'expires' is allowed for Authorization Resources in
# general, but for Key Authorization '[t]he "expires" field MUST
# be absent'... then acme-spec gives example with 'expires'
# present... That's confusing!
expires = fields.RFC3339Field('expires', omitempty=True)
@challenges.decoder
def challenges(value): # pylint: disable=missing-docstring,no-self-argument
return tuple(ChallengeBody.from_json(chall) for chall in value)
@property
def resolved_combinations(self):
"""Combinations with challenges instead of indices."""
return tuple(tuple(self.challenges[idx] for idx in combo)
for combo in self.combinations)
class CertificateRequest(jose.JSONObjectWithFields):
"""ACME new-cert request.
:ivar letsencrypt.acme.jose.util.ComparableX509 csr:
`M2Crypto.X509.Request` wrapped in `.ComparableX509`
:ivar tuple authorizations: `tuple` of URIs (`str`)
"""
csr = jose.Field('csr', decoder=jose.decode_csr, encoder=jose.encode_csr)
authorizations = jose.Field('authorizations', decoder=tuple)
class CertificateResource(Resource):
"""Certificate Resource.
:ivar letsencrypt.acme.jose.util.ComparableX509 body:
`M2Crypto.X509.X509` wrapped in `.ComparableX509`
:ivar str cert_chain_uri: URI found in the 'up' ``Link`` header
:ivar tuple authzrs: `tuple` of `AuthorizationResource`.
"""
__slots__ = ('body', 'uri', 'cert_chain_uri', 'authzrs')
class Revocation(jose.JSONObjectWithFields):
"""Revocation message.
:ivar revoke: Either a `datetime.datetime` or `Revocation.NOW`.
:ivar tuple authorizations: Same as `CertificateRequest.authorizations`
"""
NOW = 'now'
"""A possible value for `revoke`, denoting that certificate should
be revoked now."""
revoke = jose.Field('revoke')
authorizations = CertificateRequest._fields['authorizations']
@revoke.decoder
def revoke(value): # pylint: disable=missing-docstring,no-self-argument
if value == Revocation.NOW:
return value
else:
return fields.RFC3339Field.default_decoder(value)
@revoke.encoder
def revoke(value): # pylint: disable=missing-docstring,no-self-argument
if value == Revocation.NOW:
return value
else:
return fields.RFC3339Field.default_encoder(value)
| {
"repo_name": "diracdeltas/lets-encrypt-preview",
"path": "letsencrypt/acme/messages2.py",
"copies": "1",
"size": "9894",
"license": "apache-2.0",
"hash": -7658667820793373000,
"line_mean": 31.6534653465,
"line_max": 80,
"alpha_frac": 0.6680816657,
"autogenerated": false,
"ratio": 3.751990898748578,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.49200725644485777,
"avg_score": null,
"num_lines": null
} |
"""ACME protocol messages."""
import collections
import six
from acme import challenges
from acme import errors
from acme import fields
from acme import jose
from acme import util
OLD_ERROR_PREFIX = "urn:acme:error:"
ERROR_PREFIX = "urn:ietf:params:acme:error:"
ERROR_CODES = {
'badCSR': 'The CSR is unacceptable (e.g., due to a short key)',
'badNonce': 'The client sent an unacceptable anti-replay nonce',
'connection': ('The server could not connect to the client to verify the'
' domain'),
'dnssec': 'The server could not validate a DNSSEC signed domain',
# deprecate invalidEmail
'invalidEmail': 'The provided email for a registration was invalid',
'invalidContact': 'The provided contact URI was invalid',
'malformed': 'The request message was malformed',
'rateLimited': 'There were too many requests of a given type',
'serverInternal': 'The server experienced an internal error',
'tls': 'The server experienced a TLS error during domain verification',
'unauthorized': 'The client lacks sufficient authorization',
'unknownHost': 'The server could not resolve a domain name',
}
ERROR_TYPE_DESCRIPTIONS = dict(
(ERROR_PREFIX + name, desc) for name, desc in ERROR_CODES.items())
ERROR_TYPE_DESCRIPTIONS.update(dict( # add errors with old prefix, deprecate me
(OLD_ERROR_PREFIX + name, desc) for name, desc in ERROR_CODES.items()))
def is_acme_error(err):
"""Check if argument is an ACME error."""
if isinstance(err, Error) and (err.typ is not None):
return (ERROR_PREFIX in err.typ) or (OLD_ERROR_PREFIX in err.typ)
else:
return False
@six.python_2_unicode_compatible
class Error(jose.JSONObjectWithFields, errors.Error):
"""ACME error.
https://tools.ietf.org/html/draft-ietf-appsawg-http-problem-00
:ivar unicode typ:
:ivar unicode title:
:ivar unicode detail:
"""
typ = jose.Field('type', omitempty=True, default='about:blank')
title = jose.Field('title', omitempty=True)
detail = jose.Field('detail', omitempty=True)
@classmethod
def with_code(cls, code, **kwargs):
"""Create an Error instance with an ACME Error code.
:unicode code: An ACME error code, like 'dnssec'.
:kwargs: kwargs to pass to Error.
"""
if code not in ERROR_CODES:
raise ValueError("The supplied code: %s is not a known ACME error"
" code" % code)
typ = ERROR_PREFIX + code
return cls(typ=typ, **kwargs)
@property
def description(self):
"""Hardcoded error description based on its type.
:returns: Description if standard ACME error or ``None``.
:rtype: unicode
"""
return ERROR_TYPE_DESCRIPTIONS.get(self.typ)
@property
def code(self):
"""ACME error code.
Basically self.typ without the ERROR_PREFIX.
:returns: error code if standard ACME code or ``None``.
:rtype: unicode
"""
code = str(self.typ).split(':')[-1]
if code in ERROR_CODES:
return code
def __str__(self):
return b' :: '.join(
part.encode('ascii', 'backslashreplace') for part in
(self.typ, self.description, self.detail, self.title)
if part is not None).decode()
class _Constant(jose.JSONDeSerializable, collections.Hashable): # type: ignore
"""ACME constant."""
__slots__ = ('name',)
POSSIBLE_NAMES = NotImplemented
def __init__(self, name):
self.POSSIBLE_NAMES[name] = self
self.name = name
def to_partial_json(self):
return self.name
@classmethod
def from_json(cls, value):
if value not in cls.POSSIBLE_NAMES:
raise jose.DeserializationError(
'{0} not recognized'.format(cls.__name__))
return cls.POSSIBLE_NAMES[value]
def __repr__(self):
return '{0}({1})'.format(self.__class__.__name__, self.name)
def __eq__(self, other):
return isinstance(other, type(self)) and other.name == self.name
def __hash__(self):
return hash((self.__class__, self.name))
def __ne__(self, other):
return not self == other
class Status(_Constant):
"""ACME "status" field."""
POSSIBLE_NAMES = {} # type: dict
STATUS_UNKNOWN = Status('unknown')
STATUS_PENDING = Status('pending')
STATUS_PROCESSING = Status('processing')
STATUS_VALID = Status('valid')
STATUS_INVALID = Status('invalid')
STATUS_REVOKED = Status('revoked')
class IdentifierType(_Constant):
"""ACME identifier type."""
POSSIBLE_NAMES = {} # type: dict
IDENTIFIER_FQDN = IdentifierType('dns') # IdentifierDNS in Boulder
class Identifier(jose.JSONObjectWithFields):
"""ACME identifier.
:ivar IdentifierType typ:
:ivar unicode value:
"""
typ = jose.Field('type', decoder=IdentifierType.from_json)
value = jose.Field('value')
class Directory(jose.JSONDeSerializable):
"""Directory."""
_REGISTERED_TYPES = {} # type: dict
class Meta(jose.JSONObjectWithFields):
"""Directory Meta."""
terms_of_service = jose.Field('terms-of-service', omitempty=True)
website = jose.Field('website', omitempty=True)
caa_identities = jose.Field('caa-identities', omitempty=True)
@classmethod
def _canon_key(cls, key):
return getattr(key, 'resource_type', key)
@classmethod
def register(cls, resource_body_cls):
"""Register resource."""
resource_type = resource_body_cls.resource_type
assert resource_type not in cls._REGISTERED_TYPES
cls._REGISTERED_TYPES[resource_type] = resource_body_cls
return resource_body_cls
def __init__(self, jobj):
canon_jobj = util.map_keys(jobj, self._canon_key)
# TODO: check that everything is an absolute URL; acme-spec is
# not clear on that
self._jobj = canon_jobj
def __getattr__(self, name):
try:
return self[name.replace('_', '-')]
except KeyError as error:
raise AttributeError(str(error) + ': ' + name)
def __getitem__(self, name):
try:
return self._jobj[self._canon_key(name)]
except KeyError:
raise KeyError('Directory field not found')
def to_partial_json(self):
return self._jobj
@classmethod
def from_json(cls, jobj):
jobj['meta'] = cls.Meta.from_json(jobj.pop('meta', {}))
return cls(jobj)
class Resource(jose.JSONObjectWithFields):
"""ACME Resource.
:ivar acme.messages.ResourceBody body: Resource body.
"""
body = jose.Field('body')
class ResourceWithURI(Resource):
"""ACME Resource with URI.
:ivar unicode uri: Location of the resource.
"""
uri = jose.Field('uri') # no ChallengeResource.uri
class ResourceBody(jose.JSONObjectWithFields):
"""ACME Resource Body."""
class Registration(ResourceBody):
"""Registration Resource Body.
:ivar acme.jose.jwk.JWK key: Public key.
:ivar tuple contact: Contact information following ACME spec,
`tuple` of `unicode`.
:ivar unicode agreement:
"""
# on new-reg key server ignores 'key' and populates it based on
# JWS.signature.combined.jwk
key = jose.Field('key', omitempty=True, decoder=jose.JWK.from_json)
contact = jose.Field('contact', omitempty=True, default=())
agreement = jose.Field('agreement', omitempty=True)
status = jose.Field('status', omitempty=True)
phone_prefix = 'tel:'
email_prefix = 'mailto:'
@classmethod
def from_data(cls, phone=None, email=None, **kwargs):
"""Create registration resource from contact details."""
details = list(kwargs.pop('contact', ()))
if phone is not None:
details.append(cls.phone_prefix + phone)
if email is not None:
details.append(cls.email_prefix + email)
kwargs['contact'] = tuple(details)
return cls(**kwargs)
def _filter_contact(self, prefix):
return tuple(
detail[len(prefix):] for detail in self.contact
if detail.startswith(prefix))
@property
def phones(self):
"""All phones found in the ``contact`` field."""
return self._filter_contact(self.phone_prefix)
@property
def emails(self):
"""All emails found in the ``contact`` field."""
return self._filter_contact(self.email_prefix)
@Directory.register
class NewRegistration(Registration):
"""New registration."""
resource_type = 'new-reg'
resource = fields.Resource(resource_type)
class UpdateRegistration(Registration):
"""Update registration."""
resource_type = 'reg'
resource = fields.Resource(resource_type)
class RegistrationResource(ResourceWithURI):
"""Registration Resource.
:ivar acme.messages.Registration body:
:ivar unicode new_authzr_uri: Deprecated. Do not use.
:ivar unicode terms_of_service: URL for the CA TOS.
"""
body = jose.Field('body', decoder=Registration.from_json)
new_authzr_uri = jose.Field('new_authzr_uri', omitempty=True)
terms_of_service = jose.Field('terms_of_service', omitempty=True)
class ChallengeBody(ResourceBody):
"""Challenge Resource Body.
.. todo::
Confusingly, this has a similar name to `.challenges.Challenge`,
as well as `.achallenges.AnnotatedChallenge`. Please use names
such as ``challb`` to distinguish instances of this class from
``achall``.
:ivar acme.challenges.Challenge: Wrapped challenge.
Conveniently, all challenge fields are proxied, i.e. you can
call ``challb.x`` to get ``challb.chall.x`` contents.
:ivar acme.messages.Status status:
:ivar datetime.datetime validated:
:ivar messages.Error error:
"""
__slots__ = ('chall',)
uri = jose.Field('uri')
status = jose.Field('status', decoder=Status.from_json,
omitempty=True, default=STATUS_PENDING)
validated = fields.RFC3339Field('validated', omitempty=True)
error = jose.Field('error', decoder=Error.from_json,
omitempty=True, default=None)
def to_partial_json(self):
jobj = super(ChallengeBody, self).to_partial_json()
jobj.update(self.chall.to_partial_json())
return jobj
@classmethod
def fields_from_json(cls, jobj):
jobj_fields = super(ChallengeBody, cls).fields_from_json(jobj)
jobj_fields['chall'] = challenges.Challenge.from_json(jobj)
return jobj_fields
def __getattr__(self, name):
return getattr(self.chall, name)
class ChallengeResource(Resource):
"""Challenge Resource.
:ivar acme.messages.ChallengeBody body:
:ivar unicode authzr_uri: URI found in the 'up' ``Link`` header.
"""
body = jose.Field('body', decoder=ChallengeBody.from_json)
authzr_uri = jose.Field('authzr_uri')
@property
def uri(self): # pylint: disable=missing-docstring,no-self-argument
# bug? 'method already defined line None'
# pylint: disable=function-redefined
return self.body.uri # pylint: disable=no-member
class Authorization(ResourceBody):
"""Authorization Resource Body.
:ivar acme.messages.Identifier identifier:
:ivar list challenges: `list` of `.ChallengeBody`
:ivar tuple combinations: Challenge combinations (`tuple` of `tuple`
of `int`, as opposed to `list` of `list` from the spec).
:ivar acme.messages.Status status:
:ivar datetime.datetime expires:
"""
identifier = jose.Field('identifier', decoder=Identifier.from_json)
challenges = jose.Field('challenges', omitempty=True)
combinations = jose.Field('combinations', omitempty=True)
status = jose.Field('status', omitempty=True, decoder=Status.from_json)
# TODO: 'expires' is allowed for Authorization Resources in
# general, but for Key Authorization '[t]he "expires" field MUST
# be absent'... then acme-spec gives example with 'expires'
# present... That's confusing!
expires = fields.RFC3339Field('expires', omitempty=True)
@challenges.decoder
def challenges(value): # pylint: disable=missing-docstring,no-self-argument
return tuple(ChallengeBody.from_json(chall) for chall in value)
@property
def resolved_combinations(self):
"""Combinations with challenges instead of indices."""
return tuple(tuple(self.challenges[idx] for idx in combo)
for combo in self.combinations)
@Directory.register
class NewAuthorization(Authorization):
"""New authorization."""
resource_type = 'new-authz'
resource = fields.Resource(resource_type)
class AuthorizationResource(ResourceWithURI):
"""Authorization Resource.
:ivar acme.messages.Authorization body:
:ivar unicode new_cert_uri: Deprecated. Do not use.
"""
body = jose.Field('body', decoder=Authorization.from_json)
new_cert_uri = jose.Field('new_cert_uri', omitempty=True)
@Directory.register
class CertificateRequest(jose.JSONObjectWithFields):
"""ACME new-cert request.
:ivar acme.jose.util.ComparableX509 csr:
`OpenSSL.crypto.X509Req` wrapped in `.ComparableX509`
"""
resource_type = 'new-cert'
resource = fields.Resource(resource_type)
csr = jose.Field('csr', decoder=jose.decode_csr, encoder=jose.encode_csr)
class CertificateResource(ResourceWithURI):
"""Certificate Resource.
:ivar acme.jose.util.ComparableX509 body:
`OpenSSL.crypto.X509` wrapped in `.ComparableX509`
:ivar unicode cert_chain_uri: URI found in the 'up' ``Link`` header
:ivar tuple authzrs: `tuple` of `AuthorizationResource`.
"""
cert_chain_uri = jose.Field('cert_chain_uri')
authzrs = jose.Field('authzrs')
@Directory.register
class Revocation(jose.JSONObjectWithFields):
"""Revocation message.
:ivar .ComparableX509 certificate: `OpenSSL.crypto.X509` wrapped in
`.ComparableX509`
"""
resource_type = 'revoke-cert'
resource = fields.Resource(resource_type)
certificate = jose.Field(
'certificate', decoder=jose.decode_cert, encoder=jose.encode_cert)
reason = jose.Field('reason')
| {
"repo_name": "jsha/letsencrypt",
"path": "acme/acme/messages.py",
"copies": "1",
"size": "14264",
"license": "apache-2.0",
"hash": -799563591542973000,
"line_mean": 30.2807017544,
"line_max": 80,
"alpha_frac": 0.6517807067,
"autogenerated": false,
"ratio": 3.7805459846276173,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.49323266913276176,
"avg_score": null,
"num_lines": null
} |
"""ACME protocol messages."""
import collections
from acme import challenges
from acme import errors
from acme import fields
from acme import jose
from acme import util
class Error(jose.JSONObjectWithFields, errors.Error):
"""ACME error.
https://tools.ietf.org/html/draft-ietf-appsawg-http-problem-00
:ivar unicode typ:
:ivar unicode title:
:ivar unicode detail:
"""
ERROR_TYPE_DESCRIPTIONS = dict(
('urn:acme:error:' + name, description) for name, description in (
('badCSR', 'The CSR is unacceptable (e.g., due to a short key)'),
('badNonce', 'The client sent an unacceptable anti-replay nonce'),
('connection', 'The server could not connect to the client for DV'),
('dnssec', 'The server could not validate a DNSSEC signed domain'),
('malformed', 'The request message was malformed'),
('rateLimited', 'There were too many requests of a given type'),
('serverInternal', 'The server experienced an internal error'),
('tls', 'The server experienced a TLS error during DV'),
('unauthorized', 'The client lacks sufficient authorization'),
('unknownHost', 'The server could not resolve a domain name'),
)
)
typ = jose.Field('type')
title = jose.Field('title', omitempty=True)
detail = jose.Field('detail')
@property
def description(self):
"""Hardcoded error description based on its type.
:returns: Description if standard ACME error or ``None``.
:rtype: unicode
"""
return self.ERROR_TYPE_DESCRIPTIONS.get(self.typ)
def __str__(self):
return ' :: '.join(
part for part in
(self.typ, self.description, self.detail, self.title)
if part is not None)
class _Constant(jose.JSONDeSerializable, collections.Hashable):
"""ACME constant."""
__slots__ = ('name',)
POSSIBLE_NAMES = NotImplemented
def __init__(self, name):
self.POSSIBLE_NAMES[name] = self
self.name = name
def to_partial_json(self):
return self.name
@classmethod
def from_json(cls, value):
if value not in cls.POSSIBLE_NAMES:
raise jose.DeserializationError(
'{0} not recognized'.format(cls.__name__))
return cls.POSSIBLE_NAMES[value]
def __repr__(self):
return '{0}({1})'.format(self.__class__.__name__, self.name)
def __eq__(self, other):
return isinstance(other, type(self)) and other.name == self.name
def __hash__(self):
return hash((self.__class__, self.name))
def __ne__(self, other):
return not self == other
class Status(_Constant):
"""ACME "status" field."""
POSSIBLE_NAMES = {}
STATUS_UNKNOWN = Status('unknown')
STATUS_PENDING = Status('pending')
STATUS_PROCESSING = Status('processing')
STATUS_VALID = Status('valid')
STATUS_INVALID = Status('invalid')
STATUS_REVOKED = Status('revoked')
class IdentifierType(_Constant):
"""ACME identifier type."""
POSSIBLE_NAMES = {}
IDENTIFIER_FQDN = IdentifierType('dns') # IdentifierDNS in Boulder
class Identifier(jose.JSONObjectWithFields):
"""ACME identifier.
:ivar IdentifierType typ:
:ivar unicode value:
"""
typ = jose.Field('type', decoder=IdentifierType.from_json)
value = jose.Field('value')
class Directory(jose.JSONDeSerializable):
"""Directory."""
_REGISTERED_TYPES = {}
@classmethod
def _canon_key(cls, key):
return getattr(key, 'resource_type', key)
@classmethod
def register(cls, resource_body_cls):
"""Register resource."""
assert resource_body_cls.resource_type not in cls._REGISTERED_TYPES
cls._REGISTERED_TYPES[resource_body_cls.resource_type] = resource_body_cls
return resource_body_cls
def __init__(self, jobj):
canon_jobj = util.map_keys(jobj, self._canon_key)
if not set(canon_jobj).issubset(self._REGISTERED_TYPES):
# TODO: acme-spec is not clear about this: 'It is a JSON
# dictionary, whose keys are the "resource" values listed
# in {{https-requests}}'z
raise ValueError('Wrong directory fields')
# TODO: check that everything is an absolute URL; acme-spec is
# not clear on that
self._jobj = canon_jobj
def __getattr__(self, name):
try:
return self[name.replace('_', '-')]
except KeyError as error:
raise AttributeError(str(error))
def __getitem__(self, name):
try:
return self._jobj[self._canon_key(name)]
except KeyError:
raise KeyError('Directory field not found')
def to_partial_json(self):
return self._jobj
@classmethod
def from_json(cls, jobj):
try:
return cls(jobj)
except ValueError as error:
raise jose.DeserializationError(str(error))
class Resource(jose.JSONObjectWithFields):
"""ACME Resource.
:ivar acme.messages.ResourceBody body: Resource body.
"""
body = jose.Field('body')
class ResourceWithURI(Resource):
"""ACME Resource with URI.
:ivar unicode uri: Location of the resource.
"""
uri = jose.Field('uri') # no ChallengeResource.uri
class ResourceBody(jose.JSONObjectWithFields):
"""ACME Resource Body."""
class Registration(ResourceBody):
"""Registration Resource Body.
:ivar acme.jose.jwk.JWK key: Public key.
:ivar tuple contact: Contact information following ACME spec,
`tuple` of `unicode`.
:ivar unicode agreement:
:ivar unicode authorizations: URI where
`messages.Registration.Authorizations` can be found.
:ivar unicode certificates: URI where
`messages.Registration.Certificates` can be found.
"""
# on new-reg key server ignores 'key' and populates it based on
# JWS.signature.combined.jwk
key = jose.Field('key', omitempty=True, decoder=jose.JWK.from_json)
contact = jose.Field('contact', omitempty=True, default=())
agreement = jose.Field('agreement', omitempty=True)
authorizations = jose.Field('authorizations', omitempty=True)
certificates = jose.Field('certificates', omitempty=True)
class Authorizations(jose.JSONObjectWithFields):
"""Authorizations granted to Account in the process of registration.
:ivar tuple authorizations: URIs to Authorization Resources.
"""
authorizations = jose.Field('authorizations')
class Certificates(jose.JSONObjectWithFields):
"""Certificates granted to Account in the process of registration.
:ivar tuple certificates: URIs to Certificate Resources.
"""
certificates = jose.Field('certificates')
phone_prefix = 'tel:'
email_prefix = 'mailto:'
@classmethod
def from_data(cls, phone=None, email=None, **kwargs):
"""Create registration resource from contact details."""
details = list(kwargs.pop('contact', ()))
if phone is not None:
details.append(cls.phone_prefix + phone)
if email is not None:
details.append(cls.email_prefix + email)
kwargs['contact'] = tuple(details)
return cls(**kwargs)
def _filter_contact(self, prefix):
return tuple(
detail[len(prefix):] for detail in self.contact
if detail.startswith(prefix))
@property
def phones(self):
"""All phones found in the ``contact`` field."""
return self._filter_contact(self.phone_prefix)
@property
def emails(self):
"""All emails found in the ``contact`` field."""
return self._filter_contact(self.email_prefix)
@Directory.register
class NewRegistration(Registration):
"""New registration."""
resource_type = 'new-reg'
resource = fields.Resource(resource_type)
class UpdateRegistration(Registration):
"""Update registration."""
resource_type = 'reg'
resource = fields.Resource(resource_type)
class RegistrationResource(ResourceWithURI):
"""Registration Resource.
:ivar acme.messages.Registration body:
:ivar unicode new_authzr_uri: URI found in the 'next' ``Link`` header
:ivar unicode terms_of_service: URL for the CA TOS.
"""
body = jose.Field('body', decoder=Registration.from_json)
new_authzr_uri = jose.Field('new_authzr_uri')
terms_of_service = jose.Field('terms_of_service', omitempty=True)
class ChallengeBody(ResourceBody):
"""Challenge Resource Body.
.. todo::
Confusingly, this has a similar name to `.challenges.Challenge`,
as well as `.achallenges.AnnotatedChallenge`. Please use names
such as ``challb`` to distinguish instances of this class from
``achall``.
:ivar acme.challenges.Challenge: Wrapped challenge.
Conveniently, all challenge fields are proxied, i.e. you can
call ``challb.x`` to get ``challb.chall.x`` contents.
:ivar acme.messages.Status status:
:ivar datetime.datetime validated:
:ivar messages.Error error:
"""
__slots__ = ('chall',)
uri = jose.Field('uri')
status = jose.Field('status', decoder=Status.from_json,
omitempty=True, default=STATUS_PENDING)
validated = fields.RFC3339Field('validated', omitempty=True)
error = jose.Field('error', decoder=Error.from_json,
omitempty=True, default=None)
def to_partial_json(self):
jobj = super(ChallengeBody, self).to_partial_json()
jobj.update(self.chall.to_partial_json())
return jobj
@classmethod
def fields_from_json(cls, jobj):
jobj_fields = super(ChallengeBody, cls).fields_from_json(jobj)
jobj_fields['chall'] = challenges.Challenge.from_json(jobj)
return jobj_fields
def __getattr__(self, name):
return getattr(self.chall, name)
class ChallengeResource(Resource):
"""Challenge Resource.
:ivar acme.messages.ChallengeBody body:
:ivar unicode authzr_uri: URI found in the 'up' ``Link`` header.
"""
body = jose.Field('body', decoder=ChallengeBody.from_json)
authzr_uri = jose.Field('authzr_uri')
@property
def uri(self): # pylint: disable=missing-docstring,no-self-argument
# bug? 'method already defined line None'
# pylint: disable=function-redefined
return self.body.uri # pylint: disable=no-member
class Authorization(ResourceBody):
"""Authorization Resource Body.
:ivar acme.messages.Identifier identifier:
:ivar list challenges: `list` of `.ChallengeBody`
:ivar tuple combinations: Challenge combinations (`tuple` of `tuple`
of `int`, as opposed to `list` of `list` from the spec).
:ivar acme.messages.Status status:
:ivar datetime.datetime expires:
"""
identifier = jose.Field('identifier', decoder=Identifier.from_json)
challenges = jose.Field('challenges', omitempty=True)
combinations = jose.Field('combinations', omitempty=True)
status = jose.Field('status', omitempty=True, decoder=Status.from_json)
# TODO: 'expires' is allowed for Authorization Resources in
# general, but for Key Authorization '[t]he "expires" field MUST
# be absent'... then acme-spec gives example with 'expires'
# present... That's confusing!
expires = fields.RFC3339Field('expires', omitempty=True)
@challenges.decoder
def challenges(value): # pylint: disable=missing-docstring,no-self-argument
return tuple(ChallengeBody.from_json(chall) for chall in value)
@property
def resolved_combinations(self):
"""Combinations with challenges instead of indices."""
return tuple(tuple(self.challenges[idx] for idx in combo)
for combo in self.combinations)
@Directory.register
class NewAuthorization(Authorization):
"""New authorization."""
resource_type = 'new-authz'
resource = fields.Resource(resource_type)
class AuthorizationResource(ResourceWithURI):
"""Authorization Resource.
:ivar acme.messages.Authorization body:
:ivar unicode new_cert_uri: URI found in the 'next' ``Link`` header
"""
body = jose.Field('body', decoder=Authorization.from_json)
new_cert_uri = jose.Field('new_cert_uri')
@Directory.register
class CertificateRequest(jose.JSONObjectWithFields):
"""ACME new-cert request.
:ivar acme.jose.util.ComparableX509 csr:
`OpenSSL.crypto.X509Req` wrapped in `.ComparableX509`
"""
resource_type = 'new-cert'
resource = fields.Resource(resource_type)
csr = jose.Field('csr', decoder=jose.decode_csr, encoder=jose.encode_csr)
class CertificateResource(ResourceWithURI):
"""Certificate Resource.
:ivar acme.jose.util.ComparableX509 body:
`OpenSSL.crypto.X509` wrapped in `.ComparableX509`
:ivar unicode cert_chain_uri: URI found in the 'up' ``Link`` header
:ivar tuple authzrs: `tuple` of `AuthorizationResource`.
"""
cert_chain_uri = jose.Field('cert_chain_uri')
authzrs = jose.Field('authzrs')
@Directory.register
class Revocation(jose.JSONObjectWithFields):
"""Revocation message.
:ivar .ComparableX509 certificate: `OpenSSL.crypto.X509` wrapped in
`.ComparableX509`
"""
resource_type = 'revoke-cert'
resource = fields.Resource(resource_type)
certificate = jose.Field(
'certificate', decoder=jose.decode_cert, encoder=jose.encode_cert)
| {
"repo_name": "piru/letsencrypt",
"path": "acme/acme/messages.py",
"copies": "3",
"size": "13501",
"license": "apache-2.0",
"hash": -7834323424986547000,
"line_mean": 30.6182669789,
"line_max": 82,
"alpha_frac": 0.6527664617,
"autogenerated": false,
"ratio": 3.8986427952642217,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.00018739311878254202,
"num_lines": 427
} |
"""ACME protocol messages."""
import collections
from acme import challenges
from acme import errors
from acme import fields
from acme import jose
from acme import util
OLD_ERROR_PREFIX = "urn:acme:error:"
ERROR_PREFIX = "urn:ietf:params:acme:error:"
ERROR_CODES = {
'badCSR': 'The CSR is unacceptable (e.g., due to a short key)',
'badNonce': 'The client sent an unacceptable anti-replay nonce',
'connection': ('The server could not connect to the client to verify the'
' domain'),
'dnssec': 'The server could not validate a DNSSEC signed domain',
# deprecate invalidEmail
'invalidEmail': 'The provided email for a registration was invalid',
'invalidContact': 'The provided contact URI was invalid',
'malformed': 'The request message was malformed',
'rateLimited': 'There were too many requests of a given type',
'serverInternal': 'The server experienced an internal error',
'tls': 'The server experienced a TLS error during domain verification',
'unauthorized': 'The client lacks sufficient authorization',
'unknownHost': 'The server could not resolve a domain name',
}
ERROR_TYPE_DESCRIPTIONS = dict(
(ERROR_PREFIX + name, desc) for name, desc in ERROR_CODES.items())
ERROR_TYPE_DESCRIPTIONS.update(dict( # add errors with old prefix, deprecate me
(OLD_ERROR_PREFIX + name, desc) for name, desc in ERROR_CODES.items()))
def is_acme_error(err):
"""Check if argument is an ACME error."""
return (ERROR_PREFIX in str(err)) or (OLD_ERROR_PREFIX in str(err))
class Error(jose.JSONObjectWithFields, errors.Error):
"""ACME error.
https://tools.ietf.org/html/draft-ietf-appsawg-http-problem-00
:ivar unicode typ:
:ivar unicode title:
:ivar unicode detail:
"""
typ = jose.Field('type', omitempty=True, default='about:blank')
title = jose.Field('title', omitempty=True)
detail = jose.Field('detail', omitempty=True)
@classmethod
def with_code(cls, code, **kwargs):
"""Create an Error instance with an ACME Error code.
:unicode code: An ACME error code, like 'dnssec'.
:kwargs: kwargs to pass to Error.
"""
if code not in ERROR_CODES:
raise ValueError("The supplied code: %s is not a known ACME error"
" code" % code)
typ = ERROR_PREFIX + code
return cls(typ=typ, **kwargs)
@property
def description(self):
"""Hardcoded error description based on its type.
:returns: Description if standard ACME error or ``None``.
:rtype: unicode
"""
return ERROR_TYPE_DESCRIPTIONS.get(self.typ)
@property
def code(self):
"""ACME error code.
Basically self.typ without the ERROR_PREFIX.
:returns: error code if standard ACME code or ``None``.
:rtype: unicode
"""
code = str(self.typ).split(':')[-1]
if code in ERROR_CODES:
return code
def __str__(self):
return ' :: '.join(
part for part in
(self.typ, self.description, self.detail, self.title)
if part is not None)
class _Constant(jose.JSONDeSerializable, collections.Hashable):
"""ACME constant."""
__slots__ = ('name',)
POSSIBLE_NAMES = NotImplemented
def __init__(self, name):
self.POSSIBLE_NAMES[name] = self
self.name = name
def to_partial_json(self):
return self.name
@classmethod
def from_json(cls, value):
if value not in cls.POSSIBLE_NAMES:
raise jose.DeserializationError(
'{0} not recognized'.format(cls.__name__))
return cls.POSSIBLE_NAMES[value]
def __repr__(self):
return '{0}({1})'.format(self.__class__.__name__, self.name)
def __eq__(self, other):
return isinstance(other, type(self)) and other.name == self.name
def __hash__(self):
return hash((self.__class__, self.name))
def __ne__(self, other):
return not self == other
class Status(_Constant):
"""ACME "status" field."""
POSSIBLE_NAMES = {}
STATUS_UNKNOWN = Status('unknown')
STATUS_PENDING = Status('pending')
STATUS_PROCESSING = Status('processing')
STATUS_VALID = Status('valid')
STATUS_INVALID = Status('invalid')
STATUS_REVOKED = Status('revoked')
class IdentifierType(_Constant):
"""ACME identifier type."""
POSSIBLE_NAMES = {}
IDENTIFIER_FQDN = IdentifierType('dns') # IdentifierDNS in Boulder
class Identifier(jose.JSONObjectWithFields):
"""ACME identifier.
:ivar IdentifierType typ:
:ivar unicode value:
"""
typ = jose.Field('type', decoder=IdentifierType.from_json)
value = jose.Field('value')
class Directory(jose.JSONDeSerializable):
"""Directory."""
_REGISTERED_TYPES = {}
class Meta(jose.JSONObjectWithFields):
"""Directory Meta."""
terms_of_service = jose.Field('terms-of-service', omitempty=True)
website = jose.Field('website', omitempty=True)
caa_identities = jose.Field('caa-identities', omitempty=True)
@classmethod
def _canon_key(cls, key):
return getattr(key, 'resource_type', key)
@classmethod
def register(cls, resource_body_cls):
"""Register resource."""
resource_type = resource_body_cls.resource_type
assert resource_type not in cls._REGISTERED_TYPES
cls._REGISTERED_TYPES[resource_type] = resource_body_cls
return resource_body_cls
def __init__(self, jobj):
canon_jobj = util.map_keys(jobj, self._canon_key)
# TODO: check that everything is an absolute URL; acme-spec is
# not clear on that
self._jobj = canon_jobj
def __getattr__(self, name):
try:
return self[name.replace('_', '-')]
except KeyError as error:
raise AttributeError(str(error))
def __getitem__(self, name):
try:
return self._jobj[self._canon_key(name)]
except KeyError:
raise KeyError('Directory field not found')
def to_partial_json(self):
return self._jobj
@classmethod
def from_json(cls, jobj):
jobj['meta'] = cls.Meta.from_json(jobj.pop('meta', {}))
return cls(jobj)
class Resource(jose.JSONObjectWithFields):
"""ACME Resource.
:ivar acme.messages.ResourceBody body: Resource body.
"""
body = jose.Field('body')
class ResourceWithURI(Resource):
"""ACME Resource with URI.
:ivar unicode uri: Location of the resource.
"""
uri = jose.Field('uri') # no ChallengeResource.uri
class ResourceBody(jose.JSONObjectWithFields):
"""ACME Resource Body."""
class Registration(ResourceBody):
"""Registration Resource Body.
:ivar acme.jose.jwk.JWK key: Public key.
:ivar tuple contact: Contact information following ACME spec,
`tuple` of `unicode`.
:ivar unicode agreement:
:ivar unicode authorizations: URI where
`messages.Registration.Authorizations` can be found.
:ivar unicode certificates: URI where
`messages.Registration.Certificates` can be found.
"""
# on new-reg key server ignores 'key' and populates it based on
# JWS.signature.combined.jwk
key = jose.Field('key', omitempty=True, decoder=jose.JWK.from_json)
contact = jose.Field('contact', omitempty=True, default=())
agreement = jose.Field('agreement', omitempty=True)
authorizations = jose.Field('authorizations', omitempty=True)
certificates = jose.Field('certificates', omitempty=True)
class Authorizations(jose.JSONObjectWithFields):
"""Authorizations granted to Account in the process of registration.
:ivar tuple authorizations: URIs to Authorization Resources.
"""
authorizations = jose.Field('authorizations')
class Certificates(jose.JSONObjectWithFields):
"""Certificates granted to Account in the process of registration.
:ivar tuple certificates: URIs to Certificate Resources.
"""
certificates = jose.Field('certificates')
phone_prefix = 'tel:'
email_prefix = 'mailto:'
@classmethod
def from_data(cls, phone=None, email=None, **kwargs):
"""Create registration resource from contact details."""
details = list(kwargs.pop('contact', ()))
if phone is not None:
details.append(cls.phone_prefix + phone)
if email is not None:
details.append(cls.email_prefix + email)
kwargs['contact'] = tuple(details)
return cls(**kwargs)
def _filter_contact(self, prefix):
return tuple(
detail[len(prefix):] for detail in self.contact
if detail.startswith(prefix))
@property
def phones(self):
"""All phones found in the ``contact`` field."""
return self._filter_contact(self.phone_prefix)
@property
def emails(self):
"""All emails found in the ``contact`` field."""
return self._filter_contact(self.email_prefix)
@Directory.register
class NewRegistration(Registration):
"""New registration."""
resource_type = 'new-reg'
resource = fields.Resource(resource_type)
class UpdateRegistration(Registration):
"""Update registration."""
resource_type = 'reg'
resource = fields.Resource(resource_type)
class RegistrationResource(ResourceWithURI):
"""Registration Resource.
:ivar acme.messages.Registration body:
:ivar unicode new_authzr_uri: URI found in the 'next' ``Link`` header
:ivar unicode terms_of_service: URL for the CA TOS.
"""
body = jose.Field('body', decoder=Registration.from_json)
new_authzr_uri = jose.Field('new_authzr_uri')
terms_of_service = jose.Field('terms_of_service', omitempty=True)
class ChallengeBody(ResourceBody):
"""Challenge Resource Body.
.. todo::
Confusingly, this has a similar name to `.challenges.Challenge`,
as well as `.achallenges.AnnotatedChallenge`. Please use names
such as ``challb`` to distinguish instances of this class from
``achall``.
:ivar acme.challenges.Challenge: Wrapped challenge.
Conveniently, all challenge fields are proxied, i.e. you can
call ``challb.x`` to get ``challb.chall.x`` contents.
:ivar acme.messages.Status status:
:ivar datetime.datetime validated:
:ivar messages.Error error:
"""
__slots__ = ('chall',)
uri = jose.Field('uri')
status = jose.Field('status', decoder=Status.from_json,
omitempty=True, default=STATUS_PENDING)
validated = fields.RFC3339Field('validated', omitempty=True)
error = jose.Field('error', decoder=Error.from_json,
omitempty=True, default=None)
def to_partial_json(self):
jobj = super(ChallengeBody, self).to_partial_json()
jobj.update(self.chall.to_partial_json())
return jobj
@classmethod
def fields_from_json(cls, jobj):
jobj_fields = super(ChallengeBody, cls).fields_from_json(jobj)
jobj_fields['chall'] = challenges.Challenge.from_json(jobj)
return jobj_fields
def __getattr__(self, name):
return getattr(self.chall, name)
class ChallengeResource(Resource):
"""Challenge Resource.
:ivar acme.messages.ChallengeBody body:
:ivar unicode authzr_uri: URI found in the 'up' ``Link`` header.
"""
body = jose.Field('body', decoder=ChallengeBody.from_json)
authzr_uri = jose.Field('authzr_uri')
@property
def uri(self): # pylint: disable=missing-docstring,no-self-argument
# bug? 'method already defined line None'
# pylint: disable=function-redefined
return self.body.uri # pylint: disable=no-member
class Authorization(ResourceBody):
"""Authorization Resource Body.
:ivar acme.messages.Identifier identifier:
:ivar list challenges: `list` of `.ChallengeBody`
:ivar tuple combinations: Challenge combinations (`tuple` of `tuple`
of `int`, as opposed to `list` of `list` from the spec).
:ivar acme.messages.Status status:
:ivar datetime.datetime expires:
"""
identifier = jose.Field('identifier', decoder=Identifier.from_json)
challenges = jose.Field('challenges', omitempty=True)
combinations = jose.Field('combinations', omitempty=True)
status = jose.Field('status', omitempty=True, decoder=Status.from_json)
# TODO: 'expires' is allowed for Authorization Resources in
# general, but for Key Authorization '[t]he "expires" field MUST
# be absent'... then acme-spec gives example with 'expires'
# present... That's confusing!
expires = fields.RFC3339Field('expires', omitempty=True)
@challenges.decoder
def challenges(value): # pylint: disable=missing-docstring,no-self-argument
return tuple(ChallengeBody.from_json(chall) for chall in value)
@property
def resolved_combinations(self):
"""Combinations with challenges instead of indices."""
return tuple(tuple(self.challenges[idx] for idx in combo)
for combo in self.combinations)
@Directory.register
class NewAuthorization(Authorization):
"""New authorization."""
resource_type = 'new-authz'
resource = fields.Resource(resource_type)
class AuthorizationResource(ResourceWithURI):
"""Authorization Resource.
:ivar acme.messages.Authorization body:
:ivar unicode new_cert_uri: URI found in the 'next' ``Link`` header
"""
body = jose.Field('body', decoder=Authorization.from_json)
new_cert_uri = jose.Field('new_cert_uri')
@Directory.register
class CertificateRequest(jose.JSONObjectWithFields):
"""ACME new-cert request.
:ivar acme.jose.util.ComparableX509 csr:
`OpenSSL.crypto.X509Req` wrapped in `.ComparableX509`
"""
resource_type = 'new-cert'
resource = fields.Resource(resource_type)
csr = jose.Field('csr', decoder=jose.decode_csr, encoder=jose.encode_csr)
class CertificateResource(ResourceWithURI):
"""Certificate Resource.
:ivar acme.jose.util.ComparableX509 body:
`OpenSSL.crypto.X509` wrapped in `.ComparableX509`
:ivar unicode cert_chain_uri: URI found in the 'up' ``Link`` header
:ivar tuple authzrs: `tuple` of `AuthorizationResource`.
"""
cert_chain_uri = jose.Field('cert_chain_uri')
authzrs = jose.Field('authzrs')
@Directory.register
class Revocation(jose.JSONObjectWithFields):
"""Revocation message.
:ivar .ComparableX509 certificate: `OpenSSL.crypto.X509` wrapped in
`.ComparableX509`
"""
resource_type = 'revoke-cert'
resource = fields.Resource(resource_type)
certificate = jose.Field(
'certificate', decoder=jose.decode_cert, encoder=jose.encode_cert)
| {
"repo_name": "bsmr-misc-forks/letsencrypt",
"path": "acme/acme/messages.py",
"copies": "2",
"size": "14786",
"license": "apache-2.0",
"hash": 6368198455206582000,
"line_mean": 30.3927813163,
"line_max": 80,
"alpha_frac": 0.6547409712,
"autogenerated": false,
"ratio": 3.8405194805194807,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0001443071527580915,
"num_lines": 471
} |
"""ACME protocol messages."""
import collections
from acme import challenges
from acme import fields
from acme import jose
from acme import util
class Error(jose.JSONObjectWithFields, Exception):
"""ACME error.
https://tools.ietf.org/html/draft-ietf-appsawg-http-problem-00
:ivar unicode typ:
:ivar unicode title:
:ivar unicode detail:
"""
ERROR_TYPE_NAMESPACE = 'urn:acme:error:'
ERROR_TYPE_DESCRIPTIONS = {
'badCSR': 'The CSR is unacceptable (e.g., due to a short key)',
'badNonce': 'The client sent an unacceptable anti-replay nonce',
'connection': 'The server could not connect to the client for DV',
'dnssec': 'The server could not validate a DNSSEC signed domain',
'malformed': 'The request message was malformed',
'rateLimited': 'There were too many requests of a given type',
'serverInternal': 'The server experienced an internal error',
'tls': 'The server experienced a TLS error during DV',
'unauthorized': 'The client lacks sufficient authorization',
'unknownHost': 'The server could not resolve a domain name',
}
typ = jose.Field('type')
title = jose.Field('title', omitempty=True)
detail = jose.Field('detail')
@typ.encoder
def typ(value): # pylint: disable=missing-docstring,no-self-argument
return Error.ERROR_TYPE_NAMESPACE + value
@typ.decoder
def typ(value): # pylint: disable=missing-docstring,no-self-argument
# pylint thinks isinstance(value, Error), so startswith is not found
# pylint: disable=no-member
if not value.startswith(Error.ERROR_TYPE_NAMESPACE):
raise jose.DeserializationError('Missing error type prefix')
without_prefix = value[len(Error.ERROR_TYPE_NAMESPACE):]
if without_prefix not in Error.ERROR_TYPE_DESCRIPTIONS:
raise jose.DeserializationError('Error type not recognized')
return without_prefix
@property
def description(self):
"""Hardcoded error description based on its type.
:rtype: unicode
"""
return self.ERROR_TYPE_DESCRIPTIONS[self.typ]
def __str__(self):
if self.typ is not None:
return ' :: '.join([self.typ, self.description, self.detail])
else:
return str(self.detail)
class _Constant(jose.JSONDeSerializable, collections.Hashable):
"""ACME constant."""
__slots__ = ('name',)
POSSIBLE_NAMES = NotImplemented
def __init__(self, name):
self.POSSIBLE_NAMES[name] = self
self.name = name
def to_partial_json(self):
return self.name
@classmethod
def from_json(cls, value):
if value not in cls.POSSIBLE_NAMES:
raise jose.DeserializationError(
'{0} not recognized'.format(cls.__name__))
return cls.POSSIBLE_NAMES[value]
def __repr__(self):
return '{0}({1})'.format(self.__class__.__name__, self.name)
def __eq__(self, other):
return isinstance(other, type(self)) and other.name == self.name
def __hash__(self):
return hash((self.__class__, self.name))
def __ne__(self, other):
return not self == other
class Status(_Constant):
"""ACME "status" field."""
POSSIBLE_NAMES = {}
STATUS_UNKNOWN = Status('unknown')
STATUS_PENDING = Status('pending')
STATUS_PROCESSING = Status('processing')
STATUS_VALID = Status('valid')
STATUS_INVALID = Status('invalid')
STATUS_REVOKED = Status('revoked')
class IdentifierType(_Constant):
"""ACME identifier type."""
POSSIBLE_NAMES = {}
IDENTIFIER_FQDN = IdentifierType('dns') # IdentifierDNS in Boulder
class Identifier(jose.JSONObjectWithFields):
"""ACME identifier.
:ivar IdentifierType typ:
:ivar unicode value:
"""
typ = jose.Field('type', decoder=IdentifierType.from_json)
value = jose.Field('value')
class Directory(jose.JSONDeSerializable):
"""Directory."""
_REGISTERED_TYPES = {}
@classmethod
def _canon_key(cls, key):
return getattr(key, 'resource_type', key)
@classmethod
def register(cls, resource_body_cls):
"""Register resource."""
assert resource_body_cls.resource_type not in cls._REGISTERED_TYPES
cls._REGISTERED_TYPES[resource_body_cls.resource_type] = resource_body_cls
return resource_body_cls
def __init__(self, jobj):
canon_jobj = util.map_keys(jobj, self._canon_key)
if not set(canon_jobj).issubset(self._REGISTERED_TYPES):
# TODO: acme-spec is not clear about this: 'It is a JSON
# dictionary, whose keys are the "resource" values listed
# in {{https-requests}}'z
raise ValueError('Wrong directory fields')
# TODO: check that everything is an absolute URL; acme-spec is
# not clear on that
self._jobj = canon_jobj
def __getattr__(self, name):
try:
return self[name.replace('_', '-')]
except KeyError as error:
raise AttributeError(str(error))
def __getitem__(self, name):
try:
return self._jobj[self._canon_key(name)]
except KeyError:
raise KeyError('Directory field not found')
def to_partial_json(self):
return self._jobj
@classmethod
def from_json(cls, jobj):
try:
return cls(jobj)
except ValueError as error:
raise jose.DeserializationError(str(error))
class Resource(jose.JSONObjectWithFields):
"""ACME Resource.
:ivar acme.messages.ResourceBody body: Resource body.
"""
body = jose.Field('body')
class ResourceWithURI(Resource):
"""ACME Resource with URI.
:ivar unicode uri: Location of the resource.
"""
uri = jose.Field('uri') # no ChallengeResource.uri
class ResourceBody(jose.JSONObjectWithFields):
"""ACME Resource Body."""
class Registration(ResourceBody):
"""Registration Resource Body.
:ivar acme.jose.jwk.JWK key: Public key.
:ivar tuple contact: Contact information following ACME spec,
`tuple` of `unicode`.
:ivar unicode agreement:
:ivar unicode authorizations: URI where
`messages.Registration.Authorizations` can be found.
:ivar unicode certificates: URI where
`messages.Registration.Certificates` can be found.
"""
# on new-reg key server ignores 'key' and populates it based on
# JWS.signature.combined.jwk
key = jose.Field('key', omitempty=True, decoder=jose.JWK.from_json)
contact = jose.Field('contact', omitempty=True, default=())
agreement = jose.Field('agreement', omitempty=True)
authorizations = jose.Field('authorizations', omitempty=True)
certificates = jose.Field('certificates', omitempty=True)
class Authorizations(jose.JSONObjectWithFields):
"""Authorizations granted to Account in the process of registration.
:ivar tuple authorizations: URIs to Authorization Resources.
"""
authorizations = jose.Field('authorizations')
class Certificates(jose.JSONObjectWithFields):
"""Certificates granted to Account in the process of registration.
:ivar tuple certificates: URIs to Certificate Resources.
"""
certificates = jose.Field('certificates')
phone_prefix = 'tel:'
email_prefix = 'mailto:'
@classmethod
def from_data(cls, phone=None, email=None, **kwargs):
"""Create registration resource from contact details."""
details = list(kwargs.pop('contact', ()))
if phone is not None:
details.append(cls.phone_prefix + phone)
if email is not None:
details.append(cls.email_prefix + email)
kwargs['contact'] = tuple(details)
return cls(**kwargs)
def _filter_contact(self, prefix):
return tuple(
detail[len(prefix):] for detail in self.contact
if detail.startswith(prefix))
@property
def phones(self):
"""All phones found in the ``contact`` field."""
return self._filter_contact(self.phone_prefix)
@property
def emails(self):
"""All emails found in the ``contact`` field."""
return self._filter_contact(self.email_prefix)
@Directory.register
class NewRegistration(Registration):
"""New registration."""
resource_type = 'new-reg'
resource = fields.Resource(resource_type)
class UpdateRegistration(Registration):
"""Update registration."""
resource_type = 'reg'
resource = fields.Resource(resource_type)
class RegistrationResource(ResourceWithURI):
"""Registration Resource.
:ivar acme.messages.Registration body:
:ivar unicode new_authzr_uri: URI found in the 'next' ``Link`` header
:ivar unicode terms_of_service: URL for the CA TOS.
"""
body = jose.Field('body', decoder=Registration.from_json)
new_authzr_uri = jose.Field('new_authzr_uri')
terms_of_service = jose.Field('terms_of_service', omitempty=True)
class ChallengeBody(ResourceBody):
"""Challenge Resource Body.
.. todo::
Confusingly, this has a similar name to `.challenges.Challenge`,
as well as `.achallenges.AnnotatedChallenge`. Please use names
such as ``challb`` to distinguish instances of this class from
``achall``.
:ivar acme.challenges.Challenge: Wrapped challenge.
Conveniently, all challenge fields are proxied, i.e. you can
call ``challb.x`` to get ``challb.chall.x`` contents.
:ivar acme.messages.Status status:
:ivar datetime.datetime validated:
:ivar messages.Error error:
"""
__slots__ = ('chall',)
uri = jose.Field('uri')
status = jose.Field('status', decoder=Status.from_json,
omitempty=True, default=STATUS_PENDING)
validated = fields.RFC3339Field('validated', omitempty=True)
error = jose.Field('error', decoder=Error.from_json,
omitempty=True, default=None)
def to_partial_json(self):
jobj = super(ChallengeBody, self).to_partial_json()
jobj.update(self.chall.to_partial_json())
return jobj
@classmethod
def fields_from_json(cls, jobj):
jobj_fields = super(ChallengeBody, cls).fields_from_json(jobj)
jobj_fields['chall'] = challenges.Challenge.from_json(jobj)
return jobj_fields
def __getattr__(self, name):
return getattr(self.chall, name)
class ChallengeResource(Resource):
"""Challenge Resource.
:ivar acme.messages.ChallengeBody body:
:ivar unicode authzr_uri: URI found in the 'up' ``Link`` header.
"""
body = jose.Field('body', decoder=ChallengeBody.from_json)
authzr_uri = jose.Field('authzr_uri')
@property
def uri(self): # pylint: disable=missing-docstring,no-self-argument
# bug? 'method already defined line None'
# pylint: disable=function-redefined
return self.body.uri # pylint: disable=no-member
class Authorization(ResourceBody):
"""Authorization Resource Body.
:ivar acme.messages.Identifier identifier:
:ivar list challenges: `list` of `.ChallengeBody`
:ivar tuple combinations: Challenge combinations (`tuple` of `tuple`
of `int`, as opposed to `list` of `list` from the spec).
:ivar acme.messages.Status status:
:ivar datetime.datetime expires:
"""
identifier = jose.Field('identifier', decoder=Identifier.from_json)
challenges = jose.Field('challenges', omitempty=True)
combinations = jose.Field('combinations', omitempty=True)
status = jose.Field('status', omitempty=True, decoder=Status.from_json)
# TODO: 'expires' is allowed for Authorization Resources in
# general, but for Key Authorization '[t]he "expires" field MUST
# be absent'... then acme-spec gives example with 'expires'
# present... That's confusing!
expires = fields.RFC3339Field('expires', omitempty=True)
@challenges.decoder
def challenges(value): # pylint: disable=missing-docstring,no-self-argument
return tuple(ChallengeBody.from_json(chall) for chall in value)
@property
def resolved_combinations(self):
"""Combinations with challenges instead of indices."""
return tuple(tuple(self.challenges[idx] for idx in combo)
for combo in self.combinations)
@Directory.register
class NewAuthorization(Authorization):
"""New authorization."""
resource_type = 'new-authz'
resource = fields.Resource(resource_type)
class AuthorizationResource(ResourceWithURI):
"""Authorization Resource.
:ivar acme.messages.Authorization body:
:ivar unicode new_cert_uri: URI found in the 'next' ``Link`` header
"""
body = jose.Field('body', decoder=Authorization.from_json)
new_cert_uri = jose.Field('new_cert_uri')
@Directory.register
class CertificateRequest(jose.JSONObjectWithFields):
"""ACME new-cert request.
:ivar acme.jose.util.ComparableX509 csr:
`OpenSSL.crypto.X509Req` wrapped in `.ComparableX509`
"""
resource_type = 'new-cert'
resource = fields.Resource(resource_type)
csr = jose.Field('csr', decoder=jose.decode_csr, encoder=jose.encode_csr)
class CertificateResource(ResourceWithURI):
"""Certificate Resource.
:ivar acme.jose.util.ComparableX509 body:
`OpenSSL.crypto.X509` wrapped in `.ComparableX509`
:ivar unicode cert_chain_uri: URI found in the 'up' ``Link`` header
:ivar tuple authzrs: `tuple` of `AuthorizationResource`.
"""
cert_chain_uri = jose.Field('cert_chain_uri')
authzrs = jose.Field('authzrs')
@Directory.register
class Revocation(jose.JSONObjectWithFields):
"""Revocation message.
:ivar .ComparableX509 certificate: `OpenSSL.crypto.X509` wrapped in
`.ComparableX509`
"""
resource_type = 'revoke-cert'
resource = fields.Resource(resource_type)
certificate = jose.Field(
'certificate', decoder=jose.decode_cert, encoder=jose.encode_cert)
| {
"repo_name": "ghyde/letsencrypt",
"path": "acme/acme/messages.py",
"copies": "4",
"size": "14016",
"license": "apache-2.0",
"hash": 5908525603681151000,
"line_mean": 30.7823129252,
"line_max": 82,
"alpha_frac": 0.6577482877,
"autogenerated": false,
"ratio": 3.8890122086570478,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.6546760496357047,
"avg_score": null,
"num_lines": null
} |
"""ACME protocol messages."""
import collections
from six.moves.urllib import parse as urllib_parse # pylint: disable=import-error
from acme import challenges
from acme import fields
from acme import interfaces
from acme import jose
class Error(jose.JSONObjectWithFields, Exception):
"""ACME error.
https://tools.ietf.org/html/draft-ietf-appsawg-http-problem-00
:ivar unicode typ:
:ivar unicode title:
:ivar unicode detail:
"""
ERROR_TYPE_NAMESPACE = 'urn:acme:error:'
ERROR_TYPE_DESCRIPTIONS = {
'badCSR': 'The CSR is unacceptable (e.g., due to a short key)',
'badNonce': 'The client sent an unacceptable anti-replay nonce',
'connection': 'The server could not connect to the client for DV',
'dnssec': 'The server could not validate a DNSSEC signed domain',
'malformed': 'The request message was malformed',
'serverInternal': 'The server experienced an internal error',
'tls': 'The server experienced a TLS error during DV',
'unauthorized': 'The client lacks sufficient authorization',
'unknownHost': 'The server could not resolve a domain name',
}
typ = jose.Field('type')
title = jose.Field('title', omitempty=True)
detail = jose.Field('detail')
@typ.encoder
def typ(value): # pylint: disable=missing-docstring,no-self-argument
return Error.ERROR_TYPE_NAMESPACE + value
@typ.decoder
def typ(value): # pylint: disable=missing-docstring,no-self-argument
# pylint thinks isinstance(value, Error), so startswith is not found
# pylint: disable=no-member
if not value.startswith(Error.ERROR_TYPE_NAMESPACE):
raise jose.DeserializationError('Missing error type prefix')
without_prefix = value[len(Error.ERROR_TYPE_NAMESPACE):]
if without_prefix not in Error.ERROR_TYPE_DESCRIPTIONS:
raise jose.DeserializationError('Error type not recognized')
return without_prefix
@property
def description(self):
"""Hardcoded error description based on its type.
:rtype: unicode
"""
return self.ERROR_TYPE_DESCRIPTIONS[self.typ]
def __str__(self):
if self.typ is not None:
return ' :: '.join([self.typ, self.description, self.detail])
else:
return str(self.detail)
class _Constant(jose.JSONDeSerializable, collections.Hashable):
"""ACME constant."""
__slots__ = ('name',)
POSSIBLE_NAMES = NotImplemented
def __init__(self, name):
self.POSSIBLE_NAMES[name] = self
self.name = name
def to_partial_json(self):
return self.name
@classmethod
def from_json(cls, value):
if value not in cls.POSSIBLE_NAMES:
raise jose.DeserializationError(
'{0} not recognized'.format(cls.__name__))
return cls.POSSIBLE_NAMES[value]
def __repr__(self):
return '{0}({1})'.format(self.__class__.__name__, self.name)
def __eq__(self, other):
return isinstance(other, type(self)) and other.name == self.name
def __hash__(self):
return hash((self.__class__, self.name))
def __ne__(self, other):
return not self == other
class Status(_Constant):
"""ACME "status" field."""
POSSIBLE_NAMES = {}
STATUS_UNKNOWN = Status('unknown')
STATUS_PENDING = Status('pending')
STATUS_PROCESSING = Status('processing')
STATUS_VALID = Status('valid')
STATUS_INVALID = Status('invalid')
STATUS_REVOKED = Status('revoked')
class IdentifierType(_Constant):
"""ACME identifier type."""
POSSIBLE_NAMES = {}
IDENTIFIER_FQDN = IdentifierType('dns') # IdentifierDNS in Boulder
class Identifier(jose.JSONObjectWithFields):
"""ACME identifier.
:ivar IdentifierType typ:
:ivar unicode value:
"""
typ = jose.Field('type', decoder=IdentifierType.from_json)
value = jose.Field('value')
class Resource(jose.JSONObjectWithFields):
"""ACME Resource.
:ivar acme.messages.ResourceBody body: Resource body.
"""
body = jose.Field('body')
class ResourceWithURI(Resource):
"""ACME Resource with URI.
:ivar unicode uri: Location of the resource.
"""
uri = jose.Field('uri') # no ChallengeResource.uri
class ResourceBody(jose.JSONObjectWithFields):
"""ACME Resource Body."""
class Registration(interfaces.ClientRequestableResource, ResourceBody):
"""Registration Resource Body.
:ivar acme.jose.jwk.JWK key: Public key.
:ivar tuple contact: Contact information following ACME spec,
`tuple` of `unicode`.
:ivar unicode recovery_token:
:ivar unicode agreement:
"""
resource_type = 'new-reg'
# on new-reg key server ignores 'key' and populates it based on
# JWS.signature.combined.jwk
key = jose.Field('key', omitempty=True, decoder=jose.JWK.from_json)
contact = jose.Field('contact', omitempty=True, default=())
recovery_token = jose.Field('recoveryToken', omitempty=True)
agreement = jose.Field('agreement', omitempty=True)
phone_prefix = 'tel:'
email_prefix = 'mailto:'
@classmethod
def from_data(cls, phone=None, email=None, **kwargs):
"""Create registration resource from contact details."""
details = list(kwargs.pop('contact', ()))
if phone is not None:
details.append(cls.phone_prefix + phone)
if email is not None:
details.append(cls.email_prefix + email)
kwargs['contact'] = tuple(details)
return cls(**kwargs)
def _filter_contact(self, prefix):
return tuple(
detail[len(prefix):] for detail in self.contact
if detail.startswith(prefix))
@property
def phones(self):
"""All phones found in the ``contact`` field."""
return self._filter_contact(self.phone_prefix)
@property
def emails(self):
"""All emails found in the ``contact`` field."""
return self._filter_contact(self.email_prefix)
class RegistrationResource(interfaces.ClientRequestableResource,
ResourceWithURI):
"""Registration Resource.
:ivar acme.messages.Registration body:
:ivar unicode new_authzr_uri: URI found in the 'next' ``Link`` header
:ivar unicode terms_of_service: URL for the CA TOS.
"""
resource_type = 'reg'
body = jose.Field('body', decoder=Registration.from_json)
new_authzr_uri = jose.Field('new_authzr_uri')
terms_of_service = jose.Field('terms_of_service', omitempty=True)
class ChallengeBody(ResourceBody):
"""Challenge Resource Body.
.. todo::
Confusingly, this has a similar name to `.challenges.Challenge`,
as well as `.achallenges.AnnotatedChallenge`. Please use names
such as ``challb`` to distinguish instances of this class from
``achall``.
:ivar acme.challenges.Challenge: Wrapped challenge.
Conveniently, all challenge fields are proxied, i.e. you can
call ``challb.x`` to get ``challb.chall.x`` contents.
:ivar acme.messages.Status status:
:ivar datetime.datetime validated:
:ivar Error error:
"""
__slots__ = ('chall',)
uri = jose.Field('uri')
status = jose.Field('status', decoder=Status.from_json,
omitempty=True, default=STATUS_PENDING)
validated = fields.RFC3339Field('validated', omitempty=True)
error = jose.Field('error', decoder=Error.from_json,
omitempty=True, default=None)
def to_partial_json(self):
jobj = super(ChallengeBody, self).to_partial_json()
jobj.update(self.chall.to_partial_json())
return jobj
@classmethod
def fields_from_json(cls, jobj):
jobj_fields = super(ChallengeBody, cls).fields_from_json(jobj)
jobj_fields['chall'] = challenges.Challenge.from_json(jobj)
return jobj_fields
def __getattr__(self, name):
return getattr(self.chall, name)
class ChallengeResource(Resource):
"""Challenge Resource.
:ivar acme.messages.ChallengeBody body:
:ivar unicode authzr_uri: URI found in the 'up' ``Link`` header.
"""
body = jose.Field('body', decoder=ChallengeBody.from_json)
authzr_uri = jose.Field('authzr_uri')
@property
def uri(self): # pylint: disable=missing-docstring,no-self-argument
# bug? 'method already defined line None'
# pylint: disable=function-redefined
return self.body.uri # pylint: disable=no-member
class Authorization(interfaces.ClientRequestableResource, ResourceBody):
"""Authorization Resource Body.
:ivar acme.messages.Identifier identifier:
:ivar list challenges: `list` of `.ChallengeBody`
:ivar tuple combinations: Challenge combinations (`tuple` of `tuple`
of `int`, as opposed to `list` of `list` from the spec).
:ivar acme.messages.Status status:
:ivar datetime.datetime expires:
"""
resource_type = 'new-authz'
identifier = jose.Field('identifier', decoder=Identifier.from_json)
challenges = jose.Field('challenges', omitempty=True)
combinations = jose.Field('combinations', omitempty=True)
status = jose.Field('status', omitempty=True, decoder=Status.from_json)
# TODO: 'expires' is allowed for Authorization Resources in
# general, but for Key Authorization '[t]he "expires" field MUST
# be absent'... then acme-spec gives example with 'expires'
# present... That's confusing!
expires = fields.RFC3339Field('expires', omitempty=True)
@challenges.decoder
def challenges(value): # pylint: disable=missing-docstring,no-self-argument
return tuple(ChallengeBody.from_json(chall) for chall in value)
@property
def resolved_combinations(self):
"""Combinations with challenges instead of indices."""
return tuple(tuple(self.challenges[idx] for idx in combo)
for combo in self.combinations)
class AuthorizationResource(ResourceWithURI):
"""Authorization Resource.
:ivar acme.messages.Authorization body:
:ivar unicode new_cert_uri: URI found in the 'next' ``Link`` header
"""
body = jose.Field('body', decoder=Authorization.from_json)
new_cert_uri = jose.Field('new_cert_uri')
class CertificateRequest(interfaces.ClientRequestableResource,
jose.JSONObjectWithFields):
"""ACME new-cert request.
:ivar acme.jose.util.ComparableX509 csr:
`OpenSSL.crypto.X509Req` wrapped in `.ComparableX509`
:ivar tuple authorizations: `tuple` of URIs (`str`)
"""
resource_type = 'new-cert'
csr = jose.Field('csr', decoder=jose.decode_csr, encoder=jose.encode_csr)
authorizations = jose.Field('authorizations', decoder=tuple)
class CertificateResource(interfaces.ClientRequestableResource,
ResourceWithURI):
"""Certificate Resource.
:ivar acme.jose.util.ComparableX509 body:
`OpenSSL.crypto.X509` wrapped in `.ComparableX509`
:ivar unicode cert_chain_uri: URI found in the 'up' ``Link`` header
:ivar tuple authzrs: `tuple` of `AuthorizationResource`.
"""
resource_type = 'cert'
cert_chain_uri = jose.Field('cert_chain_uri')
authzrs = jose.Field('authzrs')
class Revocation(interfaces.ClientRequestableResource,
jose.JSONObjectWithFields):
"""Revocation message.
:ivar .ComparableX509 certificate: `OpenSSL.crypto.X509` wrapped in
`.ComparableX509`
"""
resource_type = 'revoke-cert'
certificate = jose.Field(
'certificate', decoder=jose.decode_cert, encoder=jose.encode_cert)
# TODO: acme-spec#138, this allows only one ACME server instance per domain
PATH = '/acme/revoke-cert'
"""Path to revocation URL, see `url`"""
@classmethod
def url(cls, base):
"""Get revocation URL.
:param str base: New Registration Resource or server (root) URL.
"""
return urllib_parse.urljoin(base, cls.PATH)
| {
"repo_name": "Jonadabe/letsencrypt",
"path": "acme/acme/messages.py",
"copies": "1",
"size": "11999",
"license": "apache-2.0",
"hash": 8189312234952371000,
"line_mean": 31.2553763441,
"line_max": 82,
"alpha_frac": 0.658388199,
"autogenerated": false,
"ratio": 3.8433696348494553,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5001757833849455,
"avg_score": null,
"num_lines": null
} |
"""ACME protocol messages."""
import six
import json
try:
from collections.abc import Hashable # pylint: disable=no-name-in-module
except ImportError: # pragma: no cover
from collections import Hashable
import josepy as jose
from acme import challenges
from acme import errors
from acme import fields
from acme import util
from acme import jws
OLD_ERROR_PREFIX = "urn:acme:error:"
ERROR_PREFIX = "urn:ietf:params:acme:error:"
ERROR_CODES = {
'badCSR': 'The CSR is unacceptable (e.g., due to a short key)',
'badNonce': 'The client sent an unacceptable anti-replay nonce',
'connection': ('The server could not connect to the client to verify the'
' domain'),
'dnssec': 'The server could not validate a DNSSEC signed domain',
# deprecate invalidEmail
'invalidEmail': 'The provided email for a registration was invalid',
'invalidContact': 'The provided contact URI was invalid',
'malformed': 'The request message was malformed',
'rateLimited': 'There were too many requests of a given type',
'serverInternal': 'The server experienced an internal error',
'tls': 'The server experienced a TLS error during domain verification',
'unauthorized': 'The client lacks sufficient authorization',
'unknownHost': 'The server could not resolve a domain name',
'externalAccountRequired': 'The server requires external account binding',
}
ERROR_TYPE_DESCRIPTIONS = dict(
(ERROR_PREFIX + name, desc) for name, desc in ERROR_CODES.items())
ERROR_TYPE_DESCRIPTIONS.update(dict( # add errors with old prefix, deprecate me
(OLD_ERROR_PREFIX + name, desc) for name, desc in ERROR_CODES.items()))
def is_acme_error(err):
"""Check if argument is an ACME error."""
if isinstance(err, Error) and (err.typ is not None):
return (ERROR_PREFIX in err.typ) or (OLD_ERROR_PREFIX in err.typ)
else:
return False
@six.python_2_unicode_compatible
class Error(jose.JSONObjectWithFields, errors.Error):
"""ACME error.
https://tools.ietf.org/html/draft-ietf-appsawg-http-problem-00
:ivar unicode typ:
:ivar unicode title:
:ivar unicode detail:
"""
typ = jose.Field('type', omitempty=True, default='about:blank')
title = jose.Field('title', omitempty=True)
detail = jose.Field('detail', omitempty=True)
@classmethod
def with_code(cls, code, **kwargs):
"""Create an Error instance with an ACME Error code.
:unicode code: An ACME error code, like 'dnssec'.
:kwargs: kwargs to pass to Error.
"""
if code not in ERROR_CODES:
raise ValueError("The supplied code: %s is not a known ACME error"
" code" % code)
typ = ERROR_PREFIX + code
return cls(typ=typ, **kwargs)
@property
def description(self):
"""Hardcoded error description based on its type.
:returns: Description if standard ACME error or ``None``.
:rtype: unicode
"""
return ERROR_TYPE_DESCRIPTIONS.get(self.typ)
@property
def code(self):
"""ACME error code.
Basically self.typ without the ERROR_PREFIX.
:returns: error code if standard ACME code or ``None``.
:rtype: unicode
"""
code = str(self.typ).split(':')[-1]
if code in ERROR_CODES:
return code
def __str__(self):
return b' :: '.join(
part.encode('ascii', 'backslashreplace') for part in
(self.typ, self.description, self.detail, self.title)
if part is not None).decode()
class _Constant(jose.JSONDeSerializable, Hashable): # type: ignore
"""ACME constant."""
__slots__ = ('name',)
POSSIBLE_NAMES = NotImplemented
def __init__(self, name):
self.POSSIBLE_NAMES[name] = self
self.name = name
def to_partial_json(self):
return self.name
@classmethod
def from_json(cls, value):
if value not in cls.POSSIBLE_NAMES:
raise jose.DeserializationError(
'{0} not recognized'.format(cls.__name__))
return cls.POSSIBLE_NAMES[value]
def __repr__(self):
return '{0}({1})'.format(self.__class__.__name__, self.name)
def __eq__(self, other):
return isinstance(other, type(self)) and other.name == self.name
def __hash__(self):
return hash((self.__class__, self.name))
def __ne__(self, other):
return not self == other
class Status(_Constant):
"""ACME "status" field."""
POSSIBLE_NAMES = {} # type: dict
STATUS_UNKNOWN = Status('unknown')
STATUS_PENDING = Status('pending')
STATUS_PROCESSING = Status('processing')
STATUS_VALID = Status('valid')
STATUS_INVALID = Status('invalid')
STATUS_REVOKED = Status('revoked')
STATUS_READY = Status('ready')
class IdentifierType(_Constant):
"""ACME identifier type."""
POSSIBLE_NAMES = {} # type: dict
IDENTIFIER_FQDN = IdentifierType('dns') # IdentifierDNS in Boulder
class Identifier(jose.JSONObjectWithFields):
"""ACME identifier.
:ivar IdentifierType typ:
:ivar unicode value:
"""
typ = jose.Field('type', decoder=IdentifierType.from_json)
value = jose.Field('value')
class Directory(jose.JSONDeSerializable):
"""Directory."""
_REGISTERED_TYPES = {} # type: dict
class Meta(jose.JSONObjectWithFields):
"""Directory Meta."""
_terms_of_service = jose.Field('terms-of-service', omitempty=True)
_terms_of_service_v2 = jose.Field('termsOfService', omitempty=True)
website = jose.Field('website', omitempty=True)
caa_identities = jose.Field('caaIdentities', omitempty=True)
external_account_required = jose.Field('externalAccountRequired', omitempty=True)
def __init__(self, **kwargs):
kwargs = dict((self._internal_name(k), v) for k, v in kwargs.items())
# pylint: disable=star-args
super(Directory.Meta, self).__init__(**kwargs)
@property
def terms_of_service(self):
"""URL for the CA TOS"""
return self._terms_of_service or self._terms_of_service_v2
def __iter__(self):
# When iterating over fields, use the external name 'terms_of_service' instead of
# the internal '_terms_of_service'.
for name in super(Directory.Meta, self).__iter__():
yield name[1:] if name == '_terms_of_service' else name
def _internal_name(self, name):
return '_' + name if name == 'terms_of_service' else name
@classmethod
def _canon_key(cls, key):
return getattr(key, 'resource_type', key)
@classmethod
def register(cls, resource_body_cls):
"""Register resource."""
resource_type = resource_body_cls.resource_type
assert resource_type not in cls._REGISTERED_TYPES
cls._REGISTERED_TYPES[resource_type] = resource_body_cls
return resource_body_cls
def __init__(self, jobj):
canon_jobj = util.map_keys(jobj, self._canon_key)
# TODO: check that everything is an absolute URL; acme-spec is
# not clear on that
self._jobj = canon_jobj
def __getattr__(self, name):
try:
return self[name.replace('_', '-')]
except KeyError as error:
raise AttributeError(str(error) + ': ' + name)
def __getitem__(self, name):
try:
return self._jobj[self._canon_key(name)]
except KeyError:
raise KeyError('Directory field not found')
def to_partial_json(self):
return self._jobj
@classmethod
def from_json(cls, jobj):
jobj['meta'] = cls.Meta.from_json(jobj.pop('meta', {}))
return cls(jobj)
class Resource(jose.JSONObjectWithFields):
"""ACME Resource.
:ivar acme.messages.ResourceBody body: Resource body.
"""
body = jose.Field('body')
class ResourceWithURI(Resource):
"""ACME Resource with URI.
:ivar unicode uri: Location of the resource.
"""
uri = jose.Field('uri') # no ChallengeResource.uri
class ResourceBody(jose.JSONObjectWithFields):
"""ACME Resource Body."""
class ExternalAccountBinding(object):
"""ACME External Account Binding"""
@classmethod
def from_data(cls, account_public_key, kid, hmac_key, directory):
"""Create External Account Binding Resource from contact details, kid and hmac."""
key_json = json.dumps(account_public_key.to_partial_json()).encode()
decoded_hmac_key = jose.b64.b64decode(hmac_key)
url = directory["newAccount"]
eab = jws.JWS.sign(key_json, jose.jwk.JWKOct(key=decoded_hmac_key),
jose.jwa.HS256, None,
url, kid)
return eab.to_partial_json()
class Registration(ResourceBody):
"""Registration Resource Body.
:ivar josepy.jwk.JWK key: Public key.
:ivar tuple contact: Contact information following ACME spec,
`tuple` of `unicode`.
:ivar unicode agreement:
"""
# on new-reg key server ignores 'key' and populates it based on
# JWS.signature.combined.jwk
key = jose.Field('key', omitempty=True, decoder=jose.JWK.from_json)
contact = jose.Field('contact', omitempty=True, default=())
agreement = jose.Field('agreement', omitempty=True)
status = jose.Field('status', omitempty=True)
terms_of_service_agreed = jose.Field('termsOfServiceAgreed', omitempty=True)
only_return_existing = jose.Field('onlyReturnExisting', omitempty=True)
external_account_binding = jose.Field('externalAccountBinding', omitempty=True)
phone_prefix = 'tel:'
email_prefix = 'mailto:'
@classmethod
def from_data(cls, phone=None, email=None, external_account_binding=None, **kwargs):
"""Create registration resource from contact details."""
details = list(kwargs.pop('contact', ()))
if phone is not None:
details.append(cls.phone_prefix + phone)
if email is not None:
details.extend([cls.email_prefix + mail for mail in email.split(',')])
kwargs['contact'] = tuple(details)
if external_account_binding:
kwargs['external_account_binding'] = external_account_binding
return cls(**kwargs)
def _filter_contact(self, prefix):
return tuple(
detail[len(prefix):] for detail in self.contact
if detail.startswith(prefix))
@property
def phones(self):
"""All phones found in the ``contact`` field."""
return self._filter_contact(self.phone_prefix)
@property
def emails(self):
"""All emails found in the ``contact`` field."""
return self._filter_contact(self.email_prefix)
@Directory.register
class NewRegistration(Registration):
"""New registration."""
resource_type = 'new-reg'
resource = fields.Resource(resource_type)
class UpdateRegistration(Registration):
"""Update registration."""
resource_type = 'reg'
resource = fields.Resource(resource_type)
class RegistrationResource(ResourceWithURI):
"""Registration Resource.
:ivar acme.messages.Registration body:
:ivar unicode new_authzr_uri: Deprecated. Do not use.
:ivar unicode terms_of_service: URL for the CA TOS.
"""
body = jose.Field('body', decoder=Registration.from_json)
new_authzr_uri = jose.Field('new_authzr_uri', omitempty=True)
terms_of_service = jose.Field('terms_of_service', omitempty=True)
class ChallengeBody(ResourceBody):
"""Challenge Resource Body.
.. todo::
Confusingly, this has a similar name to `.challenges.Challenge`,
as well as `.achallenges.AnnotatedChallenge`. Please use names
such as ``challb`` to distinguish instances of this class from
``achall``.
:ivar acme.challenges.Challenge: Wrapped challenge.
Conveniently, all challenge fields are proxied, i.e. you can
call ``challb.x`` to get ``challb.chall.x`` contents.
:ivar acme.messages.Status status:
:ivar datetime.datetime validated:
:ivar messages.Error error:
"""
__slots__ = ('chall',)
# ACMEv1 has a "uri" field in challenges. ACMEv2 has a "url" field. This
# challenge object supports either one, but should be accessed through the
# name "uri". In Client.answer_challenge, whichever one is set will be
# used.
_uri = jose.Field('uri', omitempty=True, default=None)
_url = jose.Field('url', omitempty=True, default=None)
status = jose.Field('status', decoder=Status.from_json,
omitempty=True, default=STATUS_PENDING)
validated = fields.RFC3339Field('validated', omitempty=True)
error = jose.Field('error', decoder=Error.from_json,
omitempty=True, default=None)
def __init__(self, **kwargs):
kwargs = dict((self._internal_name(k), v) for k, v in kwargs.items())
# pylint: disable=star-args
super(ChallengeBody, self).__init__(**kwargs)
def encode(self, name):
return super(ChallengeBody, self).encode(self._internal_name(name))
def to_partial_json(self):
jobj = super(ChallengeBody, self).to_partial_json()
jobj.update(self.chall.to_partial_json())
return jobj
@classmethod
def fields_from_json(cls, jobj):
jobj_fields = super(ChallengeBody, cls).fields_from_json(jobj)
jobj_fields['chall'] = challenges.Challenge.from_json(jobj)
return jobj_fields
@property
def uri(self):
"""The URL of this challenge."""
return self._url or self._uri
def __getattr__(self, name):
return getattr(self.chall, name)
def __iter__(self):
# When iterating over fields, use the external name 'uri' instead of
# the internal '_uri'.
for name in super(ChallengeBody, self).__iter__():
yield name[1:] if name == '_uri' else name
def _internal_name(self, name):
return '_' + name if name == 'uri' else name
class ChallengeResource(Resource):
"""Challenge Resource.
:ivar acme.messages.ChallengeBody body:
:ivar unicode authzr_uri: URI found in the 'up' ``Link`` header.
"""
body = jose.Field('body', decoder=ChallengeBody.from_json)
authzr_uri = jose.Field('authzr_uri')
@property
def uri(self):
"""The URL of the challenge body."""
# pylint: disable=function-redefined,no-member
return self.body.uri
class Authorization(ResourceBody):
"""Authorization Resource Body.
:ivar acme.messages.Identifier identifier:
:ivar list challenges: `list` of `.ChallengeBody`
:ivar tuple combinations: Challenge combinations (`tuple` of `tuple`
of `int`, as opposed to `list` of `list` from the spec).
:ivar acme.messages.Status status:
:ivar datetime.datetime expires:
"""
identifier = jose.Field('identifier', decoder=Identifier.from_json)
challenges = jose.Field('challenges', omitempty=True)
combinations = jose.Field('combinations', omitempty=True)
status = jose.Field('status', omitempty=True, decoder=Status.from_json)
# TODO: 'expires' is allowed for Authorization Resources in
# general, but for Key Authorization '[t]he "expires" field MUST
# be absent'... then acme-spec gives example with 'expires'
# present... That's confusing!
expires = fields.RFC3339Field('expires', omitempty=True)
wildcard = jose.Field('wildcard', omitempty=True)
@challenges.decoder
def challenges(value): # pylint: disable=missing-docstring,no-self-argument
return tuple(ChallengeBody.from_json(chall) for chall in value)
@property
def resolved_combinations(self):
"""Combinations with challenges instead of indices."""
return tuple(tuple(self.challenges[idx] for idx in combo)
for combo in self.combinations)
@Directory.register
class NewAuthorization(Authorization):
"""New authorization."""
resource_type = 'new-authz'
resource = fields.Resource(resource_type)
class AuthorizationResource(ResourceWithURI):
"""Authorization Resource.
:ivar acme.messages.Authorization body:
:ivar unicode new_cert_uri: Deprecated. Do not use.
"""
body = jose.Field('body', decoder=Authorization.from_json)
new_cert_uri = jose.Field('new_cert_uri', omitempty=True)
@Directory.register
class CertificateRequest(jose.JSONObjectWithFields):
"""ACME new-cert request.
:ivar josepy.util.ComparableX509 csr:
`OpenSSL.crypto.X509Req` wrapped in `.ComparableX509`
"""
resource_type = 'new-cert'
resource = fields.Resource(resource_type)
csr = jose.Field('csr', decoder=jose.decode_csr, encoder=jose.encode_csr)
class CertificateResource(ResourceWithURI):
"""Certificate Resource.
:ivar josepy.util.ComparableX509 body:
`OpenSSL.crypto.X509` wrapped in `.ComparableX509`
:ivar unicode cert_chain_uri: URI found in the 'up' ``Link`` header
:ivar tuple authzrs: `tuple` of `AuthorizationResource`.
"""
cert_chain_uri = jose.Field('cert_chain_uri')
authzrs = jose.Field('authzrs')
@Directory.register
class Revocation(jose.JSONObjectWithFields):
"""Revocation message.
:ivar .ComparableX509 certificate: `OpenSSL.crypto.X509` wrapped in
`.ComparableX509`
"""
resource_type = 'revoke-cert'
resource = fields.Resource(resource_type)
certificate = jose.Field(
'certificate', decoder=jose.decode_cert, encoder=jose.encode_cert)
reason = jose.Field('reason')
class Order(ResourceBody):
"""Order Resource Body.
:ivar list of .Identifier: List of identifiers for the certificate.
:ivar acme.messages.Status status:
:ivar list of str authorizations: URLs of authorizations.
:ivar str certificate: URL to download certificate as a fullchain PEM.
:ivar str finalize: URL to POST to to request issuance once all
authorizations have "valid" status.
:ivar datetime.datetime expires: When the order expires.
:ivar .Error error: Any error that occurred during finalization, if applicable.
"""
identifiers = jose.Field('identifiers', omitempty=True)
status = jose.Field('status', decoder=Status.from_json,
omitempty=True)
authorizations = jose.Field('authorizations', omitempty=True)
certificate = jose.Field('certificate', omitempty=True)
finalize = jose.Field('finalize', omitempty=True)
expires = fields.RFC3339Field('expires', omitempty=True)
error = jose.Field('error', omitempty=True, decoder=Error.from_json)
@identifiers.decoder
def identifiers(value): # pylint: disable=missing-docstring,no-self-argument
return tuple(Identifier.from_json(identifier) for identifier in value)
class OrderResource(ResourceWithURI):
"""Order Resource.
:ivar acme.messages.Order body:
:ivar str csr_pem: The CSR this Order will be finalized with.
:ivar list of acme.messages.AuthorizationResource authorizations:
Fully-fetched AuthorizationResource objects.
:ivar str fullchain_pem: The fetched contents of the certificate URL
produced once the order was finalized, if it's present.
"""
body = jose.Field('body', decoder=Order.from_json)
csr_pem = jose.Field('csr_pem', omitempty=True)
authorizations = jose.Field('authorizations')
fullchain_pem = jose.Field('fullchain_pem', omitempty=True)
@Directory.register
class NewOrder(Order):
"""New order."""
resource_type = 'new-order'
| {
"repo_name": "letsencrypt/letsencrypt",
"path": "acme/acme/messages.py",
"copies": "1",
"size": "19593",
"license": "apache-2.0",
"hash": -4691894942386358000,
"line_mean": 32.4923076923,
"line_max": 93,
"alpha_frac": 0.6534986985,
"autogenerated": false,
"ratio": 3.7882830626450117,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9939049487620579,
"avg_score": 0.0005464547048864021,
"num_lines": 585
} |
"""ACME protocol messages."""
import urlparse
from acme import challenges
from acme import fields
from acme import jose
class Error(jose.JSONObjectWithFields, Exception):
"""ACME error.
https://tools.ietf.org/html/draft-ietf-appsawg-http-problem-00
"""
ERROR_TYPE_NAMESPACE = 'urn:acme:error:'
ERROR_TYPE_DESCRIPTIONS = {
'malformed': 'The request message was malformed',
'unauthorized': 'The client lacks sufficient authorization',
'serverInternal': 'The server experienced an internal error',
'badCSR': 'The CSR is unacceptable (e.g., due to a short key)',
'badNonce': 'The client sent an unacceptable anti-replay nonce',
}
typ = jose.Field('type')
title = jose.Field('title', omitempty=True)
detail = jose.Field('detail')
@typ.encoder
def typ(value): # pylint: disable=missing-docstring,no-self-argument
return Error.ERROR_TYPE_NAMESPACE + value
@typ.decoder
def typ(value): # pylint: disable=missing-docstring,no-self-argument
# pylint thinks isinstance(value, Error), so startswith is not found
# pylint: disable=no-member
if not value.startswith(Error.ERROR_TYPE_NAMESPACE):
raise jose.DeserializationError('Missing error type prefix')
without_prefix = value[len(Error.ERROR_TYPE_NAMESPACE):]
if without_prefix not in Error.ERROR_TYPE_DESCRIPTIONS:
raise jose.DeserializationError('Error type not recognized')
return without_prefix
@property
def description(self):
"""Hardcoded error description based on its type."""
return self.ERROR_TYPE_DESCRIPTIONS[self.typ]
def __str__(self):
if self.typ is not None:
return ' :: '.join([self.typ, self.description, self.detail])
else:
return str(self.detail)
class _Constant(jose.JSONDeSerializable):
"""ACME constant."""
__slots__ = ('name',)
POSSIBLE_NAMES = NotImplemented
def __init__(self, name):
self.POSSIBLE_NAMES[name] = self
self.name = name
def to_partial_json(self):
return self.name
@classmethod
def from_json(cls, value):
if value not in cls.POSSIBLE_NAMES:
raise jose.DeserializationError(
'{0} not recognized'.format(cls.__name__))
return cls.POSSIBLE_NAMES[value]
def __repr__(self):
return '{0}({1})'.format(self.__class__.__name__, self.name)
def __eq__(self, other):
return isinstance(other, type(self)) and other.name == self.name
def __ne__(self, other):
return not self.__eq__(other)
class Status(_Constant):
"""ACME "status" field."""
POSSIBLE_NAMES = {}
STATUS_UNKNOWN = Status('unknown')
STATUS_PENDING = Status('pending')
STATUS_PROCESSING = Status('processing')
STATUS_VALID = Status('valid')
STATUS_INVALID = Status('invalid')
STATUS_REVOKED = Status('revoked')
class IdentifierType(_Constant):
"""ACME identifier type."""
POSSIBLE_NAMES = {}
IDENTIFIER_FQDN = IdentifierType('dns') # IdentifierDNS in Boulder
class Identifier(jose.JSONObjectWithFields):
"""ACME identifier.
:ivar acme.messages.IdentifierType typ:
"""
typ = jose.Field('type', decoder=IdentifierType.from_json)
value = jose.Field('value')
class Resource(jose.JSONObjectWithFields):
"""ACME Resource.
:ivar str uri: Location of the resource.
:ivar acme.messages.ResourceBody body: Resource body.
"""
body = jose.Field('body')
class ResourceWithURI(Resource):
"""ACME Resource with URI.
:ivar str uri: Location of the resource.
"""
uri = jose.Field('uri') # no ChallengeResource.uri
class ResourceBody(jose.JSONObjectWithFields):
"""ACME Resource Body."""
class Registration(ResourceBody):
"""Registration Resource Body.
:ivar acme.jose.jwk.JWK key: Public key.
:ivar tuple contact: Contact information following ACME spec
"""
# on new-reg key server ignores 'key' and populates it based on
# JWS.signature.combined.jwk
key = jose.Field('key', omitempty=True, decoder=jose.JWK.from_json)
contact = jose.Field('contact', omitempty=True, default=())
recovery_token = jose.Field('recoveryToken', omitempty=True)
agreement = jose.Field('agreement', omitempty=True)
phone_prefix = 'tel:'
email_prefix = 'mailto:'
@classmethod
def from_data(cls, phone=None, email=None, **kwargs):
"""Create registration resource from contact details."""
details = list(kwargs.pop('contact', ()))
if phone is not None:
details.append(cls.phone_prefix + phone)
if email is not None:
details.append(cls.email_prefix + email)
kwargs['contact'] = tuple(details)
return cls(**kwargs)
def _filter_contact(self, prefix):
return tuple(
detail[len(prefix):] for detail in self.contact
if detail.startswith(prefix))
@property
def phones(self):
"""All phones found in the ``contact`` field."""
return self._filter_contact(self.phone_prefix)
@property
def emails(self):
"""All emails found in the ``contact`` field."""
return self._filter_contact(self.email_prefix)
@property
def phone(self):
"""Phone."""
assert len(self.phones) == 1
return self.phones[0]
@property
def email(self):
"""Email."""
assert len(self.emails) == 1
return self.emails[0]
class RegistrationResource(ResourceWithURI):
"""Registration Resource.
:ivar acme.messages.Registration body:
:ivar str new_authzr_uri: URI found in the 'next' ``Link`` header
:ivar str terms_of_service: URL for the CA TOS.
"""
body = jose.Field('body', decoder=Registration.from_json)
new_authzr_uri = jose.Field('new_authzr_uri')
terms_of_service = jose.Field('terms_of_service', omitempty=True)
class ChallengeBody(ResourceBody):
"""Challenge Resource Body.
.. todo::
Confusingly, this has a similar name to `.challenges.Challenge`,
as well as `.achallenges.AnnotatedChallenge`. Please use names
such as ``challb`` to distinguish instances of this class from
``achall``.
:ivar acme.challenges.Challenge: Wrapped challenge.
Conveniently, all challenge fields are proxied, i.e. you can
call ``challb.x`` to get ``challb.chall.x`` contents.
:ivar acme.messages.Status status:
:ivar datetime.datetime validated:
"""
__slots__ = ('chall',)
uri = jose.Field('uri')
status = jose.Field('status', decoder=Status.from_json)
validated = fields.RFC3339Field('validated', omitempty=True)
def to_partial_json(self):
jobj = super(ChallengeBody, self).to_partial_json()
jobj.update(self.chall.to_partial_json())
return jobj
@classmethod
def fields_from_json(cls, jobj):
jobj_fields = super(ChallengeBody, cls).fields_from_json(jobj)
jobj_fields['chall'] = challenges.Challenge.from_json(jobj)
return jobj_fields
def __getattr__(self, name):
return getattr(self.chall, name)
class ChallengeResource(Resource):
"""Challenge Resource.
:ivar acme.messages.ChallengeBody body:
:ivar str authzr_uri: URI found in the 'up' ``Link`` header.
"""
body = jose.Field('body', decoder=ChallengeBody.from_json)
authzr_uri = jose.Field('authzr_uri')
@property
def uri(self): # pylint: disable=missing-docstring,no-self-argument
# bug? 'method already defined line None'
# pylint: disable=function-redefined
return self.body.uri # pylint: disable=no-member
class Authorization(ResourceBody):
"""Authorization Resource Body.
:ivar acme.messages.Identifier identifier:
:ivar list challenges: `list` of `.ChallengeBody`
:ivar tuple combinations: Challenge combinations (`tuple` of `tuple`
of `int`, as opposed to `list` of `list` from the spec).
:ivar acme.jose.jwk.JWK key: Public key.
:ivar tuple contact:
:ivar acme.messages.Status status:
:ivar datetime.datetime expires:
"""
identifier = jose.Field('identifier', decoder=Identifier.from_json)
challenges = jose.Field('challenges', omitempty=True)
combinations = jose.Field('combinations', omitempty=True)
status = jose.Field('status', omitempty=True, decoder=Status.from_json)
# TODO: 'expires' is allowed for Authorization Resources in
# general, but for Key Authorization '[t]he "expires" field MUST
# be absent'... then acme-spec gives example with 'expires'
# present... That's confusing!
expires = fields.RFC3339Field('expires', omitempty=True)
@challenges.decoder
def challenges(value): # pylint: disable=missing-docstring,no-self-argument
return tuple(ChallengeBody.from_json(chall) for chall in value)
@property
def resolved_combinations(self):
"""Combinations with challenges instead of indices."""
return tuple(tuple(self.challenges[idx] for idx in combo)
for combo in self.combinations)
class AuthorizationResource(ResourceWithURI):
"""Authorization Resource.
:ivar acme.messages.Authorization body:
:ivar str new_cert_uri: URI found in the 'next' ``Link`` header
"""
body = jose.Field('body', decoder=Authorization.from_json)
new_cert_uri = jose.Field('new_cert_uri')
class CertificateRequest(jose.JSONObjectWithFields):
"""ACME new-cert request.
:ivar acme.jose.util.ComparableX509 csr:
`M2Crypto.X509.Request` wrapped in `.ComparableX509`
:ivar tuple authorizations: `tuple` of URIs (`str`)
"""
csr = jose.Field('csr', decoder=jose.decode_csr, encoder=jose.encode_csr)
authorizations = jose.Field('authorizations', decoder=tuple)
class CertificateResource(ResourceWithURI):
"""Certificate Resource.
:ivar acme.jose.util.ComparableX509 body:
`M2Crypto.X509.X509` wrapped in `.ComparableX509`
:ivar str cert_chain_uri: URI found in the 'up' ``Link`` header
:ivar tuple authzrs: `tuple` of `AuthorizationResource`.
"""
cert_chain_uri = jose.Field('cert_chain_uri')
authzrs = jose.Field('authzrs')
class Revocation(jose.JSONObjectWithFields):
"""Revocation message.
:ivar .ComparableX509 certificate: `M2Crypto.X509.X509` wrapped in
`.ComparableX509`
"""
certificate = jose.Field(
'certificate', decoder=jose.decode_cert, encoder=jose.encode_cert)
# TODO: acme-spec#138, this allows only one ACME server instance per domain
PATH = '/acme/revoke-cert'
"""Path to revocation URL, see `url`"""
@classmethod
def url(cls, base):
"""Get revocation URL.
:param str base: New Registration Resource or server (root) URL.
"""
return urlparse.urljoin(base, cls.PATH)
| {
"repo_name": "digideskio/lets-encrypt-preview",
"path": "acme/messages.py",
"copies": "1",
"size": "10928",
"license": "apache-2.0",
"hash": -1158059670026365700,
"line_mean": 30.2228571429,
"line_max": 80,
"alpha_frac": 0.6575768668,
"autogenerated": false,
"ratio": 3.7579092159559835,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.991469146861403,
"avg_score": 0.0001589228283906155,
"num_lines": 350
} |
"""ACME protocol v00 messages.
.. warning:: This module is an implementation of the draft `ACME
protocol version 00`_, and not the "RESTified" `ACME protocol version
01`_ or later. It should work with `older Node.js implementation`_,
but will definitely not work with Boulder_. It is kept for reference
purposes only.
.. _`ACME protocol version 00`:
https://github.com/letsencrypt/acme-spec/blob/v00/draft-barnes-acme.md
.. _`ACME protocol version 01`:
https://github.com/letsencrypt/acme-spec/blob/v01/draft-barnes-acme.md
.. _Boulder: https://github.com/letsencrypt/boulder
.. _`older Node.js implementation`:
https://github.com/letsencrypt/node-acme/commit/f42aa5b7fad4cd2fc289653c4ab14f18052367b3
"""
import jsonschema
from acme import challenges
from acme import errors
from acme import jose
from acme import other
from acme import util
class Message(jose.TypedJSONObjectWithFields):
# _fields_to_partial_json | pylint: disable=abstract-method
# pylint: disable=too-few-public-methods
"""ACME message."""
TYPES = {}
type_field_name = "type"
schema = NotImplemented
"""JSON schema the object is tested against in :meth:`from_json`.
Subclasses must overrride it with a value that is acceptable by
:func:`jsonschema.validate`, most probably using
:func:`acme.util.load_schema`.
"""
@classmethod
def from_json(cls, jobj):
"""Deserialize from (possibly invalid) JSON object.
Note that the input ``jobj`` has not been sanitized in any way.
:param jobj: JSON object.
:raises acme.errors.SchemaValidationError: if the input
JSON object could not be validated against JSON schema specified
in :attr:`schema`.
:raises acme.jose.errors.DeserializationError: for any
other generic error in decoding.
:returns: instance of the class
"""
msg_cls = cls.get_type_cls(jobj)
# TODO: is that schema testing still relevant?
try:
jsonschema.validate(jobj, msg_cls.schema)
except jsonschema.ValidationError as error:
raise errors.SchemaValidationError(error)
return super(Message, cls).from_json(jobj)
@Message.register # pylint: disable=too-few-public-methods
class Challenge(Message):
"""ACME "challenge" message.
:ivar str nonce: Random data, **not** base64-encoded.
:ivar list challenges: List of
:class:`~acme.challenges.Challenge` objects.
.. todo::
1. can challenges contain two challenges of the same type?
2. can challenges contain duplicates?
3. check "combinations" indices are in valid range
4. turn "combinations" elements into sets?
5. turn "combinations" into set?
"""
typ = "challenge"
schema = util.load_schema(typ)
session_id = jose.Field("sessionID")
nonce = jose.Field("nonce", encoder=jose.b64encode,
decoder=jose.decode_b64jose)
challenges = jose.Field("challenges")
combinations = jose.Field("combinations", omitempty=True, default=())
@challenges.decoder
def challenges(value): # pylint: disable=missing-docstring,no-self-argument
return tuple(challenges.Challenge.from_json(chall) for chall in value)
@property
def resolved_combinations(self):
"""Combinations with challenges instead of indices."""
return tuple(tuple(self.challenges[idx] for idx in combo)
for combo in self.combinations)
@Message.register # pylint: disable=too-few-public-methods
class ChallengeRequest(Message):
"""ACME "challengeRequest" message."""
typ = "challengeRequest"
schema = util.load_schema(typ)
identifier = jose.Field("identifier")
@Message.register # pylint: disable=too-few-public-methods
class Authorization(Message):
"""ACME "authorization" message.
:ivar jwk: :class:`acme.jose.JWK`
"""
typ = "authorization"
schema = util.load_schema(typ)
recovery_token = jose.Field("recoveryToken", omitempty=True)
identifier = jose.Field("identifier", omitempty=True)
jwk = jose.Field("jwk", decoder=jose.JWK.from_json, omitempty=True)
@Message.register
class AuthorizationRequest(Message):
"""ACME "authorizationRequest" message.
:ivar str nonce: Random data from the corresponding
:attr:`Challenge.nonce`, **not** base64-encoded.
:ivar list responses: List of completed challenges (
:class:`acme.challenges.ChallengeResponse`).
:ivar signature: Signature (:class:`acme.other.Signature`).
"""
typ = "authorizationRequest"
schema = util.load_schema(typ)
session_id = jose.Field("sessionID")
nonce = jose.Field("nonce", encoder=jose.b64encode,
decoder=jose.decode_b64jose)
responses = jose.Field("responses")
signature = jose.Field("signature", decoder=other.Signature.from_json)
contact = jose.Field("contact", omitempty=True, default=())
@responses.decoder
def responses(value): # pylint: disable=missing-docstring,no-self-argument
return tuple(challenges.ChallengeResponse.from_json(chall)
for chall in value)
@classmethod
def create(cls, name, key, sig_nonce=None, **kwargs):
"""Create signed "authorizationRequest".
:param str name: Hostname
:param key: Key used for signing.
:type key: :class:`Crypto.PublicKey.RSA`
:param str sig_nonce: Nonce used for signature. Useful for testing.
:kwargs: Any other arguments accepted by the class constructor.
:returns: Signed "authorizationRequest" ACME message.
:rtype: :class:`AuthorizationRequest`
"""
# pylint: disable=too-many-arguments
signature = other.Signature.from_msg(
name + kwargs["nonce"], key, sig_nonce)
return cls(
signature=signature, contact=kwargs.pop("contact", ()), **kwargs)
def verify(self, name):
"""Verify signature.
.. warning:: Caller must check that the public key encoded in the
:attr:`signature`'s :class:`acme.jose.JWK` object
is the correct key for a given context.
:param str name: Hostname
:returns: True iff ``signature`` can be verified, False otherwise.
:rtype: bool
"""
# self.signature is not Field | pylint: disable=no-member
return self.signature.verify(name + self.nonce)
@Message.register # pylint: disable=too-few-public-methods
class Certificate(Message):
"""ACME "certificate" message.
:ivar certificate: The certificate (:class:`M2Crypto.X509.X509`
wrapped in :class:`acme.util.ComparableX509`).
:ivar list chain: Chain of certificates (:class:`M2Crypto.X509.X509`
wrapped in :class:`acme.util.ComparableX509` ).
"""
typ = "certificate"
schema = util.load_schema(typ)
certificate = jose.Field("certificate", encoder=jose.encode_cert,
decoder=jose.decode_cert)
chain = jose.Field("chain", omitempty=True, default=())
refresh = jose.Field("refresh", omitempty=True)
@chain.decoder
def chain(value): # pylint: disable=missing-docstring,no-self-argument
return tuple(jose.decode_cert(cert) for cert in value)
@chain.encoder
def chain(value): # pylint: disable=missing-docstring,no-self-argument
return tuple(jose.encode_cert(cert) for cert in value)
@Message.register
class CertificateRequest(Message):
"""ACME "certificateRequest" message.
:ivar csr: Certificate Signing Request (:class:`M2Crypto.X509.Request`
wrapped in :class:`acme.util.ComparableX509`.
:ivar signature: Signature (:class:`acme.other.Signature`).
"""
typ = "certificateRequest"
schema = util.load_schema(typ)
csr = jose.Field("csr", encoder=jose.encode_csr,
decoder=jose.decode_csr)
signature = jose.Field("signature", decoder=other.Signature.from_json)
@classmethod
def create(cls, key, sig_nonce=None, **kwargs):
"""Create signed "certificateRequest".
:param key: Key used for signing.
:type key: :class:`Crypto.PublicKey.RSA`
:param str sig_nonce: Nonce used for signature. Useful for testing.
:kwargs: Any other arguments accepted by the class constructor.
:returns: Signed "certificateRequest" ACME message.
:rtype: :class:`CertificateRequest`
"""
return cls(signature=other.Signature.from_msg(
kwargs["csr"].as_der(), key, sig_nonce), **kwargs)
def verify(self):
"""Verify signature.
.. warning:: Caller must check that the public key encoded in the
:attr:`signature`'s :class:`acme.jose.JWK` object
is the correct key for a given context.
:returns: True iff ``signature`` can be verified, False otherwise.
:rtype: bool
"""
# self.signature is not Field | pylint: disable=no-member
return self.signature.verify(self.csr.as_der())
@Message.register # pylint: disable=too-few-public-methods
class Defer(Message):
"""ACME "defer" message."""
typ = "defer"
schema = util.load_schema(typ)
token = jose.Field("token")
interval = jose.Field("interval", omitempty=True)
message = jose.Field("message", omitempty=True)
@Message.register # pylint: disable=too-few-public-methods
class Error(Message):
"""ACME "error" message."""
typ = "error"
schema = util.load_schema(typ)
error = jose.Field("error")
message = jose.Field("message", omitempty=True)
more_info = jose.Field("moreInfo", omitempty=True)
MESSAGE_CODES = {
"malformed": "The request message was malformed",
"unauthorized": "The client lacks sufficient authorization",
"serverInternal": "The server experienced an internal error",
"notSupported": "The request type is not supported",
"unknown": "The server does not recognize an ID/token in the request",
"badCSR": "The CSR is unacceptable (e.g., due to a short key)",
}
@Message.register # pylint: disable=too-few-public-methods
class Revocation(Message):
"""ACME "revocation" message."""
typ = "revocation"
schema = util.load_schema(typ)
@Message.register
class RevocationRequest(Message):
"""ACME "revocationRequest" message.
:ivar certificate: Certificate (:class:`M2Crypto.X509.X509`
wrapped in :class:`acme.util.ComparableX509`).
:ivar signature: Signature (:class:`acme.other.Signature`).
"""
typ = "revocationRequest"
schema = util.load_schema(typ)
certificate = jose.Field("certificate", decoder=jose.decode_cert,
encoder=jose.encode_cert)
signature = jose.Field("signature", decoder=other.Signature.from_json)
@classmethod
def create(cls, key, sig_nonce=None, **kwargs):
"""Create signed "revocationRequest".
:param key: Key used for signing.
:type key: :class:`Crypto.PublicKey.RSA`
:param str sig_nonce: Nonce used for signature. Useful for testing.
:kwargs: Any other arguments accepted by the class constructor.
:returns: Signed "revocationRequest" ACME message.
:rtype: :class:`RevocationRequest`
"""
return cls(signature=other.Signature.from_msg(
kwargs["certificate"].as_der(), key, sig_nonce), **kwargs)
def verify(self):
"""Verify signature.
.. warning:: Caller must check that the public key encoded in the
:attr:`signature`'s :class:`acme.jose.JWK` object
is the correct key for a given context.
:returns: True iff ``signature`` can be verified, False otherwise.
:rtype: bool
"""
# self.signature is not Field | pylint: disable=no-member
return self.signature.verify(self.certificate.as_der())
@Message.register # pylint: disable=too-few-public-methods
class StatusRequest(Message):
"""ACME "statusRequest" message."""
typ = "statusRequest"
schema = util.load_schema(typ)
token = jose.Field("token")
| {
"repo_name": "felixrieseberg/lets-encrypt-preview",
"path": "acme/messages.py",
"copies": "1",
"size": "12199",
"license": "apache-2.0",
"hash": 2642605497927141400,
"line_mean": 32.2397820163,
"line_max": 90,
"alpha_frac": 0.659808181,
"autogenerated": false,
"ratio": 3.858001265022138,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5017809446022138,
"avg_score": null,
"num_lines": null
} |
"""ACME-specific JWS.
The JWS implementation in josepy only implements the base JOSE standard. In
order to support the new header fields defined in ACME, this module defines some
ACME-specific classes that layer on top of josepy.
"""
import josepy as jose
class Header(jose.Header):
"""ACME-specific JOSE Header. Implements nonce, kid, and url.
"""
nonce = jose.Field('nonce', omitempty=True, encoder=jose.encode_b64jose)
kid = jose.Field('kid', omitempty=True)
url = jose.Field('url', omitempty=True)
@nonce.decoder
def nonce(value): # pylint: disable=missing-docstring,no-self-argument
try:
return jose.decode_b64jose(value)
except jose.DeserializationError as error:
# TODO: custom error
raise jose.DeserializationError("Invalid nonce: {0}".format(error))
class Signature(jose.Signature):
"""ACME-specific Signature. Uses ACME-specific Header for customer fields."""
__slots__ = jose.Signature._orig_slots # pylint: disable=no-member
# TODO: decoder/encoder should accept cls? Otherwise, subclassing
# JSONObjectWithFields is tricky...
header_cls = Header
header = jose.Field(
'header', omitempty=True, default=header_cls(),
decoder=header_cls.from_json)
# TODO: decoder should check that nonce is in the protected header
class JWS(jose.JWS):
"""ACME-specific JWS. Includes none, url, and kid in protected header."""
signature_cls = Signature
__slots__ = jose.JWS._orig_slots # pylint: disable=no-member
@classmethod
# pylint: disable=arguments-differ,too-many-arguments
def sign(cls, payload, key, alg, nonce, url=None, kid=None):
# Per ACME spec, jwk and kid are mutually exclusive, so only include a
# jwk field if kid is not provided.
include_jwk = kid is None
return super(JWS, cls).sign(payload, key=key, alg=alg,
protect=frozenset(['nonce', 'url', 'kid', 'jwk', 'alg']),
nonce=nonce, url=url, kid=kid,
include_jwk=include_jwk)
| {
"repo_name": "letsencrypt/letsencrypt",
"path": "acme/acme/jws.py",
"copies": "2",
"size": "2139",
"license": "apache-2.0",
"hash": 9201598451566115000,
"line_mean": 38.6111111111,
"line_max": 93,
"alpha_frac": 0.6479663394,
"autogenerated": false,
"ratio": 3.6192893401015227,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0006514647932575114,
"num_lines": 54
} |
"""ACME utilities for testing."""
import datetime
import itertools
import os
import pkg_resources
import Crypto.PublicKey.RSA
from acme import challenges
from acme import jose
from acme import messages
KEY = jose.HashableRSAKey(Crypto.PublicKey.RSA.importKey(
pkg_resources.resource_string(
"acme.jose", os.path.join("testdata", "rsa512_key.pem"))))
# Challenges
SIMPLE_HTTP = challenges.SimpleHTTP(
token="evaGxfADs6pSRb2LAv9IZf17Dt3juxGJ+PCt92wr+oA")
DVSNI = challenges.DVSNI(
r="O*\xb4-\xad\xec\x95>\xed\xa9\r0\x94\xe8\x97\x9c&6\xbf'\xb3"
"\xed\x9a9nX\x0f'\\m\xe7\x12", nonce="a82d5ff8ef740d12881f6d3c2277ab2e")
DNS = challenges.DNS(token="17817c66b60ce2e4012dfad92657527a")
RECOVERY_CONTACT = challenges.RecoveryContact(
activation_url="https://example.ca/sendrecovery/a5bd99383fb0",
success_url="https://example.ca/confirmrecovery/bb1b9928932",
contact="c********n@example.com")
RECOVERY_TOKEN = challenges.RecoveryToken()
POP = challenges.ProofOfPossession(
alg="RS256", nonce="xD\xf9\xb9\xdbU\xed\xaa\x17\xf1y|\x81\x88\x99 ",
hints=challenges.ProofOfPossession.Hints(
jwk=jose.JWKRSA(key=KEY.publickey()),
cert_fingerprints=(
"93416768eb85e33adc4277f4c9acd63e7418fcfe",
"16d95b7b63f1972b980b14c20291f3c0d1855d95",
"48b46570d9fc6358108af43ad1649484def0debf"
),
certs=(), # TODO
subject_key_identifiers=("d0083162dcc4c8a23ecb8aecbd86120e56fd24e5"),
serial_numbers=(34234239832, 23993939911, 17),
issuers=(
"C=US, O=SuperT LLC, CN=SuperTrustworthy Public CA",
"O=LessTrustworthy CA Inc, CN=LessTrustworthy But StillSecure",
),
authorized_for=("www.example.com", "example.net"),
)
)
CHALLENGES = [SIMPLE_HTTP, DVSNI, DNS, RECOVERY_CONTACT, RECOVERY_TOKEN, POP]
DV_CHALLENGES = [chall for chall in CHALLENGES
if isinstance(chall, challenges.DVChallenge)]
CONT_CHALLENGES = [chall for chall in CHALLENGES
if isinstance(chall, challenges.ContinuityChallenge)]
def gen_combos(challbs):
"""Generate natural combinations for challbs."""
dv_chall = []
cont_chall = []
for i, challb in enumerate(challbs): # pylint: disable=redefined-outer-name
if isinstance(challb.chall, challenges.DVChallenge):
dv_chall.append(i)
else:
cont_chall.append(i)
# Gen combos for 1 of each type, lowest index first (makes testing easier)
return tuple((i, j) if i < j else (j, i)
for i in dv_chall for j in cont_chall)
def chall_to_challb(chall, status): # pylint: disable=redefined-outer-name
"""Return ChallengeBody from Challenge."""
kwargs = {
"chall": chall,
"uri": chall.typ + "_uri",
"status": status,
}
if status == messages.STATUS_VALID:
kwargs.update({"validated": datetime.datetime.now()})
return messages.ChallengeBody(**kwargs) # pylint: disable=star-args
# Pending ChallengeBody objects
DVSNI_P = chall_to_challb(DVSNI, messages.STATUS_PENDING)
SIMPLE_HTTP_P = chall_to_challb(SIMPLE_HTTP, messages.STATUS_PENDING)
DNS_P = chall_to_challb(DNS, messages.STATUS_PENDING)
RECOVERY_CONTACT_P = chall_to_challb(RECOVERY_CONTACT, messages.STATUS_PENDING)
RECOVERY_TOKEN_P = chall_to_challb(RECOVERY_TOKEN, messages.STATUS_PENDING)
POP_P = chall_to_challb(POP, messages.STATUS_PENDING)
CHALLENGES_P = [SIMPLE_HTTP_P, DVSNI_P, DNS_P,
RECOVERY_CONTACT_P, RECOVERY_TOKEN_P, POP_P]
DV_CHALLENGES_P = [challb for challb in CHALLENGES_P
if isinstance(challb.chall, challenges.DVChallenge)]
CONT_CHALLENGES_P = [
challb for challb in CHALLENGES_P
if isinstance(challb.chall, challenges.ContinuityChallenge)
]
def gen_authzr(authz_status, domain, challs, statuses, combos=True):
"""Generate an authorization resource.
:param authz_status: Status object
:type authz_status: :class:`acme.messages.Status`
:param list challs: Challenge objects
:param list statuses: status of each challenge object
:param bool combos: Whether or not to add combinations
"""
# pylint: disable=redefined-outer-name
challbs = tuple(
chall_to_challb(chall, status)
for chall, status in itertools.izip(challs, statuses)
)
authz_kwargs = {
"identifier": messages.Identifier(
typ=messages.IDENTIFIER_FQDN, value=domain),
"challenges": challbs,
}
if combos:
authz_kwargs.update({"combinations": gen_combos(challbs)})
if authz_status == messages.STATUS_VALID:
authz_kwargs.update({
"status": authz_status,
"expires": datetime.datetime.now() + datetime.timedelta(days=31),
})
else:
authz_kwargs.update({
"status": authz_status,
})
# pylint: disable=star-args
return messages.AuthorizationResource(
uri="https://trusted.ca/new-authz-resource",
new_cert_uri="https://trusted.ca/new-cert",
body=messages.Authorization(**authz_kwargs)
)
| {
"repo_name": "digideskio/lets-encrypt-preview",
"path": "letsencrypt/tests/acme_util.py",
"copies": "1",
"size": "5112",
"license": "apache-2.0",
"hash": 141404234350382600,
"line_mean": 35,
"line_max": 80,
"alpha_frac": 0.6752738654,
"autogenerated": false,
"ratio": 3.015929203539823,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.41912030689398233,
"avg_score": null,
"num_lines": null
} |
"""ACME utilities for testing."""
import datetime
import itertools
from acme import challenges
from acme import jose
from acme import messages
from certbot.tests import test_util
KEY = test_util.load_rsa_private_key('rsa512_key.pem')
# Challenges
HTTP01 = challenges.HTTP01(
token="evaGxfADs6pSRb2LAv9IZf17Dt3juxGJ+PCt92wr+oA")
TLSSNI01 = challenges.TLSSNI01(
token=jose.b64decode(b"evaGxfADs6pSRb2LAv9IZf17Dt3juxGJyPCt92wrDoA"))
DNS = challenges.DNS(token="17817c66b60ce2e4012dfad92657527a")
CHALLENGES = [HTTP01, TLSSNI01, DNS]
def gen_combos(challbs):
"""Generate natural combinations for challbs."""
# completing a single DV challenge satisfies the CA
return tuple((i,) for i, _ in enumerate(challbs))
def chall_to_challb(chall, status): # pylint: disable=redefined-outer-name
"""Return ChallengeBody from Challenge."""
kwargs = {
"chall": chall,
"uri": chall.typ + "_uri",
"status": status,
}
if status == messages.STATUS_VALID:
kwargs.update({"validated": datetime.datetime.now()})
return messages.ChallengeBody(**kwargs) # pylint: disable=star-args
# Pending ChallengeBody objects
TLSSNI01_P = chall_to_challb(TLSSNI01, messages.STATUS_PENDING)
HTTP01_P = chall_to_challb(HTTP01, messages.STATUS_PENDING)
DNS_P = chall_to_challb(DNS, messages.STATUS_PENDING)
CHALLENGES_P = [HTTP01_P, TLSSNI01_P, DNS_P]
def gen_authzr(authz_status, domain, challs, statuses, combos=True):
"""Generate an authorization resource.
:param authz_status: Status object
:type authz_status: :class:`acme.messages.Status`
:param list challs: Challenge objects
:param list statuses: status of each challenge object
:param bool combos: Whether or not to add combinations
"""
# pylint: disable=redefined-outer-name
challbs = tuple(
chall_to_challb(chall, status)
for chall, status in itertools.izip(challs, statuses)
)
authz_kwargs = {
"identifier": messages.Identifier(
typ=messages.IDENTIFIER_FQDN, value=domain),
"challenges": challbs,
}
if combos:
authz_kwargs.update({"combinations": gen_combos(challbs)})
if authz_status == messages.STATUS_VALID:
authz_kwargs.update({
"status": authz_status,
"expires": datetime.datetime.now() + datetime.timedelta(days=31),
})
else:
authz_kwargs.update({
"status": authz_status,
})
# pylint: disable=star-args
return messages.AuthorizationResource(
uri="https://trusted.ca/new-authz-resource",
new_cert_uri="https://trusted.ca/new-cert",
body=messages.Authorization(**authz_kwargs)
)
| {
"repo_name": "DavidGarciaCat/letsencrypt",
"path": "certbot/tests/acme_util.py",
"copies": "4",
"size": "2714",
"license": "apache-2.0",
"hash": -9097599617326374000,
"line_mean": 29.4943820225,
"line_max": 77,
"alpha_frac": 0.6805453206,
"autogenerated": false,
"ratio": 3.230952380952381,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0,
"num_lines": 89
} |
"""ACME utilities for testing."""
import datetime
import itertools
from acme import challenges
from acme import jose
from acme import messages
from letsencrypt.tests import test_util
KEY = test_util.load_rsa_private_key('rsa512_key.pem')
# Challenges
HTTP01 = challenges.HTTP01(
token="evaGxfADs6pSRb2LAv9IZf17Dt3juxGJ+PCt92wr+oA")
DVSNI = challenges.DVSNI(
token=jose.b64decode(b"evaGxfADs6pSRb2LAv9IZf17Dt3juxGJyPCt92wrDoA"))
DNS = challenges.DNS(token="17817c66b60ce2e4012dfad92657527a")
RECOVERY_CONTACT = challenges.RecoveryContact(
activation_url="https://example.ca/sendrecovery/a5bd99383fb0",
success_url="https://example.ca/confirmrecovery/bb1b9928932",
contact="c********n@example.com")
POP = challenges.ProofOfPossession(
alg="RS256", nonce=jose.b64decode("eET5udtV7aoX8Xl8gYiZIA"),
hints=challenges.ProofOfPossession.Hints(
jwk=jose.JWKRSA(key=KEY.public_key()),
cert_fingerprints=(
"93416768eb85e33adc4277f4c9acd63e7418fcfe",
"16d95b7b63f1972b980b14c20291f3c0d1855d95",
"48b46570d9fc6358108af43ad1649484def0debf"
),
certs=(), # TODO
subject_key_identifiers=("d0083162dcc4c8a23ecb8aecbd86120e56fd24e5"),
serial_numbers=(34234239832, 23993939911, 17),
issuers=(
"C=US, O=SuperT LLC, CN=SuperTrustworthy Public CA",
"O=LessTrustworthy CA Inc, CN=LessTrustworthy But StillSecure",
),
authorized_for=("www.example.com", "example.net"),
)
)
CHALLENGES = [HTTP01, DVSNI, DNS, RECOVERY_CONTACT, POP]
DV_CHALLENGES = [chall for chall in CHALLENGES
if isinstance(chall, challenges.DVChallenge)]
CONT_CHALLENGES = [chall for chall in CHALLENGES
if isinstance(chall, challenges.ContinuityChallenge)]
def gen_combos(challbs):
"""Generate natural combinations for challbs."""
dv_chall = []
cont_chall = []
for i, challb in enumerate(challbs): # pylint: disable=redefined-outer-name
if isinstance(challb.chall, challenges.DVChallenge):
dv_chall.append(i)
else:
cont_chall.append(i)
# Gen combos for 1 of each type, lowest index first (makes testing easier)
return tuple((i, j) if i < j else (j, i)
for i in dv_chall for j in cont_chall)
def chall_to_challb(chall, status): # pylint: disable=redefined-outer-name
"""Return ChallengeBody from Challenge."""
kwargs = {
"chall": chall,
"uri": chall.typ + "_uri",
"status": status,
}
if status == messages.STATUS_VALID:
kwargs.update({"validated": datetime.datetime.now()})
return messages.ChallengeBody(**kwargs) # pylint: disable=star-args
# Pending ChallengeBody objects
DVSNI_P = chall_to_challb(DVSNI, messages.STATUS_PENDING)
HTTP01_P = chall_to_challb(HTTP01, messages.STATUS_PENDING)
DNS_P = chall_to_challb(DNS, messages.STATUS_PENDING)
RECOVERY_CONTACT_P = chall_to_challb(RECOVERY_CONTACT, messages.STATUS_PENDING)
POP_P = chall_to_challb(POP, messages.STATUS_PENDING)
CHALLENGES_P = [HTTP01_P, DVSNI_P, DNS_P, RECOVERY_CONTACT_P, POP_P]
DV_CHALLENGES_P = [challb for challb in CHALLENGES_P
if isinstance(challb.chall, challenges.DVChallenge)]
CONT_CHALLENGES_P = [
challb for challb in CHALLENGES_P
if isinstance(challb.chall, challenges.ContinuityChallenge)
]
def gen_authzr(authz_status, domain, challs, statuses, combos=True):
"""Generate an authorization resource.
:param authz_status: Status object
:type authz_status: :class:`acme.messages.Status`
:param list challs: Challenge objects
:param list statuses: status of each challenge object
:param bool combos: Whether or not to add combinations
"""
# pylint: disable=redefined-outer-name
challbs = tuple(
chall_to_challb(chall, status)
for chall, status in itertools.izip(challs, statuses)
)
authz_kwargs = {
"identifier": messages.Identifier(
typ=messages.IDENTIFIER_FQDN, value=domain),
"challenges": challbs,
}
if combos:
authz_kwargs.update({"combinations": gen_combos(challbs)})
if authz_status == messages.STATUS_VALID:
authz_kwargs.update({
"status": authz_status,
"expires": datetime.datetime.now() + datetime.timedelta(days=31),
})
else:
authz_kwargs.update({
"status": authz_status,
})
# pylint: disable=star-args
return messages.AuthorizationResource(
uri="https://trusted.ca/new-authz-resource",
new_cert_uri="https://trusted.ca/new-cert",
body=messages.Authorization(**authz_kwargs)
)
| {
"repo_name": "BillKeenan/lets-encrypt-preview",
"path": "letsencrypt/tests/acme_util.py",
"copies": "1",
"size": "4711",
"license": "apache-2.0",
"hash": 4094822574345121300,
"line_mean": 34.1567164179,
"line_max": 80,
"alpha_frac": 0.6754404585,
"autogenerated": false,
"ratio": 3.0551232166018156,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.42305636751018155,
"avg_score": null,
"num_lines": null
} |
"""ACME utilities for testing."""
import datetime
import josepy as jose
import six
from acme import challenges
from acme import messages
from certbot import auth_handler
from certbot.tests import util
JWK = jose.JWK.load(util.load_vector('rsa512_key.pem'))
KEY = util.load_rsa_private_key('rsa512_key.pem')
# Challenges
HTTP01 = challenges.HTTP01(
token=b"evaGxfADs6pSRb2LAv9IZf17Dt3juxGJ+PCt92wr+oA")
TLSSNI01 = challenges.TLSSNI01(
token=jose.b64decode(b"evaGxfADs6pSRb2LAv9IZf17Dt3juxGJyPCt92wrDoA"))
DNS01 = challenges.DNS01(token=b"17817c66b60ce2e4012dfad92657527a")
DNS01_2 = challenges.DNS01(token=b"cafecafecafecafecafecafe0feedbac")
CHALLENGES = [HTTP01, TLSSNI01, DNS01]
def gen_combos(challbs):
"""Generate natural combinations for challbs."""
# completing a single DV challenge satisfies the CA
return tuple((i,) for i, _ in enumerate(challbs))
def chall_to_challb(chall, status): # pylint: disable=redefined-outer-name
"""Return ChallengeBody from Challenge."""
kwargs = {
"chall": chall,
"uri": chall.typ + "_uri",
"status": status,
}
if status == messages.STATUS_VALID:
kwargs.update({"validated": datetime.datetime.now()})
return messages.ChallengeBody(**kwargs) # pylint: disable=star-args
# Pending ChallengeBody objects
TLSSNI01_P = chall_to_challb(TLSSNI01, messages.STATUS_PENDING)
HTTP01_P = chall_to_challb(HTTP01, messages.STATUS_PENDING)
DNS01_P = chall_to_challb(DNS01, messages.STATUS_PENDING)
DNS01_P_2 = chall_to_challb(DNS01_2, messages.STATUS_PENDING)
CHALLENGES_P = [HTTP01_P, TLSSNI01_P, DNS01_P]
# AnnotatedChallenge objects
HTTP01_A = auth_handler.challb_to_achall(HTTP01_P, JWK, "example.com")
TLSSNI01_A = auth_handler.challb_to_achall(TLSSNI01_P, JWK, "example.net")
DNS01_A = auth_handler.challb_to_achall(DNS01_P, JWK, "example.org")
DNS01_A_2 = auth_handler.challb_to_achall(DNS01_P_2, JWK, "esimerkki.example.org")
ACHALLENGES = [HTTP01_A, TLSSNI01_A, DNS01_A]
def gen_authzr(authz_status, domain, challs, statuses, combos=True):
"""Generate an authorization resource.
:param authz_status: Status object
:type authz_status: :class:`acme.messages.Status`
:param list challs: Challenge objects
:param list statuses: status of each challenge object
:param bool combos: Whether or not to add combinations
"""
# pylint: disable=redefined-outer-name
challbs = tuple(
chall_to_challb(chall, status)
for chall, status in six.moves.zip(challs, statuses)
)
authz_kwargs = {
"identifier": messages.Identifier(
typ=messages.IDENTIFIER_FQDN, value=domain),
"challenges": challbs,
}
if combos:
authz_kwargs.update({"combinations": gen_combos(challbs)})
if authz_status == messages.STATUS_VALID:
authz_kwargs.update({
"status": authz_status,
"expires": datetime.datetime.now() + datetime.timedelta(days=31),
})
else:
authz_kwargs.update({
"status": authz_status,
})
# pylint: disable=star-args
return messages.AuthorizationResource(
uri="https://trusted.ca/new-authz-resource",
body=messages.Authorization(**authz_kwargs)
)
| {
"repo_name": "letsencrypt/letsencrypt",
"path": "certbot/tests/acme_util.py",
"copies": "1",
"size": "3258",
"license": "apache-2.0",
"hash": 8556647702328403000,
"line_mean": 30.6310679612,
"line_max": 82,
"alpha_frac": 0.6952117864,
"autogenerated": false,
"ratio": 2.9484162895927604,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.414362807599276,
"avg_score": null,
"num_lines": null
} |
"""ACME utilities for testing."""
import datetime
import six
from acme import challenges
from acme import jose
from acme import messages
from certbot.tests import test_util
KEY = test_util.load_rsa_private_key('rsa512_key.pem')
# Challenges
HTTP01 = challenges.HTTP01(
token=b"evaGxfADs6pSRb2LAv9IZf17Dt3juxGJ+PCt92wr+oA")
TLSSNI01 = challenges.TLSSNI01(
token=jose.b64decode(b"evaGxfADs6pSRb2LAv9IZf17Dt3juxGJyPCt92wrDoA"))
DNS01 = challenges.DNS01(token=b"17817c66b60ce2e4012dfad92657527a")
CHALLENGES = [HTTP01, TLSSNI01, DNS01]
def gen_combos(challbs):
"""Generate natural combinations for challbs."""
# completing a single DV challenge satisfies the CA
return tuple((i,) for i, _ in enumerate(challbs))
def chall_to_challb(chall, status): # pylint: disable=redefined-outer-name
"""Return ChallengeBody from Challenge."""
kwargs = {
"chall": chall,
"uri": chall.typ + "_uri",
"status": status,
}
if status == messages.STATUS_VALID:
kwargs.update({"validated": datetime.datetime.now()})
return messages.ChallengeBody(**kwargs) # pylint: disable=star-args
# Pending ChallengeBody objects
TLSSNI01_P = chall_to_challb(TLSSNI01, messages.STATUS_PENDING)
HTTP01_P = chall_to_challb(HTTP01, messages.STATUS_PENDING)
DNS01_P = chall_to_challb(DNS01, messages.STATUS_PENDING)
CHALLENGES_P = [HTTP01_P, TLSSNI01_P, DNS01_P]
def gen_authzr(authz_status, domain, challs, statuses, combos=True):
"""Generate an authorization resource.
:param authz_status: Status object
:type authz_status: :class:`acme.messages.Status`
:param list challs: Challenge objects
:param list statuses: status of each challenge object
:param bool combos: Whether or not to add combinations
"""
# pylint: disable=redefined-outer-name
challbs = tuple(
chall_to_challb(chall, status)
for chall, status in six.moves.zip(challs, statuses)
)
authz_kwargs = {
"identifier": messages.Identifier(
typ=messages.IDENTIFIER_FQDN, value=domain),
"challenges": challbs,
}
if combos:
authz_kwargs.update({"combinations": gen_combos(challbs)})
if authz_status == messages.STATUS_VALID:
authz_kwargs.update({
"status": authz_status,
"expires": datetime.datetime.now() + datetime.timedelta(days=31),
})
else:
authz_kwargs.update({
"status": authz_status,
})
# pylint: disable=star-args
return messages.AuthorizationResource(
uri="https://trusted.ca/new-authz-resource",
new_cert_uri="https://trusted.ca/new-cert",
body=messages.Authorization(**authz_kwargs)
)
| {
"repo_name": "jtl999/certbot",
"path": "certbot/tests/acme_util.py",
"copies": "2",
"size": "2722",
"license": "apache-2.0",
"hash": 605440515213098000,
"line_mean": 29.2444444444,
"line_max": 77,
"alpha_frac": 0.6807494489,
"autogenerated": false,
"ratio": 3.1948356807511735,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4875585129651173,
"avg_score": null,
"num_lines": null
} |
# acm spider
from scrapy.spider import BaseSpider
from scrapy.selector import Selector
from codechef_scrape.items import CodechefScrapeItem
class ACMSpider(BaseSpider):
name = "acm"
allowed_domains = ["codechef.com"]
CONCURRENT_REQUESTS = 1
start_urls = [
"http://www.codechef.com/teams/list/ACMAMR14?page=%d" % i for i in xrange(1,63)
]
download_delay = 5
def parse(self, response):
hxs = Selector(response)
table = hxs.xpath('//table[@class="rank-table"]')
ranks = table.xpath('.//td/b/text()').extract()
teams_temp = table.xpath('.//td/a/text()').extract()
teams_temp = [x for x in teams_temp if x != 'acmicpcuser']
team_names = teams_temp
institutions_temp = table.xpath('.//td/text()').extract()[7:]
institutions_temp = [x for x in institutions_temp if x != ', ']
institutions = institutions_temp[::2]
scores = institutions_temp[1::2]
final = zip(ranks,team_names,scores,institutions)
for i in final:
item = CodechefScrapeItem()
item['rank'] = i[0]
item['team_name'] = i[1]
item['institution'] = i[3]
item['score'] = i[2]
yield item
| {
"repo_name": "supercr7/ranklist_scrape",
"path": "codechef_scrape_amrita/amritapuri/spiders/acm_spider.py",
"copies": "1",
"size": "1168",
"license": "mit",
"hash": -8879266066904678000,
"line_mean": 26.8095238095,
"line_max": 86,
"alpha_frac": 0.6275684932,
"autogenerated": false,
"ratio": 2.956962025316456,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.8840825798015308,
"avg_score": 0.048740944100229416,
"num_lines": 42
} |
# acodec.py
import encodings, codecs
# Our StreamReader
class aStreamReader(codecs.StreamReader):
def outputFromInput(self, input):
output = input
return output
def readline(self, size = None, keepends = True):
if getattr(self, "pysrc", None) == None:
input = self.stream.read().decode("utf8")
output = self.outputFromInput(input)
self.pysrc = output.splitlines()
return u'%s\n' % self.pysrc.pop(0) if self.pysrc else u''
def search_function(s):
if s != "acodec":
return None
u8 = encodings.search_function("utf8")
return codecs.CodecInfo( name = 'acodec',
encode = u8.encode,
decode = u8.decode,
incrementalencoder = u8.incrementalencoder,
incrementaldecoder = u8.incrementaldecoder,
streamreader = aStreamReader, # acodec StreamReader
streamwriter = u8.streamwriter)
codecs.register(search_function) # register our new codec search function
| {
"repo_name": "jamesthiele/acodec",
"path": "acodec.py",
"copies": "1",
"size": "1146",
"license": "bsd-2-clause",
"hash": 4858196904928144000,
"line_mean": 37.2,
"line_max": 87,
"alpha_frac": 0.5619546248,
"autogenerated": false,
"ratio": 4.5476190476190474,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5609573672419047,
"avg_score": null,
"num_lines": null
} |
a# -*- coding: utf-8 -*-
"""
Created on Sat Aug 19 21:40:35 2017
@author: michael
Simple script that draws the graph of a function
I was figuring this out as I went along, just to see if I can do it.
"""
import matplotlib.pyplot as plt
import numpy as np
import math
#Constants
a = 1
b = 8
c = 5#10
d = 6
#plt.plot([2,4,6,8],[1,2,3,4],'ro')
#plt.ylabel('Some Numbers')
def sinFunc(x):
"""f(x) = sin(x)"""
return math.sin(x)
def quadraticFunc(x):
"""y = ax^2 + bx + c,"""
return a*x**2 + b*x + c
def powerFunc(x):
"""y = ax^c"""
return a*x**c
def polinomialFunc(x):
"""y = x^5 -8x^3 +10x + 6"""
return a*x**5 - b*x**3 + c*x + d
def someFunc(x):
"""y = 1/x"""
return 1/x
x = np.arange(-3., 3., .2)
sy = [someFunc(y) for y in x]
qy = [quadraticFunc(y) for y in x]
py = [powerFunc(y) for y in x]
ply = [polinomialFunc(y) for y in x]
#T = np.matrix(t)
plt.subplot(2,2,1)
plt.plot(x, sy,'r-.')
plt.grid(True)
plt.subplot(2,2,2)
plt.plot(x,qy,'b-.')
plt.grid(True)
plt.subplot(2,2,3)
plt.plot(x,py,'g-.')
plt.grid(True)
plt.subplot(2,2,4)
plt.plot(x,ply,'y-.')
plt.grid(True)
plt.show() | {
"repo_name": "phoexer/Kelly",
"path": "scripts/Maths/plotting_functions.py",
"copies": "1",
"size": "1139",
"license": "mit",
"hash": 568217566805803650,
"line_mean": 15.5217391304,
"line_max": 68,
"alpha_frac": 0.5820895522,
"autogenerated": false,
"ratio": 2.237721021611002,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.8184579165675852,
"avg_score": 0.027046281627030103,
"num_lines": 69
} |
#a -*- coding: utf-8 -*-
import numpy as np
from numpy.random import choice, seed
import sys
import time
import codecs
if len(sys.argv) < 2:
seed(int(time.time()))
else:
seed(int(sys.argv[1]))
CODEC = "ISO-8859-1"
filepath = "__wordsFR.txt"
outfile = "output.txt"
probafile = "count2D_EN.bin"
dico = []
with codecs.open(filepath, "r", "ISO-8859-1") as lines:
for l in lines:
dico.append(l[:-1])
count = np.fromfile(probafile,dtype="int32").reshape(256,256,256)
s = count.sum(axis=2)
st = np.tile(s.T,(256,1,1)).T
p = count.astype('float')/st
p[np.isnan(p)] = 0
f = codecs.open(outfile,"w",CODEC)
K = 100
col = 0
for TGT in range(3,13):
#K = 100
#for TGT in range(4,11):
total = 0
while total<6:
i=0
j=0
res = u''
while not j==10:
k = choice(range(256),1,p=p[i,j,:])[0]
res = res + chr(k)
i = j
j = k
if len(res) == 1+TGT:
x=res[:-1]
if res[:-1] in dico:
x = res[:-1]+"*"
total += 1
if col == 0:
col = 1
else:
col = 0
print(x)
f.write(x+"\n")
f.close() | {
"repo_name": "tomMoulard/python-projetcs",
"path": "worldGen/mots_2D_2.py",
"copies": "1",
"size": "1294",
"license": "apache-2.0",
"hash": -3533516344559282700,
"line_mean": 22.5454545455,
"line_max": 74,
"alpha_frac": 0.4590417311,
"autogenerated": false,
"ratio": 2.9815668202764978,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.39406085513764977,
"avg_score": null,
"num_lines": null
} |
"""A cog allowing users to steal credits from each other."""
import os
import asyncio
import copy
import random
import time
import datetime
import discord
from discord.ext import commands
from __main__ import send_cmd_help
from .utils import checks
from .utils.dataIO import dataIO
SAVE_FILEPATH = "data/KeaneCogs/steal/steal.json"
SAVE_DEFAULT = {
"Servers": {},
"Global": {
"CreditsGivenTime": "1970-01-01T00:00:00.0",
"Version": "1.2",
},
}
SERVER_DEFAULT = {
"Players": {},
"TheftCount": 0, # Reset daily by daily_report()
"Thieves": [], # Reset daily by daily_report()
}
PLAYER_DEFAULT = {
"Active": "Advanced Security",
"Elite Raid": 0,
"Advanced Security": 0,
"Blackmarket Finances": 0,
"StealTime": 0, # The time that the user last attempted to steal, assigned to dummy value
"ActivateTime": 0, # The time that the users last activated an upgrade, assigned to dummy value
}
PRIMARY_UPGRADES = [
"Elite Raid",
"Advanced Security",
"Blackmarket Finances",
]
class Steal:
"""Steal credits from other users and spend credits on upgrades."""
def __init__(self, bot):
self.save_file = dataIO.load_json(SAVE_FILEPATH)
self.bot = bot
self.update_version()
self.menu_users = {}
self.unloading = False
self.loop_task = bot.loop.create_task(self.give_credits())
self.loop_task2 = bot.loop.create_task(self.daily_report())
@commands.command(pass_context=True, no_pm=True)
async def steal(self, ctx):
"""Steal's main menu. Everything you can do with this cog
is accessed through this command."""
server = ctx.message.server
player = ctx.message.author
servers = self.save_file["Servers"]
bank = self.bot.get_cog("Economy").bank
# Add server
if server.id not in servers:
servers[server.id] = copy.deepcopy(SERVER_DEFAULT)
# doesn't need save_json() because it'll be saved when player is added below
# Check for bank account
if not bank.account_exists(player):
return await self.bot.say("You don't have a bank account. "
"Use `{0}bank register` to open one, "
"then try `{0}steal` again.".format(ctx.prefix))
# Check if main_menu is already running for them
if player.id in self.menu_users:
message = "The command is already running for you here."
return await self.bot.send_message(player, message)
# Add user to menu_users
self.menu_users[player.id] = {
"main_menu": {
"prompt": "What would you like to do?",
"choice_type": "multi",
"choices": [
("Steal from someone", self.generate_steal_menu, []),
("Buy an upgrade", self.generate_upgrade_menu, []),
("Activate an upgrade path", self.generate_activate_menu, []),
("Quit", "done")
]
},
"target_not_found": {
"prompt": "Target not found. Try again?",
"choice_type": "multi",
"choices": [
("Yes", "steal_menu"),
("No", "main_menu")
]
},
"done": {}
}
# Add player, display newbie introduction
if player.id not in servers[server.id]["Players"]:
d_message = await self.bot.say("Check for a direct message from me.")
servers[server.id]["Players"][player.id] = copy.deepcopy(PLAYER_DEFAULT)
dataIO.save_json(SAVE_FILEPATH, self.save_file)
message = ("Welcome to the world of crime!\n"
"There are three upgrade paths you can choose from. "
"You can upgrade in multiple paths at once, but only one "
"upgrade path can be active at once. Activating an upgrade "
"path means turning on the benefits that path provides "
"(and turning off the benefits your previous path provided).\n\n"
"Right now, your active path is Advanced Security. Learn more "
"about each path at https://github.com/keanemind/Keane-Cogs/wiki/Commands#steal \n\n"
"**NOTICE: immediately deleting your `!steal` message "
"that invoked this command is recommended every time you "
"use steal. This will prevent other members of the server from "
"learning that you are using the command, possibly to steal from them.**")
await self.bot.send_message(player, message)
await asyncio.sleep(4)
await self.bot.delete_message(d_message)
# Menu
current_menu = self.menu_users[player.id]["main_menu"]
while current_menu and not self.unloading:
if current_menu["choice_type"] == "multi":
# Display prompt and choices
message = current_menu["prompt"]
for index, choice_tuple in enumerate(current_menu["choices"]):
message += "\n{}. {}".format(index + 1, choice_tuple[0])
d_message = await self.bot.send_message(player, message)
# Receive user's choice
response = await self.bot.wait_for_message(timeout=60,
author=player,
channel=d_message.channel)
if response is None:
current_menu = self.menu_users[player.id]["done"]
continue
try:
user_choice = int(response.content)
except ValueError:
await self.bot.send_message(player, "Please choose a number from the menu.")
continue
if user_choice < 1 or user_choice > len(current_menu["choices"]):
await self.bot.send_message(player, "Please choose a number from the menu.")
continue
# Move to next state based on their choice
choice = current_menu["choices"][user_choice - 1]
if len(choice) == 3: # a function needs to be called
next_menu = await choice[1](ctx, *choice[2])
current_menu = self.menu_users[player.id][next_menu]
else:
current_menu = self.menu_users[player.id][choice[1]]
elif current_menu["choice_type"] == "free": # all free choice menus have a function to call
# Display prompt
d_message = await self.bot.send_message(player, current_menu["prompt"])
# Receive user's choice
response = await self.bot.wait_for_message(timeout=30,
author=player,
channel=d_message.channel)
if response is None:
current_menu = self.menu_users[player.id]["done"]
continue
# Call function and move to next state based on the function's return
next_menu = await current_menu["action"](ctx, response, *current_menu["args"])
current_menu = self.menu_users[player.id][next_menu]
await self.bot.send_message(player, "Goodbye!")
del self.menu_users[player.id]
# Beginning of menu functions
async def generate_steal_menu(self, ctx):
"""If off cooldown, create a steal menu dict and add it
to the user's menus. Return the new menu's key."""
player = ctx.message.author
server = ctx.message.server
playersave = self.save_file["Servers"][server.id]["Players"][player.id]
# Check cooldown
since_steal = round(time.time() - playersave["StealTime"])
if since_steal < 60 * 60:
time_left = time_left_str(since_steal)
message = "Steal is on cooldown. Time left: " + time_left
await self.bot.send_message(player, message)
return "main_menu"
# Create menu
self.menu_users[player.id]["steal_menu"] = {
"prompt": ("Who do you want to steal from? The user must be on the "
"server you used `!steal` in. Enter a nickname, username, "
"or for best results, a full tag like Keane#8251."),
"choice_type": "free",
"action": self.get_target,
"args": []
}
return "steal_menu"
async def generate_upgrade_menu(self, ctx):
"""Create an upgrade menu dict and add it to the user's menus.
Return the new menu's key."""
player = ctx.message.author
server = ctx.message.server
playersave = self.save_file["Servers"][server.id]["Players"][player.id]
# Create menu
self.menu_users[player.id]["upgrade_menu"] = {
"prompt": "What would you like to upgrade? \*currently active",
"choice_type": "multi",
"choices": []
}
# Generate choices and add them to the list
choices = self.menu_users[player.id]["upgrade_menu"]["choices"]
for upgrade_name in PRIMARY_UPGRADES:
option_text = "{} (lvl {})".format(upgrade_name, playersave[upgrade_name])
if upgrade_name == playersave["Active"]:
option_text += "*"
choices.append((option_text, self.attempt_upgrade, [upgrade_name]))
choices.append(("Go back", "main_menu"))
return "upgrade_menu"
async def generate_activate_menu(self, ctx):
"""If off cooldown, create an activate menu dict and add it
to the user's menus. Return the new menu's key."""
player = ctx.message.author
server = ctx.message.server
playersave = self.save_file["Servers"][server.id]["Players"][player.id]
# Check cooldown
since_activate = round(time.time() - playersave["ActivateTime"])
if since_activate < 60 * 60:
time_left = time_left_str(since_activate)
message = "Activate is on cooldown. Time left: " + time_left
await self.bot.send_message(player, message)
return "main_menu"
# Create menu
self.menu_users[player.id]["activate_menu"] = {
"prompt": ("{} is active. What would you like to activate?"
.format(playersave["Active"])),
"choice_type": "multi",
"choices": []
}
# Generate choices and add them to the list
choices = self.menu_users[player.id]["activate_menu"]["choices"]
for upgrade_name in PRIMARY_UPGRADES:
if upgrade_name != playersave["Active"]:
option_text = "{} (lvl {})".format(upgrade_name, playersave[upgrade_name])
choices.append((option_text, self.activate, [upgrade_name]))
choices.append(("Go back", "main_menu"))
return "activate_menu"
async def get_target(self, ctx, response):
"""Convert the user's response to a target, then steal from the target."""
server = ctx.message.server
player = ctx.message.author
target = server.get_member_named(response.content)
if target is None:
return "target_not_found"
else:
if "#" not in response.content:
self.menu_users[player.id]["target_confirmation"] = {
"prompt": "Is {} the correct target?".format(target.mention),
"choice_type": "multi",
"choices": [
("Yes", self.attempt_steal, [target]),
("No", "steal_menu")
]
}
return "target_confirmation"
return await self.attempt_steal(ctx, target)
async def attempt_steal(self, ctx, target):
"""Wrapper function with safety checks that steals and returns main_menu."""
server = ctx.message.server
player = ctx.message.author
player_data = self.save_file["Servers"][server.id]["Players"]
bank = self.bot.get_cog("Economy").bank
if not bank.account_exists(target):
await self.bot.send_message(player, "That person doesn't have a bank account.")
return "main_menu"
if target.id not in player_data:
player_data[target.id] = copy.deepcopy(PLAYER_DEFAULT)
dataIO.save_json(SAVE_FILEPATH, self.save_file)
await self.steal_credits(ctx, target)
player_data[player.id]["StealTime"] = time.time()
dataIO.save_json(SAVE_FILEPATH, self.save_file)
return "main_menu"
async def attempt_upgrade(self, ctx, upgrade_name):
"""Check if the path is already max level. Returns a menu key."""
player = ctx.message.author
server = ctx.message.server
playersave = self.save_file["Servers"][server.id]["Players"][player.id]
if playersave[upgrade_name] >= 99:
await self.bot.send_message(player, "That path is already max level.")
return "upgrade_menu"
self.menu_users[player.id]["num_levels_menu"] = {
"prompt": ("How many levels would you like to upgrade? "
"Must be between 1 and {} inclusive. Reply with a non-number "
"to cancel.".format(99 - playersave[upgrade_name])),
"choice_type": "free",
"action": self.attempt_upgrade2,
"args": [upgrade_name]
}
return "num_levels_menu"
async def attempt_upgrade2(self, ctx, response, upgrade_name):
"""Check if the response is a valid number of levels to upgrade.
Returns a menu key."""
player = ctx.message.author
server = ctx.message.server
playersave = self.save_file["Servers"][server.id]["Players"][player.id]
# Check the user's response
try:
lvls = int(response.content)
except ValueError:
await self.bot.send_message(player, "Upgrade cancelled.")
return "upgrade_menu"
if lvls < 1 or lvls > 99 - playersave[upgrade_name]:
await self.bot.send_message(player, "Please choose a number within the range.")
return "num_levels_menu"
# Check if the user can afford the upgrade
current_lvl = playersave[upgrade_name]
cost = (5 * (current_lvl + lvls)**1.933) - (5 * current_lvl**1.933)
if playersave["Blackmarket Finances"] == 99:
cost = round(cost / 2)
else:
cost = round(cost)
self.menu_users[player.id]["upgrade_confirm_menu"] = {
"prompt": "This will cost {} credits. If you cannot afford the cost, "
"the maximum number of levels you can afford will be upgraded."
.format(cost),
"choice_type": "multi",
"choices": [
("Continue", self.attempt_upgrade3, [upgrade_name, lvls, cost]),
("Cancel", "upgrade_menu")
]
}
return "upgrade_confirm_menu"
async def attempt_upgrade3(self, ctx, upgrade_name, lvls, cost):
"""Try to upgrade the number of levels passed."""
player = ctx.message.author
server = ctx.message.server
playersave = self.save_file["Servers"][server.id]["Players"][player.id]
bank = self.bot.get_cog("Economy").bank
if not bank.can_spend(player, cost):
balance = bank.get_balance(player)
current_lvl = playersave[upgrade_name]
# Find how many levels the user can afford to upgrade
num = balance / 5
if playersave["Blackmarket Finances"] == 99:
num *= 2
num += current_lvl**1.933
num = num**(1/1.933) - current_lvl
# num is the exact number of levels the user can afford to upgrade
lvls = int(num) # round down
if lvls == 0:
message = "You cannot afford to upgrade this path at all."
await self.bot.send_message(player, message)
return "upgrade_menu"
# Calculate reduced cost
cost = (5 * (current_lvl + lvls)**1.933) - (5 * current_lvl**1.933)
if playersave["Blackmarket Finances"] == 99:
cost = round(cost / 2)
else:
cost = round(cost)
bank.withdraw_credits(player, cost)
playersave[upgrade_name] += lvls
dataIO.save_json(SAVE_FILEPATH, self.save_file)
await self.bot.send_message(player, "Upgrade complete.")
return "main_menu"
async def activate(self, ctx, upgrade_name):
"""Activate an upgrade path."""
player = ctx.message.author
server = ctx.message.server
playersave = self.save_file["Servers"][server.id]["Players"][player.id]
playersave["Active"] = upgrade_name
playersave["ActivateTime"] = time.time()
dataIO.save_json(SAVE_FILEPATH, self.save_file)
await self.bot.send_message(player, "Activation complete.")
return "main_menu"
# End of menu functions
async def steal_credits(self, ctx, target):
"""Steal credits. Contains all the matchup logic."""
player = ctx.message.author
server = ctx.message.server
playersave = self.save_file["Servers"][server.id]["Players"][player.id]
targetsave = self.save_file["Servers"][server.id]["Players"][target.id]
bank = self.bot.get_cog("Economy").bank
# Helldivers-like code thing
message = ("Quick! You have 15 seconds to unlock the "
"door's keypad to get inside! Type the code "
"below without the dashes. Keep trying until "
"you're in or time is up.\n")
await self.bot.send_message(player, message)
await asyncio.sleep(3)
code = []
for _ in range(13):
code.append(str(random.randint(0, 9)))
message = "-".join(code)
d_message = await self.bot.send_message(player, message)
response = await self.bot.wait_for_message(timeout=15,
author=player,
channel=d_message.channel,
content="".join(code))
if response is None:
await self.bot.send_message(player, "You failed!")
if (targetsave["Active"] == "Advanced Security"
and random.randint(1, 100) <= targetsave["Advanced Security"]):
bank.deposit_credits(target, 1000)
return
else:
await self.bot.send_message(player, "You're in!")
await asyncio.sleep(1)
# ATTACKER: ELITE RAID
if playersave["Active"] == "Elite Raid":
# Elite Raid v Elite Raid
if targetsave["Active"] == "Elite Raid":
if random.randint(1, 100) <= 66:
await self.er_steal(ctx, target)
else:
await self.steal_failure(ctx)
# Elite Raid v Advanced Security
elif targetsave["Active"] == "Advanced Security":
if targetsave["Advanced Security"] == 99:
success_chance = 33 / 2
else:
success_chance = 33
if random.randint(1, 100) <= success_chance:
await self.er_steal(ctx, target)
else:
await self.steal_failure(ctx)
if random.randint(1, 100) <= targetsave["Advanced Security"]:
bank.deposit_credits(target, 1000)
# Elite Raid is immune to Advanced Security's cameras
if playersave["Elite Raid"] >= 66:
if random.randint(1, 100) <= 33:
if targetsave["Advanced Security"] > 5:
targetsave["Advanced Security"] -= 5
else:
targetsave["Advanced Security"] = 0
# Elite Raid v Blackmarket Finances
elif targetsave["Active"] == "Blackmarket Finances":
if random.randint(1, 100) <= 66:
await self.er_steal(ctx, target)
else:
await self.steal_failure(ctx)
if targetsave["Blackmarket Finances"] >= 66:
if random.randint(1, 100) <= 33:
if playersave["Elite Raid"] > 5:
playersave["Elite Raid"] -= 5
else:
playersave["Elite Raid"] = 0
# ATTACKER: ADVANCED SECURITY
elif playersave["Active"] == "Advanced Security":
# Advanced Security v Elite Raid
if targetsave["Active"] == "Elite Raid":
if random.randint(1, 100) <= 33:
await self.regular_steal(ctx, target)
else:
await self.steal_failure(ctx)
# Advanced Security v Advanced Security
elif targetsave["Active"] == "Advanced Security":
if targetsave["Advanced Security"] == 99:
success_chance = 33 / 2
else:
success_chance = 33
if random.randint(1, 100) <= success_chance:
await self.regular_steal(ctx, target)
else:
await self.steal_failure(ctx)
if random.randint(1, 100) <= targetsave["Advanced Security"]:
bank.deposit_credits(target, 1000)
if targetsave["Advanced Security"] >= 33:
await self.reveal_attacker(ctx, target)
# Advanced Security v Blackmarket Finances
elif targetsave["Active"] == "Blackmarket Finances":
if random.randint(1, 100) <= 33:
await self.regular_steal(ctx, target)
else:
await self.steal_failure(ctx)
# ATTACKER: BLACKMARKET FINANCES
elif playersave["Active"] == "Blackmarket Finances":
# Blackmarket Finances v Elite Raid
if targetsave["Active"] == "Elite Raid":
if random.randint(1, 100) <= 50:
await self.regular_steal(ctx, target)
else:
await self.steal_failure(ctx)
# Blackmarket Finances v Advanced Security
elif targetsave["Active"] == "Advanced Security":
if targetsave["Advanced Security"] == 99:
success_chance = 33 / 2
else:
success_chance = 33
if random.randint(1, 100) <= success_chance:
await self.regular_steal(ctx, target)
else:
await self.steal_failure(ctx)
if random.randint(1, 100) <= targetsave["Advanced Security"]:
bank.deposit_credits(target, 1000)
if targetsave["Advanced Security"] >= 33:
await self.reveal_attacker(ctx, target)
if targetsave["Advanced Security"] >= 66:
if random.randint(1, 100) <= 33:
if playersave["Blackmarket Finances"] > 5:
playersave["Blackmarket Finances"] -= 5
else:
playersave["Blackmarket Finances"] = 0
# Blackmarket Finances v Blackmarket Finances
elif targetsave["Active"] == "Blackmarket Finances":
if random.randint(1, 100) <= 50:
await self.regular_steal(ctx, target)
else:
await self.steal_failure(ctx)
dataIO.save_json(SAVE_FILEPATH, self.save_file)
async def er_steal(self, ctx, target):
"""Elite Raid steal."""
player = ctx.message.author
server = ctx.message.server
playersave = self.save_file["Servers"][server.id]["Players"][player.id]
bank = self.bot.get_cog("Economy").bank
if playersave["Elite Raid"] == 99:
# 1/10 chance to steal 110% of wealth, if steal successful in the first place
if random.randint(1, 100) <= 10:
amt_stolen = round(bank.get_balance(target) * 1.1)
bank.set_credits(target, 0)
bank.deposit_credits(player, amt_stolen)
message = ("You captured a good friend of {0}'s as hostage "
"and demanded ransom, which was promptly paid. "
"You graciously accepted every credit {0} had, "
"plus some that the poor soul took out on a loan "
"to meet your demands. All in all, you earned "
"yourself {1} credits."
.format(target.mention, amt_stolen))
await self.bot.send_message(player, message)
return
amt_stolen = random.randint(1, random.randint(1, 2000))
if random.randint(1, 100) <= playersave["Elite Raid"]:
amt_stolen *= 2
if playersave["Elite Raid"] >= 33:
# steal a bonus 10% of target's wealth
amt_stolen += round(bank.get_balance(target) * 0.1)
if amt_stolen > bank.get_balance(target):
amt_stolen = bank.get_balance(target)
bank.transfer_credits(target, player, amt_stolen)
message = ("Mission accomplished! You stole {} credits "
"from {}!".format(amt_stolen, target.mention))
await self.bot.send_message(player, message)
# Add to daily report data
if player.id not in self.save_file["Servers"][server.id]["Thieves"]:
self.save_file["Servers"][server.id]["Thieves"].append(player.id)
self.save_file["Servers"][server.id]["TheftCount"] += 1
dataIO.save_json(SAVE_FILEPATH, self.save_file)
async def regular_steal(self, ctx, target):
"""Regular steal by classes other than Elite Raid."""
player = ctx.message.author
server = ctx.message.server
bank = self.bot.get_cog("Economy").bank
amt_stolen = random.randint(1, random.randint(1, 2000))
if amt_stolen > bank.get_balance(target):
amt_stolen = bank.get_balance(target)
bank.transfer_credits(target, player, amt_stolen)
message = ("Mission accomplished! You stole {} credits "
"from {}!".format(amt_stolen, target.mention))
await self.bot.send_message(player, message)
# Add to daily report data
if player.id not in self.save_file["Servers"][server.id]["Thieves"]:
self.save_file["Servers"][server.id]["Thieves"].append(player.id)
self.save_file["Servers"][server.id]["TheftCount"] += 1
dataIO.save_json(SAVE_FILEPATH, self.save_file)
async def reveal_attacker(self, ctx, target):
"""Reveal to the defender who attacked them and what the
attacker had active."""
player = ctx.message.author
server = ctx.message.server
playersave = self.save_file["Servers"][server.id]["Players"][player.id]
message = (
"{}, who had {} active, was spotted by your guard "
"stealing credits from your bank safe! Your guard "
"was unable to catch the fiend before they fled."
.format(player.mention, playersave["Active"])
)
await self.bot.send_message(target, message)
async def steal_failure(self, ctx):
"""Send a steal failure message to the person who attempted it."""
player = ctx.message.author
messages = [
("Right as you're about to open the safe, you hear footsteps. "
"You and your team flee the scene."),
("You pull hard on the door, making a loud clang, but it seems "
"to be jammed. Maybe there's some kind of hidden mechanism, but "
"guards may have heard you. You scram and and live to see another day."),
("Something about this operation smells fishy. It might be a trap. "
"You call it off."),
("There's nothing in the safe! Maybe its owner knew you were coming?"),
("What in the world!? Two armed guards jump out at you. You and the "
"team run like the wind and barely get out with your heads on your necks.")
]
message = random.choice(messages) + "\n**Steal failed.**"
await self.bot.send_message(player, message)
async def give_credits(self):
"""Loop to give credits every hour at a random minute and second
to Blackmarket Finances users."""
await self.bot.wait_until_ready()
while True:
now = datetime.datetime.utcnow()
bank = self.bot.get_cog("Economy").bank
last_given = datetime.datetime.strptime(self.save_file["Global"]["CreditsGivenTime"],
"%Y-%m-%dT%H:%M:%S.%f")
if last_given.hour == now.hour and last_given.date() == now.date():
next_time = now + datetime.timedelta(hours=1)
next_time = next_time.replace(minute=random.randint(0, 59),
second=random.randint(1, 59),
microsecond=0)
# If next_time is X:00:00 and the sleep below is slightly short,
# the hour will still be the previous hour and credits could be given
# twice in the same hour. To be safe, the minimum second is 1.
await asyncio.sleep((next_time - now).total_seconds())
for serverid in self.save_file["Servers"]:
server = self.bot.get_server(serverid)
for playerid in self.save_file["Servers"][serverid]["Players"]:
playersave = self.save_file["Servers"][serverid]["Players"][playerid]
if (playersave["Active"] == "Blackmarket Finances"
and playersave["Blackmarket Finances"] > 0):
player = server.get_member(playerid)
bank.deposit_credits(player, playersave["Blackmarket Finances"])
self.save_file["Global"]["CreditsGivenTime"] = datetime.datetime.utcnow().isoformat()
dataIO.save_json(SAVE_FILEPATH, self.save_file)
async def daily_report(self):
"""Loop to report theft every day."""
await self.bot.wait_until_ready()
now = datetime.datetime.utcnow()
wake_time = now.replace(hour=2, minute=0, second=0, microsecond=0)
if now.time() > wake_time.time():
wake_time = wake_time + datetime.timedelta(days=1)
while True:
await asyncio.sleep((wake_time - datetime.datetime.utcnow()).total_seconds())
wake_time = wake_time + datetime.timedelta(days=1)
for serverid in self.save_file["Servers"]:
serverdata = self.save_file["Servers"][serverid]
message = ("Announcement from the Royal Navy: \n"
"Today there were {} counts of theft "
"perpetrated by {} members of this server. "
"The Royal Navy cautions all members to remain "
"vigilant in these lawless times."
.format(serverdata["TheftCount"], len(serverdata["Thieves"])))
await self.bot.send_message(self.bot.get_server(serverid), message)
serverdata["TheftCount"] = 0
serverdata["Thieves"].clear()
dataIO.save_json(SAVE_FILEPATH, self.save_file)
def update_version(self):
"""Update the save file if necessary."""
if "Version" not in self.save_file["Global"]: # if Version 1.0
for serverid in self.save_file["Servers"]:
for playerid in self.save_file["Servers"][serverid]["Players"]:
playersave = self.save_file["Servers"][serverid]["Players"][playerid]
playersave["StealTime"] = playersave["LatestSteal"]
del playersave["LatestSteal"]
playersave["ActivateTime"] = 0
self.save_file["Global"]["Version"] = "1.1"
if self.save_file["Global"]["Version"] == "1.1":
for serverid in self.save_file["Servers"]:
for playerid in self.save_file["Servers"][serverid]["Players"]:
playersave = self.save_file["Servers"][serverid]["Players"][playerid]
convert_dict = {
"AS": "Advanced Security",
"ER": "Elite Raid",
"BF": "Blackmarket Finances"
}
playersave["Active"] = convert_dict[playersave["Active"]]
for key, value in convert_dict.items():
playersave[value] = playersave[key]
del playersave[key]
self.save_file["Global"]["Version"] = "1.2"
dataIO.save_json(SAVE_FILEPATH, self.save_file)
def __unload(self):
self.unloading = True
self.loop_task.cancel()
self.loop_task2.cancel()
def time_left_str(since_time):
"""Return a string with how much time is left until
the 1 hour cooldown is over."""
until_available = (60 * 60) - since_time
minute, second = divmod(until_available, 60)
hour, minute = divmod(minute, 60)
return "{:d}:{:02d}:{:02d}".format(hour, minute, second)
def dir_check():
"""Create a folder and save file for the cog if they don't exist."""
if not os.path.exists("data/KeaneCogs/steal"):
print("Creating data/KeaneCogs/steal folder...")
os.makedirs("data/KeaneCogs/steal")
if not dataIO.is_valid_json(SAVE_FILEPATH):
print("Creating default steal.json...")
dataIO.save_json(SAVE_FILEPATH, SAVE_DEFAULT)
def setup(bot):
"""Create a Steal object."""
dir_check()
bot.add_cog(Steal(bot))
| {
"repo_name": "keanemind/Keane-Cogs",
"path": "steal/steal.py",
"copies": "1",
"size": "34786",
"license": "mit",
"hash": 7638766245869105000,
"line_mean": 41.5256723716,
"line_max": 108,
"alpha_frac": 0.5463117346,
"autogenerated": false,
"ratio": 4.208832425892317,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5255144160492317,
"avg_score": null,
"num_lines": null
} |
"""A cog that requires server users to feed the bot in return for benefits."""
import os
import random
import asyncio
import copy
import datetime
import discord
from discord.ext import commands
from __main__ import send_cmd_help
from .utils import checks
from .utils.dataIO import dataIO
SAVE_FILEPATH = "data/KeaneCogs/parrot/parrot.json"
SAVE_DEFAULT = {
"Servers": {},
"Global": {
"StarveTime": [5, 0], # the hour and minute of the day that starve_check runs
"PerchInterval": 20, # the number of minutes between perches
"Version": "2.3"
}
}
SERVER_DEFAULT = {
"Parrot": {
"Appetite": 0, # max number of pellets Parrot can be fed (reset by starve_check)
"ChecksAlive": 0, # number of starve_checks survived
"HoursAlive": 0, # number of hours Parrot has been alive in the server
"UserWith": "", # ID of user Parrot is perched on (reset by starve_check)
"Fullness": 0, # number of pellets Parrot has in his belly (reset by starve_check)
"Cost": 5, # cost of feeding Parrot 1 pellet
"StarvedLoops": 0, # phase of starvation Parrot is in
"WarnedYet": False, # whether the server has been warned for the current self.checktime
"StealAvailable": True # whether steal is available for the perched user (reset by perch_loop)
},
"Feeders": {} # contains user IDs as keys and dicts as values (reset by starve_check)
}
FEEDER_DEFAULT = {
"PelletsFed": 0,
"HeistBoostAvailable": True,
"AirhornUses": 0,
"StolenFrom": [],
"CreditsCollected": 0.0
}
class Parrot:
"""Commands related to feeding the bot."""
def __init__(self, bot):
self.save_file = dataIO.load_json(SAVE_FILEPATH)
self.bot = bot
self.update_version()
self.checktime = datetime.datetime.utcnow() # dummy value
self.perchtime = datetime.datetime.utcnow() # dummy value
self.update_looptimes(False) # change checktime to what it should be
# without causing a new warning
self.loop_task = bot.loop.create_task(self.loop()) # remember to change __unload()
@commands.command(pass_context=True, no_pm=True)
async def feed(self, ctx, amount: int):
"""Feed the parrot! Use \"{prefix}help parrot\" for more information."""
server = ctx.message.server
# make sure the server is in the data file
self.add_server(server)
parrot = self.save_file["Servers"][server.id]["Parrot"]
feeders = self.save_file["Servers"][server.id]["Feeders"]
bank = self.bot.get_cog('Economy').bank
# Checks
error_msg = ""
if not bank.account_exists(ctx.message.author):
error_msg = ("You need to have a bank account with credits to feed me. "
"Use `{}bank register` to open one.".format(ctx.prefix))
elif amount <= 0:
error_msg = "You can't feed me nothing!"
elif parrot["Fullness"] == parrot["Appetite"]:
error_msg = "I'm full! I don't want to get fat."
if error_msg:
return await self.bot.say(error_msg)
# make sure parrot doesn't get overfed
if parrot["Fullness"] + amount > parrot["Appetite"]:
amount = parrot["Appetite"] - parrot["Fullness"]
await self.bot.say("I don't want to be too full. I'll only eat {} pellets, "
"and you can keep the rest.".format(amount))
usercost = amount * parrot["Cost"]
# confirmation prompt
await self.bot.say("You are about to spend {} credits to feed me {} pellets. "
"Reply \"yes\" to confirm.".format(usercost, amount))
response = await self.bot.wait_for_message(timeout=15, author=ctx.message.author)
if response is None or response.content.lower().strip() != "yes":
return await self.bot.say("Okay then, but don't let me starve!")
# deduct usercost from their credits account
if bank.can_spend(ctx.message.author, usercost):
bank.withdraw_credits(ctx.message.author, usercost)
else:
return await self.bot.say("You don't have enough credits to feed me that much.")
# set up user's dict in the data file
if ctx.message.author.id not in feeders:
feeders[ctx.message.author.id] = copy.deepcopy(FEEDER_DEFAULT)
# record how much the user has fed for the day
feeders[ctx.message.author.id]["PelletsFed"] += amount
# change parrot's fullness level
parrot["Fullness"] += amount
dataIO.save_json(SAVE_FILEPATH, self.save_file)
return await self.bot.say("Om nom nom. Thanks!")
@commands.group(pass_context=True)
async def parrot(self, ctx):
"""Parrot needs to be fed! Every day, Parrot has a different appetite value,
which is how many food pellets he would like to be fed for the day.
Spend your credits to feed Parrot pellets using the "{prefix}feed" command,
and find out how full Parrot is or what his appetite is by using the "{prefix}parrot info" command.
Every so often, Parrot perches on the shoulder of a random user who has fed him.
The fraction of Parrot's appetite that you have fed is your chance of being perched on by Parrot.
In return for providing your shoulder to him, Parrot will help you and give you powers.
For example, he can assist you with Heists."""
if ctx.invoked_subcommand is None:
await send_cmd_help(ctx)
@parrot.command(name="starvetime", pass_context=True) # no_pm=False
@checks.is_owner()
async def parrot_starve_time(self, ctx, hour: int = None, minute: int = 0):
"""View or change the time at which Parrot checks whether he has starved
and resets his appetite. This command takes UTC time.
(0 <= hour <= 23) (0 <= minute <= 59)"""
if hour is None:
cur_hour = self.save_file["Global"]["StarveTime"][0]
cur_minute = self.save_file["Global"]["StarveTime"][1]
cur_time = datetime.time(cur_hour, cur_minute)
return await self.bot.say("Current setting: {} UTC".format(cur_time.strftime("%H:%M")))
if not (0 <= hour <= 23 and 0 <= minute <= 59):
return await self.bot.say("Hour must be greater than -1 and less than 24. "
"Minute must be greater than -1 and less than 60. "
"Both numbers must be integers.")
# confirmation prompt
await self.bot.say("This is a global setting that affects all servers the bot is connected to. "
"Parrot checks whether he has starved every day at a certain time. "
"Parrot will check every day (including today if possible) at {} UTC. "
"Reply \"yes\" to confirm."
.format(datetime.time(hour, minute).strftime("%H:%M")))
response = await self.bot.wait_for_message(timeout=15, author=ctx.message.author)
if response is None or response.content.lower().strip() != "yes":
return await self.bot.say("Setting change cancelled.")
self.save_file["Global"]["StarveTime"] = [hour, minute]
dataIO.save_json(SAVE_FILEPATH, self.save_file)
self.update_looptimes()
return await self.bot.say("Setting change successful.")
@parrot.command(name="perchinterval", pass_context=True) # no_pm=False
@checks.is_owner()
async def parrot_perch_interval(self, ctx, minutes: int = None):
"""View or change how many minutes pass between perches."""
if minutes is None:
interval = self.save_file["Global"]["PerchInterval"]
return await self.bot.say("Current setting: {} minutes".format(interval))
if not 0 < minutes <= 1440:
return await self.bot.say("The number of minutes must be greater than 0 "
"and less than or equal to 1440.")
if not (60 % minutes == 0 or minutes % 60 == 0):
return await self.bot.say("The number of minutes must be a factor or "
"multiple of 60.")
# confirmation prompt
await self.bot.say("This is a global setting that affects all servers the bot is connected to. "
"Every day, the first perch is at Parrot's starve time. For the rest of the day, "
"Parrot will wait {} minutes between perches. Reply \"yes\" "
"to confirm.".format(minutes))
response = await self.bot.wait_for_message(timeout=15, author=ctx.message.author)
if response is None or response.content.lower().strip() != "yes":
return await self.bot.say("Setting change cancelled.")
self.save_file["Global"]["PerchInterval"] = minutes
dataIO.save_json(SAVE_FILEPATH, self.save_file)
self.update_looptimes() # this updates self.perchtime with the new interval
return await self.bot.say("Setting change successful.")
@parrot.command(name="checknow", pass_context=True) # no_pm=False
@checks.is_owner()
async def parrot_check_now(self, ctx):
"""Execute a starve check immediately. This will move Parrot to the next
appetite loop if he survives."""
await self.starve_check()
return await self.bot.send_message(ctx.message.author,
"starve_check was executed.")
@parrot.command(name="setcost", pass_context=True, no_pm=True)
@checks.admin_or_permissions(manage_server=True)
async def parrot_set_cost(self, ctx, cost: int):
"""Change how much it costs to feed the parrot 1 pellet."""
server = ctx.message.server
self.add_server(server) # make sure the server is in the data file
if cost >= 0:
self.save_file["Servers"][server.id]["Parrot"]["Cost"] = cost
dataIO.save_json(SAVE_FILEPATH, self.save_file)
return await self.bot.say("Set cost of feeding to {} credits per pellet.".format(cost))
else:
return await self.bot.say("Cost must be at least 0.")
@parrot.command(name="steal", pass_context=True, no_pm=True)
async def parrot_steal(self, ctx, target: discord.Member):
"""Get Parrot to steal up to 1000 of someone's credits for you.
One use per perch. Parrot will not steal from people who have
fed him. Parrot will not steal from someone twice in a day."""
self.add_server(ctx.message.server) # make sure the server is in the data file
feeders = self.save_file["Servers"][ctx.message.server.id]["Feeders"]
parrot = self.save_file["Servers"][ctx.message.server.id]["Parrot"]
bank = self.bot.get_cog('Economy').bank
# checks
error_msg = ""
if ctx.message.author.id != parrot["UserWith"]:
error_msg = ("Parrot needs to be perched on you to use this command. "
"Use `{}help parrot` for more information.".format(ctx.prefix))
elif not parrot["StealAvailable"]:
error_msg = ("You have already used steal. You must wait until "
"the next time you are perched on.")
elif not bank.account_exists(target):
error_msg = "Your target doesn't have a bank account to steal credits from."
elif target.id in feeders and feeders[target.id]["PelletsFed"] > 0:
error_msg = ("Parrot refuses to steal from someone "
"who has fed him in the current fullness cycle.")
elif target.id in feeders[ctx.message.author.id]["StolenFrom"]:
error_msg = ("You have already stolen from this person today. "
"It is too risky to try a second time.")
if error_msg:
return await self.bot.say(error_msg)
await self.bot.say("Parrot flies off...")
await asyncio.sleep(3)
stolen = round(random.uniform(1, random.uniform(1, 1000)))
target_balance = bank.get_balance(target)
if stolen >= target_balance:
bank.transfer_credits(target, ctx.message.author, target_balance)
msg = ("Parrot stole every last credit ({} credits) from "
"{}'s bank account and deposited it in your account!"
.format(target_balance, target.mention))
else:
bank.transfer_credits(target, ctx.message.author, stolen)
msg = ("Parrot stole {} credits from {}'s bank account "
"and deposited it in your account!"
.format(stolen, target.mention))
parrot["StealAvailable"] = False
feeders[ctx.message.author.id]["StolenFrom"].append(target.id)
dataIO.save_json(SAVE_FILEPATH, self.save_file)
return await self.bot.say(msg)
@parrot.command(name="airhorn", pass_context=True, no_pm=True)
async def parrot_airhorn(self, ctx, channel: discord.Channel):
"""Play an airhorn sound to the target voice channel."""
# This is copy-pasted from audio.py's play() function and has
# been modified to always play an airhorn.
# Audio.py is a part of Red Bot, which is licensed under GPL v3
# https://www.gnu.org/licenses/gpl-3.0.en.html
# CHANGES:
# The function has been renamed to parrot_airhorn, and takes a
# channel instead of a URL as an argument now.
# The URL is now hard-coded to be a YouTube link.
# The try-except clause has been commented out. The calls for
# functions within audio.py have been changed
# Changes from self.function() to audio.function() .
# Newly added lines are labeled with a comment "NEW".
# No other changes were made.
server = ctx.message.server
self.add_server(server) # NEW
if ctx.message.author.id != self.save_file["Servers"][server.id]["Parrot"]["UserWith"]: # NEW
return await self.bot.say("Parrot needs to be perched on you to use this command. "
"Use `{}help parrot` for more information.".format(ctx.prefix)) # NEW
if self.save_file["Servers"][server.id]["Feeders"][ctx.message.author.id]["AirhornUses"] >= 3: # NEW
return await self.bot.say("You have already used airhorn 3 times. You must wait until "
"Parrot's fullness resets, and be perched on by him again.") # NEW
audio = self.bot.get_cog('Audio') # NEW
url = "https://www.youtube.com/watch?v=XDvuAYySJj0" # This line was changed to be a hard-coded
# YouTube link instead of being a URL argument.
# Checking if playing in current server
if audio.is_playing(server):
await self.bot.say("Parrot is already playing music in a channel on this server.")
return # Default to queue
# Checking already connected, will join if not
# try:
# audio.has_connect_perm(target, server)
# except AuthorNotConnected:
# await self.bot.say("You must join a voice channel before I can"
# " play anything.")
# return
# except UnauthorizedConnect:
# await self.bot.say("I don't have permissions to join your"
# " voice channel.")
# return
# except UnauthorizedSpeak:
# await self.bot.say("I don't have permissions to speak in your"
# " voice channel.")
# return
# except ChannelUserLimit:
# await self.bot.say("Your voice channel is full.")
# return
if not audio.voice_connected(server):
await audio._join_voice_channel(channel)
else: # We are connected but not to the right channel
if audio.voice_client(server).channel != channel:
await audio._stop_and_disconnect(server)
await audio._join_voice_channel(channel)
# If not playing, spawn a downloader if it doesn't exist and begin
# downloading the next song
if audio.currently_downloading(server):
await audio.bot.say("I'm already downloading a file!")
return
url = url.strip("<>")
if audio._match_any_url(url):
if not audio._valid_playable_url(url):
await self.bot.say("That's not a valid URL.")
return
else:
url = url.replace("/", "/")
url = "[SEARCH:]" + url
if "[SEARCH:]" not in url and "youtube" in url:
url = url.split("&")[0] # Temp fix for the &list issue
audio._stop_player(server)
audio._clear_queue(server)
audio._add_to_queue(server, url)
self.save_file["Servers"][server.id]["Feeders"][ctx.message.author.id]["AirhornUses"] += 1 # NEW
dataIO.save_json(SAVE_FILEPATH, self.save_file) # NEW
@parrot.command(name="info", pass_context=True, no_pm=True, aliases=["stats"])
async def parrot_info(self, ctx):
"""Information about the parrot."""
server = ctx.message.server
self.add_server(server) # make sure the server is in the data file
parrot = self.save_file["Servers"][server.id]["Parrot"]
fullness_str = "{} out of {} pellets".format(parrot["Fullness"], parrot["Appetite"])
feed_cost_str = "{} credits per pellet".format(parrot["Cost"])
days_living_str = "{} days".format(round(parrot["HoursAlive"] / 24))
# status and time_until_starved
if parrot["StarvedLoops"] == 0:
status_str = "healthy"
time_until_starved_str = "until Parrot begins\nstarving: "
elif parrot["StarvedLoops"] == 1:
status_str = "starving"
time_until_starved_str = "until Parrot becomes\ndeathly hungry:\n"
else:
status_str = "deathbed\n(will die if not fed!)"
time_until_starved_str = "until Parrot dies of\nstarvation: "
if parrot["Fullness"] / parrot["Appetite"] >= 0.5:
description_str = ("Parrot has been fed enough food that he won't starve for now. "
"Use `{}help parrot` for more information.".format(ctx.prefix))
time_until_starved_str = "until fullness resets:\n"
if parrot["StarvedLoops"] > 0:
status_str = "recovering"
else:
description_str = ("If Parrot is not fed enough to be half full by the time "
"the timer reaches 0, he will enter the next phase of "
"starvation. Use `{}help parrot` for more information.".format(ctx.prefix))
if parrot["ChecksAlive"] == 0:
# add an extra day because the first check won't starve or change Parrot's appetite
until_starved = (self.checktime + datetime.timedelta(days=1)
- datetime.datetime.utcnow())
else:
until_starved = self.checktime - datetime.datetime.utcnow()
seconds = round(until_starved.total_seconds())
time_until_starved_str += str(datetime.timedelta(seconds=seconds))
if parrot["UserWith"]:
userwith_str = server.get_member(parrot["UserWith"]).mention
else:
userwith_str = "nobody"
embed = discord.Embed(color=discord.Color.teal(), description=description_str)
embed.title = "Parrot Information"
embed.timestamp = datetime.datetime.utcfromtimestamp(os.path.getmtime(os.path.abspath(__file__)))
embed.set_thumbnail(url="{}".format(self.bot.user.avatar_url if self.bot.user.avatar_url
else self.bot.user.default_avatar_url))
embed.set_footer(text="Made by Keane")
embed.add_field(name="Fullness", value=fullness_str)
embed.add_field(name="Cost to feed", value=feed_cost_str)
embed.add_field(name="Age", value=days_living_str)
embed.add_field(name="Status", value=status_str)
embed.add_field(name="Perched on", value=userwith_str)
embed.add_field(name="Countdown", value=time_until_starved_str)
return await self.bot.say(embed=embed)
@parrot.command(name="feeders", pass_context=True, no_pm=True)
async def parrot_feeders(self, ctx):
"""Display a list of people who have fed Parrot in the current appetite
loop, with the number of pellets they have fed and the percent chance
they have of being perched on."""
server = ctx.message.server
self.add_server(server) # make sure the server is in the data file
output = "```py\n"
feeders = self.save_file["Servers"][server.id]["Feeders"]
parrot = self.save_file["Servers"][server.id]["Parrot"]
# The first perched user of the day is in feeders
# but may not have fed any pellets. If so, ignore them.
fedparrot = [feederid for feederid in feeders
if feeders[feederid]["PelletsFed"] > 0]
if not fedparrot:
return await self.bot.say("```Nobody has fed Parrot yet.```")
idlist = sorted(fedparrot,
key=(lambda idnum: feeders[idnum]["PelletsFed"]),
reverse=True)
max_chance = (feeders[idlist[0]]["PelletsFed"] / parrot["Appetite"]) * 100
max_chance_len = len(str(round(max_chance)))
max_pellets = feeders[idlist[0]]["PelletsFed"]
max_pellets_len = len(str(max_pellets))
# example: " 155/100%"
max_end_len = 1 + max_pellets_len + 1 + max_chance_len + 1
for feederid in idlist:
feeder = server.get_member(feederid)
chance = (feeders[feederid]["PelletsFed"] / parrot["Appetite"]) * 100
chance_str = str(round(chance))
if len(feeder.display_name) > 26 - max_end_len:
# 26 - 3 to leave room for the ellipsis
name = feeder.display_name[:23 - max_end_len] + "..."
else:
name = feeder.display_name
output += name
pellets_str = str(feeders[feederid]["PelletsFed"])
# example: " 1/ 1%"
end_len = 1 + len(pellets_str) + 1 + max_chance_len + 1
output += " " * (26 - len(name) - end_len)
# append the end
output += " " + pellets_str + "|"
output += " " * (max_chance_len - len(chance_str))
output += chance_str + "%"
output += "\n"
output += "```"
return await self.bot.say(output)
async def loop(self):
"""Loop forever to do four tasks:
Update HoursAlive, warn servers when Parrot is starving soon,
perch on users at perchtime, and reset Parrot at checktime."""
await self.bot.wait_until_ready()
self.update_looptimes()
current_hour = datetime.datetime.utcnow().hour
while True:
now = datetime.datetime.utcnow()
# Update HoursAlive
if current_hour != now.hour:
current_hour = now.hour
for serverid in self.save_file["Servers"]:
self.save_file["Servers"][serverid]["Parrot"]["HoursAlive"] += 1
dataIO.save_json(SAVE_FILEPATH, self.save_file)
# Send starvation warnings to each server (if they haven't been sent yet)
stoptime = self.checktime + datetime.timedelta(hours=-4)
if stoptime <= now:
change = False
for serverid in self.save_file["Servers"]:
parrot = self.save_file["Servers"][serverid]["Parrot"]
if (parrot["ChecksAlive"] > 0
and (parrot["Fullness"] / parrot["Appetite"]) < 0.5
and not parrot["WarnedYet"]):
if parrot["StarvedLoops"] == 0:
await self.bot.send_message(
self.bot.get_server(serverid),
"*I'm quite hungry...*")
elif parrot["StarvedLoops"] == 1:
await self.bot.send_message(
self.bot.get_server(serverid),
"*I'm so hungry I feel weak...*")
else:
await self.bot.send_message(
self.bot.get_server(serverid),
"*I'm going to* ***DIE*** *of starvation very "
"soon if I don't get fed!*")
parrot["WarnedYet"] = True
change = True
if change:
dataIO.save_json(SAVE_FILEPATH, self.save_file)
# Perch
if self.perchtime <= now:
# Choose perched user
for serverid in self.save_file["Servers"]:
feeders = self.save_file["Servers"][serverid]["Feeders"]
parrot = self.save_file["Servers"][serverid]["Parrot"]
weights = [(feeders[feederid]["PelletsFed"] / parrot["Appetite"])
* 100 for feederid in feeders]
population = list(feeders)
weights.append(100 - sum(weights))
population.append("")
# Randomly choose who Parrot is with. This could be nobody, represented by ""
try:
parrot["UserWith"] = random.choices(population, weights)[0] #random.choices returns a list
except AttributeError:
# DIY random.choices alternative for scrubs who don't have Python 3.6
total = 0
cum_weights = []
for num in weights:
total += num
cum_weights.append(total)
rand = random.uniform(0, 100)
for index, weight in enumerate(cum_weights):
if weight >= rand:
parrot["UserWith"] = population[index]
break
# Reset at checktime (checktime is always on a perchtime)
if self.checktime <= now:
await self.display_collected()
await self.starve_check()
self.update_looptimes() # checktime must be updated daily
# Collect coins for perched user
for serverid in self.save_file["Servers"]:
self.collect_credits(serverid)
self.save_file["Servers"][serverid]["Parrot"]["StealAvailable"] = True
# Update perchtime
interval = self.save_file["Global"]["PerchInterval"]
self.perchtime = self.perchtime + datetime.timedelta(minutes=interval)
dataIO.save_json(SAVE_FILEPATH, self.save_file)
await asyncio.sleep(1)
async def starve_check(self):
"""Check if Parrot has starved or not.
If Parrot has starved, leave the server. If he has survived,
reset for the next loop."""
for serverid in list(self.save_file["Servers"]): # generate a list because servers might
# be removed from the dict while iterating
parrot = self.save_file["Servers"][serverid]["Parrot"]
feeders = self.save_file["Servers"][serverid]["Feeders"]
# don't check on the first loop to give new servers a chance
# in case they got added at an unlucky time (right before the check happens)
reset = False
if parrot["ChecksAlive"] == 0:
parrot["ChecksAlive"] += 1
elif parrot["Fullness"] / parrot["Appetite"] < 0.5:
if parrot["StarvedLoops"] == 2:
await self.bot.send_message(
self.bot.get_server(serverid),
"Oh no! I've starved to death!\n"
"Goodbye, cruel world!")
await self.bot.leave_server(self.bot.get_server(serverid))
del self.save_file["Servers"][serverid]
else:
# advance to the next stage of starvation
parrot["StarvedLoops"] += 1
reset = True
else:
# healthy; reset for the next loop
parrot["StarvedLoops"] = 0
reset = True
if reset:
parrot["ChecksAlive"] += 1
parrot["Appetite"] = round(random.normalvariate(50*(1.75**parrot["StarvedLoops"]), 6))
parrot["Fullness"] = 0
parrot["WarnedYet"] = False
self.save_file["Servers"][serverid]["Feeders"].clear()
# https://stackoverflow.com/questions/369898/difference-between-dict-clear-and-assigning-in-python
if parrot["UserWith"]:
feeders[parrot["UserWith"]] = copy.deepcopy(FEEDER_DEFAULT)
dataIO.save_json(SAVE_FILEPATH, self.save_file)
async def display_collected(self):
"""Display a leaderboard in each server with how many credits
Parrot collected for users. Award CreditsCollected to each feeder."""
bank = self.bot.get_cog('Economy').bank
for serverid in self.save_file["Servers"]:
server = self.bot.get_server(serverid)
leaderboard = ("Here's how many credits I collected for "
"everyone I perched on today:\n\n")
leaderboard += "```py\n"
feeders = self.save_file["Servers"][serverid]["Feeders"]
perched_users = [feederid for feederid in feeders
if round(feeders[feederid]["CreditsCollected"]) > 0]
if not perched_users:
continue # nobody gets credits, skip this server
ranked = sorted(perched_users,
key=lambda idnum: feeders[idnum]["CreditsCollected"],
reverse=True)
max_creds_len = len(str(round(feeders[ranked[0]]["CreditsCollected"])))
for user_id in ranked:
user = server.get_member(user_id)
if len(user.display_name) > 26 - max_creds_len - 1:
name = user.display_name[22 - max_creds_len] + "..."
else:
name = user.display_name
leaderboard += name
collected = round(feeders[user_id]["CreditsCollected"])
bank.deposit_credits(user, collected)
leaderboard += " " * (26 - len(name) - len(str(collected)))
leaderboard += str(collected) + "\n"
leaderboard += "```"
await self.bot.send_message(server, leaderboard)
def add_server(self, server):
"""Add the server to the file if it isn't already in it."""
if server.id not in self.save_file["Servers"]:
self.save_file["Servers"][server.id] = copy.deepcopy(SERVER_DEFAULT)
self.save_file["Servers"][server.id]["Parrot"]["Appetite"] = round(random.normalvariate(50, 6))
dataIO.save_json(SAVE_FILEPATH, self.save_file)
print("{} New server \"{}\" found and added to Parrot data file!"
.format(datetime.datetime.now(), server.name))
def update_looptimes(self, warn=True):
"""Update self.checktime for the latest StarveTime. If StarveTime
has already passed today, self.checktime will be StarveTime tomorrow.
Update self.perchtime for the latest StarveTime or PerchInterval."""
# Update self.checktime
starvetime = self.save_file["Global"]["StarveTime"]
checktime = datetime.datetime.utcnow().replace(hour=starvetime[0],
minute=starvetime[1],
second=0,
microsecond=0)
if datetime.datetime.utcnow().time() >= checktime.time():
checktime = checktime + datetime.timedelta(days=1)
if self.checktime != checktime: # if StarveTime changed (this will always be true
# when Parrot is first loaded due to self.checktime's
# initial value)
self.checktime = checktime
if warn:
for serverid in self.save_file["Servers"]:
self.save_file["Servers"][serverid]["Parrot"]["WarnedYet"] = False
dataIO.save_json(SAVE_FILEPATH, self.save_file)
# Update self.perchtime
interval = self.save_file["Global"]["PerchInterval"]
self.perchtime = self.checktime + datetime.timedelta(days=-1)
while self.perchtime < datetime.datetime.utcnow():
self.perchtime = self.perchtime + datetime.timedelta(minutes=interval)
def collect_credits(self, serverid):
"""Calculate how many credits Parrot will collect during the perch."""
parrot = self.save_file["Servers"][serverid]["Parrot"]
feeders = self.save_file["Servers"][serverid]["Feeders"]
interval = self.save_file["Global"]["PerchInterval"]
# Generate multiplier
since_checktime = datetime.datetime.utcnow() - self.checktime
current_minute = round(since_checktime.total_seconds() / 60)
current_minute = current_minute % 1440
multiplier = 0
for i in range(current_minute, current_minute + interval):
multiplier += 1.003**i
multiplier = multiplier / 24568
for feederid in feeders:
pellets = feeders[feederid]["PelletsFed"]
if pellets > 50: # Feeding more than 50 pellets (average healthy appetite) is ignored
pellets = 50
# 1.5 * parrot["Cost"] * pellets is exactly
# how much the feeder would earn at the end of
# the day if they fed right after checktime
feeders[feederid]["CreditsCollected"] += 1.5 * parrot["Cost"] * pellets * multiplier
dataIO.save_json(SAVE_FILEPATH, self.save_file)
def update_version(self):
"""Update the save file if necessary."""
if "Version" not in self.save_file["Global"]: # if version == 1
for serverid in self.save_file["Servers"]:
parrot = self.save_file["Servers"][serverid]["Parrot"]
starvetime = self.save_file["Global"]["StarveTime"]
parrot["HoursAlive"] = round((starvetime * parrot["LoopsAlive"]) / 3600)
parrot["ChecksAlive"] = parrot["LoopsAlive"]
del parrot["LoopsAlive"]
parrot["WarnedYet"] = False
self.save_file["Global"]["StarveTime"] = [5, 0]
self.save_file["Global"]["Version"] = "2"
if self.save_file["Global"]["Version"] == "2":
for serverid in self.save_file["Servers"]:
parrot = self.save_file["Servers"][serverid]["Parrot"]
feeders = self.save_file["Servers"][serverid]["Feeders"]
for feederid in feeders:
if "StealAvailable" in feeders[feederid]:
feeders[feederid]["StolenFrom"] = []
parrot["StealAvailable"] = True
self.save_file["Global"]["Version"] = "2.1"
if self.save_file["Global"]["Version"] == "2.1":
for serverid in self.save_file["Servers"]:
feeders = self.save_file["Servers"][serverid]["Feeders"]
for feederid in feeders:
feeders[feederid]["CreditsCollected"] = 0
feeders[feederid]["StolenFrom"] = []
feeders[feederid]["AirhornUses"] = 0
feeders[feederid]["HeistBoostAvailable"] = True
self.save_file["Global"]["Version"] = "2.2"
if self.save_file["Global"]["Version"] == "2.2":
self.save_file["Global"]["PerchInterval"] = 20
self.save_file["Global"]["Version"] = "2.3"
dataIO.save_json(SAVE_FILEPATH, self.save_file)
def parrot_perched_on(self, server):
"""Return the user ID of whoever Parrot is perched on.
This is for Heist.py to use for heist boost."""
self.add_server(server) # make sure the server is in the data file
return self.save_file["Servers"][server.id]["Parrot"]["UserWith"]
def heist_boost_available(self, server, user, availability=True):
"""Return whether the user has a Heist boost available.
Optionally set availability to False to set the user's HeistBoostAvailable to False.
This is for Heist.py to use for heist boost."""
self.add_server(server) # make sure the server is in the data file
if availability is False:
self.save_file["Servers"][server.id]["Feeders"][user.id]["HeistBoostAvailable"] = False
dataIO.save_json(SAVE_FILEPATH, self.save_file)
return self.save_file["Servers"][server.id]["Feeders"][user.id]["HeistBoostAvailable"]
def __unload(self):
self.loop_task.cancel()
def dir_check():
"""Create a folder and save file for the cog if they don't exist."""
if not os.path.exists("data/KeaneCogs/parrot"):
print("Creating data/KeaneCogs/parrot folder...")
os.makedirs("data/KeaneCogs/parrot")
if not dataIO.is_valid_json(SAVE_FILEPATH):
print("Creating default parrot.json...")
dataIO.save_json(SAVE_FILEPATH, SAVE_DEFAULT)
def setup(bot):
"""Create a Parrot object."""
dir_check()
bot.add_cog(Parrot(bot))
| {
"repo_name": "keanemind/Keane-Cogs",
"path": "parrot/parrot.py",
"copies": "1",
"size": "38270",
"license": "mit",
"hash": -723571083212629400,
"line_mean": 45.6138855055,
"line_max": 114,
"alpha_frac": 0.5722759342,
"autogenerated": false,
"ratio": 4.007329842931937,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5079605777131937,
"avg_score": null,
"num_lines": null
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.