id int32 0 252k | repo stringlengths 7 55 | path stringlengths 4 127 | func_name stringlengths 1 88 | original_string stringlengths 75 19.8k | language stringclasses 1
value | code stringlengths 51 19.8k | code_tokens list | docstring stringlengths 3 17.3k | docstring_tokens list | sha stringlengths 40 40 | url stringlengths 87 242 |
|---|---|---|---|---|---|---|---|---|---|---|---|
231,900 | odlgroup/odl | odl/tomo/backends/astra_cuda.py | astra_cuda_bp_scaling_factor | def astra_cuda_bp_scaling_factor(proj_space, reco_space, geometry):
"""Volume scaling accounting for differing adjoint definitions.
ASTRA defines the adjoint operator in terms of a fully discrete
setting (transposed "projection matrix") without any relation to
physical dimensions, which makes a re-scaling necessary to
translate it to spaces with physical dimensions.
Behavior of ASTRA changes slightly between versions, so we keep
track of it and adapt the scaling accordingly.
"""
# Angular integration weighting factor
# angle interval weight by approximate cell volume
angle_extent = geometry.motion_partition.extent
num_angles = geometry.motion_partition.shape
# TODO: this gives the wrong factor for Parallel3dEulerGeometry with
# 2 angles
scaling_factor = (angle_extent / num_angles).prod()
# Correct in case of non-weighted spaces
proj_extent = float(proj_space.partition.extent.prod())
proj_size = float(proj_space.partition.size)
proj_weighting = proj_extent / proj_size
scaling_factor *= (proj_space.weighting.const /
proj_weighting)
scaling_factor /= (reco_space.weighting.const /
reco_space.cell_volume)
if parse_version(ASTRA_VERSION) < parse_version('1.8rc1'):
if isinstance(geometry, Parallel2dGeometry):
# Scales with 1 / cell_volume
scaling_factor *= float(reco_space.cell_volume)
elif (isinstance(geometry, FanBeamGeometry)
and geometry.det_curvature_radius is None):
# Scales with 1 / cell_volume
scaling_factor *= float(reco_space.cell_volume)
# Additional magnification correction
src_radius = geometry.src_radius
det_radius = geometry.det_radius
scaling_factor *= ((src_radius + det_radius) / src_radius)
elif isinstance(geometry, Parallel3dAxisGeometry):
# Scales with voxel stride
# In 1.7, only cubic voxels are supported
voxel_stride = reco_space.cell_sides[0]
scaling_factor /= float(voxel_stride)
elif isinstance(geometry, ConeFlatGeometry):
# Scales with 1 / cell_volume
# In 1.7, only cubic voxels are supported
voxel_stride = reco_space.cell_sides[0]
scaling_factor /= float(voxel_stride)
# Magnification correction
src_radius = geometry.src_radius
det_radius = geometry.det_radius
scaling_factor *= ((src_radius + det_radius) / src_radius) ** 2
# Check if the development version of astra is used
if parse_version(ASTRA_VERSION) == parse_version('1.9.0dev'):
if isinstance(geometry, Parallel2dGeometry):
# Scales with 1 / cell_volume
scaling_factor *= float(reco_space.cell_volume)
elif (isinstance(geometry, FanBeamGeometry)
and geometry.det_curvature_radius is None):
# Scales with 1 / cell_volume
scaling_factor *= float(reco_space.cell_volume)
# Magnification correction
src_radius = geometry.src_radius
det_radius = geometry.det_radius
scaling_factor *= ((src_radius + det_radius) / src_radius)
elif isinstance(geometry, Parallel3dAxisGeometry):
# Scales with cell volume
# currently only square voxels are supported
scaling_factor /= reco_space.cell_volume
elif isinstance(geometry, ConeFlatGeometry):
# Scales with cell volume
scaling_factor /= reco_space.cell_volume
# Magnification correction (scaling = 1 / magnification ** 2)
src_radius = geometry.src_radius
det_radius = geometry.det_radius
scaling_factor *= ((src_radius + det_radius) / src_radius) ** 2
# Correction for scaled 1/r^2 factor in ASTRA's density weighting.
# This compensates for scaled voxels and pixels, as well as a
# missing factor src_radius ** 2 in the ASTRA BP with
# density weighting.
det_px_area = geometry.det_partition.cell_volume
scaling_factor *= (src_radius ** 2 * det_px_area ** 2)
else:
if isinstance(geometry, Parallel2dGeometry):
# Scales with 1 / cell_volume
scaling_factor *= float(reco_space.cell_volume)
elif (isinstance(geometry, FanBeamGeometry)
and geometry.det_curvature_radius is None):
# Scales with 1 / cell_volume
scaling_factor *= float(reco_space.cell_volume)
# Magnification correction
src_radius = geometry.src_radius
det_radius = geometry.det_radius
scaling_factor *= ((src_radius + det_radius) / src_radius)
elif isinstance(geometry, Parallel3dAxisGeometry):
# Scales with cell volume
# currently only square voxels are supported
scaling_factor /= reco_space.cell_volume
elif isinstance(geometry, ConeFlatGeometry):
# Scales with cell volume
scaling_factor /= reco_space.cell_volume
# Magnification correction (scaling = 1 / magnification ** 2)
src_radius = geometry.src_radius
det_radius = geometry.det_radius
scaling_factor *= ((src_radius + det_radius) / src_radius) ** 2
# Correction for scaled 1/r^2 factor in ASTRA's density weighting.
# This compensates for scaled voxels and pixels, as well as a
# missing factor src_radius ** 2 in the ASTRA BP with
# density weighting.
det_px_area = geometry.det_partition.cell_volume
scaling_factor *= (src_radius ** 2 * det_px_area ** 2 /
reco_space.cell_volume ** 2)
# TODO: add case with new ASTRA release
return scaling_factor | python | def astra_cuda_bp_scaling_factor(proj_space, reco_space, geometry):
# Angular integration weighting factor
# angle interval weight by approximate cell volume
angle_extent = geometry.motion_partition.extent
num_angles = geometry.motion_partition.shape
# TODO: this gives the wrong factor for Parallel3dEulerGeometry with
# 2 angles
scaling_factor = (angle_extent / num_angles).prod()
# Correct in case of non-weighted spaces
proj_extent = float(proj_space.partition.extent.prod())
proj_size = float(proj_space.partition.size)
proj_weighting = proj_extent / proj_size
scaling_factor *= (proj_space.weighting.const /
proj_weighting)
scaling_factor /= (reco_space.weighting.const /
reco_space.cell_volume)
if parse_version(ASTRA_VERSION) < parse_version('1.8rc1'):
if isinstance(geometry, Parallel2dGeometry):
# Scales with 1 / cell_volume
scaling_factor *= float(reco_space.cell_volume)
elif (isinstance(geometry, FanBeamGeometry)
and geometry.det_curvature_radius is None):
# Scales with 1 / cell_volume
scaling_factor *= float(reco_space.cell_volume)
# Additional magnification correction
src_radius = geometry.src_radius
det_radius = geometry.det_radius
scaling_factor *= ((src_radius + det_radius) / src_radius)
elif isinstance(geometry, Parallel3dAxisGeometry):
# Scales with voxel stride
# In 1.7, only cubic voxels are supported
voxel_stride = reco_space.cell_sides[0]
scaling_factor /= float(voxel_stride)
elif isinstance(geometry, ConeFlatGeometry):
# Scales with 1 / cell_volume
# In 1.7, only cubic voxels are supported
voxel_stride = reco_space.cell_sides[0]
scaling_factor /= float(voxel_stride)
# Magnification correction
src_radius = geometry.src_radius
det_radius = geometry.det_radius
scaling_factor *= ((src_radius + det_radius) / src_radius) ** 2
# Check if the development version of astra is used
if parse_version(ASTRA_VERSION) == parse_version('1.9.0dev'):
if isinstance(geometry, Parallel2dGeometry):
# Scales with 1 / cell_volume
scaling_factor *= float(reco_space.cell_volume)
elif (isinstance(geometry, FanBeamGeometry)
and geometry.det_curvature_radius is None):
# Scales with 1 / cell_volume
scaling_factor *= float(reco_space.cell_volume)
# Magnification correction
src_radius = geometry.src_radius
det_radius = geometry.det_radius
scaling_factor *= ((src_radius + det_radius) / src_radius)
elif isinstance(geometry, Parallel3dAxisGeometry):
# Scales with cell volume
# currently only square voxels are supported
scaling_factor /= reco_space.cell_volume
elif isinstance(geometry, ConeFlatGeometry):
# Scales with cell volume
scaling_factor /= reco_space.cell_volume
# Magnification correction (scaling = 1 / magnification ** 2)
src_radius = geometry.src_radius
det_radius = geometry.det_radius
scaling_factor *= ((src_radius + det_radius) / src_radius) ** 2
# Correction for scaled 1/r^2 factor in ASTRA's density weighting.
# This compensates for scaled voxels and pixels, as well as a
# missing factor src_radius ** 2 in the ASTRA BP with
# density weighting.
det_px_area = geometry.det_partition.cell_volume
scaling_factor *= (src_radius ** 2 * det_px_area ** 2)
else:
if isinstance(geometry, Parallel2dGeometry):
# Scales with 1 / cell_volume
scaling_factor *= float(reco_space.cell_volume)
elif (isinstance(geometry, FanBeamGeometry)
and geometry.det_curvature_radius is None):
# Scales with 1 / cell_volume
scaling_factor *= float(reco_space.cell_volume)
# Magnification correction
src_radius = geometry.src_radius
det_radius = geometry.det_radius
scaling_factor *= ((src_radius + det_radius) / src_radius)
elif isinstance(geometry, Parallel3dAxisGeometry):
# Scales with cell volume
# currently only square voxels are supported
scaling_factor /= reco_space.cell_volume
elif isinstance(geometry, ConeFlatGeometry):
# Scales with cell volume
scaling_factor /= reco_space.cell_volume
# Magnification correction (scaling = 1 / magnification ** 2)
src_radius = geometry.src_radius
det_radius = geometry.det_radius
scaling_factor *= ((src_radius + det_radius) / src_radius) ** 2
# Correction for scaled 1/r^2 factor in ASTRA's density weighting.
# This compensates for scaled voxels and pixels, as well as a
# missing factor src_radius ** 2 in the ASTRA BP with
# density weighting.
det_px_area = geometry.det_partition.cell_volume
scaling_factor *= (src_radius ** 2 * det_px_area ** 2 /
reco_space.cell_volume ** 2)
# TODO: add case with new ASTRA release
return scaling_factor | [
"def",
"astra_cuda_bp_scaling_factor",
"(",
"proj_space",
",",
"reco_space",
",",
"geometry",
")",
":",
"# Angular integration weighting factor",
"# angle interval weight by approximate cell volume",
"angle_extent",
"=",
"geometry",
".",
"motion_partition",
".",
"extent",
"num_... | Volume scaling accounting for differing adjoint definitions.
ASTRA defines the adjoint operator in terms of a fully discrete
setting (transposed "projection matrix") without any relation to
physical dimensions, which makes a re-scaling necessary to
translate it to spaces with physical dimensions.
Behavior of ASTRA changes slightly between versions, so we keep
track of it and adapt the scaling accordingly. | [
"Volume",
"scaling",
"accounting",
"for",
"differing",
"adjoint",
"definitions",
"."
] | b8443f6aca90e191ba36c91d32253c5a36249a6c | https://github.com/odlgroup/odl/blob/b8443f6aca90e191ba36c91d32253c5a36249a6c/odl/tomo/backends/astra_cuda.py#L342-L462 |
231,901 | odlgroup/odl | odl/tomo/backends/astra_cuda.py | AstraCudaProjectorImpl.call_forward | def call_forward(self, vol_data, out=None):
"""Run an ASTRA forward projection on the given data using the GPU.
Parameters
----------
vol_data : ``reco_space`` element
Volume data to which the projector is applied.
out : ``proj_space`` element, optional
Element of the projection space to which the result is written. If
``None``, an element in `proj_space` is created.
Returns
-------
out : ``proj_space`` element
Projection data resulting from the application of the projector.
If ``out`` was provided, the returned object is a reference to it.
"""
with self._mutex:
assert vol_data in self.reco_space
if out is not None:
assert out in self.proj_space
else:
out = self.proj_space.element()
# Copy data to GPU memory
if self.geometry.ndim == 2:
astra.data2d.store(self.vol_id, vol_data.asarray())
elif self.geometry.ndim == 3:
astra.data3d.store(self.vol_id, vol_data.asarray())
else:
raise RuntimeError('unknown ndim')
# Run algorithm
astra.algorithm.run(self.algo_id)
# Copy result to host
if self.geometry.ndim == 2:
out[:] = self.out_array
elif self.geometry.ndim == 3:
out[:] = np.swapaxes(self.out_array, 0, 1).reshape(
self.proj_space.shape)
# Fix scaling to weight by pixel size
if isinstance(self.geometry, Parallel2dGeometry):
# parallel2d scales with pixel stride
out *= 1 / float(self.geometry.det_partition.cell_volume)
return out | python | def call_forward(self, vol_data, out=None):
with self._mutex:
assert vol_data in self.reco_space
if out is not None:
assert out in self.proj_space
else:
out = self.proj_space.element()
# Copy data to GPU memory
if self.geometry.ndim == 2:
astra.data2d.store(self.vol_id, vol_data.asarray())
elif self.geometry.ndim == 3:
astra.data3d.store(self.vol_id, vol_data.asarray())
else:
raise RuntimeError('unknown ndim')
# Run algorithm
astra.algorithm.run(self.algo_id)
# Copy result to host
if self.geometry.ndim == 2:
out[:] = self.out_array
elif self.geometry.ndim == 3:
out[:] = np.swapaxes(self.out_array, 0, 1).reshape(
self.proj_space.shape)
# Fix scaling to weight by pixel size
if isinstance(self.geometry, Parallel2dGeometry):
# parallel2d scales with pixel stride
out *= 1 / float(self.geometry.det_partition.cell_volume)
return out | [
"def",
"call_forward",
"(",
"self",
",",
"vol_data",
",",
"out",
"=",
"None",
")",
":",
"with",
"self",
".",
"_mutex",
":",
"assert",
"vol_data",
"in",
"self",
".",
"reco_space",
"if",
"out",
"is",
"not",
"None",
":",
"assert",
"out",
"in",
"self",
"... | Run an ASTRA forward projection on the given data using the GPU.
Parameters
----------
vol_data : ``reco_space`` element
Volume data to which the projector is applied.
out : ``proj_space`` element, optional
Element of the projection space to which the result is written. If
``None``, an element in `proj_space` is created.
Returns
-------
out : ``proj_space`` element
Projection data resulting from the application of the projector.
If ``out`` was provided, the returned object is a reference to it. | [
"Run",
"an",
"ASTRA",
"forward",
"projection",
"on",
"the",
"given",
"data",
"using",
"the",
"GPU",
"."
] | b8443f6aca90e191ba36c91d32253c5a36249a6c | https://github.com/odlgroup/odl/blob/b8443f6aca90e191ba36c91d32253c5a36249a6c/odl/tomo/backends/astra_cuda.py#L73-L120 |
231,902 | odlgroup/odl | odl/tomo/backends/astra_cuda.py | AstraCudaProjectorImpl.create_ids | def create_ids(self):
"""Create ASTRA objects."""
# Create input and output arrays
if self.geometry.motion_partition.ndim == 1:
motion_shape = self.geometry.motion_partition.shape
else:
# Need to flatten 2- or 3-dimensional angles into one axis
motion_shape = (np.prod(self.geometry.motion_partition.shape),)
proj_shape = motion_shape + self.geometry.det_partition.shape
proj_ndim = len(proj_shape)
if proj_ndim == 2:
astra_proj_shape = proj_shape
astra_vol_shape = self.reco_space.shape
elif proj_ndim == 3:
# The `u` and `v` axes of the projection data are swapped,
# see explanation in `astra_*_3d_geom_to_vec`.
astra_proj_shape = (proj_shape[1], proj_shape[0], proj_shape[2])
astra_vol_shape = self.reco_space.shape
self.in_array = np.empty(astra_vol_shape,
dtype='float32', order='C')
self.out_array = np.empty(astra_proj_shape,
dtype='float32', order='C')
# Create ASTRA data structures
vol_geom = astra_volume_geometry(self.reco_space)
proj_geom = astra_projection_geometry(self.geometry)
self.vol_id = astra_data(vol_geom,
datatype='volume',
ndim=self.reco_space.ndim,
data=self.in_array,
allow_copy=False)
self.proj_id = astra_projector('nearest', vol_geom, proj_geom,
ndim=proj_ndim, impl='cuda')
self.sino_id = astra_data(proj_geom,
datatype='projection',
ndim=proj_ndim,
data=self.out_array,
allow_copy=False)
# Create algorithm
self.algo_id = astra_algorithm(
'forward', proj_ndim, self.vol_id, self.sino_id,
proj_id=self.proj_id, impl='cuda') | python | def create_ids(self):
# Create input and output arrays
if self.geometry.motion_partition.ndim == 1:
motion_shape = self.geometry.motion_partition.shape
else:
# Need to flatten 2- or 3-dimensional angles into one axis
motion_shape = (np.prod(self.geometry.motion_partition.shape),)
proj_shape = motion_shape + self.geometry.det_partition.shape
proj_ndim = len(proj_shape)
if proj_ndim == 2:
astra_proj_shape = proj_shape
astra_vol_shape = self.reco_space.shape
elif proj_ndim == 3:
# The `u` and `v` axes of the projection data are swapped,
# see explanation in `astra_*_3d_geom_to_vec`.
astra_proj_shape = (proj_shape[1], proj_shape[0], proj_shape[2])
astra_vol_shape = self.reco_space.shape
self.in_array = np.empty(astra_vol_shape,
dtype='float32', order='C')
self.out_array = np.empty(astra_proj_shape,
dtype='float32', order='C')
# Create ASTRA data structures
vol_geom = astra_volume_geometry(self.reco_space)
proj_geom = astra_projection_geometry(self.geometry)
self.vol_id = astra_data(vol_geom,
datatype='volume',
ndim=self.reco_space.ndim,
data=self.in_array,
allow_copy=False)
self.proj_id = astra_projector('nearest', vol_geom, proj_geom,
ndim=proj_ndim, impl='cuda')
self.sino_id = astra_data(proj_geom,
datatype='projection',
ndim=proj_ndim,
data=self.out_array,
allow_copy=False)
# Create algorithm
self.algo_id = astra_algorithm(
'forward', proj_ndim, self.vol_id, self.sino_id,
proj_id=self.proj_id, impl='cuda') | [
"def",
"create_ids",
"(",
"self",
")",
":",
"# Create input and output arrays",
"if",
"self",
".",
"geometry",
".",
"motion_partition",
".",
"ndim",
"==",
"1",
":",
"motion_shape",
"=",
"self",
".",
"geometry",
".",
"motion_partition",
".",
"shape",
"else",
":... | Create ASTRA objects. | [
"Create",
"ASTRA",
"objects",
"."
] | b8443f6aca90e191ba36c91d32253c5a36249a6c | https://github.com/odlgroup/odl/blob/b8443f6aca90e191ba36c91d32253c5a36249a6c/odl/tomo/backends/astra_cuda.py#L122-L169 |
231,903 | odlgroup/odl | odl/tomo/backends/astra_cuda.py | AstraCudaBackProjectorImpl.call_backward | def call_backward(self, proj_data, out=None):
"""Run an ASTRA back-projection on the given data using the GPU.
Parameters
----------
proj_data : ``proj_space`` element
Projection data to which the back-projector is applied.
out : ``reco_space`` element, optional
Element of the reconstruction space to which the result is written.
If ``None``, an element in ``reco_space`` is created.
Returns
-------
out : ``reco_space`` element
Reconstruction data resulting from the application of the
back-projector. If ``out`` was provided, the returned object is a
reference to it.
"""
with self._mutex:
assert proj_data in self.proj_space
if out is not None:
assert out in self.reco_space
else:
out = self.reco_space.element()
# Copy data to GPU memory
if self.geometry.ndim == 2:
astra.data2d.store(self.sino_id, proj_data.asarray())
elif self.geometry.ndim == 3:
shape = (-1,) + self.geometry.det_partition.shape
reshaped_proj_data = proj_data.asarray().reshape(shape)
swapped_proj_data = np.ascontiguousarray(
np.swapaxes(reshaped_proj_data, 0, 1))
astra.data3d.store(self.sino_id, swapped_proj_data)
# Run algorithm
astra.algorithm.run(self.algo_id)
# Copy result to CPU memory
out[:] = self.out_array
# Fix scaling to weight by pixel/voxel size
out *= astra_cuda_bp_scaling_factor(
self.proj_space, self.reco_space, self.geometry)
return out | python | def call_backward(self, proj_data, out=None):
with self._mutex:
assert proj_data in self.proj_space
if out is not None:
assert out in self.reco_space
else:
out = self.reco_space.element()
# Copy data to GPU memory
if self.geometry.ndim == 2:
astra.data2d.store(self.sino_id, proj_data.asarray())
elif self.geometry.ndim == 3:
shape = (-1,) + self.geometry.det_partition.shape
reshaped_proj_data = proj_data.asarray().reshape(shape)
swapped_proj_data = np.ascontiguousarray(
np.swapaxes(reshaped_proj_data, 0, 1))
astra.data3d.store(self.sino_id, swapped_proj_data)
# Run algorithm
astra.algorithm.run(self.algo_id)
# Copy result to CPU memory
out[:] = self.out_array
# Fix scaling to weight by pixel/voxel size
out *= astra_cuda_bp_scaling_factor(
self.proj_space, self.reco_space, self.geometry)
return out | [
"def",
"call_backward",
"(",
"self",
",",
"proj_data",
",",
"out",
"=",
"None",
")",
":",
"with",
"self",
".",
"_mutex",
":",
"assert",
"proj_data",
"in",
"self",
".",
"proj_space",
"if",
"out",
"is",
"not",
"None",
":",
"assert",
"out",
"in",
"self",
... | Run an ASTRA back-projection on the given data using the GPU.
Parameters
----------
proj_data : ``proj_space`` element
Projection data to which the back-projector is applied.
out : ``reco_space`` element, optional
Element of the reconstruction space to which the result is written.
If ``None``, an element in ``reco_space`` is created.
Returns
-------
out : ``reco_space`` element
Reconstruction data resulting from the application of the
back-projector. If ``out`` was provided, the returned object is a
reference to it. | [
"Run",
"an",
"ASTRA",
"back",
"-",
"projection",
"on",
"the",
"given",
"data",
"using",
"the",
"GPU",
"."
] | b8443f6aca90e191ba36c91d32253c5a36249a6c | https://github.com/odlgroup/odl/blob/b8443f6aca90e191ba36c91d32253c5a36249a6c/odl/tomo/backends/astra_cuda.py#L226-L271 |
231,904 | odlgroup/odl | odl/ufunc_ops/ufunc_ops.py | find_min_signature | def find_min_signature(ufunc, dtypes_in):
"""Determine the minimum matching ufunc signature for given dtypes.
Parameters
----------
ufunc : str or numpy.ufunc
Ufunc whose signatures are to be considered.
dtypes_in :
Sequence of objects specifying input dtypes. Its length must match
the number of inputs of ``ufunc``, and its entries must be understood
by `numpy.dtype`.
Returns
-------
signature : str
Minimum matching ufunc signature, see, e.g., ``np.add.types``
for examples.
Raises
------
TypeError
If no valid signature is found.
"""
if not isinstance(ufunc, np.ufunc):
ufunc = getattr(np, str(ufunc))
dtypes_in = [np.dtype(dt_in) for dt_in in dtypes_in]
tcs_in = [dt.base.char for dt in dtypes_in]
if len(tcs_in) != ufunc.nin:
raise ValueError('expected {} input dtype(s) for {}, got {}'
''.format(ufunc.nin, ufunc, len(tcs_in)))
valid_sigs = []
for sig in ufunc.types:
sig_tcs_in, sig_tcs_out = sig.split('->')
if all(np.dtype(tc_in) <= np.dtype(sig_tc_in) and
sig_tc_in in SUPP_TYPECODES
for tc_in, sig_tc_in in zip(tcs_in, sig_tcs_in)):
valid_sigs.append(sig)
if not valid_sigs:
raise TypeError('no valid signature found for {} and input dtypes {}'
''.format(ufunc, tuple(dt.name for dt in dtypes_in)))
def in_dtypes(sig):
"""Comparison key function for input dtypes of a signature."""
sig_tcs_in = sig.split('->')[0]
return tuple(np.dtype(tc) for tc in sig_tcs_in)
return min(valid_sigs, key=in_dtypes) | python | def find_min_signature(ufunc, dtypes_in):
if not isinstance(ufunc, np.ufunc):
ufunc = getattr(np, str(ufunc))
dtypes_in = [np.dtype(dt_in) for dt_in in dtypes_in]
tcs_in = [dt.base.char for dt in dtypes_in]
if len(tcs_in) != ufunc.nin:
raise ValueError('expected {} input dtype(s) for {}, got {}'
''.format(ufunc.nin, ufunc, len(tcs_in)))
valid_sigs = []
for sig in ufunc.types:
sig_tcs_in, sig_tcs_out = sig.split('->')
if all(np.dtype(tc_in) <= np.dtype(sig_tc_in) and
sig_tc_in in SUPP_TYPECODES
for tc_in, sig_tc_in in zip(tcs_in, sig_tcs_in)):
valid_sigs.append(sig)
if not valid_sigs:
raise TypeError('no valid signature found for {} and input dtypes {}'
''.format(ufunc, tuple(dt.name for dt in dtypes_in)))
def in_dtypes(sig):
"""Comparison key function for input dtypes of a signature."""
sig_tcs_in = sig.split('->')[0]
return tuple(np.dtype(tc) for tc in sig_tcs_in)
return min(valid_sigs, key=in_dtypes) | [
"def",
"find_min_signature",
"(",
"ufunc",
",",
"dtypes_in",
")",
":",
"if",
"not",
"isinstance",
"(",
"ufunc",
",",
"np",
".",
"ufunc",
")",
":",
"ufunc",
"=",
"getattr",
"(",
"np",
",",
"str",
"(",
"ufunc",
")",
")",
"dtypes_in",
"=",
"[",
"np",
... | Determine the minimum matching ufunc signature for given dtypes.
Parameters
----------
ufunc : str or numpy.ufunc
Ufunc whose signatures are to be considered.
dtypes_in :
Sequence of objects specifying input dtypes. Its length must match
the number of inputs of ``ufunc``, and its entries must be understood
by `numpy.dtype`.
Returns
-------
signature : str
Minimum matching ufunc signature, see, e.g., ``np.add.types``
for examples.
Raises
------
TypeError
If no valid signature is found. | [
"Determine",
"the",
"minimum",
"matching",
"ufunc",
"signature",
"for",
"given",
"dtypes",
"."
] | b8443f6aca90e191ba36c91d32253c5a36249a6c | https://github.com/odlgroup/odl/blob/b8443f6aca90e191ba36c91d32253c5a36249a6c/odl/ufunc_ops/ufunc_ops.py#L27-L77 |
231,905 | odlgroup/odl | odl/ufunc_ops/ufunc_ops.py | gradient_factory | def gradient_factory(name):
"""Create gradient `Functional` for some ufuncs."""
if name == 'sin':
def gradient(self):
"""Return the gradient operator."""
return cos(self.domain)
elif name == 'cos':
def gradient(self):
"""Return the gradient operator."""
return -sin(self.domain)
elif name == 'tan':
def gradient(self):
"""Return the gradient operator."""
return 1 + square(self.domain) * self
elif name == 'sqrt':
def gradient(self):
"""Return the gradient operator."""
return FunctionalQuotient(ConstantFunctional(self.domain, 0.5),
self)
elif name == 'square':
def gradient(self):
"""Return the gradient operator."""
return ScalingFunctional(self.domain, 2.0)
elif name == 'log':
def gradient(self):
"""Return the gradient operator."""
return reciprocal(self.domain)
elif name == 'exp':
def gradient(self):
"""Return the gradient operator."""
return self
elif name == 'reciprocal':
def gradient(self):
"""Return the gradient operator."""
return FunctionalQuotient(ConstantFunctional(self.domain, -1.0),
square(self.domain))
elif name == 'sinh':
def gradient(self):
"""Return the gradient operator."""
return cosh(self.domain)
elif name == 'cosh':
def gradient(self):
"""Return the gradient operator."""
return sinh(self.domain)
else:
# Fallback to default
gradient = Functional.gradient
return gradient | python | def gradient_factory(name):
if name == 'sin':
def gradient(self):
"""Return the gradient operator."""
return cos(self.domain)
elif name == 'cos':
def gradient(self):
"""Return the gradient operator."""
return -sin(self.domain)
elif name == 'tan':
def gradient(self):
"""Return the gradient operator."""
return 1 + square(self.domain) * self
elif name == 'sqrt':
def gradient(self):
"""Return the gradient operator."""
return FunctionalQuotient(ConstantFunctional(self.domain, 0.5),
self)
elif name == 'square':
def gradient(self):
"""Return the gradient operator."""
return ScalingFunctional(self.domain, 2.0)
elif name == 'log':
def gradient(self):
"""Return the gradient operator."""
return reciprocal(self.domain)
elif name == 'exp':
def gradient(self):
"""Return the gradient operator."""
return self
elif name == 'reciprocal':
def gradient(self):
"""Return the gradient operator."""
return FunctionalQuotient(ConstantFunctional(self.domain, -1.0),
square(self.domain))
elif name == 'sinh':
def gradient(self):
"""Return the gradient operator."""
return cosh(self.domain)
elif name == 'cosh':
def gradient(self):
"""Return the gradient operator."""
return sinh(self.domain)
else:
# Fallback to default
gradient = Functional.gradient
return gradient | [
"def",
"gradient_factory",
"(",
"name",
")",
":",
"if",
"name",
"==",
"'sin'",
":",
"def",
"gradient",
"(",
"self",
")",
":",
"\"\"\"Return the gradient operator.\"\"\"",
"return",
"cos",
"(",
"self",
".",
"domain",
")",
"elif",
"name",
"==",
"'cos'",
":",
... | Create gradient `Functional` for some ufuncs. | [
"Create",
"gradient",
"Functional",
"for",
"some",
"ufuncs",
"."
] | b8443f6aca90e191ba36c91d32253c5a36249a6c | https://github.com/odlgroup/odl/blob/b8443f6aca90e191ba36c91d32253c5a36249a6c/odl/ufunc_ops/ufunc_ops.py#L105-L154 |
231,906 | odlgroup/odl | odl/ufunc_ops/ufunc_ops.py | derivative_factory | def derivative_factory(name):
"""Create derivative function for some ufuncs."""
if name == 'sin':
def derivative(self, point):
"""Return the derivative operator."""
return MultiplyOperator(cos(self.domain)(point))
elif name == 'cos':
def derivative(self, point):
"""Return the derivative operator."""
point = self.domain.element(point)
return MultiplyOperator(-sin(self.domain)(point))
elif name == 'tan':
def derivative(self, point):
"""Return the derivative operator."""
return MultiplyOperator(1 + self(point) ** 2)
elif name == 'sqrt':
def derivative(self, point):
"""Return the derivative operator."""
return MultiplyOperator(0.5 / self(point))
elif name == 'square':
def derivative(self, point):
"""Return the derivative operator."""
point = self.domain.element(point)
return MultiplyOperator(2.0 * point)
elif name == 'log':
def derivative(self, point):
"""Return the derivative operator."""
point = self.domain.element(point)
return MultiplyOperator(1.0 / point)
elif name == 'exp':
def derivative(self, point):
"""Return the derivative operator."""
return MultiplyOperator(self(point))
elif name == 'reciprocal':
def derivative(self, point):
"""Return the derivative operator."""
point = self.domain.element(point)
return MultiplyOperator(-self(point) ** 2)
elif name == 'sinh':
def derivative(self, point):
"""Return the derivative operator."""
point = self.domain.element(point)
return MultiplyOperator(cosh(self.domain)(point))
elif name == 'cosh':
def derivative(self, point):
"""Return the derivative operator."""
return MultiplyOperator(sinh(self.domain)(point))
else:
# Fallback to default
derivative = Operator.derivative
return derivative | python | def derivative_factory(name):
if name == 'sin':
def derivative(self, point):
"""Return the derivative operator."""
return MultiplyOperator(cos(self.domain)(point))
elif name == 'cos':
def derivative(self, point):
"""Return the derivative operator."""
point = self.domain.element(point)
return MultiplyOperator(-sin(self.domain)(point))
elif name == 'tan':
def derivative(self, point):
"""Return the derivative operator."""
return MultiplyOperator(1 + self(point) ** 2)
elif name == 'sqrt':
def derivative(self, point):
"""Return the derivative operator."""
return MultiplyOperator(0.5 / self(point))
elif name == 'square':
def derivative(self, point):
"""Return the derivative operator."""
point = self.domain.element(point)
return MultiplyOperator(2.0 * point)
elif name == 'log':
def derivative(self, point):
"""Return the derivative operator."""
point = self.domain.element(point)
return MultiplyOperator(1.0 / point)
elif name == 'exp':
def derivative(self, point):
"""Return the derivative operator."""
return MultiplyOperator(self(point))
elif name == 'reciprocal':
def derivative(self, point):
"""Return the derivative operator."""
point = self.domain.element(point)
return MultiplyOperator(-self(point) ** 2)
elif name == 'sinh':
def derivative(self, point):
"""Return the derivative operator."""
point = self.domain.element(point)
return MultiplyOperator(cosh(self.domain)(point))
elif name == 'cosh':
def derivative(self, point):
"""Return the derivative operator."""
return MultiplyOperator(sinh(self.domain)(point))
else:
# Fallback to default
derivative = Operator.derivative
return derivative | [
"def",
"derivative_factory",
"(",
"name",
")",
":",
"if",
"name",
"==",
"'sin'",
":",
"def",
"derivative",
"(",
"self",
",",
"point",
")",
":",
"\"\"\"Return the derivative operator.\"\"\"",
"return",
"MultiplyOperator",
"(",
"cos",
"(",
"self",
".",
"domain",
... | Create derivative function for some ufuncs. | [
"Create",
"derivative",
"function",
"for",
"some",
"ufuncs",
"."
] | b8443f6aca90e191ba36c91d32253c5a36249a6c | https://github.com/odlgroup/odl/blob/b8443f6aca90e191ba36c91d32253c5a36249a6c/odl/ufunc_ops/ufunc_ops.py#L157-L209 |
231,907 | odlgroup/odl | odl/ufunc_ops/ufunc_ops.py | ufunc_functional_factory | def ufunc_functional_factory(name, nargin, nargout, docstring):
"""Create a ufunc `Functional` from a given specification."""
assert 0 <= nargin <= 2
def __init__(self, field):
"""Initialize an instance.
Parameters
----------
field : `Field`
The domain of the functional.
"""
if not isinstance(field, Field):
raise TypeError('`field` {!r} not a `Field`'.format(space))
if _is_integer_only_ufunc(name):
raise ValueError("ufunc '{}' only defined with integral dtype"
"".format(name))
linear = name in LINEAR_UFUNCS
Functional.__init__(self, space=field, linear=linear)
def _call(self, x):
"""Return ``self(x)``."""
if nargin == 1:
return getattr(np, name)(x)
else:
return getattr(np, name)(*x)
def __repr__(self):
"""Return ``repr(self)``."""
return '{}({!r})'.format(name, self.domain)
# Create example (also functions as doctest)
if nargin != 1:
raise NotImplementedError('Currently not suppored')
if nargout != 1:
raise NotImplementedError('Currently not suppored')
space = RealNumbers()
val = 1.0
arg = '{}'.format(val)
with np.errstate(all='ignore'):
result = np.float64(getattr(np, name)(val))
examples_docstring = RAW_EXAMPLES_DOCSTRING.format(space=space, name=name,
arg=arg, result=result)
full_docstring = docstring + examples_docstring
attributes = {"__init__": __init__,
"_call": _call,
"gradient": property(gradient_factory(name)),
"__repr__": __repr__,
"__doc__": full_docstring}
full_name = name + '_op'
return type(full_name, (Functional,), attributes) | python | def ufunc_functional_factory(name, nargin, nargout, docstring):
assert 0 <= nargin <= 2
def __init__(self, field):
"""Initialize an instance.
Parameters
----------
field : `Field`
The domain of the functional.
"""
if not isinstance(field, Field):
raise TypeError('`field` {!r} not a `Field`'.format(space))
if _is_integer_only_ufunc(name):
raise ValueError("ufunc '{}' only defined with integral dtype"
"".format(name))
linear = name in LINEAR_UFUNCS
Functional.__init__(self, space=field, linear=linear)
def _call(self, x):
"""Return ``self(x)``."""
if nargin == 1:
return getattr(np, name)(x)
else:
return getattr(np, name)(*x)
def __repr__(self):
"""Return ``repr(self)``."""
return '{}({!r})'.format(name, self.domain)
# Create example (also functions as doctest)
if nargin != 1:
raise NotImplementedError('Currently not suppored')
if nargout != 1:
raise NotImplementedError('Currently not suppored')
space = RealNumbers()
val = 1.0
arg = '{}'.format(val)
with np.errstate(all='ignore'):
result = np.float64(getattr(np, name)(val))
examples_docstring = RAW_EXAMPLES_DOCSTRING.format(space=space, name=name,
arg=arg, result=result)
full_docstring = docstring + examples_docstring
attributes = {"__init__": __init__,
"_call": _call,
"gradient": property(gradient_factory(name)),
"__repr__": __repr__,
"__doc__": full_docstring}
full_name = name + '_op'
return type(full_name, (Functional,), attributes) | [
"def",
"ufunc_functional_factory",
"(",
"name",
",",
"nargin",
",",
"nargout",
",",
"docstring",
")",
":",
"assert",
"0",
"<=",
"nargin",
"<=",
"2",
"def",
"__init__",
"(",
"self",
",",
"field",
")",
":",
"\"\"\"Initialize an instance.\n\n Parameters\n ... | Create a ufunc `Functional` from a given specification. | [
"Create",
"a",
"ufunc",
"Functional",
"from",
"a",
"given",
"specification",
"."
] | b8443f6aca90e191ba36c91d32253c5a36249a6c | https://github.com/odlgroup/odl/blob/b8443f6aca90e191ba36c91d32253c5a36249a6c/odl/ufunc_ops/ufunc_ops.py#L308-L368 |
231,908 | odlgroup/odl | odl/solvers/nonsmooth/primal_dual_hybrid_gradient.py | pdhg_stepsize | def pdhg_stepsize(L, tau=None, sigma=None):
r"""Default step sizes for `pdhg`.
Parameters
----------
L : `Operator` or float
Operator or norm of the operator that are used in the `pdhg` method.
If it is an `Operator`, the norm is computed with
``Operator.norm(estimate=True)``.
tau : positive float, optional
Use this value for ``tau`` instead of computing it from the
operator norms, see Notes.
sigma : positive float, optional
The ``sigma`` step size parameters for the dual update.
Returns
-------
tau : float
The ``tau`` step size parameter for the primal update.
sigma : tuple of float
The ``sigma`` step size parameter for the dual update.
Notes
-----
To guarantee convergence, the parameters :math:`\tau`, :math:`\sigma`
and :math:`L` need to satisfy
.. math::
\tau \sigma \|L\|^2 < 1
This function has 4 options, :math:`\tau`/:math:`\sigma` given or not
given.
- Neither :math:`\tau` nor :math:`\sigma` are given, they are chosen as
.. math::
\tau = \sigma = \frac{\sqrt{0.9}}{\|L\|}
- If only :math:`\sigma` is given, :math:`\tau` is set to
.. math::
\tau = \frac{0.9}{\sigma \|L\|^2}
- If only :math:`\tau` is given, :math:`\sigma` is set
to
.. math::
\sigma = \frac{0.9}{\tau \|L\|^2}
- If both are given, they are returned as-is without further validation.
"""
if tau is not None and sigma is not None:
return float(tau), float(sigma)
L_norm = L.norm(estimate=True) if isinstance(L, Operator) else float(L)
if tau is None and sigma is None:
tau = sigma = np.sqrt(0.9) / L_norm
return tau, sigma
elif tau is None:
tau = 0.9 / (sigma * L_norm ** 2)
return tau, float(sigma)
else: # sigma is None
sigma = 0.9 / (tau * L_norm ** 2)
return float(tau), sigma | python | def pdhg_stepsize(L, tau=None, sigma=None):
r"""Default step sizes for `pdhg`.
Parameters
----------
L : `Operator` or float
Operator or norm of the operator that are used in the `pdhg` method.
If it is an `Operator`, the norm is computed with
``Operator.norm(estimate=True)``.
tau : positive float, optional
Use this value for ``tau`` instead of computing it from the
operator norms, see Notes.
sigma : positive float, optional
The ``sigma`` step size parameters for the dual update.
Returns
-------
tau : float
The ``tau`` step size parameter for the primal update.
sigma : tuple of float
The ``sigma`` step size parameter for the dual update.
Notes
-----
To guarantee convergence, the parameters :math:`\tau`, :math:`\sigma`
and :math:`L` need to satisfy
.. math::
\tau \sigma \|L\|^2 < 1
This function has 4 options, :math:`\tau`/:math:`\sigma` given or not
given.
- Neither :math:`\tau` nor :math:`\sigma` are given, they are chosen as
.. math::
\tau = \sigma = \frac{\sqrt{0.9}}{\|L\|}
- If only :math:`\sigma` is given, :math:`\tau` is set to
.. math::
\tau = \frac{0.9}{\sigma \|L\|^2}
- If only :math:`\tau` is given, :math:`\sigma` is set
to
.. math::
\sigma = \frac{0.9}{\tau \|L\|^2}
- If both are given, they are returned as-is without further validation.
"""
if tau is not None and sigma is not None:
return float(tau), float(sigma)
L_norm = L.norm(estimate=True) if isinstance(L, Operator) else float(L)
if tau is None and sigma is None:
tau = sigma = np.sqrt(0.9) / L_norm
return tau, sigma
elif tau is None:
tau = 0.9 / (sigma * L_norm ** 2)
return tau, float(sigma)
else: # sigma is None
sigma = 0.9 / (tau * L_norm ** 2)
return float(tau), sigma | [
"def",
"pdhg_stepsize",
"(",
"L",
",",
"tau",
"=",
"None",
",",
"sigma",
"=",
"None",
")",
":",
"if",
"tau",
"is",
"not",
"None",
"and",
"sigma",
"is",
"not",
"None",
":",
"return",
"float",
"(",
"tau",
")",
",",
"float",
"(",
"sigma",
")",
"L_no... | r"""Default step sizes for `pdhg`.
Parameters
----------
L : `Operator` or float
Operator or norm of the operator that are used in the `pdhg` method.
If it is an `Operator`, the norm is computed with
``Operator.norm(estimate=True)``.
tau : positive float, optional
Use this value for ``tau`` instead of computing it from the
operator norms, see Notes.
sigma : positive float, optional
The ``sigma`` step size parameters for the dual update.
Returns
-------
tau : float
The ``tau`` step size parameter for the primal update.
sigma : tuple of float
The ``sigma`` step size parameter for the dual update.
Notes
-----
To guarantee convergence, the parameters :math:`\tau`, :math:`\sigma`
and :math:`L` need to satisfy
.. math::
\tau \sigma \|L\|^2 < 1
This function has 4 options, :math:`\tau`/:math:`\sigma` given or not
given.
- Neither :math:`\tau` nor :math:`\sigma` are given, they are chosen as
.. math::
\tau = \sigma = \frac{\sqrt{0.9}}{\|L\|}
- If only :math:`\sigma` is given, :math:`\tau` is set to
.. math::
\tau = \frac{0.9}{\sigma \|L\|^2}
- If only :math:`\tau` is given, :math:`\sigma` is set
to
.. math::
\sigma = \frac{0.9}{\tau \|L\|^2}
- If both are given, they are returned as-is without further validation. | [
"r",
"Default",
"step",
"sizes",
"for",
"pdhg",
"."
] | b8443f6aca90e191ba36c91d32253c5a36249a6c | https://github.com/odlgroup/odl/blob/b8443f6aca90e191ba36c91d32253c5a36249a6c/odl/solvers/nonsmooth/primal_dual_hybrid_gradient.py#L308-L371 |
231,909 | odlgroup/odl | odl/contrib/fom/util.py | haarpsi_similarity_map | def haarpsi_similarity_map(img1, img2, axis, c, a):
r"""Local similarity map for directional features along an axis.
Parameters
----------
img1, img2 : array-like
The images to compare. They must have equal shape.
axis : {0, 1}
Direction in which to look for edge similarities.
c : positive float
Constant determining the score of maximally dissimilar values.
Smaller constant means higher penalty for dissimilarity.
See Notes for details.
a : positive float
Parameter in the logistic function. Larger value leads to a
steeper curve, thus lowering the threshold for an input to
be mapped to an output close to 1. See Notes for details.
Returns
-------
local_sim : `numpy.ndarray`
Pointwise similarity of directional edge features of ``img1`` and
``img2``, measured using two Haar wavelet detail levels.
Notes
-----
For input images :math:`f_1, f_2` this function is defined as
.. math::
\mathrm{HS}_{f_1, f_2}^{(k)}(x) =
l_a \left(
\frac{1}{2} \sum_{j=1}^2
S\left(\left|g_j^{(k)} \ast f_1 \right|(x),
\left|g_j^{(k)} \ast f_2 \right|(x), c\right)
\right),
see `[Rei+2016] <https://arxiv.org/abs/1607.06140>`_ equation (10).
Here, the superscript :math:`(k)` refers to the axis (0 or 1)
in which edge features are compared, :math:`l_a` is the logistic
function :math:`l_a(x) = (1 + \mathrm{e}^{-a x})^{-1}`, and :math:`S`
is the pointwise similarity score
.. math::
S(x, y, c) = \frac{2xy + c^2}{x^2 + y^2 + c^2},
Hence, :math:`c` is the :math:`y`-value at which the score
drops to :math:`1 / 2` for :math:`x = 0`. In other words, the smaller
:math:`c` is chosen, the more dissimilarity is penalized.
The filters :math:`g_j^{(k)}` are high-pass Haar wavelet filters in the
axis :math:`k` and low-pass Haar wavelet filters in the other axes.
The index :math:`j` refers to the scaling level of the wavelet.
In code, these filters can be computed as ::
f_lo_level1 = [np.sqrt(2), np.sqrt(2)] # low-pass Haar filter
f_hi_level1 = [-np.sqrt(2), np.sqrt(2)] # high-pass Haar filter
f_lo_level2 = np.repeat(f_lo_level1, 2)
f_hi_level2 = np.repeat(f_hi_level1, 2)
f_lo_level3 = np.repeat(f_lo_level2, 2)
f_hi_level3 = np.repeat(f_hi_level2, 2)
...
The logistic function :math:`l_a` transforms values in
:math:`[0, \infty)` to :math:`[1/2, 1)`, where the parameter
:math:`a` determines how fast the curve attains values close
to 1. Larger :math:`a` means that smaller :math:`x` will yield
a value :math:`l_a(x)` close to 1 (and thus result in a higher
score). In other words, the larger :math:`a`, the more forgiving
the similarity measure.
References
----------
[Rei+2016] Reisenhofer, R, Bosse, S, Kutyniok, G, and Wiegand, T.
*A Haar Wavelet-Based Perceptual Similarity Index for Image Quality
Assessment*. arXiv:1607.06140 [cs], Jul. 2016.
"""
# TODO: generalize for nD
import scipy.special
impl = 'pyfftw' if PYFFTW_AVAILABLE else 'numpy'
# Haar wavelet filters for levels 1 and 2
dec_lo_lvl1 = np.array([np.sqrt(2), np.sqrt(2)])
dec_lo_lvl2 = np.repeat(dec_lo_lvl1, 2)
dec_hi_lvl1 = np.array([-np.sqrt(2), np.sqrt(2)])
dec_hi_lvl2 = np.repeat(dec_hi_lvl1, 2)
if axis == 0:
# High-pass in axis 0, low-pass in axis 1
fh_lvl1 = dec_hi_lvl1
fv_lvl1 = dec_lo_lvl1
fh_lvl2 = dec_hi_lvl2
fv_lvl2 = dec_lo_lvl2
elif axis == 1:
# Low-pass in axis 0, high-pass in axis 1
fh_lvl1 = dec_lo_lvl1
fv_lvl1 = dec_hi_lvl1
fh_lvl2 = dec_lo_lvl2
fv_lvl2 = dec_hi_lvl2
else:
raise ValueError('`axis` out of the valid range 0 -> 1')
# Filter images with level 1 and 2 filters
img1_lvl1 = filter_image_sep2d(img1, fh_lvl1, fv_lvl1, impl=impl)
img1_lvl2 = filter_image_sep2d(img1, fh_lvl2, fv_lvl2, impl=impl)
img2_lvl1 = filter_image_sep2d(img2, fh_lvl1, fv_lvl1, impl=impl)
img2_lvl2 = filter_image_sep2d(img2, fh_lvl2, fv_lvl2, impl=impl)
c = float(c)
def S(x, y):
"""Return ``(2 * x * y + c ** 2) / (x ** 2 + y ** 2 + c ** 2)``."""
num = 2 * x
num *= y
num += c ** 2
denom = x ** 2
denom += y ** 2
denom += c ** 2
frac = num
frac /= denom
return frac
# Compute similarity scores for both levels
np.abs(img1_lvl1, out=img1_lvl1)
np.abs(img2_lvl1, out=img2_lvl1)
np.abs(img1_lvl2, out=img1_lvl2)
np.abs(img2_lvl2, out=img2_lvl2)
sim_lvl1 = S(img1_lvl1, img2_lvl1)
sim_lvl2 = S(img1_lvl2, img2_lvl2)
# Return logistic of the mean value
sim = sim_lvl1
sim += sim_lvl2
sim /= 2
sim *= a
return scipy.special.expit(sim) | python | def haarpsi_similarity_map(img1, img2, axis, c, a):
r"""Local similarity map for directional features along an axis.
Parameters
----------
img1, img2 : array-like
The images to compare. They must have equal shape.
axis : {0, 1}
Direction in which to look for edge similarities.
c : positive float
Constant determining the score of maximally dissimilar values.
Smaller constant means higher penalty for dissimilarity.
See Notes for details.
a : positive float
Parameter in the logistic function. Larger value leads to a
steeper curve, thus lowering the threshold for an input to
be mapped to an output close to 1. See Notes for details.
Returns
-------
local_sim : `numpy.ndarray`
Pointwise similarity of directional edge features of ``img1`` and
``img2``, measured using two Haar wavelet detail levels.
Notes
-----
For input images :math:`f_1, f_2` this function is defined as
.. math::
\mathrm{HS}_{f_1, f_2}^{(k)}(x) =
l_a \left(
\frac{1}{2} \sum_{j=1}^2
S\left(\left|g_j^{(k)} \ast f_1 \right|(x),
\left|g_j^{(k)} \ast f_2 \right|(x), c\right)
\right),
see `[Rei+2016] <https://arxiv.org/abs/1607.06140>`_ equation (10).
Here, the superscript :math:`(k)` refers to the axis (0 or 1)
in which edge features are compared, :math:`l_a` is the logistic
function :math:`l_a(x) = (1 + \mathrm{e}^{-a x})^{-1}`, and :math:`S`
is the pointwise similarity score
.. math::
S(x, y, c) = \frac{2xy + c^2}{x^2 + y^2 + c^2},
Hence, :math:`c` is the :math:`y`-value at which the score
drops to :math:`1 / 2` for :math:`x = 0`. In other words, the smaller
:math:`c` is chosen, the more dissimilarity is penalized.
The filters :math:`g_j^{(k)}` are high-pass Haar wavelet filters in the
axis :math:`k` and low-pass Haar wavelet filters in the other axes.
The index :math:`j` refers to the scaling level of the wavelet.
In code, these filters can be computed as ::
f_lo_level1 = [np.sqrt(2), np.sqrt(2)] # low-pass Haar filter
f_hi_level1 = [-np.sqrt(2), np.sqrt(2)] # high-pass Haar filter
f_lo_level2 = np.repeat(f_lo_level1, 2)
f_hi_level2 = np.repeat(f_hi_level1, 2)
f_lo_level3 = np.repeat(f_lo_level2, 2)
f_hi_level3 = np.repeat(f_hi_level2, 2)
...
The logistic function :math:`l_a` transforms values in
:math:`[0, \infty)` to :math:`[1/2, 1)`, where the parameter
:math:`a` determines how fast the curve attains values close
to 1. Larger :math:`a` means that smaller :math:`x` will yield
a value :math:`l_a(x)` close to 1 (and thus result in a higher
score). In other words, the larger :math:`a`, the more forgiving
the similarity measure.
References
----------
[Rei+2016] Reisenhofer, R, Bosse, S, Kutyniok, G, and Wiegand, T.
*A Haar Wavelet-Based Perceptual Similarity Index for Image Quality
Assessment*. arXiv:1607.06140 [cs], Jul. 2016.
"""
# TODO: generalize for nD
import scipy.special
impl = 'pyfftw' if PYFFTW_AVAILABLE else 'numpy'
# Haar wavelet filters for levels 1 and 2
dec_lo_lvl1 = np.array([np.sqrt(2), np.sqrt(2)])
dec_lo_lvl2 = np.repeat(dec_lo_lvl1, 2)
dec_hi_lvl1 = np.array([-np.sqrt(2), np.sqrt(2)])
dec_hi_lvl2 = np.repeat(dec_hi_lvl1, 2)
if axis == 0:
# High-pass in axis 0, low-pass in axis 1
fh_lvl1 = dec_hi_lvl1
fv_lvl1 = dec_lo_lvl1
fh_lvl2 = dec_hi_lvl2
fv_lvl2 = dec_lo_lvl2
elif axis == 1:
# Low-pass in axis 0, high-pass in axis 1
fh_lvl1 = dec_lo_lvl1
fv_lvl1 = dec_hi_lvl1
fh_lvl2 = dec_lo_lvl2
fv_lvl2 = dec_hi_lvl2
else:
raise ValueError('`axis` out of the valid range 0 -> 1')
# Filter images with level 1 and 2 filters
img1_lvl1 = filter_image_sep2d(img1, fh_lvl1, fv_lvl1, impl=impl)
img1_lvl2 = filter_image_sep2d(img1, fh_lvl2, fv_lvl2, impl=impl)
img2_lvl1 = filter_image_sep2d(img2, fh_lvl1, fv_lvl1, impl=impl)
img2_lvl2 = filter_image_sep2d(img2, fh_lvl2, fv_lvl2, impl=impl)
c = float(c)
def S(x, y):
"""Return ``(2 * x * y + c ** 2) / (x ** 2 + y ** 2 + c ** 2)``."""
num = 2 * x
num *= y
num += c ** 2
denom = x ** 2
denom += y ** 2
denom += c ** 2
frac = num
frac /= denom
return frac
# Compute similarity scores for both levels
np.abs(img1_lvl1, out=img1_lvl1)
np.abs(img2_lvl1, out=img2_lvl1)
np.abs(img1_lvl2, out=img1_lvl2)
np.abs(img2_lvl2, out=img2_lvl2)
sim_lvl1 = S(img1_lvl1, img2_lvl1)
sim_lvl2 = S(img1_lvl2, img2_lvl2)
# Return logistic of the mean value
sim = sim_lvl1
sim += sim_lvl2
sim /= 2
sim *= a
return scipy.special.expit(sim) | [
"def",
"haarpsi_similarity_map",
"(",
"img1",
",",
"img2",
",",
"axis",
",",
"c",
",",
"a",
")",
":",
"# TODO: generalize for nD",
"import",
"scipy",
".",
"special",
"impl",
"=",
"'pyfftw'",
"if",
"PYFFTW_AVAILABLE",
"else",
"'numpy'",
"# Haar wavelet filters for ... | r"""Local similarity map for directional features along an axis.
Parameters
----------
img1, img2 : array-like
The images to compare. They must have equal shape.
axis : {0, 1}
Direction in which to look for edge similarities.
c : positive float
Constant determining the score of maximally dissimilar values.
Smaller constant means higher penalty for dissimilarity.
See Notes for details.
a : positive float
Parameter in the logistic function. Larger value leads to a
steeper curve, thus lowering the threshold for an input to
be mapped to an output close to 1. See Notes for details.
Returns
-------
local_sim : `numpy.ndarray`
Pointwise similarity of directional edge features of ``img1`` and
``img2``, measured using two Haar wavelet detail levels.
Notes
-----
For input images :math:`f_1, f_2` this function is defined as
.. math::
\mathrm{HS}_{f_1, f_2}^{(k)}(x) =
l_a \left(
\frac{1}{2} \sum_{j=1}^2
S\left(\left|g_j^{(k)} \ast f_1 \right|(x),
\left|g_j^{(k)} \ast f_2 \right|(x), c\right)
\right),
see `[Rei+2016] <https://arxiv.org/abs/1607.06140>`_ equation (10).
Here, the superscript :math:`(k)` refers to the axis (0 or 1)
in which edge features are compared, :math:`l_a` is the logistic
function :math:`l_a(x) = (1 + \mathrm{e}^{-a x})^{-1}`, and :math:`S`
is the pointwise similarity score
.. math::
S(x, y, c) = \frac{2xy + c^2}{x^2 + y^2 + c^2},
Hence, :math:`c` is the :math:`y`-value at which the score
drops to :math:`1 / 2` for :math:`x = 0`. In other words, the smaller
:math:`c` is chosen, the more dissimilarity is penalized.
The filters :math:`g_j^{(k)}` are high-pass Haar wavelet filters in the
axis :math:`k` and low-pass Haar wavelet filters in the other axes.
The index :math:`j` refers to the scaling level of the wavelet.
In code, these filters can be computed as ::
f_lo_level1 = [np.sqrt(2), np.sqrt(2)] # low-pass Haar filter
f_hi_level1 = [-np.sqrt(2), np.sqrt(2)] # high-pass Haar filter
f_lo_level2 = np.repeat(f_lo_level1, 2)
f_hi_level2 = np.repeat(f_hi_level1, 2)
f_lo_level3 = np.repeat(f_lo_level2, 2)
f_hi_level3 = np.repeat(f_hi_level2, 2)
...
The logistic function :math:`l_a` transforms values in
:math:`[0, \infty)` to :math:`[1/2, 1)`, where the parameter
:math:`a` determines how fast the curve attains values close
to 1. Larger :math:`a` means that smaller :math:`x` will yield
a value :math:`l_a(x)` close to 1 (and thus result in a higher
score). In other words, the larger :math:`a`, the more forgiving
the similarity measure.
References
----------
[Rei+2016] Reisenhofer, R, Bosse, S, Kutyniok, G, and Wiegand, T.
*A Haar Wavelet-Based Perceptual Similarity Index for Image Quality
Assessment*. arXiv:1607.06140 [cs], Jul. 2016. | [
"r",
"Local",
"similarity",
"map",
"for",
"directional",
"features",
"along",
"an",
"axis",
"."
] | b8443f6aca90e191ba36c91d32253c5a36249a6c | https://github.com/odlgroup/odl/blob/b8443f6aca90e191ba36c91d32253c5a36249a6c/odl/contrib/fom/util.py#L191-L327 |
231,910 | odlgroup/odl | odl/contrib/fom/util.py | haarpsi_weight_map | def haarpsi_weight_map(img1, img2, axis):
r"""Weighting map for directional features along an axis.
Parameters
----------
img1, img2 : array-like
The images to compare. They must have equal shape.
axis : {0, 1}
Direction in which to look for edge similarities.
Returns
-------
weight_map : `numpy.ndarray`
The pointwise weight map. See Notes for details.
Notes
-----
The pointwise weight map of associated with input images :math:`f_1, f_2`
and axis :math:`k` is defined
as
.. math::
\mathrm{W}_{f_1, f_2}^{(k)}(x) =
\max \left\{
\left|g_3^{(k)} \ast f_1 \right|(x),
\left|g_3^{(k)} \ast f_2 \right|(x)
\right\},
see `[Rei+2016] <https://arxiv.org/abs/1607.06140>`_ equations (11)
and (13).
Here, :math:`g_3^{(k)}` is a Haar wavelet filter for scaling level 3
that performs high-pass filtering in axis :math:`k` and low-pass
filtering in the other axes. Such a filter can be computed as ::
f_lo_level1 = [np.sqrt(2), np.sqrt(2)] # low-pass Haar filter
f_hi_level1 = [-np.sqrt(2), np.sqrt(2)] # high-pass Haar filter
f_lo_level3 = np.repeat(f_lo_level1, 4)
f_hi_level3 = np.repeat(f_hi_level1, 4)
References
----------
[Rei+2016] Reisenhofer, R, Bosse, S, Kutyniok, G, and Wiegand, T.
*A Haar Wavelet-Based Perceptual Similarity Index for Image Quality
Assessment*. arXiv:1607.06140 [cs], Jul. 2016.
"""
# TODO: generalize for nD
impl = 'pyfftw' if PYFFTW_AVAILABLE else 'numpy'
# Haar wavelet filters for level 3
dec_lo_lvl3 = np.repeat([np.sqrt(2), np.sqrt(2)], 4)
dec_hi_lvl3 = np.repeat([-np.sqrt(2), np.sqrt(2)], 4)
if axis == 0:
fh_lvl3 = dec_hi_lvl3
fv_lvl3 = dec_lo_lvl3
elif axis == 1:
fh_lvl3 = dec_lo_lvl3
fv_lvl3 = dec_hi_lvl3
else:
raise ValueError('`axis` out of the valid range 0 -> 1')
# Filter with level 3 wavelet filter
img1_lvl3 = filter_image_sep2d(img1, fh_lvl3, fv_lvl3, impl=impl)
img2_lvl3 = filter_image_sep2d(img2, fh_lvl3, fv_lvl3, impl=impl)
# Return the pointwise maximum of the filtered images
np.abs(img1_lvl3, out=img1_lvl3)
np.abs(img2_lvl3, out=img2_lvl3)
return np.maximum(img1_lvl3, img2_lvl3) | python | def haarpsi_weight_map(img1, img2, axis):
r"""Weighting map for directional features along an axis.
Parameters
----------
img1, img2 : array-like
The images to compare. They must have equal shape.
axis : {0, 1}
Direction in which to look for edge similarities.
Returns
-------
weight_map : `numpy.ndarray`
The pointwise weight map. See Notes for details.
Notes
-----
The pointwise weight map of associated with input images :math:`f_1, f_2`
and axis :math:`k` is defined
as
.. math::
\mathrm{W}_{f_1, f_2}^{(k)}(x) =
\max \left\{
\left|g_3^{(k)} \ast f_1 \right|(x),
\left|g_3^{(k)} \ast f_2 \right|(x)
\right\},
see `[Rei+2016] <https://arxiv.org/abs/1607.06140>`_ equations (11)
and (13).
Here, :math:`g_3^{(k)}` is a Haar wavelet filter for scaling level 3
that performs high-pass filtering in axis :math:`k` and low-pass
filtering in the other axes. Such a filter can be computed as ::
f_lo_level1 = [np.sqrt(2), np.sqrt(2)] # low-pass Haar filter
f_hi_level1 = [-np.sqrt(2), np.sqrt(2)] # high-pass Haar filter
f_lo_level3 = np.repeat(f_lo_level1, 4)
f_hi_level3 = np.repeat(f_hi_level1, 4)
References
----------
[Rei+2016] Reisenhofer, R, Bosse, S, Kutyniok, G, and Wiegand, T.
*A Haar Wavelet-Based Perceptual Similarity Index for Image Quality
Assessment*. arXiv:1607.06140 [cs], Jul. 2016.
"""
# TODO: generalize for nD
impl = 'pyfftw' if PYFFTW_AVAILABLE else 'numpy'
# Haar wavelet filters for level 3
dec_lo_lvl3 = np.repeat([np.sqrt(2), np.sqrt(2)], 4)
dec_hi_lvl3 = np.repeat([-np.sqrt(2), np.sqrt(2)], 4)
if axis == 0:
fh_lvl3 = dec_hi_lvl3
fv_lvl3 = dec_lo_lvl3
elif axis == 1:
fh_lvl3 = dec_lo_lvl3
fv_lvl3 = dec_hi_lvl3
else:
raise ValueError('`axis` out of the valid range 0 -> 1')
# Filter with level 3 wavelet filter
img1_lvl3 = filter_image_sep2d(img1, fh_lvl3, fv_lvl3, impl=impl)
img2_lvl3 = filter_image_sep2d(img2, fh_lvl3, fv_lvl3, impl=impl)
# Return the pointwise maximum of the filtered images
np.abs(img1_lvl3, out=img1_lvl3)
np.abs(img2_lvl3, out=img2_lvl3)
return np.maximum(img1_lvl3, img2_lvl3) | [
"def",
"haarpsi_weight_map",
"(",
"img1",
",",
"img2",
",",
"axis",
")",
":",
"# TODO: generalize for nD",
"impl",
"=",
"'pyfftw'",
"if",
"PYFFTW_AVAILABLE",
"else",
"'numpy'",
"# Haar wavelet filters for level 3",
"dec_lo_lvl3",
"=",
"np",
".",
"repeat",
"(",
"[",
... | r"""Weighting map for directional features along an axis.
Parameters
----------
img1, img2 : array-like
The images to compare. They must have equal shape.
axis : {0, 1}
Direction in which to look for edge similarities.
Returns
-------
weight_map : `numpy.ndarray`
The pointwise weight map. See Notes for details.
Notes
-----
The pointwise weight map of associated with input images :math:`f_1, f_2`
and axis :math:`k` is defined
as
.. math::
\mathrm{W}_{f_1, f_2}^{(k)}(x) =
\max \left\{
\left|g_3^{(k)} \ast f_1 \right|(x),
\left|g_3^{(k)} \ast f_2 \right|(x)
\right\},
see `[Rei+2016] <https://arxiv.org/abs/1607.06140>`_ equations (11)
and (13).
Here, :math:`g_3^{(k)}` is a Haar wavelet filter for scaling level 3
that performs high-pass filtering in axis :math:`k` and low-pass
filtering in the other axes. Such a filter can be computed as ::
f_lo_level1 = [np.sqrt(2), np.sqrt(2)] # low-pass Haar filter
f_hi_level1 = [-np.sqrt(2), np.sqrt(2)] # high-pass Haar filter
f_lo_level3 = np.repeat(f_lo_level1, 4)
f_hi_level3 = np.repeat(f_hi_level1, 4)
References
----------
[Rei+2016] Reisenhofer, R, Bosse, S, Kutyniok, G, and Wiegand, T.
*A Haar Wavelet-Based Perceptual Similarity Index for Image Quality
Assessment*. arXiv:1607.06140 [cs], Jul. 2016. | [
"r",
"Weighting",
"map",
"for",
"directional",
"features",
"along",
"an",
"axis",
"."
] | b8443f6aca90e191ba36c91d32253c5a36249a6c | https://github.com/odlgroup/odl/blob/b8443f6aca90e191ba36c91d32253c5a36249a6c/odl/contrib/fom/util.py#L330-L400 |
231,911 | odlgroup/odl | odl/contrib/fom/util.py | spherical_sum | def spherical_sum(image, binning_factor=1.0):
"""Sum image values over concentric annuli.
Parameters
----------
image : `DiscreteLp` element
Input data whose radial sum should be computed.
binning_factor : positive float, optional
Reduce the number of output bins by this factor. Increasing this
number can help reducing fluctuations due to the variance of points
that fall in a particular annulus.
A binning factor of ``1`` corresponds to a bin size equal to
image pixel size for images with square pixels, otherwise ::
max(norm2(c)) / norm2(shape)
where the maximum is taken over all corners of the image domain.
Returns
-------
spherical_sum : 1D `DiscreteLp` element
The spherical sum of ``image``. Its space is one-dimensional with
domain ``[0, rmax]``, where ``rmax`` is the radius of the smallest
ball containing ``image.space.domain``. Its shape is ``(N,)`` with ::
N = int(sqrt(sum(n ** 2 for n in image.shape)) / binning_factor)
"""
r = np.sqrt(sum(xi ** 2 for xi in image.space.meshgrid))
rmax = max(np.linalg.norm(c) for c in image.space.domain.corners())
n_bins = int(np.sqrt(sum(n ** 2 for n in image.shape)) / binning_factor)
rad_sum, _ = np.histogram(r, weights=image, bins=n_bins, range=(0, rmax))
out_spc = uniform_discr(min_pt=0, max_pt=rmax, shape=n_bins,
impl=image.space.impl, dtype=image.space.dtype,
interp="linear", axis_labels=["$r$"])
return out_spc.element(rad_sum) | python | def spherical_sum(image, binning_factor=1.0):
r = np.sqrt(sum(xi ** 2 for xi in image.space.meshgrid))
rmax = max(np.linalg.norm(c) for c in image.space.domain.corners())
n_bins = int(np.sqrt(sum(n ** 2 for n in image.shape)) / binning_factor)
rad_sum, _ = np.histogram(r, weights=image, bins=n_bins, range=(0, rmax))
out_spc = uniform_discr(min_pt=0, max_pt=rmax, shape=n_bins,
impl=image.space.impl, dtype=image.space.dtype,
interp="linear", axis_labels=["$r$"])
return out_spc.element(rad_sum) | [
"def",
"spherical_sum",
"(",
"image",
",",
"binning_factor",
"=",
"1.0",
")",
":",
"r",
"=",
"np",
".",
"sqrt",
"(",
"sum",
"(",
"xi",
"**",
"2",
"for",
"xi",
"in",
"image",
".",
"space",
".",
"meshgrid",
")",
")",
"rmax",
"=",
"max",
"(",
"np",
... | Sum image values over concentric annuli.
Parameters
----------
image : `DiscreteLp` element
Input data whose radial sum should be computed.
binning_factor : positive float, optional
Reduce the number of output bins by this factor. Increasing this
number can help reducing fluctuations due to the variance of points
that fall in a particular annulus.
A binning factor of ``1`` corresponds to a bin size equal to
image pixel size for images with square pixels, otherwise ::
max(norm2(c)) / norm2(shape)
where the maximum is taken over all corners of the image domain.
Returns
-------
spherical_sum : 1D `DiscreteLp` element
The spherical sum of ``image``. Its space is one-dimensional with
domain ``[0, rmax]``, where ``rmax`` is the radius of the smallest
ball containing ``image.space.domain``. Its shape is ``(N,)`` with ::
N = int(sqrt(sum(n ** 2 for n in image.shape)) / binning_factor) | [
"Sum",
"image",
"values",
"over",
"concentric",
"annuli",
"."
] | b8443f6aca90e191ba36c91d32253c5a36249a6c | https://github.com/odlgroup/odl/blob/b8443f6aca90e191ba36c91d32253c5a36249a6c/odl/contrib/fom/util.py#L403-L439 |
231,912 | odlgroup/odl | odl/solvers/functional/functional.py | simple_functional | def simple_functional(space, fcall=None, grad=None, prox=None, grad_lip=np.nan,
convex_conj_fcall=None, convex_conj_grad=None,
convex_conj_prox=None, convex_conj_grad_lip=np.nan,
linear=False):
"""Simplified interface to create a functional with specific properties.
Users may specify as many properties as-is needed by the application.
Parameters
----------
space : `LinearSpace`
Space that the functional should act on.
fcall : callable, optional
Function to evaluate when calling the functional.
grad : callable or `Operator`, optional
Gradient operator of the functional.
prox : `proximal factory`, optional
Proximal factory for the functional.
grad_lip : float, optional
lipschitz constant of the functional.
convex_conj_fcall : callable, optional
Function to evaluate when calling the convex conjugate functional.
convex_conj_grad : callable or `Operator`, optional
Gradient operator of the convex conjugate functional
convex_conj_prox : `proximal factory`, optional
Proximal factory for the convex conjugate functional.
convex_conj_grad_lip : float, optional
lipschitz constant of the convex conjugate functional.
linear : bool, optional
True if the operator is linear.
Examples
--------
Create squared sum functional on rn:
>>> def f(x):
... return sum(xi**2 for xi in x)
>>> def dfdx(x):
... return 2 * x
>>> space = odl.rn(3)
>>> func = simple_functional(space, f, grad=dfdx)
>>> func.domain
rn(3)
>>> func.range
RealNumbers()
>>> func([1, 2, 3])
14.0
>>> func.gradient([1, 2, 3])
rn(3).element([ 2., 4., 6.])
"""
if grad is not None and not isinstance(grad, Operator):
grad_in = grad
class SimpleFunctionalGradient(Operator):
"""Gradient of a `SimpleFunctional`."""
def _call(self, x):
"""Return ``self(x)``."""
return grad_in(x)
grad = SimpleFunctionalGradient(space, space, linear=False)
if (convex_conj_grad is not None and
not isinstance(convex_conj_grad, Operator)):
convex_conj_grad_in = convex_conj_grad
class SimpleFunctionalConvexConjGradient(Operator):
"""Gradient of the convex conj of a `SimpleFunctional`."""
def _call(self, x):
"""Return ``self(x)``."""
return convex_conj_grad_in(x)
convex_conj_grad = SimpleFunctionalConvexConjGradient(
space, space, linear=False)
class SimpleFunctional(Functional):
"""A simplified functional for examples."""
def __init__(self):
"""Initialize an instance."""
super(SimpleFunctional, self).__init__(
space, linear=linear, grad_lipschitz=grad_lip)
def _call(self, x):
"""Return ``self(x)``."""
if fcall is None:
raise NotImplementedError('call not implemented')
else:
return fcall(x)
@property
def proximal(self):
"""Return the proximal of the operator."""
if prox is None:
raise NotImplementedError('proximal not implemented')
else:
return prox
@property
def gradient(self):
"""Return the gradient of the operator."""
if grad is None:
raise NotImplementedError('gradient not implemented')
else:
return grad
@property
def convex_conj(self):
return simple_functional(space, fcall=convex_conj_fcall,
grad=convex_conj_grad,
prox=convex_conj_prox,
grad_lip=convex_conj_grad_lip,
convex_conj_fcall=fcall,
convex_conj_grad=grad,
convex_conj_prox=prox,
convex_conj_grad_lip=grad_lip,
linear=linear)
return SimpleFunctional() | python | def simple_functional(space, fcall=None, grad=None, prox=None, grad_lip=np.nan,
convex_conj_fcall=None, convex_conj_grad=None,
convex_conj_prox=None, convex_conj_grad_lip=np.nan,
linear=False):
if grad is not None and not isinstance(grad, Operator):
grad_in = grad
class SimpleFunctionalGradient(Operator):
"""Gradient of a `SimpleFunctional`."""
def _call(self, x):
"""Return ``self(x)``."""
return grad_in(x)
grad = SimpleFunctionalGradient(space, space, linear=False)
if (convex_conj_grad is not None and
not isinstance(convex_conj_grad, Operator)):
convex_conj_grad_in = convex_conj_grad
class SimpleFunctionalConvexConjGradient(Operator):
"""Gradient of the convex conj of a `SimpleFunctional`."""
def _call(self, x):
"""Return ``self(x)``."""
return convex_conj_grad_in(x)
convex_conj_grad = SimpleFunctionalConvexConjGradient(
space, space, linear=False)
class SimpleFunctional(Functional):
"""A simplified functional for examples."""
def __init__(self):
"""Initialize an instance."""
super(SimpleFunctional, self).__init__(
space, linear=linear, grad_lipschitz=grad_lip)
def _call(self, x):
"""Return ``self(x)``."""
if fcall is None:
raise NotImplementedError('call not implemented')
else:
return fcall(x)
@property
def proximal(self):
"""Return the proximal of the operator."""
if prox is None:
raise NotImplementedError('proximal not implemented')
else:
return prox
@property
def gradient(self):
"""Return the gradient of the operator."""
if grad is None:
raise NotImplementedError('gradient not implemented')
else:
return grad
@property
def convex_conj(self):
return simple_functional(space, fcall=convex_conj_fcall,
grad=convex_conj_grad,
prox=convex_conj_prox,
grad_lip=convex_conj_grad_lip,
convex_conj_fcall=fcall,
convex_conj_grad=grad,
convex_conj_prox=prox,
convex_conj_grad_lip=grad_lip,
linear=linear)
return SimpleFunctional() | [
"def",
"simple_functional",
"(",
"space",
",",
"fcall",
"=",
"None",
",",
"grad",
"=",
"None",
",",
"prox",
"=",
"None",
",",
"grad_lip",
"=",
"np",
".",
"nan",
",",
"convex_conj_fcall",
"=",
"None",
",",
"convex_conj_grad",
"=",
"None",
",",
"convex_con... | Simplified interface to create a functional with specific properties.
Users may specify as many properties as-is needed by the application.
Parameters
----------
space : `LinearSpace`
Space that the functional should act on.
fcall : callable, optional
Function to evaluate when calling the functional.
grad : callable or `Operator`, optional
Gradient operator of the functional.
prox : `proximal factory`, optional
Proximal factory for the functional.
grad_lip : float, optional
lipschitz constant of the functional.
convex_conj_fcall : callable, optional
Function to evaluate when calling the convex conjugate functional.
convex_conj_grad : callable or `Operator`, optional
Gradient operator of the convex conjugate functional
convex_conj_prox : `proximal factory`, optional
Proximal factory for the convex conjugate functional.
convex_conj_grad_lip : float, optional
lipschitz constant of the convex conjugate functional.
linear : bool, optional
True if the operator is linear.
Examples
--------
Create squared sum functional on rn:
>>> def f(x):
... return sum(xi**2 for xi in x)
>>> def dfdx(x):
... return 2 * x
>>> space = odl.rn(3)
>>> func = simple_functional(space, f, grad=dfdx)
>>> func.domain
rn(3)
>>> func.range
RealNumbers()
>>> func([1, 2, 3])
14.0
>>> func.gradient([1, 2, 3])
rn(3).element([ 2., 4., 6.]) | [
"Simplified",
"interface",
"to",
"create",
"a",
"functional",
"with",
"specific",
"properties",
"."
] | b8443f6aca90e191ba36c91d32253c5a36249a6c | https://github.com/odlgroup/odl/blob/b8443f6aca90e191ba36c91d32253c5a36249a6c/odl/solvers/functional/functional.py#L1470-L1592 |
231,913 | odlgroup/odl | odl/solvers/functional/functional.py | FunctionalLeftScalarMult.convex_conj | def convex_conj(self):
"""Convex conjugate functional of the scaled functional.
``Functional.__rmul__`` takes care of the case scalar = 0.
"""
if self.scalar <= 0:
raise ValueError('scaling with nonpositive values have no convex '
'conjugate. Current value: {}.'
''.format(self.scalar))
return self.scalar * self.functional.convex_conj * (1.0 / self.scalar) | python | def convex_conj(self):
if self.scalar <= 0:
raise ValueError('scaling with nonpositive values have no convex '
'conjugate. Current value: {}.'
''.format(self.scalar))
return self.scalar * self.functional.convex_conj * (1.0 / self.scalar) | [
"def",
"convex_conj",
"(",
"self",
")",
":",
"if",
"self",
".",
"scalar",
"<=",
"0",
":",
"raise",
"ValueError",
"(",
"'scaling with nonpositive values have no convex '",
"'conjugate. Current value: {}.'",
"''",
".",
"format",
"(",
"self",
".",
"scalar",
")",
")",... | Convex conjugate functional of the scaled functional.
``Functional.__rmul__`` takes care of the case scalar = 0. | [
"Convex",
"conjugate",
"functional",
"of",
"the",
"scaled",
"functional",
"."
] | b8443f6aca90e191ba36c91d32253c5a36249a6c | https://github.com/odlgroup/odl/blob/b8443f6aca90e191ba36c91d32253c5a36249a6c/odl/solvers/functional/functional.py#L486-L496 |
231,914 | odlgroup/odl | odl/solvers/functional/functional.py | FunctionalLeftScalarMult.proximal | def proximal(self):
"""Proximal factory of the scaled functional.
``Functional.__rmul__`` takes care of the case scalar = 0
See Also
--------
odl.solvers.nonsmooth.proximal_operators.proximal_const_func
"""
if self.scalar < 0:
raise ValueError('proximal operator of functional scaled with a '
'negative value {} is not well-defined'
''.format(self.scalar))
elif self.scalar == 0:
# Should not get here. `Functional.__rmul__` takes care of the case
# scalar = 0
return proximal_const_func(self.domain)
else:
def proximal_left_scalar_mult(sigma=1.0):
"""Proximal operator for left scalar multiplication.
Parameters
----------
sigma : positive float, optional
Step size parameter. Default: 1.0
"""
return self.functional.proximal(sigma * self.scalar)
return proximal_left_scalar_mult | python | def proximal(self):
if self.scalar < 0:
raise ValueError('proximal operator of functional scaled with a '
'negative value {} is not well-defined'
''.format(self.scalar))
elif self.scalar == 0:
# Should not get here. `Functional.__rmul__` takes care of the case
# scalar = 0
return proximal_const_func(self.domain)
else:
def proximal_left_scalar_mult(sigma=1.0):
"""Proximal operator for left scalar multiplication.
Parameters
----------
sigma : positive float, optional
Step size parameter. Default: 1.0
"""
return self.functional.proximal(sigma * self.scalar)
return proximal_left_scalar_mult | [
"def",
"proximal",
"(",
"self",
")",
":",
"if",
"self",
".",
"scalar",
"<",
"0",
":",
"raise",
"ValueError",
"(",
"'proximal operator of functional scaled with a '",
"'negative value {} is not well-defined'",
"''",
".",
"format",
"(",
"self",
".",
"scalar",
")",
"... | Proximal factory of the scaled functional.
``Functional.__rmul__`` takes care of the case scalar = 0
See Also
--------
odl.solvers.nonsmooth.proximal_operators.proximal_const_func | [
"Proximal",
"factory",
"of",
"the",
"scaled",
"functional",
"."
] | b8443f6aca90e191ba36c91d32253c5a36249a6c | https://github.com/odlgroup/odl/blob/b8443f6aca90e191ba36c91d32253c5a36249a6c/odl/solvers/functional/functional.py#L499-L530 |
231,915 | odlgroup/odl | odl/solvers/functional/functional.py | FunctionalComp.gradient | def gradient(self):
"""Gradient of the compositon according to the chain rule."""
func = self.left
op = self.right
class FunctionalCompositionGradient(Operator):
"""Gradient of the compositon according to the chain rule."""
def __init__(self):
"""Initialize a new instance."""
super(FunctionalCompositionGradient, self).__init__(
op.domain, op.domain, linear=False)
def _call(self, x):
"""Apply the gradient operator to the given point."""
return op.derivative(x).adjoint(func.gradient(op(x)))
def derivative(self, x):
"""The derivative in point ``x``.
This is only defined
"""
if not op.is_linear:
raise NotImplementedError('derivative only implemented '
'for linear opertors.')
else:
return (op.adjoint * func.gradient * op).derivative(x)
return FunctionalCompositionGradient() | python | def gradient(self):
func = self.left
op = self.right
class FunctionalCompositionGradient(Operator):
def __init__(self):
"""Initialize a new instance."""
super(FunctionalCompositionGradient, self).__init__(
op.domain, op.domain, linear=False)
def _call(self, x):
"""Apply the gradient operator to the given point."""
return op.derivative(x).adjoint(func.gradient(op(x)))
def derivative(self, x):
"""The derivative in point ``x``.
This is only defined
"""
if not op.is_linear:
raise NotImplementedError('derivative only implemented '
'for linear opertors.')
else:
return (op.adjoint * func.gradient * op).derivative(x)
return FunctionalCompositionGradient() | [
"def",
"gradient",
"(",
"self",
")",
":",
"func",
"=",
"self",
".",
"left",
"op",
"=",
"self",
".",
"right",
"class",
"FunctionalCompositionGradient",
"(",
"Operator",
")",
":",
"\"\"\"Gradient of the compositon according to the chain rule.\"\"\"",
"def",
"__init__",
... | Gradient of the compositon according to the chain rule. | [
"Gradient",
"of",
"the",
"compositon",
"according",
"to",
"the",
"chain",
"rule",
"."
] | b8443f6aca90e191ba36c91d32253c5a36249a6c | https://github.com/odlgroup/odl/blob/b8443f6aca90e191ba36c91d32253c5a36249a6c/odl/solvers/functional/functional.py#L628-L657 |
231,916 | odlgroup/odl | odl/solvers/functional/functional.py | FunctionalQuadraticPerturb.proximal | def proximal(self):
"""Proximal factory of the quadratically perturbed functional."""
if self.quadratic_coeff < 0:
raise TypeError('`quadratic_coeff` {} must be non-negative'
''.format(self.quadratic_coeff))
return proximal_quadratic_perturbation(
self.functional.proximal,
a=self.quadratic_coeff, u=self.linear_term) | python | def proximal(self):
if self.quadratic_coeff < 0:
raise TypeError('`quadratic_coeff` {} must be non-negative'
''.format(self.quadratic_coeff))
return proximal_quadratic_perturbation(
self.functional.proximal,
a=self.quadratic_coeff, u=self.linear_term) | [
"def",
"proximal",
"(",
"self",
")",
":",
"if",
"self",
".",
"quadratic_coeff",
"<",
"0",
":",
"raise",
"TypeError",
"(",
"'`quadratic_coeff` {} must be non-negative'",
"''",
".",
"format",
"(",
"self",
".",
"quadratic_coeff",
")",
")",
"return",
"proximal_quadr... | Proximal factory of the quadratically perturbed functional. | [
"Proximal",
"factory",
"of",
"the",
"quadratically",
"perturbed",
"functional",
"."
] | b8443f6aca90e191ba36c91d32253c5a36249a6c | https://github.com/odlgroup/odl/blob/b8443f6aca90e191ba36c91d32253c5a36249a6c/odl/solvers/functional/functional.py#L1059-L1067 |
231,917 | odlgroup/odl | odl/solvers/functional/functional.py | FunctionalQuadraticPerturb.convex_conj | def convex_conj(self):
r"""Convex conjugate functional of the functional.
Notes
-----
Given a functional :math:`f`, the convex conjugate of a linearly
perturbed version :math:`f(x) + <y, x>` is given by a translation of
the convex conjugate of :math:`f`:
.. math::
(f + \langle y, \cdot \rangle)^* (x^*) = f^*(x^* - y).
For reference on the identity used, see `[KP2015]`_. Moreover, the
convex conjugate of :math:`f + c` is by definition
.. math::
(f + c)^* (x^*) = f^*(x^*) - c.
References
----------
[KP2015] Komodakis, N, and Pesquet, J-C. *Playing with Duality: An
overview of recent primal-dual approaches for solving large-scale
optimization problems*. IEEE Signal Processing Magazine, 32.6 (2015),
pp 31--54.
.. _[KP2015]: https://arxiv.org/abs/1406.5429
"""
if self.quadratic_coeff == 0:
cconj = self.functional.convex_conj.translated(self.linear_term)
if self.constant != 0:
cconj = cconj - self.constant
return cconj
else:
return super(FunctionalQuadraticPerturb, self).convex_conj | python | def convex_conj(self):
r"""Convex conjugate functional of the functional.
Notes
-----
Given a functional :math:`f`, the convex conjugate of a linearly
perturbed version :math:`f(x) + <y, x>` is given by a translation of
the convex conjugate of :math:`f`:
.. math::
(f + \langle y, \cdot \rangle)^* (x^*) = f^*(x^* - y).
For reference on the identity used, see `[KP2015]`_. Moreover, the
convex conjugate of :math:`f + c` is by definition
.. math::
(f + c)^* (x^*) = f^*(x^*) - c.
References
----------
[KP2015] Komodakis, N, and Pesquet, J-C. *Playing with Duality: An
overview of recent primal-dual approaches for solving large-scale
optimization problems*. IEEE Signal Processing Magazine, 32.6 (2015),
pp 31--54.
.. _[KP2015]: https://arxiv.org/abs/1406.5429
"""
if self.quadratic_coeff == 0:
cconj = self.functional.convex_conj.translated(self.linear_term)
if self.constant != 0:
cconj = cconj - self.constant
return cconj
else:
return super(FunctionalQuadraticPerturb, self).convex_conj | [
"def",
"convex_conj",
"(",
"self",
")",
":",
"if",
"self",
".",
"quadratic_coeff",
"==",
"0",
":",
"cconj",
"=",
"self",
".",
"functional",
".",
"convex_conj",
".",
"translated",
"(",
"self",
".",
"linear_term",
")",
"if",
"self",
".",
"constant",
"!=",
... | r"""Convex conjugate functional of the functional.
Notes
-----
Given a functional :math:`f`, the convex conjugate of a linearly
perturbed version :math:`f(x) + <y, x>` is given by a translation of
the convex conjugate of :math:`f`:
.. math::
(f + \langle y, \cdot \rangle)^* (x^*) = f^*(x^* - y).
For reference on the identity used, see `[KP2015]`_. Moreover, the
convex conjugate of :math:`f + c` is by definition
.. math::
(f + c)^* (x^*) = f^*(x^*) - c.
References
----------
[KP2015] Komodakis, N, and Pesquet, J-C. *Playing with Duality: An
overview of recent primal-dual approaches for solving large-scale
optimization problems*. IEEE Signal Processing Magazine, 32.6 (2015),
pp 31--54.
.. _[KP2015]: https://arxiv.org/abs/1406.5429 | [
"r",
"Convex",
"conjugate",
"functional",
"of",
"the",
"functional",
"."
] | b8443f6aca90e191ba36c91d32253c5a36249a6c | https://github.com/odlgroup/odl/blob/b8443f6aca90e191ba36c91d32253c5a36249a6c/odl/solvers/functional/functional.py#L1070-L1104 |
231,918 | odlgroup/odl | odl/contrib/fom/unsupervised.py | estimate_noise_std | def estimate_noise_std(img, average=True):
"""Estimate standard deviation of noise in ``img``.
The algorithm, given in [Immerkaer1996], estimates the noise in an image.
Parameters
----------
img : array-like
Array to estimate noise in.
average : bool
If ``True``, return the mean noise in the image, otherwise give a
pointwise estimate.
Returns
-------
noise : float
Examples
--------
Create image with noise 1.0, verify result
>>> img = np.random.randn(10, 10)
>>> result = estimate_noise_std(img) # should be about 1
Also works with higher dimensional arrays
>>> img = np.random.randn(3, 3, 3)
>>> result = estimate_noise_std(img)
The method can also estimate the noise pointwise (but with high
uncertainity):
>>> img = np.random.randn(3, 3, 3)
>>> result = estimate_noise_std(img, average=False)
References
----------
[Immerkaer1996] Immerkaer, J. *Fast Noise Variance Estimation*.
Computer Vision and Image Understanding, 1996.
"""
import scipy.signal
import functools
img = np.asarray(img, dtype='float')
M = functools.reduce(np.add.outer, [[-1, 2, -1]] * img.ndim)
convolved = scipy.signal.fftconvolve(img, M, mode='valid')
if average:
conv_var = np.sum(convolved ** 2) / convolved.size
else:
conv_var = convolved ** 2
# Pad in order to retain shape
conv_var = np.pad(conv_var, pad_width=1, mode='edge')
scale = np.sum(np.square(M))
sigma = np.sqrt(conv_var / scale)
return sigma | python | def estimate_noise_std(img, average=True):
import scipy.signal
import functools
img = np.asarray(img, dtype='float')
M = functools.reduce(np.add.outer, [[-1, 2, -1]] * img.ndim)
convolved = scipy.signal.fftconvolve(img, M, mode='valid')
if average:
conv_var = np.sum(convolved ** 2) / convolved.size
else:
conv_var = convolved ** 2
# Pad in order to retain shape
conv_var = np.pad(conv_var, pad_width=1, mode='edge')
scale = np.sum(np.square(M))
sigma = np.sqrt(conv_var / scale)
return sigma | [
"def",
"estimate_noise_std",
"(",
"img",
",",
"average",
"=",
"True",
")",
":",
"import",
"scipy",
".",
"signal",
"import",
"functools",
"img",
"=",
"np",
".",
"asarray",
"(",
"img",
",",
"dtype",
"=",
"'float'",
")",
"M",
"=",
"functools",
".",
"reduc... | Estimate standard deviation of noise in ``img``.
The algorithm, given in [Immerkaer1996], estimates the noise in an image.
Parameters
----------
img : array-like
Array to estimate noise in.
average : bool
If ``True``, return the mean noise in the image, otherwise give a
pointwise estimate.
Returns
-------
noise : float
Examples
--------
Create image with noise 1.0, verify result
>>> img = np.random.randn(10, 10)
>>> result = estimate_noise_std(img) # should be about 1
Also works with higher dimensional arrays
>>> img = np.random.randn(3, 3, 3)
>>> result = estimate_noise_std(img)
The method can also estimate the noise pointwise (but with high
uncertainity):
>>> img = np.random.randn(3, 3, 3)
>>> result = estimate_noise_std(img, average=False)
References
----------
[Immerkaer1996] Immerkaer, J. *Fast Noise Variance Estimation*.
Computer Vision and Image Understanding, 1996. | [
"Estimate",
"standard",
"deviation",
"of",
"noise",
"in",
"img",
"."
] | b8443f6aca90e191ba36c91d32253c5a36249a6c | https://github.com/odlgroup/odl/blob/b8443f6aca90e191ba36c91d32253c5a36249a6c/odl/contrib/fom/unsupervised.py#L17-L75 |
231,919 | odlgroup/odl | odl/tomo/geometry/conebeam.py | cone_beam_geometry | def cone_beam_geometry(space, src_radius, det_radius, num_angles=None,
short_scan=False, det_shape=None):
r"""Create a default fan or cone beam geometry from ``space``.
This function is intended for simple test cases where users do not
need the full flexibility of the geometries, but simply wants a
geometry that works.
The geometry returned by this function has equidistant angles
that lie (strictly) between 0 and either ``2 * pi`` (full scan)
or ``pi + fan_angle`` (short scan).
The detector is centered around 0, and its size is chosen such that
the whole ``space`` is covered with lines.
The number of angles and detector elements is chosen such that
the resulting sinogram is fully sampled according to the
Nyquist criterion, which in general results in a very large number of
samples. In particular, a ``space`` that is not centered at the origin
can result in very large detectors since the latter is always
origin-centered.
Parameters
----------
space : `DiscreteLp`
Reconstruction space, the space of the volumetric data to be
projected. Must be 2- or 3-dimensional.
src_radius : nonnegative float
Radius of the source circle. Must be larger than the radius of
the smallest vertical cylinder containing ``space.domain``,
i.e., the source must be outside the volume for all rotations.
det_radius : nonnegative float
Radius of the detector circle.
short_scan : bool, optional
Use the minimum required angular range ``[0, pi + fan_angle]``.
For ``True``, the `parker_weighting` should be used in FBP.
By default, the range ``[0, 2 * pi]`` is used.
num_angles : int, optional
Number of angles.
Default: Enough to fully sample the data, see Notes.
det_shape : int or sequence of ints, optional
Number of detector pixels.
Default: Enough to fully sample the data, see Notes.
Returns
-------
geometry : `DivergentBeamGeometry`
Projection geometry with equidistant angles and zero-centered
detector as determined by sampling criteria.
- If ``space`` is 2D, the result is a `FanBeamGeometry`.
- If ``space`` is 3D, the result is a `ConeFlatGeometry`.
Examples
--------
Create a fan beam geometry from a 2d space:
>>> space = odl.uniform_discr([-1, -1], [1, 1], (20, 20))
>>> geometry = cone_beam_geometry(space, src_radius=5, det_radius=5)
>>> geometry.angles.size
78
>>> geometry.detector.size
57
For a short scan geometry (from 0 to ``pi + fan_angle``), the
``short_scan`` flag can be set, resulting in a smaller number of
angles:
>>> geometry = cone_beam_geometry(space, src_radius=5, det_radius=5,
... short_scan=True)
>>> geometry.angles.size
46
If the source is close to the object, the detector becomes larger due
to more magnification:
>>> geometry = cone_beam_geometry(space, src_radius=3, det_radius=9)
>>> geometry.angles.size
80
>>> geometry.detector.size
105
Notes
-----
According to [NW2001]_, pages 75--76, a function
:math:`f : \mathbb{R}^2 \to \mathbb{R}` that has compact support
.. math::
\| x \| > \rho \implies f(x) = 0,
and is essentially bandlimited
.. math::
\| \xi \| > \Omega \implies \hat{f}(\xi) \approx 0,
can be fully reconstructed from a fan beam ray transform with
source-detector distance :math:`r` (assuming all detector
points have the same distance to the source) if (1) the projection
angles are sampled with a spacing of :math:`\Delta \psi` such that
.. math::
\Delta \psi \leq \frac{r + \rho}{r}\, \frac{\pi}{\rho \Omega},
and (2) the detector is sampled with an angular interval
:math:`\Delta \alpha` that satisfies
.. math::
\Delta \alpha \leq \frac{\pi}{r \Omega}.
For a flat detector, the angular interval is smallest in the center
of the fan and largest at the boundaries. The worst-case relation
between the linear and angular sampling intervals are
.. math::
\Delta s = R \Delta \alpha, \quad R^2 = r^2 + (w / 2)^2,
where :math:`w` is the width of the detector.
Thus, to satisfy the angular detector condition one can choose
.. math::
\Delta s \leq \frac{\pi \sqrt{r^2 + (w / 2)^2}}{r \Omega}.
The geometry returned by this function satisfies these conditions exactly.
If the domain is 3-dimensional, a circular cone beam geometry is
created with the third coordinate axis as rotation axis. This does,
of course, not yield complete data, but is equivalent to the
2D fan beam case in the :math:`z = 0` slice. The vertical size of
the detector is chosen such that it covers the object vertically
with rays, using a containing cuboid
:math:`[-\rho, \rho]^2 \times [z_{\mathrm{min}}, z_{\mathrm{min}}]`
to compute the cone angle.
References
----------
.. [NW2001] Natterer, F and Wuebbeling, F.
*Mathematical Methods in Image Reconstruction*.
SIAM, 2001.
https://dx.doi.org/10.1137/1.9780898718324
"""
# Find maximum distance from rotation axis
corners = space.domain.corners()[:, :2]
rho = np.max(np.linalg.norm(corners, axis=1))
# Find default values according to Nyquist criterion.
# We assume that the function is bandlimited by a wave along the x or y
# axis. The highest frequency we can measure is then a standing wave with
# period of twice the inter-node distance.
min_side = min(space.partition.cell_sides[:2])
omega = np.pi / min_side
# Compute minimum width of the detector to cover the object. The relation
# used here is (w/2)/(rs+rd) = rho/rs since both are equal to tan(alpha),
# where alpha is the half fan angle.
rs = float(src_radius)
if (rs <= rho):
raise ValueError('source too close to the object, resulting in '
'infinite detector for full coverage')
rd = float(det_radius)
r = src_radius + det_radius
w = 2 * rho * (rs + rd) / rs
# Compute minimum number of pixels given the constraint on the
# sampling interval and the computed width
rb = np.hypot(r, w / 2) # length of the boundary ray to the flat detector
num_px_horiz = 2 * int(np.ceil(w * omega * r / (2 * np.pi * rb))) + 1
if space.ndim == 2:
det_min_pt = -w / 2
det_max_pt = w / 2
if det_shape is None:
det_shape = num_px_horiz
elif space.ndim == 3:
# Compute number of vertical pixels required to cover the object,
# using the same sampling interval vertically as horizontally.
# The reasoning is the same as for the computation of w.
# Minimum distance of the containing cuboid edges to the source
dist = rs - rho
# Take angle of the rays going through the top and bottom corners
# in that edge
half_cone_angle = max(np.arctan(abs(space.partition.min_pt[2]) / dist),
np.arctan(abs(space.partition.max_pt[2]) / dist))
h = 2 * np.sin(half_cone_angle) * (rs + rd)
# Use the vertical spacing from the reco space, corrected for
# magnification at the "back" of the object, i.e., where it is
# minimal
min_mag = (rs + rd) / (rs + rho)
delta_h = min_mag * space.cell_sides[2]
num_px_vert = int(np.ceil(h / delta_h))
h = num_px_vert * delta_h # make multiple of spacing
det_min_pt = [-w / 2, -h / 2]
det_max_pt = [w / 2, h / 2]
if det_shape is None:
det_shape = [num_px_horiz, num_px_vert]
fan_angle = 2 * np.arctan(rho / rs)
if short_scan:
max_angle = min(np.pi + fan_angle, 2 * np.pi)
else:
max_angle = 2 * np.pi
if num_angles is None:
num_angles = int(np.ceil(max_angle * omega * rho / np.pi
* r / (r + rho)))
angle_partition = uniform_partition(0, max_angle, num_angles)
det_partition = uniform_partition(det_min_pt, det_max_pt, det_shape)
if space.ndim == 2:
return FanBeamGeometry(angle_partition, det_partition,
src_radius, det_radius)
elif space.ndim == 3:
return ConeFlatGeometry(angle_partition, det_partition,
src_radius, det_radius)
else:
raise ValueError('``space.ndim`` must be 2 or 3.') | python | def cone_beam_geometry(space, src_radius, det_radius, num_angles=None,
short_scan=False, det_shape=None):
r"""Create a default fan or cone beam geometry from ``space``.
This function is intended for simple test cases where users do not
need the full flexibility of the geometries, but simply wants a
geometry that works.
The geometry returned by this function has equidistant angles
that lie (strictly) between 0 and either ``2 * pi`` (full scan)
or ``pi + fan_angle`` (short scan).
The detector is centered around 0, and its size is chosen such that
the whole ``space`` is covered with lines.
The number of angles and detector elements is chosen such that
the resulting sinogram is fully sampled according to the
Nyquist criterion, which in general results in a very large number of
samples. In particular, a ``space`` that is not centered at the origin
can result in very large detectors since the latter is always
origin-centered.
Parameters
----------
space : `DiscreteLp`
Reconstruction space, the space of the volumetric data to be
projected. Must be 2- or 3-dimensional.
src_radius : nonnegative float
Radius of the source circle. Must be larger than the radius of
the smallest vertical cylinder containing ``space.domain``,
i.e., the source must be outside the volume for all rotations.
det_radius : nonnegative float
Radius of the detector circle.
short_scan : bool, optional
Use the minimum required angular range ``[0, pi + fan_angle]``.
For ``True``, the `parker_weighting` should be used in FBP.
By default, the range ``[0, 2 * pi]`` is used.
num_angles : int, optional
Number of angles.
Default: Enough to fully sample the data, see Notes.
det_shape : int or sequence of ints, optional
Number of detector pixels.
Default: Enough to fully sample the data, see Notes.
Returns
-------
geometry : `DivergentBeamGeometry`
Projection geometry with equidistant angles and zero-centered
detector as determined by sampling criteria.
- If ``space`` is 2D, the result is a `FanBeamGeometry`.
- If ``space`` is 3D, the result is a `ConeFlatGeometry`.
Examples
--------
Create a fan beam geometry from a 2d space:
>>> space = odl.uniform_discr([-1, -1], [1, 1], (20, 20))
>>> geometry = cone_beam_geometry(space, src_radius=5, det_radius=5)
>>> geometry.angles.size
78
>>> geometry.detector.size
57
For a short scan geometry (from 0 to ``pi + fan_angle``), the
``short_scan`` flag can be set, resulting in a smaller number of
angles:
>>> geometry = cone_beam_geometry(space, src_radius=5, det_radius=5,
... short_scan=True)
>>> geometry.angles.size
46
If the source is close to the object, the detector becomes larger due
to more magnification:
>>> geometry = cone_beam_geometry(space, src_radius=3, det_radius=9)
>>> geometry.angles.size
80
>>> geometry.detector.size
105
Notes
-----
According to [NW2001]_, pages 75--76, a function
:math:`f : \mathbb{R}^2 \to \mathbb{R}` that has compact support
.. math::
\| x \| > \rho \implies f(x) = 0,
and is essentially bandlimited
.. math::
\| \xi \| > \Omega \implies \hat{f}(\xi) \approx 0,
can be fully reconstructed from a fan beam ray transform with
source-detector distance :math:`r` (assuming all detector
points have the same distance to the source) if (1) the projection
angles are sampled with a spacing of :math:`\Delta \psi` such that
.. math::
\Delta \psi \leq \frac{r + \rho}{r}\, \frac{\pi}{\rho \Omega},
and (2) the detector is sampled with an angular interval
:math:`\Delta \alpha` that satisfies
.. math::
\Delta \alpha \leq \frac{\pi}{r \Omega}.
For a flat detector, the angular interval is smallest in the center
of the fan and largest at the boundaries. The worst-case relation
between the linear and angular sampling intervals are
.. math::
\Delta s = R \Delta \alpha, \quad R^2 = r^2 + (w / 2)^2,
where :math:`w` is the width of the detector.
Thus, to satisfy the angular detector condition one can choose
.. math::
\Delta s \leq \frac{\pi \sqrt{r^2 + (w / 2)^2}}{r \Omega}.
The geometry returned by this function satisfies these conditions exactly.
If the domain is 3-dimensional, a circular cone beam geometry is
created with the third coordinate axis as rotation axis. This does,
of course, not yield complete data, but is equivalent to the
2D fan beam case in the :math:`z = 0` slice. The vertical size of
the detector is chosen such that it covers the object vertically
with rays, using a containing cuboid
:math:`[-\rho, \rho]^2 \times [z_{\mathrm{min}}, z_{\mathrm{min}}]`
to compute the cone angle.
References
----------
.. [NW2001] Natterer, F and Wuebbeling, F.
*Mathematical Methods in Image Reconstruction*.
SIAM, 2001.
https://dx.doi.org/10.1137/1.9780898718324
"""
# Find maximum distance from rotation axis
corners = space.domain.corners()[:, :2]
rho = np.max(np.linalg.norm(corners, axis=1))
# Find default values according to Nyquist criterion.
# We assume that the function is bandlimited by a wave along the x or y
# axis. The highest frequency we can measure is then a standing wave with
# period of twice the inter-node distance.
min_side = min(space.partition.cell_sides[:2])
omega = np.pi / min_side
# Compute minimum width of the detector to cover the object. The relation
# used here is (w/2)/(rs+rd) = rho/rs since both are equal to tan(alpha),
# where alpha is the half fan angle.
rs = float(src_radius)
if (rs <= rho):
raise ValueError('source too close to the object, resulting in '
'infinite detector for full coverage')
rd = float(det_radius)
r = src_radius + det_radius
w = 2 * rho * (rs + rd) / rs
# Compute minimum number of pixels given the constraint on the
# sampling interval and the computed width
rb = np.hypot(r, w / 2) # length of the boundary ray to the flat detector
num_px_horiz = 2 * int(np.ceil(w * omega * r / (2 * np.pi * rb))) + 1
if space.ndim == 2:
det_min_pt = -w / 2
det_max_pt = w / 2
if det_shape is None:
det_shape = num_px_horiz
elif space.ndim == 3:
# Compute number of vertical pixels required to cover the object,
# using the same sampling interval vertically as horizontally.
# The reasoning is the same as for the computation of w.
# Minimum distance of the containing cuboid edges to the source
dist = rs - rho
# Take angle of the rays going through the top and bottom corners
# in that edge
half_cone_angle = max(np.arctan(abs(space.partition.min_pt[2]) / dist),
np.arctan(abs(space.partition.max_pt[2]) / dist))
h = 2 * np.sin(half_cone_angle) * (rs + rd)
# Use the vertical spacing from the reco space, corrected for
# magnification at the "back" of the object, i.e., where it is
# minimal
min_mag = (rs + rd) / (rs + rho)
delta_h = min_mag * space.cell_sides[2]
num_px_vert = int(np.ceil(h / delta_h))
h = num_px_vert * delta_h # make multiple of spacing
det_min_pt = [-w / 2, -h / 2]
det_max_pt = [w / 2, h / 2]
if det_shape is None:
det_shape = [num_px_horiz, num_px_vert]
fan_angle = 2 * np.arctan(rho / rs)
if short_scan:
max_angle = min(np.pi + fan_angle, 2 * np.pi)
else:
max_angle = 2 * np.pi
if num_angles is None:
num_angles = int(np.ceil(max_angle * omega * rho / np.pi
* r / (r + rho)))
angle_partition = uniform_partition(0, max_angle, num_angles)
det_partition = uniform_partition(det_min_pt, det_max_pt, det_shape)
if space.ndim == 2:
return FanBeamGeometry(angle_partition, det_partition,
src_radius, det_radius)
elif space.ndim == 3:
return ConeFlatGeometry(angle_partition, det_partition,
src_radius, det_radius)
else:
raise ValueError('``space.ndim`` must be 2 or 3.') | [
"def",
"cone_beam_geometry",
"(",
"space",
",",
"src_radius",
",",
"det_radius",
",",
"num_angles",
"=",
"None",
",",
"short_scan",
"=",
"False",
",",
"det_shape",
"=",
"None",
")",
":",
"# Find maximum distance from rotation axis",
"corners",
"=",
"space",
".",
... | r"""Create a default fan or cone beam geometry from ``space``.
This function is intended for simple test cases where users do not
need the full flexibility of the geometries, but simply wants a
geometry that works.
The geometry returned by this function has equidistant angles
that lie (strictly) between 0 and either ``2 * pi`` (full scan)
or ``pi + fan_angle`` (short scan).
The detector is centered around 0, and its size is chosen such that
the whole ``space`` is covered with lines.
The number of angles and detector elements is chosen such that
the resulting sinogram is fully sampled according to the
Nyquist criterion, which in general results in a very large number of
samples. In particular, a ``space`` that is not centered at the origin
can result in very large detectors since the latter is always
origin-centered.
Parameters
----------
space : `DiscreteLp`
Reconstruction space, the space of the volumetric data to be
projected. Must be 2- or 3-dimensional.
src_radius : nonnegative float
Radius of the source circle. Must be larger than the radius of
the smallest vertical cylinder containing ``space.domain``,
i.e., the source must be outside the volume for all rotations.
det_radius : nonnegative float
Radius of the detector circle.
short_scan : bool, optional
Use the minimum required angular range ``[0, pi + fan_angle]``.
For ``True``, the `parker_weighting` should be used in FBP.
By default, the range ``[0, 2 * pi]`` is used.
num_angles : int, optional
Number of angles.
Default: Enough to fully sample the data, see Notes.
det_shape : int or sequence of ints, optional
Number of detector pixels.
Default: Enough to fully sample the data, see Notes.
Returns
-------
geometry : `DivergentBeamGeometry`
Projection geometry with equidistant angles and zero-centered
detector as determined by sampling criteria.
- If ``space`` is 2D, the result is a `FanBeamGeometry`.
- If ``space`` is 3D, the result is a `ConeFlatGeometry`.
Examples
--------
Create a fan beam geometry from a 2d space:
>>> space = odl.uniform_discr([-1, -1], [1, 1], (20, 20))
>>> geometry = cone_beam_geometry(space, src_radius=5, det_radius=5)
>>> geometry.angles.size
78
>>> geometry.detector.size
57
For a short scan geometry (from 0 to ``pi + fan_angle``), the
``short_scan`` flag can be set, resulting in a smaller number of
angles:
>>> geometry = cone_beam_geometry(space, src_radius=5, det_radius=5,
... short_scan=True)
>>> geometry.angles.size
46
If the source is close to the object, the detector becomes larger due
to more magnification:
>>> geometry = cone_beam_geometry(space, src_radius=3, det_radius=9)
>>> geometry.angles.size
80
>>> geometry.detector.size
105
Notes
-----
According to [NW2001]_, pages 75--76, a function
:math:`f : \mathbb{R}^2 \to \mathbb{R}` that has compact support
.. math::
\| x \| > \rho \implies f(x) = 0,
and is essentially bandlimited
.. math::
\| \xi \| > \Omega \implies \hat{f}(\xi) \approx 0,
can be fully reconstructed from a fan beam ray transform with
source-detector distance :math:`r` (assuming all detector
points have the same distance to the source) if (1) the projection
angles are sampled with a spacing of :math:`\Delta \psi` such that
.. math::
\Delta \psi \leq \frac{r + \rho}{r}\, \frac{\pi}{\rho \Omega},
and (2) the detector is sampled with an angular interval
:math:`\Delta \alpha` that satisfies
.. math::
\Delta \alpha \leq \frac{\pi}{r \Omega}.
For a flat detector, the angular interval is smallest in the center
of the fan and largest at the boundaries. The worst-case relation
between the linear and angular sampling intervals are
.. math::
\Delta s = R \Delta \alpha, \quad R^2 = r^2 + (w / 2)^2,
where :math:`w` is the width of the detector.
Thus, to satisfy the angular detector condition one can choose
.. math::
\Delta s \leq \frac{\pi \sqrt{r^2 + (w / 2)^2}}{r \Omega}.
The geometry returned by this function satisfies these conditions exactly.
If the domain is 3-dimensional, a circular cone beam geometry is
created with the third coordinate axis as rotation axis. This does,
of course, not yield complete data, but is equivalent to the
2D fan beam case in the :math:`z = 0` slice. The vertical size of
the detector is chosen such that it covers the object vertically
with rays, using a containing cuboid
:math:`[-\rho, \rho]^2 \times [z_{\mathrm{min}}, z_{\mathrm{min}}]`
to compute the cone angle.
References
----------
.. [NW2001] Natterer, F and Wuebbeling, F.
*Mathematical Methods in Image Reconstruction*.
SIAM, 2001.
https://dx.doi.org/10.1137/1.9780898718324 | [
"r",
"Create",
"a",
"default",
"fan",
"or",
"cone",
"beam",
"geometry",
"from",
"space",
"."
] | b8443f6aca90e191ba36c91d32253c5a36249a6c | https://github.com/odlgroup/odl/blob/b8443f6aca90e191ba36c91d32253c5a36249a6c/odl/tomo/geometry/conebeam.py#L1245-L1463 |
231,920 | odlgroup/odl | odl/tomo/geometry/conebeam.py | helical_geometry | def helical_geometry(space, src_radius, det_radius, num_turns,
n_pi=1, num_angles=None, det_shape=None):
"""Create a default helical geometry from ``space``.
This function is intended for simple test cases where users do not
need the full flexibility of the geometries, but simply wants a
geometry that works.
The geometry returned by this function has equidistant angles
that lie (strictly) between 0 and ``2 * pi * num_turns``.
The detector is centered around 0, and its size is chosen such that
the whole ``space`` is covered with lines.
The number of angles and detector elements is chosen such that
the resulting sinogram is fully sampled according to the
Nyquist criterion, which in general results in a very large number of
samples. In particular, a ``space`` that is not centered at the origin
can result in very large detectors since the latter is always
origin-centered.
Parameters
----------
space : `DiscreteLp`
Reconstruction space, the space of the volumetric data to be
projected. Must be 3-dimensional.
src_radius : nonnegative float
Radius of the source circle. Must be larger than the radius of
the smallest vertical cylinder containing ``space.domain``,
i.e., the source must be outside the volume for all rotations.
det_radius : nonnegative float
Radius of the detector circle.
num_turns : positive float
Total number of helical turns.
num_angles : int, optional
Number of angles.
Default: Enough to fully sample the data, see Notes.
n_pi : odd int, optional
Total number of half rotations to include in the window. Values larger
than 1 should be used if the pitch is much smaller than the detector
height.
det_shape : int or sequence of ints, optional
Number of detector pixels.
Default: Enough to fully sample the data, see Notes.
Returns
-------
geometry : `ConeFlatGeometry`
Projection geometry with equidistant angles and zero-centered
detector as determined by sampling criteria.
Examples
--------
Create a helical beam geometry from space:
>>> space = odl.uniform_discr([-1, -1, -1], [1, 1, 1], (20, 20, 20))
>>> geometry = helical_geometry(space, src_radius=5, det_radius=5,
... num_turns=3)
>>> geometry.angles.size
234
>>> geometry.detector.shape
(57, 9)
Notes
-----
In the "fan beam direction", the sampling exactly follows the
two-dimensional case see `cone_beam_geometry` for a description.
In the "axial direction", e.g. along the [0, 0, 1] axis, the geometry is
sampled according to two criteria. First, the bounds of the detector
are chosen to satisfy the tuy condition.
See `[TSS1998]`_ for a full description.
Second, the sampling rate is selected according to the nyquist criterion
to give a full sampling. This is done by sampling such that the pixel
size is half of the size of the projection of the smallest voxel onto the
detector.
References
----------
[TSS1998] Tam, K C, Samarasekera, S and Sauer, F.
*Exact cone beam CT with a spiral scan*.
Physics in Medicine & Biology 4 (1998), p 1015.
.. _[TSS1998]: https://dx.doi.org/10.1088/0031-9155/43/4/028
"""
# Find maximum distance from rotation axis
corners = space.domain.corners()[:, :2]
rho = np.max(np.linalg.norm(corners, axis=1))
offset_along_axis = space.partition.min_pt[2]
pitch = space.partition.extent[2] / num_turns
# Find default values according to Nyquist criterion.
# We assume that the function is bandlimited by a wave along the x or y
# axis. The highest frequency we can measure is then a standing wave with
# period of twice the inter-node distance.
min_side = min(space.partition.cell_sides[:2])
omega = np.pi / min_side
# Compute minimum width of the detector to cover the object. The relation
# used here is (w/2)/(rs+rd) = rho/rs since both are equal to tan(alpha),
# where alpha is the half fan angle.
rs = float(src_radius)
if (rs <= rho):
raise ValueError('source too close to the object, resulting in '
'infinite detector for full coverage')
rd = float(det_radius)
r = rs + rd
w = 2 * rho * (rs + rd) / rs
# Compute minimum number of pixels given the constraint on the
# sampling interval and the computed width
rb = np.hypot(r, w / 2) # length of the boundary ray to the flat detector
num_px_horiz = 2 * int(np.ceil(w * omega * r / (2 * np.pi * rb))) + 1
# Compute lower and upper bound needed to fully sample the object.
# In particular, since in a helical geometry several turns are used,
# this is selected so that the field of view of two opposing projections,
# separated by theta = 180 deg, overlap, but as little as possible.
# See `tam_danielson_window` for more information.
h_axis = (pitch / (2 * np.pi) *
(1 + (-rho / src_radius) ** 2) *
(n_pi * np.pi / 2.0 - np.arctan(-rho / src_radius)))
h = 2 * h_axis * (rs + rd) / rs
# Compute number of pixels
min_mag = r / rs
dh = 0.5 * space.partition.cell_sides[2] * min_mag
num_px_vert = int(np.ceil(h / dh))
det_min_pt = [-w / 2, -h / 2]
det_max_pt = [w / 2, h / 2]
if det_shape is None:
det_shape = [num_px_horiz, num_px_vert]
max_angle = 2 * np.pi * num_turns
if num_angles is None:
num_angles = int(np.ceil(max_angle * omega * rho / np.pi
* r / (r + rho)))
angle_partition = uniform_partition(0, max_angle, num_angles)
det_partition = uniform_partition(det_min_pt, det_max_pt, det_shape)
return ConeFlatGeometry(angle_partition, det_partition,
src_radius, det_radius,
offset_along_axis=offset_along_axis,
pitch=pitch) | python | def helical_geometry(space, src_radius, det_radius, num_turns,
n_pi=1, num_angles=None, det_shape=None):
# Find maximum distance from rotation axis
corners = space.domain.corners()[:, :2]
rho = np.max(np.linalg.norm(corners, axis=1))
offset_along_axis = space.partition.min_pt[2]
pitch = space.partition.extent[2] / num_turns
# Find default values according to Nyquist criterion.
# We assume that the function is bandlimited by a wave along the x or y
# axis. The highest frequency we can measure is then a standing wave with
# period of twice the inter-node distance.
min_side = min(space.partition.cell_sides[:2])
omega = np.pi / min_side
# Compute minimum width of the detector to cover the object. The relation
# used here is (w/2)/(rs+rd) = rho/rs since both are equal to tan(alpha),
# where alpha is the half fan angle.
rs = float(src_radius)
if (rs <= rho):
raise ValueError('source too close to the object, resulting in '
'infinite detector for full coverage')
rd = float(det_radius)
r = rs + rd
w = 2 * rho * (rs + rd) / rs
# Compute minimum number of pixels given the constraint on the
# sampling interval and the computed width
rb = np.hypot(r, w / 2) # length of the boundary ray to the flat detector
num_px_horiz = 2 * int(np.ceil(w * omega * r / (2 * np.pi * rb))) + 1
# Compute lower and upper bound needed to fully sample the object.
# In particular, since in a helical geometry several turns are used,
# this is selected so that the field of view of two opposing projections,
# separated by theta = 180 deg, overlap, but as little as possible.
# See `tam_danielson_window` for more information.
h_axis = (pitch / (2 * np.pi) *
(1 + (-rho / src_radius) ** 2) *
(n_pi * np.pi / 2.0 - np.arctan(-rho / src_radius)))
h = 2 * h_axis * (rs + rd) / rs
# Compute number of pixels
min_mag = r / rs
dh = 0.5 * space.partition.cell_sides[2] * min_mag
num_px_vert = int(np.ceil(h / dh))
det_min_pt = [-w / 2, -h / 2]
det_max_pt = [w / 2, h / 2]
if det_shape is None:
det_shape = [num_px_horiz, num_px_vert]
max_angle = 2 * np.pi * num_turns
if num_angles is None:
num_angles = int(np.ceil(max_angle * omega * rho / np.pi
* r / (r + rho)))
angle_partition = uniform_partition(0, max_angle, num_angles)
det_partition = uniform_partition(det_min_pt, det_max_pt, det_shape)
return ConeFlatGeometry(angle_partition, det_partition,
src_radius, det_radius,
offset_along_axis=offset_along_axis,
pitch=pitch) | [
"def",
"helical_geometry",
"(",
"space",
",",
"src_radius",
",",
"det_radius",
",",
"num_turns",
",",
"n_pi",
"=",
"1",
",",
"num_angles",
"=",
"None",
",",
"det_shape",
"=",
"None",
")",
":",
"# Find maximum distance from rotation axis",
"corners",
"=",
"space"... | Create a default helical geometry from ``space``.
This function is intended for simple test cases where users do not
need the full flexibility of the geometries, but simply wants a
geometry that works.
The geometry returned by this function has equidistant angles
that lie (strictly) between 0 and ``2 * pi * num_turns``.
The detector is centered around 0, and its size is chosen such that
the whole ``space`` is covered with lines.
The number of angles and detector elements is chosen such that
the resulting sinogram is fully sampled according to the
Nyquist criterion, which in general results in a very large number of
samples. In particular, a ``space`` that is not centered at the origin
can result in very large detectors since the latter is always
origin-centered.
Parameters
----------
space : `DiscreteLp`
Reconstruction space, the space of the volumetric data to be
projected. Must be 3-dimensional.
src_radius : nonnegative float
Radius of the source circle. Must be larger than the radius of
the smallest vertical cylinder containing ``space.domain``,
i.e., the source must be outside the volume for all rotations.
det_radius : nonnegative float
Radius of the detector circle.
num_turns : positive float
Total number of helical turns.
num_angles : int, optional
Number of angles.
Default: Enough to fully sample the data, see Notes.
n_pi : odd int, optional
Total number of half rotations to include in the window. Values larger
than 1 should be used if the pitch is much smaller than the detector
height.
det_shape : int or sequence of ints, optional
Number of detector pixels.
Default: Enough to fully sample the data, see Notes.
Returns
-------
geometry : `ConeFlatGeometry`
Projection geometry with equidistant angles and zero-centered
detector as determined by sampling criteria.
Examples
--------
Create a helical beam geometry from space:
>>> space = odl.uniform_discr([-1, -1, -1], [1, 1, 1], (20, 20, 20))
>>> geometry = helical_geometry(space, src_radius=5, det_radius=5,
... num_turns=3)
>>> geometry.angles.size
234
>>> geometry.detector.shape
(57, 9)
Notes
-----
In the "fan beam direction", the sampling exactly follows the
two-dimensional case see `cone_beam_geometry` for a description.
In the "axial direction", e.g. along the [0, 0, 1] axis, the geometry is
sampled according to two criteria. First, the bounds of the detector
are chosen to satisfy the tuy condition.
See `[TSS1998]`_ for a full description.
Second, the sampling rate is selected according to the nyquist criterion
to give a full sampling. This is done by sampling such that the pixel
size is half of the size of the projection of the smallest voxel onto the
detector.
References
----------
[TSS1998] Tam, K C, Samarasekera, S and Sauer, F.
*Exact cone beam CT with a spiral scan*.
Physics in Medicine & Biology 4 (1998), p 1015.
.. _[TSS1998]: https://dx.doi.org/10.1088/0031-9155/43/4/028 | [
"Create",
"a",
"default",
"helical",
"geometry",
"from",
"space",
"."
] | b8443f6aca90e191ba36c91d32253c5a36249a6c | https://github.com/odlgroup/odl/blob/b8443f6aca90e191ba36c91d32253c5a36249a6c/odl/tomo/geometry/conebeam.py#L1466-L1614 |
231,921 | odlgroup/odl | odl/tomo/geometry/conebeam.py | FanBeamGeometry.frommatrix | def frommatrix(cls, apart, dpart, src_radius, det_radius, init_matrix,
det_curvature_radius=None, **kwargs):
"""Create an instance of `FanBeamGeometry` using a matrix.
This alternative constructor uses a matrix to rotate and
translate the default configuration. It is most useful when
the transformation to be applied is already given as a matrix.
Parameters
----------
apart : 1-dim. `RectPartition`
Partition of the angle interval.
dpart : 1-dim. `RectPartition`
Partition of the detector parameter interval.
src_radius : nonnegative float
Radius of the source circle.
det_radius : nonnegative float
Radius of the detector circle. Must be nonzero if ``src_radius``
is zero.
init_matrix : `array_like`, shape ``(2, 2)`` or ``(2, 3)``, optional
Transformation matrix whose left ``(2, 2)`` block is multiplied
with the default ``det_pos_init`` and ``det_axis_init`` to
determine the new vectors. If present, the third column acts
as a translation after the initial transformation.
The resulting ``det_axis_init`` will be normalized.
det_curvature_radius : nonnegative float, optional
Radius of the detector curvature.
If ``None``, flat detector is used, otherwise must be positive.
kwargs :
Further keyword arguments passed to the class constructor.
Returns
-------
geometry : `FanBeamGeometry`
Examples
--------
Mirror the second unit vector, creating a left-handed system:
>>> apart = odl.uniform_partition(0, np.pi, 10)
>>> dpart = odl.uniform_partition(-1, 1, 20)
>>> matrix = np.array([[1, 0],
... [0, -1]])
>>> geom = FanBeamGeometry.frommatrix(
... apart, dpart, src_radius=1, det_radius=5, init_matrix=matrix)
>>> geom.det_refpoint(0)
array([ 0., -5.])
>>> geom.det_axis_init
array([ 1., 0.])
>>> geom.translation
array([ 0., 0.])
Adding a translation with a third matrix column:
>>> matrix = np.array([[1, 0, 1],
... [0, -1, 1]])
>>> geom = FanBeamGeometry.frommatrix(
... apart, dpart, src_radius=1, det_radius=5, init_matrix=matrix)
>>> geom.translation
array([ 1., 1.])
>>> geom.det_refpoint(0) # (0, -5) + (1, 1)
array([ 1., -4.])
"""
# Get transformation and translation parts from `init_matrix`
init_matrix = np.asarray(init_matrix, dtype=float)
if init_matrix.shape not in ((2, 2), (2, 3)):
raise ValueError('`matrix` must have shape (2, 2) or (2, 3), '
'got array with shape {}'
''.format(init_matrix.shape))
trafo_matrix = init_matrix[:, :2]
translation = init_matrix[:, 2:].squeeze()
# Transform the default vectors
default_src_to_det_init = cls._default_config['src_to_det_init']
default_det_axis_init = cls._default_config['det_axis_init']
vecs_to_transform = [default_det_axis_init]
transformed_vecs = transform_system(
default_src_to_det_init, None, vecs_to_transform,
matrix=trafo_matrix)
# Use the standard constructor with these vectors
src_to_det, det_axis = transformed_vecs
if translation.size != 0:
kwargs['translation'] = translation
return cls(apart, dpart, src_radius, det_radius, det_curvature_radius,
src_to_det, det_axis_init=det_axis, **kwargs) | python | def frommatrix(cls, apart, dpart, src_radius, det_radius, init_matrix,
det_curvature_radius=None, **kwargs):
# Get transformation and translation parts from `init_matrix`
init_matrix = np.asarray(init_matrix, dtype=float)
if init_matrix.shape not in ((2, 2), (2, 3)):
raise ValueError('`matrix` must have shape (2, 2) or (2, 3), '
'got array with shape {}'
''.format(init_matrix.shape))
trafo_matrix = init_matrix[:, :2]
translation = init_matrix[:, 2:].squeeze()
# Transform the default vectors
default_src_to_det_init = cls._default_config['src_to_det_init']
default_det_axis_init = cls._default_config['det_axis_init']
vecs_to_transform = [default_det_axis_init]
transformed_vecs = transform_system(
default_src_to_det_init, None, vecs_to_transform,
matrix=trafo_matrix)
# Use the standard constructor with these vectors
src_to_det, det_axis = transformed_vecs
if translation.size != 0:
kwargs['translation'] = translation
return cls(apart, dpart, src_radius, det_radius, det_curvature_radius,
src_to_det, det_axis_init=det_axis, **kwargs) | [
"def",
"frommatrix",
"(",
"cls",
",",
"apart",
",",
"dpart",
",",
"src_radius",
",",
"det_radius",
",",
"init_matrix",
",",
"det_curvature_radius",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"# Get transformation and translation parts from `init_matrix`",
"init... | Create an instance of `FanBeamGeometry` using a matrix.
This alternative constructor uses a matrix to rotate and
translate the default configuration. It is most useful when
the transformation to be applied is already given as a matrix.
Parameters
----------
apart : 1-dim. `RectPartition`
Partition of the angle interval.
dpart : 1-dim. `RectPartition`
Partition of the detector parameter interval.
src_radius : nonnegative float
Radius of the source circle.
det_radius : nonnegative float
Radius of the detector circle. Must be nonzero if ``src_radius``
is zero.
init_matrix : `array_like`, shape ``(2, 2)`` or ``(2, 3)``, optional
Transformation matrix whose left ``(2, 2)`` block is multiplied
with the default ``det_pos_init`` and ``det_axis_init`` to
determine the new vectors. If present, the third column acts
as a translation after the initial transformation.
The resulting ``det_axis_init`` will be normalized.
det_curvature_radius : nonnegative float, optional
Radius of the detector curvature.
If ``None``, flat detector is used, otherwise must be positive.
kwargs :
Further keyword arguments passed to the class constructor.
Returns
-------
geometry : `FanBeamGeometry`
Examples
--------
Mirror the second unit vector, creating a left-handed system:
>>> apart = odl.uniform_partition(0, np.pi, 10)
>>> dpart = odl.uniform_partition(-1, 1, 20)
>>> matrix = np.array([[1, 0],
... [0, -1]])
>>> geom = FanBeamGeometry.frommatrix(
... apart, dpart, src_radius=1, det_radius=5, init_matrix=matrix)
>>> geom.det_refpoint(0)
array([ 0., -5.])
>>> geom.det_axis_init
array([ 1., 0.])
>>> geom.translation
array([ 0., 0.])
Adding a translation with a third matrix column:
>>> matrix = np.array([[1, 0, 1],
... [0, -1, 1]])
>>> geom = FanBeamGeometry.frommatrix(
... apart, dpart, src_radius=1, det_radius=5, init_matrix=matrix)
>>> geom.translation
array([ 1., 1.])
>>> geom.det_refpoint(0) # (0, -5) + (1, 1)
array([ 1., -4.]) | [
"Create",
"an",
"instance",
"of",
"FanBeamGeometry",
"using",
"a",
"matrix",
"."
] | b8443f6aca90e191ba36c91d32253c5a36249a6c | https://github.com/odlgroup/odl/blob/b8443f6aca90e191ba36c91d32253c5a36249a6c/odl/tomo/geometry/conebeam.py#L239-L325 |
231,922 | odlgroup/odl | odl/tomo/geometry/conebeam.py | FanBeamGeometry.src_position | def src_position(self, angle):
"""Return the source position at ``angle``.
For an angle ``phi``, the source position is given by ::
src(phi) = translation +
rot_matrix(phi) * (-src_rad * src_to_det_init)
where ``src_to_det_init`` is the initial unit vector pointing
from source to detector.
Parameters
----------
angle : float or `array-like`
Angle(s) in radians describing the counter-clockwise
rotation of source and detector.
Returns
-------
pos : `numpy.ndarray`
Vector(s) pointing from the origin to the source.
If ``angle`` is a single parameter, the returned array has
shape ``(2,)``, otherwise ``angle.shape + (2,)``.
See Also
--------
det_refpoint
Examples
--------
With default arguments, the source starts at ``src_rad * (-e_y)``
and rotates to ``src_rad * e_x`` at 90 degrees:
>>> apart = odl.uniform_partition(0, 2 * np.pi, 10)
>>> dpart = odl.uniform_partition(-1, 1, 20)
>>> geom = FanBeamGeometry(apart, dpart, src_radius=2, det_radius=5)
>>> geom.src_position(0)
array([ 0., -2.])
>>> np.allclose(geom.src_position(np.pi / 2), [2, 0])
True
The method is vectorized, i.e., it can be called with multiple
angles at once:
>>> points = geom.src_position([0, np.pi / 2])
>>> np.allclose(points[0], [0, -2])
True
>>> np.allclose(points[1], [2, 0])
True
"""
squeeze_out = (np.shape(angle) == ())
angle = np.array(angle, dtype=float, copy=False, ndmin=1)
# Initial vector from the rotation center to the source. It can be
# computed this way since source and detector are at maximum distance,
# i.e. the connecting line passes the origin.
center_to_src_init = -self.src_radius * self.src_to_det_init
pos_vec = (self.translation[None, :]
+ self.rotation_matrix(angle).dot(center_to_src_init))
if squeeze_out:
pos_vec = pos_vec.squeeze()
return pos_vec | python | def src_position(self, angle):
squeeze_out = (np.shape(angle) == ())
angle = np.array(angle, dtype=float, copy=False, ndmin=1)
# Initial vector from the rotation center to the source. It can be
# computed this way since source and detector are at maximum distance,
# i.e. the connecting line passes the origin.
center_to_src_init = -self.src_radius * self.src_to_det_init
pos_vec = (self.translation[None, :]
+ self.rotation_matrix(angle).dot(center_to_src_init))
if squeeze_out:
pos_vec = pos_vec.squeeze()
return pos_vec | [
"def",
"src_position",
"(",
"self",
",",
"angle",
")",
":",
"squeeze_out",
"=",
"(",
"np",
".",
"shape",
"(",
"angle",
")",
"==",
"(",
")",
")",
"angle",
"=",
"np",
".",
"array",
"(",
"angle",
",",
"dtype",
"=",
"float",
",",
"copy",
"=",
"False"... | Return the source position at ``angle``.
For an angle ``phi``, the source position is given by ::
src(phi) = translation +
rot_matrix(phi) * (-src_rad * src_to_det_init)
where ``src_to_det_init`` is the initial unit vector pointing
from source to detector.
Parameters
----------
angle : float or `array-like`
Angle(s) in radians describing the counter-clockwise
rotation of source and detector.
Returns
-------
pos : `numpy.ndarray`
Vector(s) pointing from the origin to the source.
If ``angle`` is a single parameter, the returned array has
shape ``(2,)``, otherwise ``angle.shape + (2,)``.
See Also
--------
det_refpoint
Examples
--------
With default arguments, the source starts at ``src_rad * (-e_y)``
and rotates to ``src_rad * e_x`` at 90 degrees:
>>> apart = odl.uniform_partition(0, 2 * np.pi, 10)
>>> dpart = odl.uniform_partition(-1, 1, 20)
>>> geom = FanBeamGeometry(apart, dpart, src_radius=2, det_radius=5)
>>> geom.src_position(0)
array([ 0., -2.])
>>> np.allclose(geom.src_position(np.pi / 2), [2, 0])
True
The method is vectorized, i.e., it can be called with multiple
angles at once:
>>> points = geom.src_position([0, np.pi / 2])
>>> np.allclose(points[0], [0, -2])
True
>>> np.allclose(points[1], [2, 0])
True | [
"Return",
"the",
"source",
"position",
"at",
"angle",
"."
] | b8443f6aca90e191ba36c91d32253c5a36249a6c | https://github.com/odlgroup/odl/blob/b8443f6aca90e191ba36c91d32253c5a36249a6c/odl/tomo/geometry/conebeam.py#L361-L423 |
231,923 | odlgroup/odl | odl/tomo/geometry/conebeam.py | ConeFlatGeometry.frommatrix | def frommatrix(cls, apart, dpart, src_radius, det_radius, init_matrix,
pitch=0, **kwargs):
"""Create an instance of `ConeFlatGeometry` using a matrix.
This alternative constructor uses a matrix to rotate and
translate the default configuration. It is most useful when
the transformation to be applied is already given as a matrix.
Parameters
----------
apart : 1-dim. `RectPartition`
Partition of the parameter interval.
dpart : 2-dim. `RectPartition`
Partition of the detector parameter set.
src_radius : nonnegative float
Radius of the source circle.
det_radius : nonnegative float
Radius of the detector circle. Must be nonzero if ``src_radius``
is zero.
init_matrix : `array_like`, shape ``(3, 3)`` or ``(3, 4)``, optional
Transformation matrix whose left ``(3, 3)`` block is multiplied
with the default ``det_pos_init`` and ``det_axes_init`` to
determine the new vectors. If present, the fourth column acts
as a translation after the initial transformation.
The resulting ``det_axes_init`` will be normalized.
pitch : float, optional
Constant distance along the rotation axis that a point on the
helix traverses when increasing the angle parameter by
``2 * pi``. The default case ``pitch=0`` results in a circular
cone beam geometry.
kwargs :
Further keyword arguments passed to the class constructor.
Returns
-------
geometry : `ConeFlatGeometry`
Examples
--------
Map unit vectors ``e_y -> e_z`` and ``e_z -> -e_y``, keeping the
right-handedness:
>>> apart = odl.uniform_partition(0, 2 * np.pi, 10)
>>> dpart = odl.uniform_partition([-1, -1], [1, 1], (20, 20))
>>> matrix = np.array([[1, 0, 0],
... [0, 0, -1],
... [0, 1, 0]])
>>> geom = ConeFlatGeometry.frommatrix(
... apart, dpart, src_radius=5, det_radius=10, pitch=2,
... init_matrix=matrix)
>>> geom.axis
array([ 0., -1., 0.])
>>> geom.src_to_det_init
array([ 0., 0., 1.])
>>> geom.det_axes_init
array([[ 1., 0., 0.],
[ 0., -1., 0.]])
Adding a translation with a fourth matrix column:
>>> matrix = np.array([[0, 0, -1, 0],
... [0, 1, 0, 1],
... [1, 0, 0, 1]])
>>> geom = ConeFlatGeometry.frommatrix(
... apart, dpart, src_radius=5, det_radius=10, pitch=2,
... init_matrix=matrix)
>>> geom.translation
array([ 0., 1., 1.])
>>> geom.det_refpoint(0) # (0, 10, 0) + (0, 1, 1)
array([ 0., 11., 1.])
"""
for key in ('axis', 'src_to_det_init', 'det_axes_init', 'translation'):
if key in kwargs:
raise TypeError('got unknown keyword argument {!r}'
''.format(key))
# Get transformation and translation parts from `init_matrix`
init_matrix = np.asarray(init_matrix, dtype=float)
if init_matrix.shape not in ((3, 3), (3, 4)):
raise ValueError('`matrix` must have shape (3, 3) or (3, 4), '
'got array with shape {}'
''.format(init_matrix.shape))
trafo_matrix = init_matrix[:, :3]
translation = init_matrix[:, 3:].squeeze()
# Transform the default vectors
default_axis = cls._default_config['axis']
default_src_to_det_init = cls._default_config['src_to_det_init']
default_det_axes_init = cls._default_config['det_axes_init']
vecs_to_transform = (default_src_to_det_init,) + default_det_axes_init
transformed_vecs = transform_system(
default_axis, None, vecs_to_transform, matrix=trafo_matrix)
# Use the standard constructor with these vectors
axis, src_to_det, det_axis_0, det_axis_1 = transformed_vecs
if translation.size == 0:
pass
else:
kwargs['translation'] = translation
return cls(apart, dpart, src_radius, det_radius, pitch, axis,
src_to_det_init=src_to_det,
det_axes_init=[det_axis_0, det_axis_1],
**kwargs) | python | def frommatrix(cls, apart, dpart, src_radius, det_radius, init_matrix,
pitch=0, **kwargs):
for key in ('axis', 'src_to_det_init', 'det_axes_init', 'translation'):
if key in kwargs:
raise TypeError('got unknown keyword argument {!r}'
''.format(key))
# Get transformation and translation parts from `init_matrix`
init_matrix = np.asarray(init_matrix, dtype=float)
if init_matrix.shape not in ((3, 3), (3, 4)):
raise ValueError('`matrix` must have shape (3, 3) or (3, 4), '
'got array with shape {}'
''.format(init_matrix.shape))
trafo_matrix = init_matrix[:, :3]
translation = init_matrix[:, 3:].squeeze()
# Transform the default vectors
default_axis = cls._default_config['axis']
default_src_to_det_init = cls._default_config['src_to_det_init']
default_det_axes_init = cls._default_config['det_axes_init']
vecs_to_transform = (default_src_to_det_init,) + default_det_axes_init
transformed_vecs = transform_system(
default_axis, None, vecs_to_transform, matrix=trafo_matrix)
# Use the standard constructor with these vectors
axis, src_to_det, det_axis_0, det_axis_1 = transformed_vecs
if translation.size == 0:
pass
else:
kwargs['translation'] = translation
return cls(apart, dpart, src_radius, det_radius, pitch, axis,
src_to_det_init=src_to_det,
det_axes_init=[det_axis_0, det_axis_1],
**kwargs) | [
"def",
"frommatrix",
"(",
"cls",
",",
"apart",
",",
"dpart",
",",
"src_radius",
",",
"det_radius",
",",
"init_matrix",
",",
"pitch",
"=",
"0",
",",
"*",
"*",
"kwargs",
")",
":",
"for",
"key",
"in",
"(",
"'axis'",
",",
"'src_to_det_init'",
",",
"'det_ax... | Create an instance of `ConeFlatGeometry` using a matrix.
This alternative constructor uses a matrix to rotate and
translate the default configuration. It is most useful when
the transformation to be applied is already given as a matrix.
Parameters
----------
apart : 1-dim. `RectPartition`
Partition of the parameter interval.
dpart : 2-dim. `RectPartition`
Partition of the detector parameter set.
src_radius : nonnegative float
Radius of the source circle.
det_radius : nonnegative float
Radius of the detector circle. Must be nonzero if ``src_radius``
is zero.
init_matrix : `array_like`, shape ``(3, 3)`` or ``(3, 4)``, optional
Transformation matrix whose left ``(3, 3)`` block is multiplied
with the default ``det_pos_init`` and ``det_axes_init`` to
determine the new vectors. If present, the fourth column acts
as a translation after the initial transformation.
The resulting ``det_axes_init`` will be normalized.
pitch : float, optional
Constant distance along the rotation axis that a point on the
helix traverses when increasing the angle parameter by
``2 * pi``. The default case ``pitch=0`` results in a circular
cone beam geometry.
kwargs :
Further keyword arguments passed to the class constructor.
Returns
-------
geometry : `ConeFlatGeometry`
Examples
--------
Map unit vectors ``e_y -> e_z`` and ``e_z -> -e_y``, keeping the
right-handedness:
>>> apart = odl.uniform_partition(0, 2 * np.pi, 10)
>>> dpart = odl.uniform_partition([-1, -1], [1, 1], (20, 20))
>>> matrix = np.array([[1, 0, 0],
... [0, 0, -1],
... [0, 1, 0]])
>>> geom = ConeFlatGeometry.frommatrix(
... apart, dpart, src_radius=5, det_radius=10, pitch=2,
... init_matrix=matrix)
>>> geom.axis
array([ 0., -1., 0.])
>>> geom.src_to_det_init
array([ 0., 0., 1.])
>>> geom.det_axes_init
array([[ 1., 0., 0.],
[ 0., -1., 0.]])
Adding a translation with a fourth matrix column:
>>> matrix = np.array([[0, 0, -1, 0],
... [0, 1, 0, 1],
... [1, 0, 0, 1]])
>>> geom = ConeFlatGeometry.frommatrix(
... apart, dpart, src_radius=5, det_radius=10, pitch=2,
... init_matrix=matrix)
>>> geom.translation
array([ 0., 1., 1.])
>>> geom.det_refpoint(0) # (0, 10, 0) + (0, 1, 1)
array([ 0., 11., 1.]) | [
"Create",
"an",
"instance",
"of",
"ConeFlatGeometry",
"using",
"a",
"matrix",
"."
] | b8443f6aca90e191ba36c91d32253c5a36249a6c | https://github.com/odlgroup/odl/blob/b8443f6aca90e191ba36c91d32253c5a36249a6c/odl/tomo/geometry/conebeam.py#L823-L926 |
231,924 | odlgroup/odl | odl/deform/linearized.py | linear_deform | def linear_deform(template, displacement, out=None):
"""Linearized deformation of a template with a displacement field.
The function maps a given template ``I`` and a given displacement
field ``v`` to the new function ``x --> I(x + v(x))``.
Parameters
----------
template : `DiscreteLpElement`
Template to be deformed by a displacement field.
displacement : element of power space of ``template.space``
Vector field (displacement field) used to deform the
template.
out : `numpy.ndarray`, optional
Array to which the function values of the deformed template
are written. It must have the same shape as ``template`` and
a data type compatible with ``template.dtype``.
Returns
-------
deformed_template : `numpy.ndarray`
Function values of the deformed template. If ``out`` was given,
the returned object is a reference to it.
Examples
--------
Create a simple 1D template to initialize the operator and
apply it to a displacement field. Where the displacement is zero,
the output value is the same as the input value.
In the 4-th point, the value is taken from 0.2 (one cell) to the
left, i.e. 1.0.
>>> space = odl.uniform_discr(0, 1, 5)
>>> disp_field_space = space.tangent_bundle
>>> template = space.element([0, 0, 1, 0, 0])
>>> displacement_field = disp_field_space.element([[0, 0, 0, -0.2, 0]])
>>> linear_deform(template, displacement_field)
array([ 0., 0., 1., 1., 0.])
The result depends on the chosen interpolation. With 'linear'
interpolation and an offset equal to half the distance between two
points, 0.1, one gets the mean of the values.
>>> space = odl.uniform_discr(0, 1, 5, interp='linear')
>>> disp_field_space = space.tangent_bundle
>>> template = space.element([0, 0, 1, 0, 0])
>>> displacement_field = disp_field_space.element([[0, 0, 0, -0.1, 0]])
>>> linear_deform(template, displacement_field)
array([ 0. , 0. , 1. , 0.5, 0. ])
"""
image_pts = template.space.points()
for i, vi in enumerate(displacement):
image_pts[:, i] += vi.asarray().ravel()
values = template.interpolation(image_pts.T, out=out, bounds_check=False)
return values.reshape(template.space.shape) | python | def linear_deform(template, displacement, out=None):
image_pts = template.space.points()
for i, vi in enumerate(displacement):
image_pts[:, i] += vi.asarray().ravel()
values = template.interpolation(image_pts.T, out=out, bounds_check=False)
return values.reshape(template.space.shape) | [
"def",
"linear_deform",
"(",
"template",
",",
"displacement",
",",
"out",
"=",
"None",
")",
":",
"image_pts",
"=",
"template",
".",
"space",
".",
"points",
"(",
")",
"for",
"i",
",",
"vi",
"in",
"enumerate",
"(",
"displacement",
")",
":",
"image_pts",
... | Linearized deformation of a template with a displacement field.
The function maps a given template ``I`` and a given displacement
field ``v`` to the new function ``x --> I(x + v(x))``.
Parameters
----------
template : `DiscreteLpElement`
Template to be deformed by a displacement field.
displacement : element of power space of ``template.space``
Vector field (displacement field) used to deform the
template.
out : `numpy.ndarray`, optional
Array to which the function values of the deformed template
are written. It must have the same shape as ``template`` and
a data type compatible with ``template.dtype``.
Returns
-------
deformed_template : `numpy.ndarray`
Function values of the deformed template. If ``out`` was given,
the returned object is a reference to it.
Examples
--------
Create a simple 1D template to initialize the operator and
apply it to a displacement field. Where the displacement is zero,
the output value is the same as the input value.
In the 4-th point, the value is taken from 0.2 (one cell) to the
left, i.e. 1.0.
>>> space = odl.uniform_discr(0, 1, 5)
>>> disp_field_space = space.tangent_bundle
>>> template = space.element([0, 0, 1, 0, 0])
>>> displacement_field = disp_field_space.element([[0, 0, 0, -0.2, 0]])
>>> linear_deform(template, displacement_field)
array([ 0., 0., 1., 1., 0.])
The result depends on the chosen interpolation. With 'linear'
interpolation and an offset equal to half the distance between two
points, 0.1, one gets the mean of the values.
>>> space = odl.uniform_discr(0, 1, 5, interp='linear')
>>> disp_field_space = space.tangent_bundle
>>> template = space.element([0, 0, 1, 0, 0])
>>> displacement_field = disp_field_space.element([[0, 0, 0, -0.1, 0]])
>>> linear_deform(template, displacement_field)
array([ 0. , 0. , 1. , 0.5, 0. ]) | [
"Linearized",
"deformation",
"of",
"a",
"template",
"with",
"a",
"displacement",
"field",
"."
] | b8443f6aca90e191ba36c91d32253c5a36249a6c | https://github.com/odlgroup/odl/blob/b8443f6aca90e191ba36c91d32253c5a36249a6c/odl/deform/linearized.py#L23-L77 |
231,925 | odlgroup/odl | odl/deform/linearized.py | LinDeformFixedTempl.derivative | def derivative(self, displacement):
"""Derivative of the operator at ``displacement``.
Parameters
----------
displacement : `domain` `element-like`
Point at which the derivative is computed.
Returns
-------
derivative : `PointwiseInner`
The derivative evaluated at ``displacement``.
"""
# To implement the complex case we need to be able to embed the real
# vector field space into the range of the gradient. Issue #59.
if not self.range.is_real:
raise NotImplementedError('derivative not implemented for complex '
'spaces.')
displacement = self.domain.element(displacement)
# TODO: allow users to select what method to use here.
grad = Gradient(domain=self.range, method='central',
pad_mode='symmetric')
grad_templ = grad(self.template)
def_grad = self.domain.element(
[linear_deform(gf, displacement) for gf in grad_templ])
return PointwiseInner(self.domain, def_grad) | python | def derivative(self, displacement):
# To implement the complex case we need to be able to embed the real
# vector field space into the range of the gradient. Issue #59.
if not self.range.is_real:
raise NotImplementedError('derivative not implemented for complex '
'spaces.')
displacement = self.domain.element(displacement)
# TODO: allow users to select what method to use here.
grad = Gradient(domain=self.range, method='central',
pad_mode='symmetric')
grad_templ = grad(self.template)
def_grad = self.domain.element(
[linear_deform(gf, displacement) for gf in grad_templ])
return PointwiseInner(self.domain, def_grad) | [
"def",
"derivative",
"(",
"self",
",",
"displacement",
")",
":",
"# To implement the complex case we need to be able to embed the real",
"# vector field space into the range of the gradient. Issue #59.",
"if",
"not",
"self",
".",
"range",
".",
"is_real",
":",
"raise",
"NotImple... | Derivative of the operator at ``displacement``.
Parameters
----------
displacement : `domain` `element-like`
Point at which the derivative is computed.
Returns
-------
derivative : `PointwiseInner`
The derivative evaluated at ``displacement``. | [
"Derivative",
"of",
"the",
"operator",
"at",
"displacement",
"."
] | b8443f6aca90e191ba36c91d32253c5a36249a6c | https://github.com/odlgroup/odl/blob/b8443f6aca90e191ba36c91d32253c5a36249a6c/odl/deform/linearized.py#L203-L231 |
231,926 | odlgroup/odl | odl/deform/linearized.py | LinDeformFixedDisp.adjoint | def adjoint(self):
"""Adjoint of the linear operator.
Note that this implementation uses an approximation that is only
valid for small displacements.
"""
# TODO allow users to select what method to use here.
div_op = Divergence(domain=self.displacement.space, method='forward',
pad_mode='symmetric')
jacobian_det = self.domain.element(
np.exp(-div_op(self.displacement)))
return jacobian_det * self.inverse | python | def adjoint(self):
# TODO allow users to select what method to use here.
div_op = Divergence(domain=self.displacement.space, method='forward',
pad_mode='symmetric')
jacobian_det = self.domain.element(
np.exp(-div_op(self.displacement)))
return jacobian_det * self.inverse | [
"def",
"adjoint",
"(",
"self",
")",
":",
"# TODO allow users to select what method to use here.",
"div_op",
"=",
"Divergence",
"(",
"domain",
"=",
"self",
".",
"displacement",
".",
"space",
",",
"method",
"=",
"'forward'",
",",
"pad_mode",
"=",
"'symmetric'",
")",... | Adjoint of the linear operator.
Note that this implementation uses an approximation that is only
valid for small displacements. | [
"Adjoint",
"of",
"the",
"linear",
"operator",
"."
] | b8443f6aca90e191ba36c91d32253c5a36249a6c | https://github.com/odlgroup/odl/blob/b8443f6aca90e191ba36c91d32253c5a36249a6c/odl/deform/linearized.py#L362-L374 |
231,927 | odlgroup/odl | odl/phantom/phantom_utils.py | cylinders_from_ellipses | def cylinders_from_ellipses(ellipses):
"""Create 3d cylinders from ellipses."""
ellipses = np.asarray(ellipses)
ellipsoids = np.zeros((ellipses.shape[0], 10))
ellipsoids[:, [0, 1, 2, 4, 5, 7]] = ellipses
ellipsoids[:, 3] = 100000.0
return ellipsoids | python | def cylinders_from_ellipses(ellipses):
ellipses = np.asarray(ellipses)
ellipsoids = np.zeros((ellipses.shape[0], 10))
ellipsoids[:, [0, 1, 2, 4, 5, 7]] = ellipses
ellipsoids[:, 3] = 100000.0
return ellipsoids | [
"def",
"cylinders_from_ellipses",
"(",
"ellipses",
")",
":",
"ellipses",
"=",
"np",
".",
"asarray",
"(",
"ellipses",
")",
"ellipsoids",
"=",
"np",
".",
"zeros",
"(",
"(",
"ellipses",
".",
"shape",
"[",
"0",
"]",
",",
"10",
")",
")",
"ellipsoids",
"[",
... | Create 3d cylinders from ellipses. | [
"Create",
"3d",
"cylinders",
"from",
"ellipses",
"."
] | b8443f6aca90e191ba36c91d32253c5a36249a6c | https://github.com/odlgroup/odl/blob/b8443f6aca90e191ba36c91d32253c5a36249a6c/odl/phantom/phantom_utils.py#L18-L25 |
231,928 | internetarchive/brozzler | brozzler/model.py | ElapsedMixIn.elapsed | def elapsed(self):
'''
Returns elapsed crawl time as a float in seconds.
This metric includes all the time that a site was in active rotation,
including any time it spent waiting for its turn to be brozzled.
In contrast `Site.active_brozzling_time` only counts time when a
brozzler worker claimed the site and was actively brozzling it.
'''
dt = 0
for ss in self.starts_and_stops[:-1]:
dt += (ss['stop'] - ss['start']).total_seconds()
ss = self.starts_and_stops[-1]
if ss['stop']:
dt += (ss['stop'] - ss['start']).total_seconds()
else: # crawl is active
dt += (doublethink.utcnow() - ss['start']).total_seconds()
return dt | python | def elapsed(self):
'''
Returns elapsed crawl time as a float in seconds.
This metric includes all the time that a site was in active rotation,
including any time it spent waiting for its turn to be brozzled.
In contrast `Site.active_brozzling_time` only counts time when a
brozzler worker claimed the site and was actively brozzling it.
'''
dt = 0
for ss in self.starts_and_stops[:-1]:
dt += (ss['stop'] - ss['start']).total_seconds()
ss = self.starts_and_stops[-1]
if ss['stop']:
dt += (ss['stop'] - ss['start']).total_seconds()
else: # crawl is active
dt += (doublethink.utcnow() - ss['start']).total_seconds()
return dt | [
"def",
"elapsed",
"(",
"self",
")",
":",
"dt",
"=",
"0",
"for",
"ss",
"in",
"self",
".",
"starts_and_stops",
"[",
":",
"-",
"1",
"]",
":",
"dt",
"+=",
"(",
"ss",
"[",
"'stop'",
"]",
"-",
"ss",
"[",
"'start'",
"]",
")",
".",
"total_seconds",
"("... | Returns elapsed crawl time as a float in seconds.
This metric includes all the time that a site was in active rotation,
including any time it spent waiting for its turn to be brozzled.
In contrast `Site.active_brozzling_time` only counts time when a
brozzler worker claimed the site and was actively brozzling it. | [
"Returns",
"elapsed",
"crawl",
"time",
"as",
"a",
"float",
"in",
"seconds",
"."
] | 411b3f266a38b9bb942021c0121ebd8e5ca66447 | https://github.com/internetarchive/brozzler/blob/411b3f266a38b9bb942021c0121ebd8e5ca66447/brozzler/model.py#L137-L155 |
231,929 | internetarchive/brozzler | brozzler/frontier.py | RethinkDbFrontier.enforce_time_limit | def enforce_time_limit(self, site):
'''
Raises `brozzler.ReachedTimeLimit` if appropriate.
'''
if (site.time_limit and site.time_limit > 0
and site.elapsed() > site.time_limit):
self.logger.debug(
"site FINISHED_TIME_LIMIT! time_limit=%s "
"elapsed=%s %s", site.time_limit, site.elapsed(), site)
raise brozzler.ReachedTimeLimit | python | def enforce_time_limit(self, site):
'''
Raises `brozzler.ReachedTimeLimit` if appropriate.
'''
if (site.time_limit and site.time_limit > 0
and site.elapsed() > site.time_limit):
self.logger.debug(
"site FINISHED_TIME_LIMIT! time_limit=%s "
"elapsed=%s %s", site.time_limit, site.elapsed(), site)
raise brozzler.ReachedTimeLimit | [
"def",
"enforce_time_limit",
"(",
"self",
",",
"site",
")",
":",
"if",
"(",
"site",
".",
"time_limit",
"and",
"site",
".",
"time_limit",
">",
"0",
"and",
"site",
".",
"elapsed",
"(",
")",
">",
"site",
".",
"time_limit",
")",
":",
"self",
".",
"logger... | Raises `brozzler.ReachedTimeLimit` if appropriate. | [
"Raises",
"brozzler",
".",
"ReachedTimeLimit",
"if",
"appropriate",
"."
] | 411b3f266a38b9bb942021c0121ebd8e5ca66447 | https://github.com/internetarchive/brozzler/blob/411b3f266a38b9bb942021c0121ebd8e5ca66447/brozzler/frontier.py#L155-L164 |
231,930 | internetarchive/brozzler | brozzler/frontier.py | RethinkDbFrontier.honor_stop_request | def honor_stop_request(self, site):
"""Raises brozzler.CrawlStopped if stop has been requested."""
site.refresh()
if (site.stop_requested
and site.stop_requested <= doublethink.utcnow()):
self.logger.info("stop requested for site %s", site.id)
raise brozzler.CrawlStopped
if site.job_id:
job = brozzler.Job.load(self.rr, site.job_id)
if (job and job.stop_requested
and job.stop_requested <= doublethink.utcnow()):
self.logger.info("stop requested for job %s", site.job_id)
raise brozzler.CrawlStopped | python | def honor_stop_request(self, site):
site.refresh()
if (site.stop_requested
and site.stop_requested <= doublethink.utcnow()):
self.logger.info("stop requested for site %s", site.id)
raise brozzler.CrawlStopped
if site.job_id:
job = brozzler.Job.load(self.rr, site.job_id)
if (job and job.stop_requested
and job.stop_requested <= doublethink.utcnow()):
self.logger.info("stop requested for job %s", site.job_id)
raise brozzler.CrawlStopped | [
"def",
"honor_stop_request",
"(",
"self",
",",
"site",
")",
":",
"site",
".",
"refresh",
"(",
")",
"if",
"(",
"site",
".",
"stop_requested",
"and",
"site",
".",
"stop_requested",
"<=",
"doublethink",
".",
"utcnow",
"(",
")",
")",
":",
"self",
".",
"log... | Raises brozzler.CrawlStopped if stop has been requested. | [
"Raises",
"brozzler",
".",
"CrawlStopped",
"if",
"stop",
"has",
"been",
"requested",
"."
] | 411b3f266a38b9bb942021c0121ebd8e5ca66447 | https://github.com/internetarchive/brozzler/blob/411b3f266a38b9bb942021c0121ebd8e5ca66447/brozzler/frontier.py#L207-L220 |
231,931 | internetarchive/brozzler | brozzler/frontier.py | RethinkDbFrontier._maybe_finish_job | def _maybe_finish_job(self, job_id):
"""Returns True if job is finished."""
job = brozzler.Job.load(self.rr, job_id)
if not job:
return False
if job.status.startswith("FINISH"):
self.logger.warn("%s is already %s", job, job.status)
return True
results = self.rr.table("sites").get_all(job_id, index="job_id").run()
n = 0
for result in results:
site = brozzler.Site(self.rr, result)
if not site.status.startswith("FINISH"):
results.close()
return False
n += 1
self.logger.info(
"all %s sites finished, job %s is FINISHED!", n, job.id)
job.finish()
job.save()
return True | python | def _maybe_finish_job(self, job_id):
job = brozzler.Job.load(self.rr, job_id)
if not job:
return False
if job.status.startswith("FINISH"):
self.logger.warn("%s is already %s", job, job.status)
return True
results = self.rr.table("sites").get_all(job_id, index="job_id").run()
n = 0
for result in results:
site = brozzler.Site(self.rr, result)
if not site.status.startswith("FINISH"):
results.close()
return False
n += 1
self.logger.info(
"all %s sites finished, job %s is FINISHED!", n, job.id)
job.finish()
job.save()
return True | [
"def",
"_maybe_finish_job",
"(",
"self",
",",
"job_id",
")",
":",
"job",
"=",
"brozzler",
".",
"Job",
".",
"load",
"(",
"self",
".",
"rr",
",",
"job_id",
")",
"if",
"not",
"job",
":",
"return",
"False",
"if",
"job",
".",
"status",
".",
"startswith",
... | Returns True if job is finished. | [
"Returns",
"True",
"if",
"job",
"is",
"finished",
"."
] | 411b3f266a38b9bb942021c0121ebd8e5ca66447 | https://github.com/internetarchive/brozzler/blob/411b3f266a38b9bb942021c0121ebd8e5ca66447/brozzler/frontier.py#L222-L244 |
231,932 | internetarchive/brozzler | brozzler/frontier.py | RethinkDbFrontier._merge_page | def _merge_page(self, existing_page, fresh_page):
'''
Utility method for merging info from `brozzler.Page` instances
representing the same url but with possibly different metadata.
'''
existing_page.priority += fresh_page.priority
existing_page.hashtags = list(set(
existing_page.hashtags + fresh_page.hashtags))
existing_page.hops_off = min(
existing_page.hops_off, fresh_page.hops_off) | python | def _merge_page(self, existing_page, fresh_page):
'''
Utility method for merging info from `brozzler.Page` instances
representing the same url but with possibly different metadata.
'''
existing_page.priority += fresh_page.priority
existing_page.hashtags = list(set(
existing_page.hashtags + fresh_page.hashtags))
existing_page.hops_off = min(
existing_page.hops_off, fresh_page.hops_off) | [
"def",
"_merge_page",
"(",
"self",
",",
"existing_page",
",",
"fresh_page",
")",
":",
"existing_page",
".",
"priority",
"+=",
"fresh_page",
".",
"priority",
"existing_page",
".",
"hashtags",
"=",
"list",
"(",
"set",
"(",
"existing_page",
".",
"hashtags",
"+",
... | Utility method for merging info from `brozzler.Page` instances
representing the same url but with possibly different metadata. | [
"Utility",
"method",
"for",
"merging",
"info",
"from",
"brozzler",
".",
"Page",
"instances",
"representing",
"the",
"same",
"url",
"but",
"with",
"possibly",
"different",
"metadata",
"."
] | 411b3f266a38b9bb942021c0121ebd8e5ca66447 | https://github.com/internetarchive/brozzler/blob/411b3f266a38b9bb942021c0121ebd8e5ca66447/brozzler/frontier.py#L310-L319 |
231,933 | internetarchive/brozzler | brozzler/__init__.py | behaviors | def behaviors(behaviors_dir=None):
"""Return list of JS behaviors loaded from YAML file.
:param behaviors_dir: Directory containing `behaviors.yaml` and
`js-templates/`. Defaults to brozzler dir.
"""
import os, yaml, string
global _behaviors
if _behaviors is None:
d = behaviors_dir or os.path.dirname(__file__)
behaviors_yaml = os.path.join(d, 'behaviors.yaml')
with open(behaviors_yaml) as fin:
_behaviors = yaml.safe_load(fin)
return _behaviors | python | def behaviors(behaviors_dir=None):
import os, yaml, string
global _behaviors
if _behaviors is None:
d = behaviors_dir or os.path.dirname(__file__)
behaviors_yaml = os.path.join(d, 'behaviors.yaml')
with open(behaviors_yaml) as fin:
_behaviors = yaml.safe_load(fin)
return _behaviors | [
"def",
"behaviors",
"(",
"behaviors_dir",
"=",
"None",
")",
":",
"import",
"os",
",",
"yaml",
",",
"string",
"global",
"_behaviors",
"if",
"_behaviors",
"is",
"None",
":",
"d",
"=",
"behaviors_dir",
"or",
"os",
".",
"path",
".",
"dirname",
"(",
"__file__... | Return list of JS behaviors loaded from YAML file.
:param behaviors_dir: Directory containing `behaviors.yaml` and
`js-templates/`. Defaults to brozzler dir. | [
"Return",
"list",
"of",
"JS",
"behaviors",
"loaded",
"from",
"YAML",
"file",
"."
] | 411b3f266a38b9bb942021c0121ebd8e5ca66447 | https://github.com/internetarchive/brozzler/blob/411b3f266a38b9bb942021c0121ebd8e5ca66447/brozzler/__init__.py#L97-L110 |
231,934 | internetarchive/brozzler | brozzler/__init__.py | behavior_script | def behavior_script(url, template_parameters=None, behaviors_dir=None):
'''
Returns the javascript behavior string populated with template_parameters.
'''
import re, logging, json
for behavior in behaviors(behaviors_dir=behaviors_dir):
if re.match(behavior['url_regex'], url):
parameters = dict()
if 'default_parameters' in behavior:
parameters.update(behavior['default_parameters'])
if template_parameters:
parameters.update(template_parameters)
template = jinja2_environment(behaviors_dir).get_template(
behavior['behavior_js_template'])
script = template.render(parameters)
logging.info(
'using template=%r populated with parameters=%r for %r',
behavior['behavior_js_template'], json.dumps(parameters), url)
return script
return None | python | def behavior_script(url, template_parameters=None, behaviors_dir=None):
'''
Returns the javascript behavior string populated with template_parameters.
'''
import re, logging, json
for behavior in behaviors(behaviors_dir=behaviors_dir):
if re.match(behavior['url_regex'], url):
parameters = dict()
if 'default_parameters' in behavior:
parameters.update(behavior['default_parameters'])
if template_parameters:
parameters.update(template_parameters)
template = jinja2_environment(behaviors_dir).get_template(
behavior['behavior_js_template'])
script = template.render(parameters)
logging.info(
'using template=%r populated with parameters=%r for %r',
behavior['behavior_js_template'], json.dumps(parameters), url)
return script
return None | [
"def",
"behavior_script",
"(",
"url",
",",
"template_parameters",
"=",
"None",
",",
"behaviors_dir",
"=",
"None",
")",
":",
"import",
"re",
",",
"logging",
",",
"json",
"for",
"behavior",
"in",
"behaviors",
"(",
"behaviors_dir",
"=",
"behaviors_dir",
")",
":... | Returns the javascript behavior string populated with template_parameters. | [
"Returns",
"the",
"javascript",
"behavior",
"string",
"populated",
"with",
"template_parameters",
"."
] | 411b3f266a38b9bb942021c0121ebd8e5ca66447 | https://github.com/internetarchive/brozzler/blob/411b3f266a38b9bb942021c0121ebd8e5ca66447/brozzler/__init__.py#L112-L131 |
231,935 | internetarchive/brozzler | brozzler/__init__.py | thread_raise | def thread_raise(thread, exctype):
'''
Raises or queues the exception `exctype` for the thread `thread`.
See the documentation on the function `thread_exception_gate()` for more
information.
Adapted from http://tomerfiliba.com/recipes/Thread2/ which explains:
"The exception will be raised only when executing python bytecode. If your
thread calls a native/built-in blocking function, the exception will be
raised only when execution returns to the python code."
Raises:
TypeError if `exctype` is not a class
ValueError, SystemError in case of unexpected problems
'''
import ctypes, inspect, threading, logging
if not inspect.isclass(exctype):
raise TypeError(
'cannot raise %s, only exception types can be raised (not '
'instances)' % exctype)
gate = thread_exception_gate(thread)
with gate.lock:
if gate.ok_to_raise.is_set() and thread.is_alive():
gate.ok_to_raise.clear()
logging.info('raising %s in thread %s', exctype, thread)
res = ctypes.pythonapi.PyThreadState_SetAsyncExc(
ctypes.c_long(thread.ident), ctypes.py_object(exctype))
if res == 0:
raise ValueError(
'invalid thread id? thread.ident=%s' % thread.ident)
elif res != 1:
# if it returns a number greater than one, you're in trouble,
# and you should call it again with exc=NULL to revert the effect
ctypes.pythonapi.PyThreadState_SetAsyncExc(thread.ident, 0)
raise SystemError('PyThreadState_SetAsyncExc failed')
else:
logging.info('queueing %s for thread %s', exctype, thread)
gate.queue_exception(exctype) | python | def thread_raise(thread, exctype):
'''
Raises or queues the exception `exctype` for the thread `thread`.
See the documentation on the function `thread_exception_gate()` for more
information.
Adapted from http://tomerfiliba.com/recipes/Thread2/ which explains:
"The exception will be raised only when executing python bytecode. If your
thread calls a native/built-in blocking function, the exception will be
raised only when execution returns to the python code."
Raises:
TypeError if `exctype` is not a class
ValueError, SystemError in case of unexpected problems
'''
import ctypes, inspect, threading, logging
if not inspect.isclass(exctype):
raise TypeError(
'cannot raise %s, only exception types can be raised (not '
'instances)' % exctype)
gate = thread_exception_gate(thread)
with gate.lock:
if gate.ok_to_raise.is_set() and thread.is_alive():
gate.ok_to_raise.clear()
logging.info('raising %s in thread %s', exctype, thread)
res = ctypes.pythonapi.PyThreadState_SetAsyncExc(
ctypes.c_long(thread.ident), ctypes.py_object(exctype))
if res == 0:
raise ValueError(
'invalid thread id? thread.ident=%s' % thread.ident)
elif res != 1:
# if it returns a number greater than one, you're in trouble,
# and you should call it again with exc=NULL to revert the effect
ctypes.pythonapi.PyThreadState_SetAsyncExc(thread.ident, 0)
raise SystemError('PyThreadState_SetAsyncExc failed')
else:
logging.info('queueing %s for thread %s', exctype, thread)
gate.queue_exception(exctype) | [
"def",
"thread_raise",
"(",
"thread",
",",
"exctype",
")",
":",
"import",
"ctypes",
",",
"inspect",
",",
"threading",
",",
"logging",
"if",
"not",
"inspect",
".",
"isclass",
"(",
"exctype",
")",
":",
"raise",
"TypeError",
"(",
"'cannot raise %s, only exception... | Raises or queues the exception `exctype` for the thread `thread`.
See the documentation on the function `thread_exception_gate()` for more
information.
Adapted from http://tomerfiliba.com/recipes/Thread2/ which explains:
"The exception will be raised only when executing python bytecode. If your
thread calls a native/built-in blocking function, the exception will be
raised only when execution returns to the python code."
Raises:
TypeError if `exctype` is not a class
ValueError, SystemError in case of unexpected problems | [
"Raises",
"or",
"queues",
"the",
"exception",
"exctype",
"for",
"the",
"thread",
"thread",
"."
] | 411b3f266a38b9bb942021c0121ebd8e5ca66447 | https://github.com/internetarchive/brozzler/blob/411b3f266a38b9bb942021c0121ebd8e5ca66447/brozzler/__init__.py#L206-L246 |
231,936 | internetarchive/brozzler | brozzler/__init__.py | sleep | def sleep(duration):
'''
Sleeps for duration seconds in increments of 0.5 seconds.
Use this so that the sleep can be interrupted by thread_raise().
'''
import time
start = time.time()
while True:
elapsed = time.time() - start
if elapsed >= duration:
break
time.sleep(min(duration - elapsed, 0.5)) | python | def sleep(duration):
'''
Sleeps for duration seconds in increments of 0.5 seconds.
Use this so that the sleep can be interrupted by thread_raise().
'''
import time
start = time.time()
while True:
elapsed = time.time() - start
if elapsed >= duration:
break
time.sleep(min(duration - elapsed, 0.5)) | [
"def",
"sleep",
"(",
"duration",
")",
":",
"import",
"time",
"start",
"=",
"time",
".",
"time",
"(",
")",
"while",
"True",
":",
"elapsed",
"=",
"time",
".",
"time",
"(",
")",
"-",
"start",
"if",
"elapsed",
">=",
"duration",
":",
"break",
"time",
".... | Sleeps for duration seconds in increments of 0.5 seconds.
Use this so that the sleep can be interrupted by thread_raise(). | [
"Sleeps",
"for",
"duration",
"seconds",
"in",
"increments",
"of",
"0",
".",
"5",
"seconds",
"."
] | 411b3f266a38b9bb942021c0121ebd8e5ca66447 | https://github.com/internetarchive/brozzler/blob/411b3f266a38b9bb942021c0121ebd8e5ca66447/brozzler/__init__.py#L248-L260 |
231,937 | internetarchive/brozzler | brozzler/browser.py | BrowserPool.acquire_multi | def acquire_multi(self, n=1):
'''
Returns a list of up to `n` browsers.
Raises:
NoBrowsersAvailable if none available
'''
browsers = []
with self._lock:
if len(self._in_use) >= self.size:
raise NoBrowsersAvailable
while len(self._in_use) < self.size and len(browsers) < n:
browser = self._fresh_browser()
browsers.append(browser)
self._in_use.add(browser)
return browsers | python | def acquire_multi(self, n=1):
'''
Returns a list of up to `n` browsers.
Raises:
NoBrowsersAvailable if none available
'''
browsers = []
with self._lock:
if len(self._in_use) >= self.size:
raise NoBrowsersAvailable
while len(self._in_use) < self.size and len(browsers) < n:
browser = self._fresh_browser()
browsers.append(browser)
self._in_use.add(browser)
return browsers | [
"def",
"acquire_multi",
"(",
"self",
",",
"n",
"=",
"1",
")",
":",
"browsers",
"=",
"[",
"]",
"with",
"self",
".",
"_lock",
":",
"if",
"len",
"(",
"self",
".",
"_in_use",
")",
">=",
"self",
".",
"size",
":",
"raise",
"NoBrowsersAvailable",
"while",
... | Returns a list of up to `n` browsers.
Raises:
NoBrowsersAvailable if none available | [
"Returns",
"a",
"list",
"of",
"up",
"to",
"n",
"browsers",
"."
] | 411b3f266a38b9bb942021c0121ebd8e5ca66447 | https://github.com/internetarchive/brozzler/blob/411b3f266a38b9bb942021c0121ebd8e5ca66447/brozzler/browser.py#L74-L89 |
231,938 | internetarchive/brozzler | brozzler/browser.py | BrowserPool.acquire | def acquire(self):
'''
Returns an available instance.
Returns:
browser from pool, if available
Raises:
NoBrowsersAvailable if none available
'''
with self._lock:
if len(self._in_use) >= self.size:
raise NoBrowsersAvailable
browser = self._fresh_browser()
self._in_use.add(browser)
return browser | python | def acquire(self):
'''
Returns an available instance.
Returns:
browser from pool, if available
Raises:
NoBrowsersAvailable if none available
'''
with self._lock:
if len(self._in_use) >= self.size:
raise NoBrowsersAvailable
browser = self._fresh_browser()
self._in_use.add(browser)
return browser | [
"def",
"acquire",
"(",
"self",
")",
":",
"with",
"self",
".",
"_lock",
":",
"if",
"len",
"(",
"self",
".",
"_in_use",
")",
">=",
"self",
".",
"size",
":",
"raise",
"NoBrowsersAvailable",
"browser",
"=",
"self",
".",
"_fresh_browser",
"(",
")",
"self",
... | Returns an available instance.
Returns:
browser from pool, if available
Raises:
NoBrowsersAvailable if none available | [
"Returns",
"an",
"available",
"instance",
"."
] | 411b3f266a38b9bb942021c0121ebd8e5ca66447 | https://github.com/internetarchive/brozzler/blob/411b3f266a38b9bb942021c0121ebd8e5ca66447/brozzler/browser.py#L91-L106 |
231,939 | internetarchive/brozzler | brozzler/browser.py | WebsockReceiverThread._on_error | def _on_error(self, websock, e):
'''
Raises BrowsingException in the thread that created this instance.
'''
if isinstance(e, (
websocket.WebSocketConnectionClosedException,
ConnectionResetError)):
self.logger.error('websocket closed, did chrome die?')
else:
self.logger.error(
'exception from websocket receiver thread',
exc_info=1)
brozzler.thread_raise(self.calling_thread, BrowsingException) | python | def _on_error(self, websock, e):
'''
Raises BrowsingException in the thread that created this instance.
'''
if isinstance(e, (
websocket.WebSocketConnectionClosedException,
ConnectionResetError)):
self.logger.error('websocket closed, did chrome die?')
else:
self.logger.error(
'exception from websocket receiver thread',
exc_info=1)
brozzler.thread_raise(self.calling_thread, BrowsingException) | [
"def",
"_on_error",
"(",
"self",
",",
"websock",
",",
"e",
")",
":",
"if",
"isinstance",
"(",
"e",
",",
"(",
"websocket",
".",
"WebSocketConnectionClosedException",
",",
"ConnectionResetError",
")",
")",
":",
"self",
".",
"logger",
".",
"error",
"(",
"'web... | Raises BrowsingException in the thread that created this instance. | [
"Raises",
"BrowsingException",
"in",
"the",
"thread",
"that",
"created",
"this",
"instance",
"."
] | 411b3f266a38b9bb942021c0121ebd8e5ca66447 | https://github.com/internetarchive/brozzler/blob/411b3f266a38b9bb942021c0121ebd8e5ca66447/brozzler/browser.py#L175-L187 |
231,940 | internetarchive/brozzler | brozzler/browser.py | Browser.start | def start(self, **kwargs):
'''
Starts chrome if it's not running.
Args:
**kwargs: arguments for self.chrome.start(...)
'''
if not self.is_running():
self.websock_url = self.chrome.start(**kwargs)
self.websock = websocket.WebSocketApp(self.websock_url)
self.websock_thread = WebsockReceiverThread(
self.websock, name='WebsockThread:%s' % self.chrome.port)
self.websock_thread.start()
self._wait_for(lambda: self.websock_thread.is_open, timeout=30)
# tell browser to send us messages we're interested in
self.send_to_chrome(method='Network.enable')
self.send_to_chrome(method='Page.enable')
self.send_to_chrome(method='Console.enable')
self.send_to_chrome(method='Runtime.enable')
self.send_to_chrome(method='ServiceWorker.enable')
self.send_to_chrome(method='ServiceWorker.setForceUpdateOnPageLoad')
# disable google analytics
self.send_to_chrome(
method='Network.setBlockedURLs',
params={'urls': ['*google-analytics.com/analytics.js',
'*google-analytics.com/ga.js']}) | python | def start(self, **kwargs):
'''
Starts chrome if it's not running.
Args:
**kwargs: arguments for self.chrome.start(...)
'''
if not self.is_running():
self.websock_url = self.chrome.start(**kwargs)
self.websock = websocket.WebSocketApp(self.websock_url)
self.websock_thread = WebsockReceiverThread(
self.websock, name='WebsockThread:%s' % self.chrome.port)
self.websock_thread.start()
self._wait_for(lambda: self.websock_thread.is_open, timeout=30)
# tell browser to send us messages we're interested in
self.send_to_chrome(method='Network.enable')
self.send_to_chrome(method='Page.enable')
self.send_to_chrome(method='Console.enable')
self.send_to_chrome(method='Runtime.enable')
self.send_to_chrome(method='ServiceWorker.enable')
self.send_to_chrome(method='ServiceWorker.setForceUpdateOnPageLoad')
# disable google analytics
self.send_to_chrome(
method='Network.setBlockedURLs',
params={'urls': ['*google-analytics.com/analytics.js',
'*google-analytics.com/ga.js']}) | [
"def",
"start",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"not",
"self",
".",
"is_running",
"(",
")",
":",
"self",
".",
"websock_url",
"=",
"self",
".",
"chrome",
".",
"start",
"(",
"*",
"*",
"kwargs",
")",
"self",
".",
"websock",
"="... | Starts chrome if it's not running.
Args:
**kwargs: arguments for self.chrome.start(...) | [
"Starts",
"chrome",
"if",
"it",
"s",
"not",
"running",
"."
] | 411b3f266a38b9bb942021c0121ebd8e5ca66447 | https://github.com/internetarchive/brozzler/blob/411b3f266a38b9bb942021c0121ebd8e5ca66447/brozzler/browser.py#L333-L361 |
231,941 | internetarchive/brozzler | brozzler/browser.py | Browser.stop | def stop(self):
'''
Stops chrome if it's running.
'''
try:
if (self.websock and self.websock.sock
and self.websock.sock.connected):
self.logger.info('shutting down websocket connection')
try:
self.websock.close()
except BaseException as e:
self.logger.error(
'exception closing websocket %s - %s',
self.websock, e)
self.chrome.stop()
if self.websock_thread and (
self.websock_thread != threading.current_thread()):
self.websock_thread.join(timeout=30)
if self.websock_thread.is_alive():
self.logger.error(
'%s still alive 30 seconds after closing %s, will '
'forcefully nudge it again', self.websock_thread,
self.websock)
self.websock.keep_running = False
self.websock_thread.join(timeout=30)
if self.websock_thread.is_alive():
self.logger.critical(
'%s still alive 60 seconds after closing %s',
self.websock_thread, self.websock)
self.websock_url = None
except:
self.logger.error('problem stopping', exc_info=True) | python | def stop(self):
'''
Stops chrome if it's running.
'''
try:
if (self.websock and self.websock.sock
and self.websock.sock.connected):
self.logger.info('shutting down websocket connection')
try:
self.websock.close()
except BaseException as e:
self.logger.error(
'exception closing websocket %s - %s',
self.websock, e)
self.chrome.stop()
if self.websock_thread and (
self.websock_thread != threading.current_thread()):
self.websock_thread.join(timeout=30)
if self.websock_thread.is_alive():
self.logger.error(
'%s still alive 30 seconds after closing %s, will '
'forcefully nudge it again', self.websock_thread,
self.websock)
self.websock.keep_running = False
self.websock_thread.join(timeout=30)
if self.websock_thread.is_alive():
self.logger.critical(
'%s still alive 60 seconds after closing %s',
self.websock_thread, self.websock)
self.websock_url = None
except:
self.logger.error('problem stopping', exc_info=True) | [
"def",
"stop",
"(",
"self",
")",
":",
"try",
":",
"if",
"(",
"self",
".",
"websock",
"and",
"self",
".",
"websock",
".",
"sock",
"and",
"self",
".",
"websock",
".",
"sock",
".",
"connected",
")",
":",
"self",
".",
"logger",
".",
"info",
"(",
"'sh... | Stops chrome if it's running. | [
"Stops",
"chrome",
"if",
"it",
"s",
"running",
"."
] | 411b3f266a38b9bb942021c0121ebd8e5ca66447 | https://github.com/internetarchive/brozzler/blob/411b3f266a38b9bb942021c0121ebd8e5ca66447/brozzler/browser.py#L363-L397 |
231,942 | internetarchive/brozzler | brozzler/browser.py | Browser.browse_page | def browse_page(
self, page_url, extra_headers=None,
user_agent=None, behavior_parameters=None, behaviors_dir=None,
on_request=None, on_response=None,
on_service_worker_version_updated=None, on_screenshot=None,
username=None, password=None, hashtags=None,
skip_extract_outlinks=False, skip_visit_hashtags=False,
skip_youtube_dl=False, page_timeout=300, behavior_timeout=900):
'''
Browses page in browser.
Browser should already be running, i.e. start() should have been
called. Opens the page_url in the browser, runs behaviors, takes a
screenshot, extracts outlinks.
Args:
page_url: url of the page to browse
extra_headers: dict of extra http headers to configure the browser
to send with every request (default None)
user_agent: user agent string, replaces browser default if
supplied (default None)
behavior_parameters: dict of parameters for populating the
javascript behavior template (default None)
behaviors_dir: Directory containing behaviors.yaml and JS templates
(default None loads Brozzler default JS behaviors)
on_request: callback to invoke on every Network.requestWillBeSent
event, takes one argument, the json-decoded message (default
None)
on_response: callback to invoke on every Network.responseReceived
event, takes one argument, the json-decoded message (default
None)
on_service_worker_version_updated: callback to invoke on every
ServiceWorker.workerVersionUpdated event, takes one argument,
the json-decoded message (default None)
on_screenshot: callback to invoke when screenshot is obtained,
takes one argument, the the raw jpeg bytes (default None)
# XXX takes two arguments, the url of the page at the time the
# screenshot was taken, and the raw jpeg bytes (default None)
username: username string to use to try logging in if a login form
is found in the page (default None)
password: password string to use to try logging in if a login form
is found in the page (default None)
... (there are more)
Returns:
A tuple (final_page_url, outlinks).
final_page_url: the url in the location bar at the end of the
browse_page cycle, which could be different from the original
page url if the page redirects, javascript has changed the url
in the location bar, etc
outlinks: a list of navigational links extracted from the page
Raises:
brozzler.ProxyError: in case of proxy connection error
BrowsingException: if browsing the page fails in some other way
'''
if not self.is_running():
raise BrowsingException('browser has not been started')
if self.is_browsing:
raise BrowsingException('browser is already busy browsing a page')
self.is_browsing = True
if on_request:
self.websock_thread.on_request = on_request
if on_response:
self.websock_thread.on_response = on_response
if on_service_worker_version_updated:
self.websock_thread.on_service_worker_version_updated = \
on_service_worker_version_updated
try:
with brozzler.thread_accept_exceptions():
self.configure_browser(
extra_headers=extra_headers,
user_agent=user_agent)
self.navigate_to_page(page_url, timeout=page_timeout)
if password:
self.try_login(username, password, timeout=page_timeout)
# if login redirected us, return to page_url
if page_url != self.url().split('#')[0]:
self.logger.debug(
'login navigated away from %s; returning!',
page_url)
self.navigate_to_page(page_url, timeout=page_timeout)
if on_screenshot:
self._try_screenshot(on_screenshot)
behavior_script = brozzler.behavior_script(
page_url, behavior_parameters,
behaviors_dir=behaviors_dir)
self.run_behavior(behavior_script, timeout=behavior_timeout)
if skip_extract_outlinks:
outlinks = []
else:
outlinks = self.extract_outlinks()
if not skip_visit_hashtags:
self.visit_hashtags(self.url(), hashtags, outlinks)
final_page_url = self.url()
return final_page_url, outlinks
except brozzler.ReachedLimit:
# websock_thread has stashed the ReachedLimit exception with
# more information, raise that one
raise self.websock_thread.reached_limit
except websocket.WebSocketConnectionClosedException as e:
self.logger.error('websocket closed, did chrome die?')
raise BrowsingException(e)
finally:
self.is_browsing = False
self.websock_thread.on_request = None
self.websock_thread.on_response = None | python | def browse_page(
self, page_url, extra_headers=None,
user_agent=None, behavior_parameters=None, behaviors_dir=None,
on_request=None, on_response=None,
on_service_worker_version_updated=None, on_screenshot=None,
username=None, password=None, hashtags=None,
skip_extract_outlinks=False, skip_visit_hashtags=False,
skip_youtube_dl=False, page_timeout=300, behavior_timeout=900):
'''
Browses page in browser.
Browser should already be running, i.e. start() should have been
called. Opens the page_url in the browser, runs behaviors, takes a
screenshot, extracts outlinks.
Args:
page_url: url of the page to browse
extra_headers: dict of extra http headers to configure the browser
to send with every request (default None)
user_agent: user agent string, replaces browser default if
supplied (default None)
behavior_parameters: dict of parameters for populating the
javascript behavior template (default None)
behaviors_dir: Directory containing behaviors.yaml and JS templates
(default None loads Brozzler default JS behaviors)
on_request: callback to invoke on every Network.requestWillBeSent
event, takes one argument, the json-decoded message (default
None)
on_response: callback to invoke on every Network.responseReceived
event, takes one argument, the json-decoded message (default
None)
on_service_worker_version_updated: callback to invoke on every
ServiceWorker.workerVersionUpdated event, takes one argument,
the json-decoded message (default None)
on_screenshot: callback to invoke when screenshot is obtained,
takes one argument, the the raw jpeg bytes (default None)
# XXX takes two arguments, the url of the page at the time the
# screenshot was taken, and the raw jpeg bytes (default None)
username: username string to use to try logging in if a login form
is found in the page (default None)
password: password string to use to try logging in if a login form
is found in the page (default None)
... (there are more)
Returns:
A tuple (final_page_url, outlinks).
final_page_url: the url in the location bar at the end of the
browse_page cycle, which could be different from the original
page url if the page redirects, javascript has changed the url
in the location bar, etc
outlinks: a list of navigational links extracted from the page
Raises:
brozzler.ProxyError: in case of proxy connection error
BrowsingException: if browsing the page fails in some other way
'''
if not self.is_running():
raise BrowsingException('browser has not been started')
if self.is_browsing:
raise BrowsingException('browser is already busy browsing a page')
self.is_browsing = True
if on_request:
self.websock_thread.on_request = on_request
if on_response:
self.websock_thread.on_response = on_response
if on_service_worker_version_updated:
self.websock_thread.on_service_worker_version_updated = \
on_service_worker_version_updated
try:
with brozzler.thread_accept_exceptions():
self.configure_browser(
extra_headers=extra_headers,
user_agent=user_agent)
self.navigate_to_page(page_url, timeout=page_timeout)
if password:
self.try_login(username, password, timeout=page_timeout)
# if login redirected us, return to page_url
if page_url != self.url().split('#')[0]:
self.logger.debug(
'login navigated away from %s; returning!',
page_url)
self.navigate_to_page(page_url, timeout=page_timeout)
if on_screenshot:
self._try_screenshot(on_screenshot)
behavior_script = brozzler.behavior_script(
page_url, behavior_parameters,
behaviors_dir=behaviors_dir)
self.run_behavior(behavior_script, timeout=behavior_timeout)
if skip_extract_outlinks:
outlinks = []
else:
outlinks = self.extract_outlinks()
if not skip_visit_hashtags:
self.visit_hashtags(self.url(), hashtags, outlinks)
final_page_url = self.url()
return final_page_url, outlinks
except brozzler.ReachedLimit:
# websock_thread has stashed the ReachedLimit exception with
# more information, raise that one
raise self.websock_thread.reached_limit
except websocket.WebSocketConnectionClosedException as e:
self.logger.error('websocket closed, did chrome die?')
raise BrowsingException(e)
finally:
self.is_browsing = False
self.websock_thread.on_request = None
self.websock_thread.on_response = None | [
"def",
"browse_page",
"(",
"self",
",",
"page_url",
",",
"extra_headers",
"=",
"None",
",",
"user_agent",
"=",
"None",
",",
"behavior_parameters",
"=",
"None",
",",
"behaviors_dir",
"=",
"None",
",",
"on_request",
"=",
"None",
",",
"on_response",
"=",
"None"... | Browses page in browser.
Browser should already be running, i.e. start() should have been
called. Opens the page_url in the browser, runs behaviors, takes a
screenshot, extracts outlinks.
Args:
page_url: url of the page to browse
extra_headers: dict of extra http headers to configure the browser
to send with every request (default None)
user_agent: user agent string, replaces browser default if
supplied (default None)
behavior_parameters: dict of parameters for populating the
javascript behavior template (default None)
behaviors_dir: Directory containing behaviors.yaml and JS templates
(default None loads Brozzler default JS behaviors)
on_request: callback to invoke on every Network.requestWillBeSent
event, takes one argument, the json-decoded message (default
None)
on_response: callback to invoke on every Network.responseReceived
event, takes one argument, the json-decoded message (default
None)
on_service_worker_version_updated: callback to invoke on every
ServiceWorker.workerVersionUpdated event, takes one argument,
the json-decoded message (default None)
on_screenshot: callback to invoke when screenshot is obtained,
takes one argument, the the raw jpeg bytes (default None)
# XXX takes two arguments, the url of the page at the time the
# screenshot was taken, and the raw jpeg bytes (default None)
username: username string to use to try logging in if a login form
is found in the page (default None)
password: password string to use to try logging in if a login form
is found in the page (default None)
... (there are more)
Returns:
A tuple (final_page_url, outlinks).
final_page_url: the url in the location bar at the end of the
browse_page cycle, which could be different from the original
page url if the page redirects, javascript has changed the url
in the location bar, etc
outlinks: a list of navigational links extracted from the page
Raises:
brozzler.ProxyError: in case of proxy connection error
BrowsingException: if browsing the page fails in some other way | [
"Browses",
"page",
"in",
"browser",
"."
] | 411b3f266a38b9bb942021c0121ebd8e5ca66447 | https://github.com/internetarchive/brozzler/blob/411b3f266a38b9bb942021c0121ebd8e5ca66447/brozzler/browser.py#L402-L508 |
231,943 | internetarchive/brozzler | brozzler/browser.py | Browser.url | def url(self, timeout=30):
'''
Returns value of document.URL from the browser.
'''
self.websock_thread.expect_result(self._command_id.peek())
msg_id = self.send_to_chrome(
method='Runtime.evaluate',
params={'expression': 'document.URL'})
self._wait_for(
lambda: self.websock_thread.received_result(msg_id),
timeout=timeout)
message = self.websock_thread.pop_result(msg_id)
return message['result']['result']['value'] | python | def url(self, timeout=30):
'''
Returns value of document.URL from the browser.
'''
self.websock_thread.expect_result(self._command_id.peek())
msg_id = self.send_to_chrome(
method='Runtime.evaluate',
params={'expression': 'document.URL'})
self._wait_for(
lambda: self.websock_thread.received_result(msg_id),
timeout=timeout)
message = self.websock_thread.pop_result(msg_id)
return message['result']['result']['value'] | [
"def",
"url",
"(",
"self",
",",
"timeout",
"=",
"30",
")",
":",
"self",
".",
"websock_thread",
".",
"expect_result",
"(",
"self",
".",
"_command_id",
".",
"peek",
"(",
")",
")",
"msg_id",
"=",
"self",
".",
"send_to_chrome",
"(",
"method",
"=",
"'Runtim... | Returns value of document.URL from the browser. | [
"Returns",
"value",
"of",
"document",
".",
"URL",
"from",
"the",
"browser",
"."
] | 411b3f266a38b9bb942021c0121ebd8e5ca66447 | https://github.com/internetarchive/brozzler/blob/411b3f266a38b9bb942021c0121ebd8e5ca66447/brozzler/browser.py#L600-L612 |
231,944 | internetarchive/brozzler | brozzler/cli.py | brozzler_new_job | def brozzler_new_job(argv=None):
'''
Command line utility entry point for queuing a new brozzler job. Takes a
yaml brozzler job configuration file, creates job, sites, and pages objects
in rethinkdb, which brozzler-workers will look at and start crawling.
'''
argv = argv or sys.argv
arg_parser = argparse.ArgumentParser(
prog=os.path.basename(argv[0]),
description='brozzler-new-job - queue new job with brozzler',
formatter_class=BetterArgumentDefaultsHelpFormatter)
arg_parser.add_argument(
'job_conf_file', metavar='JOB_CONF_FILE',
help='brozzler job configuration file in yaml')
add_rethinkdb_options(arg_parser)
add_common_options(arg_parser, argv)
args = arg_parser.parse_args(args=argv[1:])
configure_logging(args)
rr = rethinker(args)
frontier = brozzler.RethinkDbFrontier(rr)
try:
brozzler.new_job_file(frontier, args.job_conf_file)
except brozzler.InvalidJobConf as e:
print('brozzler-new-job: invalid job file:', args.job_conf_file, file=sys.stderr)
print(' ' + yaml.dump(e.errors).rstrip().replace('\n', '\n '), file=sys.stderr)
sys.exit(1) | python | def brozzler_new_job(argv=None):
'''
Command line utility entry point for queuing a new brozzler job. Takes a
yaml brozzler job configuration file, creates job, sites, and pages objects
in rethinkdb, which brozzler-workers will look at and start crawling.
'''
argv = argv or sys.argv
arg_parser = argparse.ArgumentParser(
prog=os.path.basename(argv[0]),
description='brozzler-new-job - queue new job with brozzler',
formatter_class=BetterArgumentDefaultsHelpFormatter)
arg_parser.add_argument(
'job_conf_file', metavar='JOB_CONF_FILE',
help='brozzler job configuration file in yaml')
add_rethinkdb_options(arg_parser)
add_common_options(arg_parser, argv)
args = arg_parser.parse_args(args=argv[1:])
configure_logging(args)
rr = rethinker(args)
frontier = brozzler.RethinkDbFrontier(rr)
try:
brozzler.new_job_file(frontier, args.job_conf_file)
except brozzler.InvalidJobConf as e:
print('brozzler-new-job: invalid job file:', args.job_conf_file, file=sys.stderr)
print(' ' + yaml.dump(e.errors).rstrip().replace('\n', '\n '), file=sys.stderr)
sys.exit(1) | [
"def",
"brozzler_new_job",
"(",
"argv",
"=",
"None",
")",
":",
"argv",
"=",
"argv",
"or",
"sys",
".",
"argv",
"arg_parser",
"=",
"argparse",
".",
"ArgumentParser",
"(",
"prog",
"=",
"os",
".",
"path",
".",
"basename",
"(",
"argv",
"[",
"0",
"]",
")",... | Command line utility entry point for queuing a new brozzler job. Takes a
yaml brozzler job configuration file, creates job, sites, and pages objects
in rethinkdb, which brozzler-workers will look at and start crawling. | [
"Command",
"line",
"utility",
"entry",
"point",
"for",
"queuing",
"a",
"new",
"brozzler",
"job",
".",
"Takes",
"a",
"yaml",
"brozzler",
"job",
"configuration",
"file",
"creates",
"job",
"sites",
"and",
"pages",
"objects",
"in",
"rethinkdb",
"which",
"brozzler"... | 411b3f266a38b9bb942021c0121ebd8e5ca66447 | https://github.com/internetarchive/brozzler/blob/411b3f266a38b9bb942021c0121ebd8e5ca66447/brozzler/cli.py#L206-L233 |
231,945 | internetarchive/brozzler | brozzler/cli.py | brozzler_new_site | def brozzler_new_site(argv=None):
'''
Command line utility entry point for queuing a new brozzler site.
Takes a seed url and creates a site and page object in rethinkdb, which
brozzler-workers will look at and start crawling.
'''
argv = argv or sys.argv
arg_parser = argparse.ArgumentParser(
prog=os.path.basename(argv[0]),
description='brozzler-new-site - register site to brozzle',
formatter_class=BetterArgumentDefaultsHelpFormatter)
arg_parser.add_argument('seed', metavar='SEED', help='seed url')
add_rethinkdb_options(arg_parser)
arg_parser.add_argument(
'--time-limit', dest='time_limit', default=None,
help='time limit in seconds for this site')
arg_parser.add_argument(
'--ignore-robots', dest='ignore_robots', action='store_true',
help='ignore robots.txt for this site')
arg_parser.add_argument(
'--warcprox-meta', dest='warcprox_meta',
help=(
'Warcprox-Meta http request header to send with each request; '
'must be a json blob, ignored unless warcprox features are '
'enabled'))
arg_parser.add_argument(
'--behavior-parameters', dest='behavior_parameters',
default=None, help=(
'json blob of parameters to populate the javascript behavior '
'template, e.g. {"parameter_username":"x",'
'"parameter_password":"y"}'))
arg_parser.add_argument(
'--username', dest='username', default=None,
help='use this username to try to log in if a login form is found')
arg_parser.add_argument(
'--password', dest='password', default=None,
help='use this password to try to log in if a login form is found')
add_common_options(arg_parser, argv)
args = arg_parser.parse_args(args=argv[1:])
configure_logging(args)
rr = rethinker(args)
site = brozzler.Site(rr, {
'seed': args.seed,
'time_limit': int(args.time_limit) if args.time_limit else None,
'ignore_robots': args.ignore_robots,
'warcprox_meta': json.loads(
args.warcprox_meta) if args.warcprox_meta else None,
'behavior_parameters': json.loads(
args.behavior_parameters) if args.behavior_parameters else None,
'username': args.username,
'password': args.password})
frontier = brozzler.RethinkDbFrontier(rr)
brozzler.new_site(frontier, site) | python | def brozzler_new_site(argv=None):
'''
Command line utility entry point for queuing a new brozzler site.
Takes a seed url and creates a site and page object in rethinkdb, which
brozzler-workers will look at and start crawling.
'''
argv = argv or sys.argv
arg_parser = argparse.ArgumentParser(
prog=os.path.basename(argv[0]),
description='brozzler-new-site - register site to brozzle',
formatter_class=BetterArgumentDefaultsHelpFormatter)
arg_parser.add_argument('seed', metavar='SEED', help='seed url')
add_rethinkdb_options(arg_parser)
arg_parser.add_argument(
'--time-limit', dest='time_limit', default=None,
help='time limit in seconds for this site')
arg_parser.add_argument(
'--ignore-robots', dest='ignore_robots', action='store_true',
help='ignore robots.txt for this site')
arg_parser.add_argument(
'--warcprox-meta', dest='warcprox_meta',
help=(
'Warcprox-Meta http request header to send with each request; '
'must be a json blob, ignored unless warcprox features are '
'enabled'))
arg_parser.add_argument(
'--behavior-parameters', dest='behavior_parameters',
default=None, help=(
'json blob of parameters to populate the javascript behavior '
'template, e.g. {"parameter_username":"x",'
'"parameter_password":"y"}'))
arg_parser.add_argument(
'--username', dest='username', default=None,
help='use this username to try to log in if a login form is found')
arg_parser.add_argument(
'--password', dest='password', default=None,
help='use this password to try to log in if a login form is found')
add_common_options(arg_parser, argv)
args = arg_parser.parse_args(args=argv[1:])
configure_logging(args)
rr = rethinker(args)
site = brozzler.Site(rr, {
'seed': args.seed,
'time_limit': int(args.time_limit) if args.time_limit else None,
'ignore_robots': args.ignore_robots,
'warcprox_meta': json.loads(
args.warcprox_meta) if args.warcprox_meta else None,
'behavior_parameters': json.loads(
args.behavior_parameters) if args.behavior_parameters else None,
'username': args.username,
'password': args.password})
frontier = brozzler.RethinkDbFrontier(rr)
brozzler.new_site(frontier, site) | [
"def",
"brozzler_new_site",
"(",
"argv",
"=",
"None",
")",
":",
"argv",
"=",
"argv",
"or",
"sys",
".",
"argv",
"arg_parser",
"=",
"argparse",
".",
"ArgumentParser",
"(",
"prog",
"=",
"os",
".",
"path",
".",
"basename",
"(",
"argv",
"[",
"0",
"]",
")"... | Command line utility entry point for queuing a new brozzler site.
Takes a seed url and creates a site and page object in rethinkdb, which
brozzler-workers will look at and start crawling. | [
"Command",
"line",
"utility",
"entry",
"point",
"for",
"queuing",
"a",
"new",
"brozzler",
"site",
".",
"Takes",
"a",
"seed",
"url",
"and",
"creates",
"a",
"site",
"and",
"page",
"object",
"in",
"rethinkdb",
"which",
"brozzler",
"-",
"workers",
"will",
"loo... | 411b3f266a38b9bb942021c0121ebd8e5ca66447 | https://github.com/internetarchive/brozzler/blob/411b3f266a38b9bb942021c0121ebd8e5ca66447/brozzler/cli.py#L235-L290 |
231,946 | internetarchive/brozzler | brozzler/cli.py | brozzler_list_captures | def brozzler_list_captures(argv=None):
'''
Handy utility for looking up entries in the rethinkdb "captures" table by
url or sha1.
'''
import urlcanon
argv = argv or sys.argv
arg_parser = argparse.ArgumentParser(
prog=os.path.basename(argv[0]),
formatter_class=BetterArgumentDefaultsHelpFormatter)
arg_parser.add_argument(
'-p', '--prefix', dest='prefix', action='store_true', help=(
'use prefix match for url (n.b. may not work as expected if '
'searching key has query string because canonicalization can '
'reorder query parameters)'))
arg_parser.add_argument(
'--yaml', dest='yaml', action='store_true', help=(
'yaml output (default is json)'))
add_rethinkdb_options(arg_parser)
add_common_options(arg_parser, argv)
arg_parser.add_argument(
'url_or_sha1', metavar='URL_or_SHA1',
help='url or sha1 to look up in captures table')
args = arg_parser.parse_args(args=argv[1:])
configure_logging(args)
rr = rethinker(args)
if args.url_or_sha1[:5] == 'sha1:':
if args.prefix:
logging.warn(
'ignoring supplied --prefix option which does not apply '
'to lookup by sha1')
# assumes it's already base32 (XXX could detect if hex and convert)
sha1base32 = args.url_or_sha1[5:].upper()
reql = rr.table('captures').between(
[sha1base32, r.minval, r.minval],
[sha1base32, r.maxval, r.maxval],
index='sha1_warc_type')
logging.debug('querying rethinkdb: %s', reql)
results = reql.run()
else:
key = urlcanon.semantic(args.url_or_sha1).surt().decode('ascii')
abbr_start_key = key[:150]
if args.prefix:
# surt is necessarily ascii and \x7f is the last ascii character
abbr_end_key = key[:150] + '\x7f'
end_key = key + '\x7f'
else:
abbr_end_key = key[:150]
end_key = key
reql = rr.table('captures').between(
[abbr_start_key, r.minval],
[abbr_end_key, r.maxval],
index='abbr_canon_surt_timestamp', right_bound='closed')
reql = reql.order_by(index='abbr_canon_surt_timestamp')
reql = reql.filter(
lambda capture: (capture['canon_surt'] >= key)
& (capture['canon_surt'] <= end_key))
logging.debug('querying rethinkdb: %s', reql)
results = reql.run()
if args.yaml:
yaml.dump_all(
results, stream=sys.stdout, explicit_start=True,
default_flow_style=False)
else:
for result in results:
print(json.dumps(result, cls=Jsonner, indent=2)) | python | def brozzler_list_captures(argv=None):
'''
Handy utility for looking up entries in the rethinkdb "captures" table by
url or sha1.
'''
import urlcanon
argv = argv or sys.argv
arg_parser = argparse.ArgumentParser(
prog=os.path.basename(argv[0]),
formatter_class=BetterArgumentDefaultsHelpFormatter)
arg_parser.add_argument(
'-p', '--prefix', dest='prefix', action='store_true', help=(
'use prefix match for url (n.b. may not work as expected if '
'searching key has query string because canonicalization can '
'reorder query parameters)'))
arg_parser.add_argument(
'--yaml', dest='yaml', action='store_true', help=(
'yaml output (default is json)'))
add_rethinkdb_options(arg_parser)
add_common_options(arg_parser, argv)
arg_parser.add_argument(
'url_or_sha1', metavar='URL_or_SHA1',
help='url or sha1 to look up in captures table')
args = arg_parser.parse_args(args=argv[1:])
configure_logging(args)
rr = rethinker(args)
if args.url_or_sha1[:5] == 'sha1:':
if args.prefix:
logging.warn(
'ignoring supplied --prefix option which does not apply '
'to lookup by sha1')
# assumes it's already base32 (XXX could detect if hex and convert)
sha1base32 = args.url_or_sha1[5:].upper()
reql = rr.table('captures').between(
[sha1base32, r.minval, r.minval],
[sha1base32, r.maxval, r.maxval],
index='sha1_warc_type')
logging.debug('querying rethinkdb: %s', reql)
results = reql.run()
else:
key = urlcanon.semantic(args.url_or_sha1).surt().decode('ascii')
abbr_start_key = key[:150]
if args.prefix:
# surt is necessarily ascii and \x7f is the last ascii character
abbr_end_key = key[:150] + '\x7f'
end_key = key + '\x7f'
else:
abbr_end_key = key[:150]
end_key = key
reql = rr.table('captures').between(
[abbr_start_key, r.minval],
[abbr_end_key, r.maxval],
index='abbr_canon_surt_timestamp', right_bound='closed')
reql = reql.order_by(index='abbr_canon_surt_timestamp')
reql = reql.filter(
lambda capture: (capture['canon_surt'] >= key)
& (capture['canon_surt'] <= end_key))
logging.debug('querying rethinkdb: %s', reql)
results = reql.run()
if args.yaml:
yaml.dump_all(
results, stream=sys.stdout, explicit_start=True,
default_flow_style=False)
else:
for result in results:
print(json.dumps(result, cls=Jsonner, indent=2)) | [
"def",
"brozzler_list_captures",
"(",
"argv",
"=",
"None",
")",
":",
"import",
"urlcanon",
"argv",
"=",
"argv",
"or",
"sys",
".",
"argv",
"arg_parser",
"=",
"argparse",
".",
"ArgumentParser",
"(",
"prog",
"=",
"os",
".",
"path",
".",
"basename",
"(",
"ar... | Handy utility for looking up entries in the rethinkdb "captures" table by
url or sha1. | [
"Handy",
"utility",
"for",
"looking",
"up",
"entries",
"in",
"the",
"rethinkdb",
"captures",
"table",
"by",
"url",
"or",
"sha1",
"."
] | 411b3f266a38b9bb942021c0121ebd8e5ca66447 | https://github.com/internetarchive/brozzler/blob/411b3f266a38b9bb942021c0121ebd8e5ca66447/brozzler/cli.py#L683-L753 |
231,947 | internetarchive/brozzler | brozzler/easy.py | BrozzlerEasyController._warcprox_opts | def _warcprox_opts(self, args):
'''
Takes args as produced by the argument parser built by
_build_arg_parser and builds warcprox arguments object suitable to pass
to warcprox.main.init_controller. Copies some arguments, renames some,
populates some with defaults appropriate for brozzler-easy, etc.
'''
warcprox_opts = warcprox.Options()
warcprox_opts.address = 'localhost'
# let the OS choose an available port; discover it later using
# sock.getsockname()[1]
warcprox_opts.port = 0
warcprox_opts.cacert = args.cacert
warcprox_opts.certs_dir = args.certs_dir
warcprox_opts.directory = args.warcs_dir
warcprox_opts.gzip = True
warcprox_opts.prefix = 'brozzler'
warcprox_opts.size = 1000 * 1000* 1000
warcprox_opts.rollover_idle_time = 3 * 60
warcprox_opts.digest_algorithm = 'sha1'
warcprox_opts.base32 = True
warcprox_opts.stats_db_file = None
warcprox_opts.playback_port = None
warcprox_opts.playback_index_db_file = None
warcprox_opts.rethinkdb_big_table_url = (
'rethinkdb://%s/%s/captures' % (
args.rethinkdb_servers, args.rethinkdb_db))
warcprox_opts.queue_size = 500
warcprox_opts.max_threads = None
warcprox_opts.profile = False
warcprox_opts.onion_tor_socks_proxy = args.onion_tor_socks_proxy
return warcprox_opts | python | def _warcprox_opts(self, args):
'''
Takes args as produced by the argument parser built by
_build_arg_parser and builds warcprox arguments object suitable to pass
to warcprox.main.init_controller. Copies some arguments, renames some,
populates some with defaults appropriate for brozzler-easy, etc.
'''
warcprox_opts = warcprox.Options()
warcprox_opts.address = 'localhost'
# let the OS choose an available port; discover it later using
# sock.getsockname()[1]
warcprox_opts.port = 0
warcprox_opts.cacert = args.cacert
warcprox_opts.certs_dir = args.certs_dir
warcprox_opts.directory = args.warcs_dir
warcprox_opts.gzip = True
warcprox_opts.prefix = 'brozzler'
warcprox_opts.size = 1000 * 1000* 1000
warcprox_opts.rollover_idle_time = 3 * 60
warcprox_opts.digest_algorithm = 'sha1'
warcprox_opts.base32 = True
warcprox_opts.stats_db_file = None
warcprox_opts.playback_port = None
warcprox_opts.playback_index_db_file = None
warcprox_opts.rethinkdb_big_table_url = (
'rethinkdb://%s/%s/captures' % (
args.rethinkdb_servers, args.rethinkdb_db))
warcprox_opts.queue_size = 500
warcprox_opts.max_threads = None
warcprox_opts.profile = False
warcprox_opts.onion_tor_socks_proxy = args.onion_tor_socks_proxy
return warcprox_opts | [
"def",
"_warcprox_opts",
"(",
"self",
",",
"args",
")",
":",
"warcprox_opts",
"=",
"warcprox",
".",
"Options",
"(",
")",
"warcprox_opts",
".",
"address",
"=",
"'localhost'",
"# let the OS choose an available port; discover it later using",
"# sock.getsockname()[1]",
"warc... | Takes args as produced by the argument parser built by
_build_arg_parser and builds warcprox arguments object suitable to pass
to warcprox.main.init_controller. Copies some arguments, renames some,
populates some with defaults appropriate for brozzler-easy, etc. | [
"Takes",
"args",
"as",
"produced",
"by",
"the",
"argument",
"parser",
"built",
"by",
"_build_arg_parser",
"and",
"builds",
"warcprox",
"arguments",
"object",
"suitable",
"to",
"pass",
"to",
"warcprox",
".",
"main",
".",
"init_controller",
".",
"Copies",
"some",
... | 411b3f266a38b9bb942021c0121ebd8e5ca66447 | https://github.com/internetarchive/brozzler/blob/411b3f266a38b9bb942021c0121ebd8e5ca66447/brozzler/easy.py#L224-L255 |
231,948 | internetarchive/brozzler | brozzler/robots.py | _reppy_rules_getitem | def _reppy_rules_getitem(self, agent):
'''
Find the user-agent token matching the supplied full user-agent, using
a case-insensitive substring search.
'''
lc_agent = agent.lower()
for s in self.agents:
if s in lc_agent:
return self.agents[s]
return self.agents.get('*') | python | def _reppy_rules_getitem(self, agent):
'''
Find the user-agent token matching the supplied full user-agent, using
a case-insensitive substring search.
'''
lc_agent = agent.lower()
for s in self.agents:
if s in lc_agent:
return self.agents[s]
return self.agents.get('*') | [
"def",
"_reppy_rules_getitem",
"(",
"self",
",",
"agent",
")",
":",
"lc_agent",
"=",
"agent",
".",
"lower",
"(",
")",
"for",
"s",
"in",
"self",
".",
"agents",
":",
"if",
"s",
"in",
"lc_agent",
":",
"return",
"self",
".",
"agents",
"[",
"s",
"]",
"r... | Find the user-agent token matching the supplied full user-agent, using
a case-insensitive substring search. | [
"Find",
"the",
"user",
"-",
"agent",
"token",
"matching",
"the",
"supplied",
"full",
"user",
"-",
"agent",
"using",
"a",
"case",
"-",
"insensitive",
"substring",
"search",
"."
] | 411b3f266a38b9bb942021c0121ebd8e5ca66447 | https://github.com/internetarchive/brozzler/blob/411b3f266a38b9bb942021c0121ebd8e5ca66447/brozzler/robots.py#L37-L46 |
231,949 | internetarchive/brozzler | brozzler/robots.py | is_permitted_by_robots | def is_permitted_by_robots(site, url, proxy=None):
'''
Checks if `url` is permitted by robots.txt.
Treats any kind of error fetching robots.txt as "allow all". See
http://builds.archive.org/javadoc/heritrix-3.x-snapshot/org/archive/modules/net/CrawlServer.html#updateRobots(org.archive.modules.CrawlURI)
for some background on that policy.
Returns:
bool: `True` if `site.ignore_robots` is set, or if `url` is permitted
by robots.txt, `False` otherwise
Raises:
brozzler.ReachedLimit: if warcprox responded with 420 Reached Limit
requests.exceptions.ProxyError: if the proxy is down
'''
if site.ignore_robots:
return True
try:
result = _robots_cache(site, proxy).allowed(
url, site.user_agent or "brozzler")
return result
except Exception as e:
if isinstance(e, reppy.exceptions.ServerError) and isinstance(
e.args[0], brozzler.ReachedLimit):
raise e.args[0]
elif hasattr(e, 'args') and isinstance(
e.args[0], requests.exceptions.ProxyError):
# reppy has wrapped an exception that we want to bubble up
raise brozzler.ProxyError(e)
else:
logging.warn(
"returning true (permitted) after problem fetching "
"robots.txt for %r: %r", url, e)
return True | python | def is_permitted_by_robots(site, url, proxy=None):
'''
Checks if `url` is permitted by robots.txt.
Treats any kind of error fetching robots.txt as "allow all". See
http://builds.archive.org/javadoc/heritrix-3.x-snapshot/org/archive/modules/net/CrawlServer.html#updateRobots(org.archive.modules.CrawlURI)
for some background on that policy.
Returns:
bool: `True` if `site.ignore_robots` is set, or if `url` is permitted
by robots.txt, `False` otherwise
Raises:
brozzler.ReachedLimit: if warcprox responded with 420 Reached Limit
requests.exceptions.ProxyError: if the proxy is down
'''
if site.ignore_robots:
return True
try:
result = _robots_cache(site, proxy).allowed(
url, site.user_agent or "brozzler")
return result
except Exception as e:
if isinstance(e, reppy.exceptions.ServerError) and isinstance(
e.args[0], brozzler.ReachedLimit):
raise e.args[0]
elif hasattr(e, 'args') and isinstance(
e.args[0], requests.exceptions.ProxyError):
# reppy has wrapped an exception that we want to bubble up
raise brozzler.ProxyError(e)
else:
logging.warn(
"returning true (permitted) after problem fetching "
"robots.txt for %r: %r", url, e)
return True | [
"def",
"is_permitted_by_robots",
"(",
"site",
",",
"url",
",",
"proxy",
"=",
"None",
")",
":",
"if",
"site",
".",
"ignore_robots",
":",
"return",
"True",
"try",
":",
"result",
"=",
"_robots_cache",
"(",
"site",
",",
"proxy",
")",
".",
"allowed",
"(",
"... | Checks if `url` is permitted by robots.txt.
Treats any kind of error fetching robots.txt as "allow all". See
http://builds.archive.org/javadoc/heritrix-3.x-snapshot/org/archive/modules/net/CrawlServer.html#updateRobots(org.archive.modules.CrawlURI)
for some background on that policy.
Returns:
bool: `True` if `site.ignore_robots` is set, or if `url` is permitted
by robots.txt, `False` otherwise
Raises:
brozzler.ReachedLimit: if warcprox responded with 420 Reached Limit
requests.exceptions.ProxyError: if the proxy is down | [
"Checks",
"if",
"url",
"is",
"permitted",
"by",
"robots",
".",
"txt",
"."
] | 411b3f266a38b9bb942021c0121ebd8e5ca66447 | https://github.com/internetarchive/brozzler/blob/411b3f266a38b9bb942021c0121ebd8e5ca66447/brozzler/robots.py#L77-L112 |
231,950 | internetarchive/brozzler | brozzler/ydl.py | final_bounces | def final_bounces(fetches, url):
"""
Resolves redirect chains in `fetches` and returns a list of fetches
representing the final redirect destinations of the given url. There could
be more than one if for example youtube-dl hit the same url with HEAD and
then GET requests.
"""
redirects = {}
for fetch in fetches:
# XXX check http status 301,302,303,307? check for "uri" header
# as well as "location"? see urllib.request.HTTPRedirectHandler
if 'location' in fetch['response_headers']:
redirects[fetch['url']] = fetch
final_url = url
while final_url in redirects:
fetch = redirects.pop(final_url)
final_url = urllib.parse.urljoin(
fetch['url'], fetch['response_headers']['location'])
final_bounces = []
for fetch in fetches:
if fetch['url'] == final_url:
final_bounces.append(fetch)
return final_bounces | python | def final_bounces(fetches, url):
redirects = {}
for fetch in fetches:
# XXX check http status 301,302,303,307? check for "uri" header
# as well as "location"? see urllib.request.HTTPRedirectHandler
if 'location' in fetch['response_headers']:
redirects[fetch['url']] = fetch
final_url = url
while final_url in redirects:
fetch = redirects.pop(final_url)
final_url = urllib.parse.urljoin(
fetch['url'], fetch['response_headers']['location'])
final_bounces = []
for fetch in fetches:
if fetch['url'] == final_url:
final_bounces.append(fetch)
return final_bounces | [
"def",
"final_bounces",
"(",
"fetches",
",",
"url",
")",
":",
"redirects",
"=",
"{",
"}",
"for",
"fetch",
"in",
"fetches",
":",
"# XXX check http status 301,302,303,307? check for \"uri\" header",
"# as well as \"location\"? see urllib.request.HTTPRedirectHandler",
"if",
"'lo... | Resolves redirect chains in `fetches` and returns a list of fetches
representing the final redirect destinations of the given url. There could
be more than one if for example youtube-dl hit the same url with HEAD and
then GET requests. | [
"Resolves",
"redirect",
"chains",
"in",
"fetches",
"and",
"returns",
"a",
"list",
"of",
"fetches",
"representing",
"the",
"final",
"redirect",
"destinations",
"of",
"the",
"given",
"url",
".",
"There",
"could",
"be",
"more",
"than",
"one",
"if",
"for",
"exam... | 411b3f266a38b9bb942021c0121ebd8e5ca66447 | https://github.com/internetarchive/brozzler/blob/411b3f266a38b9bb942021c0121ebd8e5ca66447/brozzler/ydl.py#L91-L116 |
231,951 | internetarchive/brozzler | brozzler/ydl.py | _remember_videos | def _remember_videos(page, fetches, stitch_ups=None):
'''
Saves info about videos captured by youtube-dl in `page.videos`.
'''
if not 'videos' in page:
page.videos = []
for fetch in fetches or []:
content_type = fetch['response_headers'].get_content_type()
if (content_type.startswith('video/')
# skip manifests of DASH segmented video -
# see https://github.com/internetarchive/brozzler/pull/70
and content_type != 'video/vnd.mpeg.dash.mpd'
and fetch['method'] == 'GET'
and fetch['response_code'] in (200, 206)):
video = {
'blame': 'youtube-dl',
'url': fetch['url'],
'response_code': fetch['response_code'],
'content-type': content_type,
}
if 'content-length' in fetch['response_headers']:
video['content-length'] = int(
fetch['response_headers']['content-length'])
if 'content-range' in fetch['response_headers']:
video['content-range'] = fetch[
'response_headers']['content-range']
logging.debug('embedded video %s', video)
page.videos.append(video)
for stitch_up in stitch_ups or []:
if stitch_up['content-type'].startswith('video/'):
video = {
'blame': 'youtube-dl',
'url': stitch_up['url'],
'response_code': stitch_up['response_code'],
'content-type': stitch_up['content-type'],
'content-length': stitch_up['content-length'],
}
logging.debug('embedded video %s', video)
page.videos.append(video) | python | def _remember_videos(page, fetches, stitch_ups=None):
'''
Saves info about videos captured by youtube-dl in `page.videos`.
'''
if not 'videos' in page:
page.videos = []
for fetch in fetches or []:
content_type = fetch['response_headers'].get_content_type()
if (content_type.startswith('video/')
# skip manifests of DASH segmented video -
# see https://github.com/internetarchive/brozzler/pull/70
and content_type != 'video/vnd.mpeg.dash.mpd'
and fetch['method'] == 'GET'
and fetch['response_code'] in (200, 206)):
video = {
'blame': 'youtube-dl',
'url': fetch['url'],
'response_code': fetch['response_code'],
'content-type': content_type,
}
if 'content-length' in fetch['response_headers']:
video['content-length'] = int(
fetch['response_headers']['content-length'])
if 'content-range' in fetch['response_headers']:
video['content-range'] = fetch[
'response_headers']['content-range']
logging.debug('embedded video %s', video)
page.videos.append(video)
for stitch_up in stitch_ups or []:
if stitch_up['content-type'].startswith('video/'):
video = {
'blame': 'youtube-dl',
'url': stitch_up['url'],
'response_code': stitch_up['response_code'],
'content-type': stitch_up['content-type'],
'content-length': stitch_up['content-length'],
}
logging.debug('embedded video %s', video)
page.videos.append(video) | [
"def",
"_remember_videos",
"(",
"page",
",",
"fetches",
",",
"stitch_ups",
"=",
"None",
")",
":",
"if",
"not",
"'videos'",
"in",
"page",
":",
"page",
".",
"videos",
"=",
"[",
"]",
"for",
"fetch",
"in",
"fetches",
"or",
"[",
"]",
":",
"content_type",
... | Saves info about videos captured by youtube-dl in `page.videos`. | [
"Saves",
"info",
"about",
"videos",
"captured",
"by",
"youtube",
"-",
"dl",
"in",
"page",
".",
"videos",
"."
] | 411b3f266a38b9bb942021c0121ebd8e5ca66447 | https://github.com/internetarchive/brozzler/blob/411b3f266a38b9bb942021c0121ebd8e5ca66447/brozzler/ydl.py#L274-L312 |
231,952 | internetarchive/brozzler | brozzler/ydl.py | do_youtube_dl | def do_youtube_dl(worker, site, page):
'''
Runs youtube-dl configured for `worker` and `site` to download videos from
`page`.
Args:
worker (brozzler.BrozzlerWorker): the calling brozzler worker
site (brozzler.Site): the site we are brozzling
page (brozzler.Page): the page we are brozzling
Returns:
tuple with two entries:
`list` of `dict`: with info about urls fetched:
[{
'url': ...,
'method': ...,
'response_code': ...,
'response_headers': ...,
}, ...]
`list` of `str`: outlink urls
'''
with tempfile.TemporaryDirectory(prefix='brzl-ydl-') as tempdir:
ydl = _build_youtube_dl(worker, tempdir, site)
ie_result = _try_youtube_dl(worker, ydl, site, page)
outlinks = set()
if ie_result and ie_result.get('extractor') == 'youtube:playlist':
# youtube watch pages as outlinks
outlinks = {'https://www.youtube.com/watch?v=%s' % e['id']
for e in ie_result.get('entries_no_dl', [])}
# any outlinks for other cases?
return ydl.fetch_spy.fetches, outlinks | python | def do_youtube_dl(worker, site, page):
'''
Runs youtube-dl configured for `worker` and `site` to download videos from
`page`.
Args:
worker (brozzler.BrozzlerWorker): the calling brozzler worker
site (brozzler.Site): the site we are brozzling
page (brozzler.Page): the page we are brozzling
Returns:
tuple with two entries:
`list` of `dict`: with info about urls fetched:
[{
'url': ...,
'method': ...,
'response_code': ...,
'response_headers': ...,
}, ...]
`list` of `str`: outlink urls
'''
with tempfile.TemporaryDirectory(prefix='brzl-ydl-') as tempdir:
ydl = _build_youtube_dl(worker, tempdir, site)
ie_result = _try_youtube_dl(worker, ydl, site, page)
outlinks = set()
if ie_result and ie_result.get('extractor') == 'youtube:playlist':
# youtube watch pages as outlinks
outlinks = {'https://www.youtube.com/watch?v=%s' % e['id']
for e in ie_result.get('entries_no_dl', [])}
# any outlinks for other cases?
return ydl.fetch_spy.fetches, outlinks | [
"def",
"do_youtube_dl",
"(",
"worker",
",",
"site",
",",
"page",
")",
":",
"with",
"tempfile",
".",
"TemporaryDirectory",
"(",
"prefix",
"=",
"'brzl-ydl-'",
")",
"as",
"tempdir",
":",
"ydl",
"=",
"_build_youtube_dl",
"(",
"worker",
",",
"tempdir",
",",
"si... | Runs youtube-dl configured for `worker` and `site` to download videos from
`page`.
Args:
worker (brozzler.BrozzlerWorker): the calling brozzler worker
site (brozzler.Site): the site we are brozzling
page (brozzler.Page): the page we are brozzling
Returns:
tuple with two entries:
`list` of `dict`: with info about urls fetched:
[{
'url': ...,
'method': ...,
'response_code': ...,
'response_headers': ...,
}, ...]
`list` of `str`: outlink urls | [
"Runs",
"youtube",
"-",
"dl",
"configured",
"for",
"worker",
"and",
"site",
"to",
"download",
"videos",
"from",
"page",
"."
] | 411b3f266a38b9bb942021c0121ebd8e5ca66447 | https://github.com/internetarchive/brozzler/blob/411b3f266a38b9bb942021c0121ebd8e5ca66447/brozzler/ydl.py#L357-L387 |
231,953 | internetarchive/brozzler | brozzler/dashboard/__init__.py | pages | def pages(site_id):
"""Pages already crawled."""
start = int(flask.request.args.get("start", 0))
end = int(flask.request.args.get("end", start + 90))
reql = rr.table("pages").between(
[site_id, 1, r.minval], [site_id, r.maxval, r.maxval],
index="least_hops").order_by(index="least_hops")[start:end]
logging.debug("querying rethinkdb: %s", reql)
pages_ = reql.run()
return flask.jsonify(pages=list(pages_)) | python | def pages(site_id):
start = int(flask.request.args.get("start", 0))
end = int(flask.request.args.get("end", start + 90))
reql = rr.table("pages").between(
[site_id, 1, r.minval], [site_id, r.maxval, r.maxval],
index="least_hops").order_by(index="least_hops")[start:end]
logging.debug("querying rethinkdb: %s", reql)
pages_ = reql.run()
return flask.jsonify(pages=list(pages_)) | [
"def",
"pages",
"(",
"site_id",
")",
":",
"start",
"=",
"int",
"(",
"flask",
".",
"request",
".",
"args",
".",
"get",
"(",
"\"start\"",
",",
"0",
")",
")",
"end",
"=",
"int",
"(",
"flask",
".",
"request",
".",
"args",
".",
"get",
"(",
"\"end\"",
... | Pages already crawled. | [
"Pages",
"already",
"crawled",
"."
] | 411b3f266a38b9bb942021c0121ebd8e5ca66447 | https://github.com/internetarchive/brozzler/blob/411b3f266a38b9bb942021c0121ebd8e5ca66447/brozzler/dashboard/__init__.py#L95-L104 |
231,954 | internetarchive/brozzler | brozzler/chrome.py | check_version | def check_version(chrome_exe):
'''
Raises SystemExit if `chrome_exe` is not a supported browser version.
Must run in the main thread to have the desired effect.
'''
# mac$ /Applications/Google\ Chrome.app/Contents/MacOS/Google\ Chrome --version
# Google Chrome 64.0.3282.140
# mac$ /Applications/Google\ Chrome\ Canary.app/Contents/MacOS/Google\ Chrome\ Canary --version
# Google Chrome 66.0.3341.0 canary
# linux$ chromium-browser --version
# Using PPAPI flash.
# --ppapi-flash-path=/usr/lib/adobe-flashplugin/libpepflashplayer.so --ppapi-flash-version=
# Chromium 61.0.3163.100 Built on Ubuntu , running on Ubuntu 16.04
cmd = [chrome_exe, '--version']
out = subprocess.check_output(cmd, timeout=60)
m = re.search(br'(Chromium|Google Chrome) ([\d.]+)', out)
if not m:
sys.exit(
'unable to parse browser version from output of '
'%r: %r' % (subprocess.list2cmdline(cmd), out))
version_str = m.group(2).decode()
major_version = int(version_str.split('.')[0])
if major_version < 64:
sys.exit('brozzler requires chrome/chromium version 64 or '
'later but %s reports version %s' % (
chrome_exe, version_str)) | python | def check_version(chrome_exe):
'''
Raises SystemExit if `chrome_exe` is not a supported browser version.
Must run in the main thread to have the desired effect.
'''
# mac$ /Applications/Google\ Chrome.app/Contents/MacOS/Google\ Chrome --version
# Google Chrome 64.0.3282.140
# mac$ /Applications/Google\ Chrome\ Canary.app/Contents/MacOS/Google\ Chrome\ Canary --version
# Google Chrome 66.0.3341.0 canary
# linux$ chromium-browser --version
# Using PPAPI flash.
# --ppapi-flash-path=/usr/lib/adobe-flashplugin/libpepflashplayer.so --ppapi-flash-version=
# Chromium 61.0.3163.100 Built on Ubuntu , running on Ubuntu 16.04
cmd = [chrome_exe, '--version']
out = subprocess.check_output(cmd, timeout=60)
m = re.search(br'(Chromium|Google Chrome) ([\d.]+)', out)
if not m:
sys.exit(
'unable to parse browser version from output of '
'%r: %r' % (subprocess.list2cmdline(cmd), out))
version_str = m.group(2).decode()
major_version = int(version_str.split('.')[0])
if major_version < 64:
sys.exit('brozzler requires chrome/chromium version 64 or '
'later but %s reports version %s' % (
chrome_exe, version_str)) | [
"def",
"check_version",
"(",
"chrome_exe",
")",
":",
"# mac$ /Applications/Google\\ Chrome.app/Contents/MacOS/Google\\ Chrome --version",
"# Google Chrome 64.0.3282.140 ",
"# mac$ /Applications/Google\\ Chrome\\ Canary.app/Contents/MacOS/Google\\ Chrome\\ Canary --version",
"# Google Chrome 66.0.3... | Raises SystemExit if `chrome_exe` is not a supported browser version.
Must run in the main thread to have the desired effect. | [
"Raises",
"SystemExit",
"if",
"chrome_exe",
"is",
"not",
"a",
"supported",
"browser",
"version",
"."
] | 411b3f266a38b9bb942021c0121ebd8e5ca66447 | https://github.com/internetarchive/brozzler/blob/411b3f266a38b9bb942021c0121ebd8e5ca66447/brozzler/chrome.py#L34-L60 |
231,955 | internetarchive/brozzler | brozzler/worker.py | BrozzlerWorker._service_heartbeat_if_due | def _service_heartbeat_if_due(self):
'''Sends service registry heartbeat if due'''
due = False
if self._service_registry:
if not hasattr(self, "status_info"):
due = True
else:
d = doublethink.utcnow() - self.status_info["last_heartbeat"]
due = d.total_seconds() > self.HEARTBEAT_INTERVAL
if due:
self._service_heartbeat() | python | def _service_heartbeat_if_due(self):
'''Sends service registry heartbeat if due'''
due = False
if self._service_registry:
if not hasattr(self, "status_info"):
due = True
else:
d = doublethink.utcnow() - self.status_info["last_heartbeat"]
due = d.total_seconds() > self.HEARTBEAT_INTERVAL
if due:
self._service_heartbeat() | [
"def",
"_service_heartbeat_if_due",
"(",
"self",
")",
":",
"due",
"=",
"False",
"if",
"self",
".",
"_service_registry",
":",
"if",
"not",
"hasattr",
"(",
"self",
",",
"\"status_info\"",
")",
":",
"due",
"=",
"True",
"else",
":",
"d",
"=",
"doublethink",
... | Sends service registry heartbeat if due | [
"Sends",
"service",
"registry",
"heartbeat",
"if",
"due"
] | 411b3f266a38b9bb942021c0121ebd8e5ca66447 | https://github.com/internetarchive/brozzler/blob/411b3f266a38b9bb942021c0121ebd8e5ca66447/brozzler/worker.py#L448-L459 |
231,956 | internetarchive/brozzler | brozzler/worker.py | BrozzlerWorker._start_browsing_some_sites | def _start_browsing_some_sites(self):
'''
Starts browsing some sites.
Raises:
NoBrowsersAvailable if none available
'''
# acquire_multi() raises NoBrowsersAvailable if none available
browsers = self._browser_pool.acquire_multi(
(self._browser_pool.num_available() + 1) // 2)
try:
sites = self._frontier.claim_sites(len(browsers))
except:
self._browser_pool.release_all(browsers)
raise
for i in range(len(browsers)):
if i < len(sites):
th = threading.Thread(
target=self._brozzle_site_thread_target,
args=(browsers[i], sites[i]),
name="BrozzlingThread:%s" % browsers[i].chrome.port,
daemon=True)
with self._browsing_threads_lock:
self._browsing_threads.add(th)
th.start()
else:
self._browser_pool.release(browsers[i]) | python | def _start_browsing_some_sites(self):
'''
Starts browsing some sites.
Raises:
NoBrowsersAvailable if none available
'''
# acquire_multi() raises NoBrowsersAvailable if none available
browsers = self._browser_pool.acquire_multi(
(self._browser_pool.num_available() + 1) // 2)
try:
sites = self._frontier.claim_sites(len(browsers))
except:
self._browser_pool.release_all(browsers)
raise
for i in range(len(browsers)):
if i < len(sites):
th = threading.Thread(
target=self._brozzle_site_thread_target,
args=(browsers[i], sites[i]),
name="BrozzlingThread:%s" % browsers[i].chrome.port,
daemon=True)
with self._browsing_threads_lock:
self._browsing_threads.add(th)
th.start()
else:
self._browser_pool.release(browsers[i]) | [
"def",
"_start_browsing_some_sites",
"(",
"self",
")",
":",
"# acquire_multi() raises NoBrowsersAvailable if none available",
"browsers",
"=",
"self",
".",
"_browser_pool",
".",
"acquire_multi",
"(",
"(",
"self",
".",
"_browser_pool",
".",
"num_available",
"(",
")",
"+"... | Starts browsing some sites.
Raises:
NoBrowsersAvailable if none available | [
"Starts",
"browsing",
"some",
"sites",
"."
] | 411b3f266a38b9bb942021c0121ebd8e5ca66447 | https://github.com/internetarchive/brozzler/blob/411b3f266a38b9bb942021c0121ebd8e5ca66447/brozzler/worker.py#L461-L488 |
231,957 | idank/bashlex | bashlex/tokenizer.py | tokenizer._createtoken | def _createtoken(self, type_, value, flags=None):
'''create a token with position information'''
pos = None
assert len(self._positions) >= 2, (type_, value)
p2 = self._positions.pop()
p1 = self._positions.pop()
pos = [p1, p2]
return token(type_, value, pos, flags) | python | def _createtoken(self, type_, value, flags=None):
'''create a token with position information'''
pos = None
assert len(self._positions) >= 2, (type_, value)
p2 = self._positions.pop()
p1 = self._positions.pop()
pos = [p1, p2]
return token(type_, value, pos, flags) | [
"def",
"_createtoken",
"(",
"self",
",",
"type_",
",",
"value",
",",
"flags",
"=",
"None",
")",
":",
"pos",
"=",
"None",
"assert",
"len",
"(",
"self",
".",
"_positions",
")",
">=",
"2",
",",
"(",
"type_",
",",
"value",
")",
"p2",
"=",
"self",
"."... | create a token with position information | [
"create",
"a",
"token",
"with",
"position",
"information"
] | 800cb7e3c634eaa3c81f8a8648fd7fd4e27050ac | https://github.com/idank/bashlex/blob/800cb7e3c634eaa3c81f8a8648fd7fd4e27050ac/bashlex/tokenizer.py#L257-L264 |
231,958 | idank/bashlex | bashlex/parser.py | parse | def parse(s, strictmode=True, expansionlimit=None, convertpos=False):
'''parse the input string, returning a list of nodes
top level node kinds are:
- command - a simple command
- pipeline - a series of simple commands
- list - a series of one or more pipelines
- compound - contains constructs for { list; }, (list), if, for..
leafs are word nodes (which in turn can also contain any of the
aforementioned nodes due to command substitutions).
when strictmode is set to False, we will:
- skip reading a heredoc if we're at the end of the input
expansionlimit is used to limit the amount of recursive parsing done due to
command substitutions found during word expansion.
'''
p = _parser(s, strictmode=strictmode, expansionlimit=expansionlimit)
parts = [p.parse()]
class endfinder(ast.nodevisitor):
def __init__(self):
self.end = -1
def visitheredoc(self, node, value):
self.end = node.pos[1]
# find the 'real' end incase we have a heredoc in there
ef = _endfinder()
ef.visit(parts[-1])
index = max(parts[-1].pos[1], ef.end) + 1
while index < len(s):
part = _parser(s[index:], strictmode=strictmode).parse()
if not isinstance(part, ast.node):
break
ast.posshifter(index).visit(part)
parts.append(part)
ef = _endfinder()
ef.visit(parts[-1])
index = max(parts[-1].pos[1], ef.end) + 1
if convertpos:
for tree in parts:
ast.posconverter(s).visit(tree)
return parts | python | def parse(s, strictmode=True, expansionlimit=None, convertpos=False):
'''parse the input string, returning a list of nodes
top level node kinds are:
- command - a simple command
- pipeline - a series of simple commands
- list - a series of one or more pipelines
- compound - contains constructs for { list; }, (list), if, for..
leafs are word nodes (which in turn can also contain any of the
aforementioned nodes due to command substitutions).
when strictmode is set to False, we will:
- skip reading a heredoc if we're at the end of the input
expansionlimit is used to limit the amount of recursive parsing done due to
command substitutions found during word expansion.
'''
p = _parser(s, strictmode=strictmode, expansionlimit=expansionlimit)
parts = [p.parse()]
class endfinder(ast.nodevisitor):
def __init__(self):
self.end = -1
def visitheredoc(self, node, value):
self.end = node.pos[1]
# find the 'real' end incase we have a heredoc in there
ef = _endfinder()
ef.visit(parts[-1])
index = max(parts[-1].pos[1], ef.end) + 1
while index < len(s):
part = _parser(s[index:], strictmode=strictmode).parse()
if not isinstance(part, ast.node):
break
ast.posshifter(index).visit(part)
parts.append(part)
ef = _endfinder()
ef.visit(parts[-1])
index = max(parts[-1].pos[1], ef.end) + 1
if convertpos:
for tree in parts:
ast.posconverter(s).visit(tree)
return parts | [
"def",
"parse",
"(",
"s",
",",
"strictmode",
"=",
"True",
",",
"expansionlimit",
"=",
"None",
",",
"convertpos",
"=",
"False",
")",
":",
"p",
"=",
"_parser",
"(",
"s",
",",
"strictmode",
"=",
"strictmode",
",",
"expansionlimit",
"=",
"expansionlimit",
")... | parse the input string, returning a list of nodes
top level node kinds are:
- command - a simple command
- pipeline - a series of simple commands
- list - a series of one or more pipelines
- compound - contains constructs for { list; }, (list), if, for..
leafs are word nodes (which in turn can also contain any of the
aforementioned nodes due to command substitutions).
when strictmode is set to False, we will:
- skip reading a heredoc if we're at the end of the input
expansionlimit is used to limit the amount of recursive parsing done due to
command substitutions found during word expansion. | [
"parse",
"the",
"input",
"string",
"returning",
"a",
"list",
"of",
"nodes"
] | 800cb7e3c634eaa3c81f8a8648fd7fd4e27050ac | https://github.com/idank/bashlex/blob/800cb7e3c634eaa3c81f8a8648fd7fd4e27050ac/bashlex/parser.py#L581-L629 |
231,959 | idank/bashlex | bashlex/parser.py | split | def split(s):
'''a utility function that mimics shlex.split but handles more
complex shell constructs such as command substitutions inside words
>>> list(split('a b"c"\\'d\\''))
['a', 'bcd']
>>> list(split('a "b $(c)" $(d) \\'$(e)\\''))
['a', 'b $(c)', '$(d)', '$(e)']
>>> list(split('a b\\n'))
['a', 'b', '\\n']
'''
p = _parser(s)
for t in p.tok:
if t.ttype == tokenizer.tokentype.WORD:
quoted = bool(t.flags & flags.word.QUOTED)
doublequoted = quoted and t.value[0] == '"'
parts, expandedword = subst._expandwordinternal(p, t, 0,
doublequoted, 0, 0)
yield expandedword
else:
yield s[t.lexpos:t.endlexpos] | python | def split(s):
'''a utility function that mimics shlex.split but handles more
complex shell constructs such as command substitutions inside words
>>> list(split('a b"c"\\'d\\''))
['a', 'bcd']
>>> list(split('a "b $(c)" $(d) \\'$(e)\\''))
['a', 'b $(c)', '$(d)', '$(e)']
>>> list(split('a b\\n'))
['a', 'b', '\\n']
'''
p = _parser(s)
for t in p.tok:
if t.ttype == tokenizer.tokentype.WORD:
quoted = bool(t.flags & flags.word.QUOTED)
doublequoted = quoted and t.value[0] == '"'
parts, expandedword = subst._expandwordinternal(p, t, 0,
doublequoted, 0, 0)
yield expandedword
else:
yield s[t.lexpos:t.endlexpos] | [
"def",
"split",
"(",
"s",
")",
":",
"p",
"=",
"_parser",
"(",
"s",
")",
"for",
"t",
"in",
"p",
".",
"tok",
":",
"if",
"t",
".",
"ttype",
"==",
"tokenizer",
".",
"tokentype",
".",
"WORD",
":",
"quoted",
"=",
"bool",
"(",
"t",
".",
"flags",
"&"... | a utility function that mimics shlex.split but handles more
complex shell constructs such as command substitutions inside words
>>> list(split('a b"c"\\'d\\''))
['a', 'bcd']
>>> list(split('a "b $(c)" $(d) \\'$(e)\\''))
['a', 'b $(c)', '$(d)', '$(e)']
>>> list(split('a b\\n'))
['a', 'b', '\\n'] | [
"a",
"utility",
"function",
"that",
"mimics",
"shlex",
".",
"split",
"but",
"handles",
"more",
"complex",
"shell",
"constructs",
"such",
"as",
"command",
"substitutions",
"inside",
"words"
] | 800cb7e3c634eaa3c81f8a8648fd7fd4e27050ac | https://github.com/idank/bashlex/blob/800cb7e3c634eaa3c81f8a8648fd7fd4e27050ac/bashlex/parser.py#L631-L651 |
231,960 | tomasbasham/ratelimit | ratelimit/decorators.py | sleep_and_retry | def sleep_and_retry(func):
'''
Return a wrapped function that rescues rate limit exceptions, sleeping the
current thread until rate limit resets.
:param function func: The function to decorate.
:return: Decorated function.
:rtype: function
'''
@wraps(func)
def wrapper(*args, **kargs):
'''
Call the rate limited function. If the function raises a rate limit
exception sleep for the remaing time period and retry the function.
:param args: non-keyword variable length argument list to the decorated function.
:param kargs: keyworded variable length argument list to the decorated function.
'''
while True:
try:
return func(*args, **kargs)
except RateLimitException as exception:
time.sleep(exception.period_remaining)
return wrapper | python | def sleep_and_retry(func):
'''
Return a wrapped function that rescues rate limit exceptions, sleeping the
current thread until rate limit resets.
:param function func: The function to decorate.
:return: Decorated function.
:rtype: function
'''
@wraps(func)
def wrapper(*args, **kargs):
'''
Call the rate limited function. If the function raises a rate limit
exception sleep for the remaing time period and retry the function.
:param args: non-keyword variable length argument list to the decorated function.
:param kargs: keyworded variable length argument list to the decorated function.
'''
while True:
try:
return func(*args, **kargs)
except RateLimitException as exception:
time.sleep(exception.period_remaining)
return wrapper | [
"def",
"sleep_and_retry",
"(",
"func",
")",
":",
"@",
"wraps",
"(",
"func",
")",
"def",
"wrapper",
"(",
"*",
"args",
",",
"*",
"*",
"kargs",
")",
":",
"'''\n Call the rate limited function. If the function raises a rate limit\n exception sleep for the remain... | Return a wrapped function that rescues rate limit exceptions, sleeping the
current thread until rate limit resets.
:param function func: The function to decorate.
:return: Decorated function.
:rtype: function | [
"Return",
"a",
"wrapped",
"function",
"that",
"rescues",
"rate",
"limit",
"exceptions",
"sleeping",
"the",
"current",
"thread",
"until",
"rate",
"limit",
"resets",
"."
] | 18d5f3382724a8ae2d4f066a1bd51c74d5ae1524 | https://github.com/tomasbasham/ratelimit/blob/18d5f3382724a8ae2d4f066a1bd51c74d5ae1524/ratelimit/decorators.py#L98-L121 |
231,961 | tomasbasham/ratelimit | ratelimit/decorators.py | RateLimitDecorator.__period_remaining | def __period_remaining(self):
'''
Return the period remaining for the current rate limit window.
:return: The remaing period.
:rtype: float
'''
elapsed = self.clock() - self.last_reset
return self.period - elapsed | python | def __period_remaining(self):
'''
Return the period remaining for the current rate limit window.
:return: The remaing period.
:rtype: float
'''
elapsed = self.clock() - self.last_reset
return self.period - elapsed | [
"def",
"__period_remaining",
"(",
"self",
")",
":",
"elapsed",
"=",
"self",
".",
"clock",
"(",
")",
"-",
"self",
".",
"last_reset",
"return",
"self",
".",
"period",
"-",
"elapsed"
] | Return the period remaining for the current rate limit window.
:return: The remaing period.
:rtype: float | [
"Return",
"the",
"period",
"remaining",
"for",
"the",
"current",
"rate",
"limit",
"window",
"."
] | 18d5f3382724a8ae2d4f066a1bd51c74d5ae1524 | https://github.com/tomasbasham/ratelimit/blob/18d5f3382724a8ae2d4f066a1bd51c74d5ae1524/ratelimit/decorators.py#L88-L96 |
231,962 | Alexis-benoist/eralchemy | eralchemy/parser.py | filter_lines_from_comments | def filter_lines_from_comments(lines):
""" Filter the lines from comments and non code lines. """
for line_nb, raw_line in enumerate(lines):
clean_line = remove_comments_from_line(raw_line)
if clean_line == '':
continue
yield line_nb, clean_line, raw_line | python | def filter_lines_from_comments(lines):
for line_nb, raw_line in enumerate(lines):
clean_line = remove_comments_from_line(raw_line)
if clean_line == '':
continue
yield line_nb, clean_line, raw_line | [
"def",
"filter_lines_from_comments",
"(",
"lines",
")",
":",
"for",
"line_nb",
",",
"raw_line",
"in",
"enumerate",
"(",
"lines",
")",
":",
"clean_line",
"=",
"remove_comments_from_line",
"(",
"raw_line",
")",
"if",
"clean_line",
"==",
"''",
":",
"continue",
"y... | Filter the lines from comments and non code lines. | [
"Filter",
"the",
"lines",
"from",
"comments",
"and",
"non",
"code",
"lines",
"."
] | d6fcdc67d6d413bb174bf008fd360044e1dff5a7 | https://github.com/Alexis-benoist/eralchemy/blob/d6fcdc67d6d413bb174bf008fd360044e1dff5a7/eralchemy/parser.py#L43-L49 |
231,963 | Alexis-benoist/eralchemy | eralchemy/parser.py | _check_no_current_table | def _check_no_current_table(new_obj, current_table):
""" Raises exception if we try to add a relation or a column
with no current table. """
if current_table is None:
msg = 'Cannot add {} before adding table'
if isinstance(new_obj, Relation):
raise NoCurrentTableException(msg.format('relation'))
if isinstance(new_obj, Column):
raise NoCurrentTableException(msg.format('column')) | python | def _check_no_current_table(new_obj, current_table):
if current_table is None:
msg = 'Cannot add {} before adding table'
if isinstance(new_obj, Relation):
raise NoCurrentTableException(msg.format('relation'))
if isinstance(new_obj, Column):
raise NoCurrentTableException(msg.format('column')) | [
"def",
"_check_no_current_table",
"(",
"new_obj",
",",
"current_table",
")",
":",
"if",
"current_table",
"is",
"None",
":",
"msg",
"=",
"'Cannot add {} before adding table'",
"if",
"isinstance",
"(",
"new_obj",
",",
"Relation",
")",
":",
"raise",
"NoCurrentTableExce... | Raises exception if we try to add a relation or a column
with no current table. | [
"Raises",
"exception",
"if",
"we",
"try",
"to",
"add",
"a",
"relation",
"or",
"a",
"column",
"with",
"no",
"current",
"table",
"."
] | d6fcdc67d6d413bb174bf008fd360044e1dff5a7 | https://github.com/Alexis-benoist/eralchemy/blob/d6fcdc67d6d413bb174bf008fd360044e1dff5a7/eralchemy/parser.py#L61-L69 |
231,964 | Alexis-benoist/eralchemy | eralchemy/parser.py | update_models | def update_models(new_obj, current_table, tables, relations):
""" Update the state of the parsing. """
_update_check_inputs(current_table, tables, relations)
_check_no_current_table(new_obj, current_table)
if isinstance(new_obj, Table):
tables_names = [t.name for t in tables]
_check_not_creating_duplicates(new_obj.name, tables_names, 'table', DuplicateTableException)
return new_obj, tables + [new_obj], relations
if isinstance(new_obj, Relation):
tables_names = [t.name for t in tables]
_check_colname_in_lst(new_obj.right_col, tables_names)
_check_colname_in_lst(new_obj.left_col, tables_names)
return current_table, tables, relations + [new_obj]
if isinstance(new_obj, Column):
columns_names = [c.name for c in current_table.columns]
_check_not_creating_duplicates(new_obj.name, columns_names, 'column', DuplicateColumnException)
current_table.columns.append(new_obj)
return current_table, tables, relations
msg = "new_obj cannot be of type {}"
raise ValueError(msg.format(new_obj.__class__.__name__)) | python | def update_models(new_obj, current_table, tables, relations):
_update_check_inputs(current_table, tables, relations)
_check_no_current_table(new_obj, current_table)
if isinstance(new_obj, Table):
tables_names = [t.name for t in tables]
_check_not_creating_duplicates(new_obj.name, tables_names, 'table', DuplicateTableException)
return new_obj, tables + [new_obj], relations
if isinstance(new_obj, Relation):
tables_names = [t.name for t in tables]
_check_colname_in_lst(new_obj.right_col, tables_names)
_check_colname_in_lst(new_obj.left_col, tables_names)
return current_table, tables, relations + [new_obj]
if isinstance(new_obj, Column):
columns_names = [c.name for c in current_table.columns]
_check_not_creating_duplicates(new_obj.name, columns_names, 'column', DuplicateColumnException)
current_table.columns.append(new_obj)
return current_table, tables, relations
msg = "new_obj cannot be of type {}"
raise ValueError(msg.format(new_obj.__class__.__name__)) | [
"def",
"update_models",
"(",
"new_obj",
",",
"current_table",
",",
"tables",
",",
"relations",
")",
":",
"_update_check_inputs",
"(",
"current_table",
",",
"tables",
",",
"relations",
")",
"_check_no_current_table",
"(",
"new_obj",
",",
"current_table",
")",
"if",... | Update the state of the parsing. | [
"Update",
"the",
"state",
"of",
"the",
"parsing",
"."
] | d6fcdc67d6d413bb174bf008fd360044e1dff5a7 | https://github.com/Alexis-benoist/eralchemy/blob/d6fcdc67d6d413bb174bf008fd360044e1dff5a7/eralchemy/parser.py#L93-L116 |
231,965 | Alexis-benoist/eralchemy | eralchemy/parser.py | markdown_file_to_intermediary | def markdown_file_to_intermediary(filename):
""" Parse a file and return to intermediary syntax. """
with open(filename) as f:
lines = f.readlines()
return line_iterator_to_intermediary(lines) | python | def markdown_file_to_intermediary(filename):
with open(filename) as f:
lines = f.readlines()
return line_iterator_to_intermediary(lines) | [
"def",
"markdown_file_to_intermediary",
"(",
"filename",
")",
":",
"with",
"open",
"(",
"filename",
")",
"as",
"f",
":",
"lines",
"=",
"f",
".",
"readlines",
"(",
")",
"return",
"line_iterator_to_intermediary",
"(",
"lines",
")"
] | Parse a file and return to intermediary syntax. | [
"Parse",
"a",
"file",
"and",
"return",
"to",
"intermediary",
"syntax",
"."
] | d6fcdc67d6d413bb174bf008fd360044e1dff5a7 | https://github.com/Alexis-benoist/eralchemy/blob/d6fcdc67d6d413bb174bf008fd360044e1dff5a7/eralchemy/parser.py#L119-L123 |
231,966 | Alexis-benoist/eralchemy | eralchemy/helpers.py | check_args | def check_args(args):
"""Checks that the args are coherent."""
check_args_has_attributes(args)
if args.v:
non_version_attrs = [v for k, v in args.__dict__.items() if k != 'v']
print('non_version_attrs', non_version_attrs)
if len([v for v in non_version_attrs if v is not None]) != 0:
fail('Cannot show the version number with another command.')
return
if args.i is None:
fail('Cannot draw ER diagram of no database.')
if args.o is None:
fail('Cannot draw ER diagram with no output file.') | python | def check_args(args):
check_args_has_attributes(args)
if args.v:
non_version_attrs = [v for k, v in args.__dict__.items() if k != 'v']
print('non_version_attrs', non_version_attrs)
if len([v for v in non_version_attrs if v is not None]) != 0:
fail('Cannot show the version number with another command.')
return
if args.i is None:
fail('Cannot draw ER diagram of no database.')
if args.o is None:
fail('Cannot draw ER diagram with no output file.') | [
"def",
"check_args",
"(",
"args",
")",
":",
"check_args_has_attributes",
"(",
"args",
")",
"if",
"args",
".",
"v",
":",
"non_version_attrs",
"=",
"[",
"v",
"for",
"k",
",",
"v",
"in",
"args",
".",
"__dict__",
".",
"items",
"(",
")",
"if",
"k",
"!=",
... | Checks that the args are coherent. | [
"Checks",
"that",
"the",
"args",
"are",
"coherent",
"."
] | d6fcdc67d6d413bb174bf008fd360044e1dff5a7 | https://github.com/Alexis-benoist/eralchemy/blob/d6fcdc67d6d413bb174bf008fd360044e1dff5a7/eralchemy/helpers.py#L11-L23 |
231,967 | Alexis-benoist/eralchemy | eralchemy/sqla.py | relation_to_intermediary | def relation_to_intermediary(fk):
"""Transform an SQLAlchemy ForeignKey object to it's intermediary representation. """
return Relation(
right_col=format_name(fk.parent.table.fullname),
left_col=format_name(fk._column_tokens[1]),
right_cardinality='?',
left_cardinality='*',
) | python | def relation_to_intermediary(fk):
return Relation(
right_col=format_name(fk.parent.table.fullname),
left_col=format_name(fk._column_tokens[1]),
right_cardinality='?',
left_cardinality='*',
) | [
"def",
"relation_to_intermediary",
"(",
"fk",
")",
":",
"return",
"Relation",
"(",
"right_col",
"=",
"format_name",
"(",
"fk",
".",
"parent",
".",
"table",
".",
"fullname",
")",
",",
"left_col",
"=",
"format_name",
"(",
"fk",
".",
"_column_tokens",
"[",
"1... | Transform an SQLAlchemy ForeignKey object to it's intermediary representation. | [
"Transform",
"an",
"SQLAlchemy",
"ForeignKey",
"object",
"to",
"it",
"s",
"intermediary",
"representation",
"."
] | d6fcdc67d6d413bb174bf008fd360044e1dff5a7 | https://github.com/Alexis-benoist/eralchemy/blob/d6fcdc67d6d413bb174bf008fd360044e1dff5a7/eralchemy/sqla.py#L14-L21 |
231,968 | Alexis-benoist/eralchemy | eralchemy/sqla.py | column_to_intermediary | def column_to_intermediary(col, type_formatter=format_type):
"""Transform an SQLAlchemy Column object to it's intermediary representation. """
return Column(
name=col.name,
type=type_formatter(col.type),
is_key=col.primary_key,
) | python | def column_to_intermediary(col, type_formatter=format_type):
return Column(
name=col.name,
type=type_formatter(col.type),
is_key=col.primary_key,
) | [
"def",
"column_to_intermediary",
"(",
"col",
",",
"type_formatter",
"=",
"format_type",
")",
":",
"return",
"Column",
"(",
"name",
"=",
"col",
".",
"name",
",",
"type",
"=",
"type_formatter",
"(",
"col",
".",
"type",
")",
",",
"is_key",
"=",
"col",
".",
... | Transform an SQLAlchemy Column object to it's intermediary representation. | [
"Transform",
"an",
"SQLAlchemy",
"Column",
"object",
"to",
"it",
"s",
"intermediary",
"representation",
"."
] | d6fcdc67d6d413bb174bf008fd360044e1dff5a7 | https://github.com/Alexis-benoist/eralchemy/blob/d6fcdc67d6d413bb174bf008fd360044e1dff5a7/eralchemy/sqla.py#L37-L43 |
231,969 | Alexis-benoist/eralchemy | eralchemy/sqla.py | table_to_intermediary | def table_to_intermediary(table):
"""Transform an SQLAlchemy Table object to it's intermediary representation. """
return Table(
name=table.fullname,
columns=[column_to_intermediary(col) for col in table.c._data.values()]
) | python | def table_to_intermediary(table):
return Table(
name=table.fullname,
columns=[column_to_intermediary(col) for col in table.c._data.values()]
) | [
"def",
"table_to_intermediary",
"(",
"table",
")",
":",
"return",
"Table",
"(",
"name",
"=",
"table",
".",
"fullname",
",",
"columns",
"=",
"[",
"column_to_intermediary",
"(",
"col",
")",
"for",
"col",
"in",
"table",
".",
"c",
".",
"_data",
".",
"values"... | Transform an SQLAlchemy Table object to it's intermediary representation. | [
"Transform",
"an",
"SQLAlchemy",
"Table",
"object",
"to",
"it",
"s",
"intermediary",
"representation",
"."
] | d6fcdc67d6d413bb174bf008fd360044e1dff5a7 | https://github.com/Alexis-benoist/eralchemy/blob/d6fcdc67d6d413bb174bf008fd360044e1dff5a7/eralchemy/sqla.py#L46-L51 |
231,970 | Alexis-benoist/eralchemy | eralchemy/sqla.py | metadata_to_intermediary | def metadata_to_intermediary(metadata):
""" Transforms SQLAlchemy metadata to the intermediary representation. """
tables = [table_to_intermediary(table) for table in metadata.tables.values()]
relationships = [relation_to_intermediary(fk) for table in metadata.tables.values() for fk in table.foreign_keys]
return tables, relationships | python | def metadata_to_intermediary(metadata):
tables = [table_to_intermediary(table) for table in metadata.tables.values()]
relationships = [relation_to_intermediary(fk) for table in metadata.tables.values() for fk in table.foreign_keys]
return tables, relationships | [
"def",
"metadata_to_intermediary",
"(",
"metadata",
")",
":",
"tables",
"=",
"[",
"table_to_intermediary",
"(",
"table",
")",
"for",
"table",
"in",
"metadata",
".",
"tables",
".",
"values",
"(",
")",
"]",
"relationships",
"=",
"[",
"relation_to_intermediary",
... | Transforms SQLAlchemy metadata to the intermediary representation. | [
"Transforms",
"SQLAlchemy",
"metadata",
"to",
"the",
"intermediary",
"representation",
"."
] | d6fcdc67d6d413bb174bf008fd360044e1dff5a7 | https://github.com/Alexis-benoist/eralchemy/blob/d6fcdc67d6d413bb174bf008fd360044e1dff5a7/eralchemy/sqla.py#L54-L58 |
231,971 | Alexis-benoist/eralchemy | eralchemy/sqla.py | name_for_scalar_relationship | def name_for_scalar_relationship(base, local_cls, referred_cls, constraint):
""" Overriding naming schemes. """
name = referred_cls.__name__.lower() + "_ref"
return name | python | def name_for_scalar_relationship(base, local_cls, referred_cls, constraint):
name = referred_cls.__name__.lower() + "_ref"
return name | [
"def",
"name_for_scalar_relationship",
"(",
"base",
",",
"local_cls",
",",
"referred_cls",
",",
"constraint",
")",
":",
"name",
"=",
"referred_cls",
".",
"__name__",
".",
"lower",
"(",
")",
"+",
"\"_ref\"",
"return",
"name"
] | Overriding naming schemes. | [
"Overriding",
"naming",
"schemes",
"."
] | d6fcdc67d6d413bb174bf008fd360044e1dff5a7 | https://github.com/Alexis-benoist/eralchemy/blob/d6fcdc67d6d413bb174bf008fd360044e1dff5a7/eralchemy/sqla.py#L66-L69 |
231,972 | Alexis-benoist/eralchemy | eralchemy/main.py | intermediary_to_markdown | def intermediary_to_markdown(tables, relationships, output):
""" Saves the intermediary representation to markdown. """
er_markup = _intermediary_to_markdown(tables, relationships)
with open(output, "w") as file_out:
file_out.write(er_markup) | python | def intermediary_to_markdown(tables, relationships, output):
er_markup = _intermediary_to_markdown(tables, relationships)
with open(output, "w") as file_out:
file_out.write(er_markup) | [
"def",
"intermediary_to_markdown",
"(",
"tables",
",",
"relationships",
",",
"output",
")",
":",
"er_markup",
"=",
"_intermediary_to_markdown",
"(",
"tables",
",",
"relationships",
")",
"with",
"open",
"(",
"output",
",",
"\"w\"",
")",
"as",
"file_out",
":",
"... | Saves the intermediary representation to markdown. | [
"Saves",
"the",
"intermediary",
"representation",
"to",
"markdown",
"."
] | d6fcdc67d6d413bb174bf008fd360044e1dff5a7 | https://github.com/Alexis-benoist/eralchemy/blob/d6fcdc67d6d413bb174bf008fd360044e1dff5a7/eralchemy/main.py#L55-L59 |
231,973 | Alexis-benoist/eralchemy | eralchemy/main.py | intermediary_to_dot | def intermediary_to_dot(tables, relationships, output):
""" Save the intermediary representation to dot format. """
dot_file = _intermediary_to_dot(tables, relationships)
with open(output, "w") as file_out:
file_out.write(dot_file) | python | def intermediary_to_dot(tables, relationships, output):
dot_file = _intermediary_to_dot(tables, relationships)
with open(output, "w") as file_out:
file_out.write(dot_file) | [
"def",
"intermediary_to_dot",
"(",
"tables",
",",
"relationships",
",",
"output",
")",
":",
"dot_file",
"=",
"_intermediary_to_dot",
"(",
"tables",
",",
"relationships",
")",
"with",
"open",
"(",
"output",
",",
"\"w\"",
")",
"as",
"file_out",
":",
"file_out",
... | Save the intermediary representation to dot format. | [
"Save",
"the",
"intermediary",
"representation",
"to",
"dot",
"format",
"."
] | d6fcdc67d6d413bb174bf008fd360044e1dff5a7 | https://github.com/Alexis-benoist/eralchemy/blob/d6fcdc67d6d413bb174bf008fd360044e1dff5a7/eralchemy/main.py#L62-L66 |
231,974 | Alexis-benoist/eralchemy | eralchemy/main.py | intermediary_to_schema | def intermediary_to_schema(tables, relationships, output):
""" Transforms and save the intermediary representation to the file chosen. """
dot_file = _intermediary_to_dot(tables, relationships)
graph = AGraph()
graph = graph.from_string(dot_file)
extension = output.split('.')[-1]
graph.draw(path=output, prog='dot', format=extension) | python | def intermediary_to_schema(tables, relationships, output):
dot_file = _intermediary_to_dot(tables, relationships)
graph = AGraph()
graph = graph.from_string(dot_file)
extension = output.split('.')[-1]
graph.draw(path=output, prog='dot', format=extension) | [
"def",
"intermediary_to_schema",
"(",
"tables",
",",
"relationships",
",",
"output",
")",
":",
"dot_file",
"=",
"_intermediary_to_dot",
"(",
"tables",
",",
"relationships",
")",
"graph",
"=",
"AGraph",
"(",
")",
"graph",
"=",
"graph",
".",
"from_string",
"(",
... | Transforms and save the intermediary representation to the file chosen. | [
"Transforms",
"and",
"save",
"the",
"intermediary",
"representation",
"to",
"the",
"file",
"chosen",
"."
] | d6fcdc67d6d413bb174bf008fd360044e1dff5a7 | https://github.com/Alexis-benoist/eralchemy/blob/d6fcdc67d6d413bb174bf008fd360044e1dff5a7/eralchemy/main.py#L69-L75 |
231,975 | Alexis-benoist/eralchemy | eralchemy/main.py | _intermediary_to_markdown | def _intermediary_to_markdown(tables, relationships):
""" Returns the er markup source in a string. """
t = '\n'.join(t.to_markdown() for t in tables)
r = '\n'.join(r.to_markdown() for r in relationships)
return '{}\n{}'.format(t, r) | python | def _intermediary_to_markdown(tables, relationships):
t = '\n'.join(t.to_markdown() for t in tables)
r = '\n'.join(r.to_markdown() for r in relationships)
return '{}\n{}'.format(t, r) | [
"def",
"_intermediary_to_markdown",
"(",
"tables",
",",
"relationships",
")",
":",
"t",
"=",
"'\\n'",
".",
"join",
"(",
"t",
".",
"to_markdown",
"(",
")",
"for",
"t",
"in",
"tables",
")",
"r",
"=",
"'\\n'",
".",
"join",
"(",
"r",
".",
"to_markdown",
... | Returns the er markup source in a string. | [
"Returns",
"the",
"er",
"markup",
"source",
"in",
"a",
"string",
"."
] | d6fcdc67d6d413bb174bf008fd360044e1dff5a7 | https://github.com/Alexis-benoist/eralchemy/blob/d6fcdc67d6d413bb174bf008fd360044e1dff5a7/eralchemy/main.py#L78-L82 |
231,976 | Alexis-benoist/eralchemy | eralchemy/main.py | _intermediary_to_dot | def _intermediary_to_dot(tables, relationships):
""" Returns the dot source representing the database in a string. """
t = '\n'.join(t.to_dot() for t in tables)
r = '\n'.join(r.to_dot() for r in relationships)
return '{}\n{}\n{}\n}}'.format(GRAPH_BEGINNING, t, r) | python | def _intermediary_to_dot(tables, relationships):
t = '\n'.join(t.to_dot() for t in tables)
r = '\n'.join(r.to_dot() for r in relationships)
return '{}\n{}\n{}\n}}'.format(GRAPH_BEGINNING, t, r) | [
"def",
"_intermediary_to_dot",
"(",
"tables",
",",
"relationships",
")",
":",
"t",
"=",
"'\\n'",
".",
"join",
"(",
"t",
".",
"to_dot",
"(",
")",
"for",
"t",
"in",
"tables",
")",
"r",
"=",
"'\\n'",
".",
"join",
"(",
"r",
".",
"to_dot",
"(",
")",
"... | Returns the dot source representing the database in a string. | [
"Returns",
"the",
"dot",
"source",
"representing",
"the",
"database",
"in",
"a",
"string",
"."
] | d6fcdc67d6d413bb174bf008fd360044e1dff5a7 | https://github.com/Alexis-benoist/eralchemy/blob/d6fcdc67d6d413bb174bf008fd360044e1dff5a7/eralchemy/main.py#L85-L89 |
231,977 | Alexis-benoist/eralchemy | eralchemy/main.py | all_to_intermediary | def all_to_intermediary(filename_or_input, schema=None):
""" Dispatch the filename_or_input to the different function to produce the intermediary syntax.
All the supported classes names are in `swich_input_class_to_method`.
The input can also be a list of strings in markdown format or a filename finishing by '.er' containing markdown
format.
"""
# Try to convert from the name of the class
input_class_name = filename_or_input.__class__.__name__
try:
this_to_intermediary = switch_input_class_to_method[input_class_name]
tables, relationships = this_to_intermediary(filename_or_input)
return tables, relationships
except KeyError:
pass
# try to read markdown file.
if isinstance(filename_or_input, basestring):
if filename_or_input.split('.')[-1] == 'er':
return markdown_file_to_intermediary(filename_or_input)
# try to read a markdown in a string
if not isinstance(filename_or_input, basestring):
if all(isinstance(e, basestring) for e in filename_or_input):
return line_iterator_to_intermediary(filename_or_input)
# try to read DB URI.
try:
make_url(filename_or_input)
return database_to_intermediary(filename_or_input, schema=schema)
except ArgumentError:
pass
msg = 'Cannot process filename_or_input {}'.format(input_class_name)
raise ValueError(msg) | python | def all_to_intermediary(filename_or_input, schema=None):
# Try to convert from the name of the class
input_class_name = filename_or_input.__class__.__name__
try:
this_to_intermediary = switch_input_class_to_method[input_class_name]
tables, relationships = this_to_intermediary(filename_or_input)
return tables, relationships
except KeyError:
pass
# try to read markdown file.
if isinstance(filename_or_input, basestring):
if filename_or_input.split('.')[-1] == 'er':
return markdown_file_to_intermediary(filename_or_input)
# try to read a markdown in a string
if not isinstance(filename_or_input, basestring):
if all(isinstance(e, basestring) for e in filename_or_input):
return line_iterator_to_intermediary(filename_or_input)
# try to read DB URI.
try:
make_url(filename_or_input)
return database_to_intermediary(filename_or_input, schema=schema)
except ArgumentError:
pass
msg = 'Cannot process filename_or_input {}'.format(input_class_name)
raise ValueError(msg) | [
"def",
"all_to_intermediary",
"(",
"filename_or_input",
",",
"schema",
"=",
"None",
")",
":",
"# Try to convert from the name of the class",
"input_class_name",
"=",
"filename_or_input",
".",
"__class__",
".",
"__name__",
"try",
":",
"this_to_intermediary",
"=",
"switch_i... | Dispatch the filename_or_input to the different function to produce the intermediary syntax.
All the supported classes names are in `swich_input_class_to_method`.
The input can also be a list of strings in markdown format or a filename finishing by '.er' containing markdown
format. | [
"Dispatch",
"the",
"filename_or_input",
"to",
"the",
"different",
"function",
"to",
"produce",
"the",
"intermediary",
"syntax",
".",
"All",
"the",
"supported",
"classes",
"names",
"are",
"in",
"swich_input_class_to_method",
".",
"The",
"input",
"can",
"also",
"be"... | d6fcdc67d6d413bb174bf008fd360044e1dff5a7 | https://github.com/Alexis-benoist/eralchemy/blob/d6fcdc67d6d413bb174bf008fd360044e1dff5a7/eralchemy/main.py#L119-L152 |
231,978 | Alexis-benoist/eralchemy | eralchemy/main.py | get_output_mode | def get_output_mode(output, mode):
"""
From the output name and the mode returns a the function that will transform the intermediary
representation to the output.
"""
if mode != 'auto':
try:
return switch_output_mode_auto[mode]
except KeyError:
raise ValueError('Mode "{}" is not supported.')
extension = output.split('.')[-1]
try:
return switch_output_mode[extension]
except KeyError:
return intermediary_to_schema | python | def get_output_mode(output, mode):
if mode != 'auto':
try:
return switch_output_mode_auto[mode]
except KeyError:
raise ValueError('Mode "{}" is not supported.')
extension = output.split('.')[-1]
try:
return switch_output_mode[extension]
except KeyError:
return intermediary_to_schema | [
"def",
"get_output_mode",
"(",
"output",
",",
"mode",
")",
":",
"if",
"mode",
"!=",
"'auto'",
":",
"try",
":",
"return",
"switch_output_mode_auto",
"[",
"mode",
"]",
"except",
"KeyError",
":",
"raise",
"ValueError",
"(",
"'Mode \"{}\" is not supported.'",
")",
... | From the output name and the mode returns a the function that will transform the intermediary
representation to the output. | [
"From",
"the",
"output",
"name",
"and",
"the",
"mode",
"returns",
"a",
"the",
"function",
"that",
"will",
"transform",
"the",
"intermediary",
"representation",
"to",
"the",
"output",
"."
] | d6fcdc67d6d413bb174bf008fd360044e1dff5a7 | https://github.com/Alexis-benoist/eralchemy/blob/d6fcdc67d6d413bb174bf008fd360044e1dff5a7/eralchemy/main.py#L155-L170 |
231,979 | notifiers/notifiers | notifiers_cli/utils/dynamic_click.py | handle_oneof | def handle_oneof(oneof_schema: list) -> tuple:
"""
Custom handle of `oneOf` JSON schema validator. Tried to match primitive type and see if it should be allowed
to be passed multiple timns into a command
:param oneof_schema: `oneOf` JSON schema
:return: Tuple of :class:`click.ParamType`, ``multiple`` flag and ``description`` of option
"""
oneof_dict = {schema["type"]: schema for schema in oneof_schema}
click_type = None
multiple = False
description = None
for key, value in oneof_dict.items():
if key == "array":
continue
elif key in SCHEMA_BASE_MAP:
if oneof_dict.get("array") and oneof_dict["array"]["items"]["type"] == key:
multiple = True
# Found a match to a primitive type
click_type = SCHEMA_BASE_MAP[key]
description = value.get("title")
break
return click_type, multiple, description | python | def handle_oneof(oneof_schema: list) -> tuple:
oneof_dict = {schema["type"]: schema for schema in oneof_schema}
click_type = None
multiple = False
description = None
for key, value in oneof_dict.items():
if key == "array":
continue
elif key in SCHEMA_BASE_MAP:
if oneof_dict.get("array") and oneof_dict["array"]["items"]["type"] == key:
multiple = True
# Found a match to a primitive type
click_type = SCHEMA_BASE_MAP[key]
description = value.get("title")
break
return click_type, multiple, description | [
"def",
"handle_oneof",
"(",
"oneof_schema",
":",
"list",
")",
"->",
"tuple",
":",
"oneof_dict",
"=",
"{",
"schema",
"[",
"\"type\"",
"]",
":",
"schema",
"for",
"schema",
"in",
"oneof_schema",
"}",
"click_type",
"=",
"None",
"multiple",
"=",
"False",
"descr... | Custom handle of `oneOf` JSON schema validator. Tried to match primitive type and see if it should be allowed
to be passed multiple timns into a command
:param oneof_schema: `oneOf` JSON schema
:return: Tuple of :class:`click.ParamType`, ``multiple`` flag and ``description`` of option | [
"Custom",
"handle",
"of",
"oneOf",
"JSON",
"schema",
"validator",
".",
"Tried",
"to",
"match",
"primitive",
"type",
"and",
"see",
"if",
"it",
"should",
"be",
"allowed",
"to",
"be",
"passed",
"multiple",
"timns",
"into",
"a",
"command"
] | 6dd8aafff86935dbb4763db9c56f9cdd7fc08b65 | https://github.com/notifiers/notifiers/blob/6dd8aafff86935dbb4763db9c56f9cdd7fc08b65/notifiers_cli/utils/dynamic_click.py#L24-L46 |
231,980 | notifiers/notifiers | notifiers_cli/utils/dynamic_click.py | clean_data | def clean_data(data: dict) -> dict:
"""Removes all empty values and converts tuples into lists"""
new_data = {}
for key, value in data.items():
# Verify that only explicitly passed args get passed on
if not isinstance(value, bool) and not value:
continue
# Multiple choice command are passed as tuples, convert to list to match schema
if isinstance(value, tuple):
value = list(value)
new_data[key] = value
return new_data | python | def clean_data(data: dict) -> dict:
new_data = {}
for key, value in data.items():
# Verify that only explicitly passed args get passed on
if not isinstance(value, bool) and not value:
continue
# Multiple choice command are passed as tuples, convert to list to match schema
if isinstance(value, tuple):
value = list(value)
new_data[key] = value
return new_data | [
"def",
"clean_data",
"(",
"data",
":",
"dict",
")",
"->",
"dict",
":",
"new_data",
"=",
"{",
"}",
"for",
"key",
",",
"value",
"in",
"data",
".",
"items",
"(",
")",
":",
"# Verify that only explicitly passed args get passed on",
"if",
"not",
"isinstance",
"("... | Removes all empty values and converts tuples into lists | [
"Removes",
"all",
"empty",
"values",
"and",
"converts",
"tuples",
"into",
"lists"
] | 6dd8aafff86935dbb4763db9c56f9cdd7fc08b65 | https://github.com/notifiers/notifiers/blob/6dd8aafff86935dbb4763db9c56f9cdd7fc08b65/notifiers_cli/utils/dynamic_click.py#L70-L82 |
231,981 | notifiers/notifiers | notifiers/core.py | SchemaResource.schema | def schema(self) -> dict:
"""
A property method that'll return the constructed provider schema.
Schema MUST be an object and this method must be overridden
:return: JSON schema of the provider
"""
if not self._merged_schema:
log.debug("merging required dict into schema for %s", self.name)
self._merged_schema = self._schema.copy()
self._merged_schema.update(self._required)
return self._merged_schema | python | def schema(self) -> dict:
if not self._merged_schema:
log.debug("merging required dict into schema for %s", self.name)
self._merged_schema = self._schema.copy()
self._merged_schema.update(self._required)
return self._merged_schema | [
"def",
"schema",
"(",
"self",
")",
"->",
"dict",
":",
"if",
"not",
"self",
".",
"_merged_schema",
":",
"log",
".",
"debug",
"(",
"\"merging required dict into schema for %s\"",
",",
"self",
".",
"name",
")",
"self",
".",
"_merged_schema",
"=",
"self",
".",
... | A property method that'll return the constructed provider schema.
Schema MUST be an object and this method must be overridden
:return: JSON schema of the provider | [
"A",
"property",
"method",
"that",
"ll",
"return",
"the",
"constructed",
"provider",
"schema",
".",
"Schema",
"MUST",
"be",
"an",
"object",
"and",
"this",
"method",
"must",
"be",
"overridden"
] | 6dd8aafff86935dbb4763db9c56f9cdd7fc08b65 | https://github.com/notifiers/notifiers/blob/6dd8aafff86935dbb4763db9c56f9cdd7fc08b65/notifiers/core.py#L96-L107 |
231,982 | notifiers/notifiers | notifiers/core.py | SchemaResource._process_data | def _process_data(self, **data) -> dict:
"""
The main method that process all resources data. Validates schema, gets environs, validates data, prepares
it via provider requirements, merges defaults and check for data dependencies
:param data: The raw data passed by the notifiers client
:return: Processed data
"""
env_prefix = data.pop("env_prefix", None)
environs = self._get_environs(env_prefix)
if environs:
data = merge_dicts(data, environs)
data = self._merge_defaults(data)
self._validate_data(data)
data = self._validate_data_dependencies(data)
data = self._prepare_data(data)
return data | python | def _process_data(self, **data) -> dict:
env_prefix = data.pop("env_prefix", None)
environs = self._get_environs(env_prefix)
if environs:
data = merge_dicts(data, environs)
data = self._merge_defaults(data)
self._validate_data(data)
data = self._validate_data_dependencies(data)
data = self._prepare_data(data)
return data | [
"def",
"_process_data",
"(",
"self",
",",
"*",
"*",
"data",
")",
"->",
"dict",
":",
"env_prefix",
"=",
"data",
".",
"pop",
"(",
"\"env_prefix\"",
",",
"None",
")",
"environs",
"=",
"self",
".",
"_get_environs",
"(",
"env_prefix",
")",
"if",
"environs",
... | The main method that process all resources data. Validates schema, gets environs, validates data, prepares
it via provider requirements, merges defaults and check for data dependencies
:param data: The raw data passed by the notifiers client
:return: Processed data | [
"The",
"main",
"method",
"that",
"process",
"all",
"resources",
"data",
".",
"Validates",
"schema",
"gets",
"environs",
"validates",
"data",
"prepares",
"it",
"via",
"provider",
"requirements",
"merges",
"defaults",
"and",
"check",
"for",
"data",
"dependencies"
] | 6dd8aafff86935dbb4763db9c56f9cdd7fc08b65 | https://github.com/notifiers/notifiers/blob/6dd8aafff86935dbb4763db9c56f9cdd7fc08b65/notifiers/core.py#L221-L238 |
231,983 | notifiers/notifiers | notifiers/utils/schema/formats.py | is_iso8601 | def is_iso8601(instance: str):
"""Validates ISO8601 format"""
if not isinstance(instance, str):
return True
return ISO8601.match(instance) is not None | python | def is_iso8601(instance: str):
if not isinstance(instance, str):
return True
return ISO8601.match(instance) is not None | [
"def",
"is_iso8601",
"(",
"instance",
":",
"str",
")",
":",
"if",
"not",
"isinstance",
"(",
"instance",
",",
"str",
")",
":",
"return",
"True",
"return",
"ISO8601",
".",
"match",
"(",
"instance",
")",
"is",
"not",
"None"
] | Validates ISO8601 format | [
"Validates",
"ISO8601",
"format"
] | 6dd8aafff86935dbb4763db9c56f9cdd7fc08b65 | https://github.com/notifiers/notifiers/blob/6dd8aafff86935dbb4763db9c56f9cdd7fc08b65/notifiers/utils/schema/formats.py#L25-L29 |
231,984 | notifiers/notifiers | notifiers/utils/schema/formats.py | is_rfc2822 | def is_rfc2822(instance: str):
"""Validates RFC2822 format"""
if not isinstance(instance, str):
return True
return email.utils.parsedate(instance) is not None | python | def is_rfc2822(instance: str):
if not isinstance(instance, str):
return True
return email.utils.parsedate(instance) is not None | [
"def",
"is_rfc2822",
"(",
"instance",
":",
"str",
")",
":",
"if",
"not",
"isinstance",
"(",
"instance",
",",
"str",
")",
":",
"return",
"True",
"return",
"email",
".",
"utils",
".",
"parsedate",
"(",
"instance",
")",
"is",
"not",
"None"
] | Validates RFC2822 format | [
"Validates",
"RFC2822",
"format"
] | 6dd8aafff86935dbb4763db9c56f9cdd7fc08b65 | https://github.com/notifiers/notifiers/blob/6dd8aafff86935dbb4763db9c56f9cdd7fc08b65/notifiers/utils/schema/formats.py#L33-L37 |
231,985 | notifiers/notifiers | notifiers/utils/schema/formats.py | is_valid_port | def is_valid_port(instance: int):
"""Validates data is a valid port"""
if not isinstance(instance, (int, str)):
return True
return int(instance) in range(65535) | python | def is_valid_port(instance: int):
if not isinstance(instance, (int, str)):
return True
return int(instance) in range(65535) | [
"def",
"is_valid_port",
"(",
"instance",
":",
"int",
")",
":",
"if",
"not",
"isinstance",
"(",
"instance",
",",
"(",
"int",
",",
"str",
")",
")",
":",
"return",
"True",
"return",
"int",
"(",
"instance",
")",
"in",
"range",
"(",
"65535",
")"
] | Validates data is a valid port | [
"Validates",
"data",
"is",
"a",
"valid",
"port"
] | 6dd8aafff86935dbb4763db9c56f9cdd7fc08b65 | https://github.com/notifiers/notifiers/blob/6dd8aafff86935dbb4763db9c56f9cdd7fc08b65/notifiers/utils/schema/formats.py#L57-L61 |
231,986 | notifiers/notifiers | notifiers/utils/schema/formats.py | is_timestamp | def is_timestamp(instance):
"""Validates data is a timestamp"""
if not isinstance(instance, (int, str)):
return True
return datetime.fromtimestamp(int(instance)) | python | def is_timestamp(instance):
if not isinstance(instance, (int, str)):
return True
return datetime.fromtimestamp(int(instance)) | [
"def",
"is_timestamp",
"(",
"instance",
")",
":",
"if",
"not",
"isinstance",
"(",
"instance",
",",
"(",
"int",
",",
"str",
")",
")",
":",
"return",
"True",
"return",
"datetime",
".",
"fromtimestamp",
"(",
"int",
"(",
"instance",
")",
")"
] | Validates data is a timestamp | [
"Validates",
"data",
"is",
"a",
"timestamp"
] | 6dd8aafff86935dbb4763db9c56f9cdd7fc08b65 | https://github.com/notifiers/notifiers/blob/6dd8aafff86935dbb4763db9c56f9cdd7fc08b65/notifiers/utils/schema/formats.py#L65-L69 |
231,987 | notifiers/notifiers | notifiers_cli/utils/callbacks.py | func_factory | def func_factory(p, method: str) -> callable:
"""
Dynamically generates callback commands to correlate to provider public methods
:param p: A :class:`notifiers.core.Provider` object
:param method: A string correlating to a provider method
:return: A callback func
"""
def callback(pretty: bool = False):
res = getattr(p, method)
dump = partial(json.dumps, indent=4) if pretty else partial(json.dumps)
click.echo(dump(res))
return callback | python | def func_factory(p, method: str) -> callable:
def callback(pretty: bool = False):
res = getattr(p, method)
dump = partial(json.dumps, indent=4) if pretty else partial(json.dumps)
click.echo(dump(res))
return callback | [
"def",
"func_factory",
"(",
"p",
",",
"method",
":",
"str",
")",
"->",
"callable",
":",
"def",
"callback",
"(",
"pretty",
":",
"bool",
"=",
"False",
")",
":",
"res",
"=",
"getattr",
"(",
"p",
",",
"method",
")",
"dump",
"=",
"partial",
"(",
"json",... | Dynamically generates callback commands to correlate to provider public methods
:param p: A :class:`notifiers.core.Provider` object
:param method: A string correlating to a provider method
:return: A callback func | [
"Dynamically",
"generates",
"callback",
"commands",
"to",
"correlate",
"to",
"provider",
"public",
"methods"
] | 6dd8aafff86935dbb4763db9c56f9cdd7fc08b65 | https://github.com/notifiers/notifiers/blob/6dd8aafff86935dbb4763db9c56f9cdd7fc08b65/notifiers_cli/utils/callbacks.py#L14-L28 |
231,988 | notifiers/notifiers | notifiers_cli/utils/callbacks.py | _notify | def _notify(p, **data):
"""The callback func that will be hooked to the ``notify`` command"""
message = data.get("message")
if not message and not sys.stdin.isatty():
message = click.get_text_stream("stdin").read()
data["message"] = message
data = clean_data(data)
ctx = click.get_current_context()
if ctx.obj.get("env_prefix"):
data["env_prefix"] = ctx.obj["env_prefix"]
rsp = p.notify(**data)
rsp.raise_on_errors()
click.secho(f"Succesfully sent a notification to {p.name}!", fg="green") | python | def _notify(p, **data):
message = data.get("message")
if not message and not sys.stdin.isatty():
message = click.get_text_stream("stdin").read()
data["message"] = message
data = clean_data(data)
ctx = click.get_current_context()
if ctx.obj.get("env_prefix"):
data["env_prefix"] = ctx.obj["env_prefix"]
rsp = p.notify(**data)
rsp.raise_on_errors()
click.secho(f"Succesfully sent a notification to {p.name}!", fg="green") | [
"def",
"_notify",
"(",
"p",
",",
"*",
"*",
"data",
")",
":",
"message",
"=",
"data",
".",
"get",
"(",
"\"message\"",
")",
"if",
"not",
"message",
"and",
"not",
"sys",
".",
"stdin",
".",
"isatty",
"(",
")",
":",
"message",
"=",
"click",
".",
"get_... | The callback func that will be hooked to the ``notify`` command | [
"The",
"callback",
"func",
"that",
"will",
"be",
"hooked",
"to",
"the",
"notify",
"command"
] | 6dd8aafff86935dbb4763db9c56f9cdd7fc08b65 | https://github.com/notifiers/notifiers/blob/6dd8aafff86935dbb4763db9c56f9cdd7fc08b65/notifiers_cli/utils/callbacks.py#L31-L46 |
231,989 | notifiers/notifiers | notifiers_cli/utils/callbacks.py | _resource | def _resource(resource, pretty: bool = None, **data):
"""The callback func that will be hooked to the generic resource commands"""
data = clean_data(data)
ctx = click.get_current_context()
if ctx.obj.get("env_prefix"):
data["env_prefix"] = ctx.obj["env_prefix"]
rsp = resource(**data)
dump = partial(json.dumps, indent=4) if pretty else partial(json.dumps)
click.echo(dump(rsp)) | python | def _resource(resource, pretty: bool = None, **data):
data = clean_data(data)
ctx = click.get_current_context()
if ctx.obj.get("env_prefix"):
data["env_prefix"] = ctx.obj["env_prefix"]
rsp = resource(**data)
dump = partial(json.dumps, indent=4) if pretty else partial(json.dumps)
click.echo(dump(rsp)) | [
"def",
"_resource",
"(",
"resource",
",",
"pretty",
":",
"bool",
"=",
"None",
",",
"*",
"*",
"data",
")",
":",
"data",
"=",
"clean_data",
"(",
"data",
")",
"ctx",
"=",
"click",
".",
"get_current_context",
"(",
")",
"if",
"ctx",
".",
"obj",
".",
"ge... | The callback func that will be hooked to the generic resource commands | [
"The",
"callback",
"func",
"that",
"will",
"be",
"hooked",
"to",
"the",
"generic",
"resource",
"commands"
] | 6dd8aafff86935dbb4763db9c56f9cdd7fc08b65 | https://github.com/notifiers/notifiers/blob/6dd8aafff86935dbb4763db9c56f9cdd7fc08b65/notifiers_cli/utils/callbacks.py#L49-L59 |
231,990 | notifiers/notifiers | notifiers_cli/utils/callbacks.py | _resources | def _resources(p):
"""Callback func to display provider resources"""
if p.resources:
click.echo(",".join(p.resources))
else:
click.echo(f"Provider '{p.name}' does not have resource helpers") | python | def _resources(p):
if p.resources:
click.echo(",".join(p.resources))
else:
click.echo(f"Provider '{p.name}' does not have resource helpers") | [
"def",
"_resources",
"(",
"p",
")",
":",
"if",
"p",
".",
"resources",
":",
"click",
".",
"echo",
"(",
"\",\"",
".",
"join",
"(",
"p",
".",
"resources",
")",
")",
"else",
":",
"click",
".",
"echo",
"(",
"f\"Provider '{p.name}' does not have resource helpers... | Callback func to display provider resources | [
"Callback",
"func",
"to",
"display",
"provider",
"resources"
] | 6dd8aafff86935dbb4763db9c56f9cdd7fc08b65 | https://github.com/notifiers/notifiers/blob/6dd8aafff86935dbb4763db9c56f9cdd7fc08b65/notifiers_cli/utils/callbacks.py#L62-L67 |
231,991 | notifiers/notifiers | notifiers/utils/schema/helpers.py | one_or_more | def one_or_more(
schema: dict, unique_items: bool = True, min: int = 1, max: int = None
) -> dict:
"""
Helper function to construct a schema that validates items matching
`schema` or an array containing items matching `schema`.
:param schema: The schema to use
:param unique_items: Flag if array items should be unique
:param min: Correlates to ``minLength`` attribute of JSON Schema array
:param max: Correlates to ``maxLength`` attribute of JSON Schema array
"""
multi_schema = {
"type": "array",
"items": schema,
"minItems": min,
"uniqueItems": unique_items,
}
if max:
multi_schema["maxItems"] = max
return {"oneOf": [multi_schema, schema]} | python | def one_or_more(
schema: dict, unique_items: bool = True, min: int = 1, max: int = None
) -> dict:
multi_schema = {
"type": "array",
"items": schema,
"minItems": min,
"uniqueItems": unique_items,
}
if max:
multi_schema["maxItems"] = max
return {"oneOf": [multi_schema, schema]} | [
"def",
"one_or_more",
"(",
"schema",
":",
"dict",
",",
"unique_items",
":",
"bool",
"=",
"True",
",",
"min",
":",
"int",
"=",
"1",
",",
"max",
":",
"int",
"=",
"None",
")",
"->",
"dict",
":",
"multi_schema",
"=",
"{",
"\"type\"",
":",
"\"array\"",
... | Helper function to construct a schema that validates items matching
`schema` or an array containing items matching `schema`.
:param schema: The schema to use
:param unique_items: Flag if array items should be unique
:param min: Correlates to ``minLength`` attribute of JSON Schema array
:param max: Correlates to ``maxLength`` attribute of JSON Schema array | [
"Helper",
"function",
"to",
"construct",
"a",
"schema",
"that",
"validates",
"items",
"matching",
"schema",
"or",
"an",
"array",
"containing",
"items",
"matching",
"schema",
"."
] | 6dd8aafff86935dbb4763db9c56f9cdd7fc08b65 | https://github.com/notifiers/notifiers/blob/6dd8aafff86935dbb4763db9c56f9cdd7fc08b65/notifiers/utils/schema/helpers.py#L1-L21 |
231,992 | notifiers/notifiers | notifiers/utils/helpers.py | text_to_bool | def text_to_bool(value: str) -> bool:
"""
Tries to convert a text value to a bool. If unsuccessful returns if value is None or not
:param value: Value to check
"""
try:
return bool(strtobool(value))
except (ValueError, AttributeError):
return value is not None | python | def text_to_bool(value: str) -> bool:
try:
return bool(strtobool(value))
except (ValueError, AttributeError):
return value is not None | [
"def",
"text_to_bool",
"(",
"value",
":",
"str",
")",
"->",
"bool",
":",
"try",
":",
"return",
"bool",
"(",
"strtobool",
"(",
"value",
")",
")",
"except",
"(",
"ValueError",
",",
"AttributeError",
")",
":",
"return",
"value",
"is",
"not",
"None"
] | Tries to convert a text value to a bool. If unsuccessful returns if value is None or not
:param value: Value to check | [
"Tries",
"to",
"convert",
"a",
"text",
"value",
"to",
"a",
"bool",
".",
"If",
"unsuccessful",
"returns",
"if",
"value",
"is",
"None",
"or",
"not"
] | 6dd8aafff86935dbb4763db9c56f9cdd7fc08b65 | https://github.com/notifiers/notifiers/blob/6dd8aafff86935dbb4763db9c56f9cdd7fc08b65/notifiers/utils/helpers.py#L9-L18 |
231,993 | notifiers/notifiers | notifiers/utils/helpers.py | merge_dicts | def merge_dicts(target_dict: dict, merge_dict: dict) -> dict:
"""
Merges ``merge_dict`` into ``target_dict`` if the latter does not already contain a value for each of the key
names in ``merge_dict``. Used to cleanly merge default and environ data into notification payload.
:param target_dict: The target dict to merge into and return, the user provided data for example
:param merge_dict: The data that should be merged into the target data
:return: A dict of merged data
"""
log.debug("merging dict %s into %s", merge_dict, target_dict)
for key, value in merge_dict.items():
if key not in target_dict:
target_dict[key] = value
return target_dict | python | def merge_dicts(target_dict: dict, merge_dict: dict) -> dict:
log.debug("merging dict %s into %s", merge_dict, target_dict)
for key, value in merge_dict.items():
if key not in target_dict:
target_dict[key] = value
return target_dict | [
"def",
"merge_dicts",
"(",
"target_dict",
":",
"dict",
",",
"merge_dict",
":",
"dict",
")",
"->",
"dict",
":",
"log",
".",
"debug",
"(",
"\"merging dict %s into %s\"",
",",
"merge_dict",
",",
"target_dict",
")",
"for",
"key",
",",
"value",
"in",
"merge_dict"... | Merges ``merge_dict`` into ``target_dict`` if the latter does not already contain a value for each of the key
names in ``merge_dict``. Used to cleanly merge default and environ data into notification payload.
:param target_dict: The target dict to merge into and return, the user provided data for example
:param merge_dict: The data that should be merged into the target data
:return: A dict of merged data | [
"Merges",
"merge_dict",
"into",
"target_dict",
"if",
"the",
"latter",
"does",
"not",
"already",
"contain",
"a",
"value",
"for",
"each",
"of",
"the",
"key",
"names",
"in",
"merge_dict",
".",
"Used",
"to",
"cleanly",
"merge",
"default",
"and",
"environ",
"data... | 6dd8aafff86935dbb4763db9c56f9cdd7fc08b65 | https://github.com/notifiers/notifiers/blob/6dd8aafff86935dbb4763db9c56f9cdd7fc08b65/notifiers/utils/helpers.py#L21-L34 |
231,994 | notifiers/notifiers | notifiers/utils/helpers.py | snake_to_camel_case | def snake_to_camel_case(value: str) -> str:
"""
Convert a snake case param to CamelCase
:param value: The value to convert
:return: A CamelCase value
"""
log.debug("trying to convert %s to camel case", value)
return "".join(word.capitalize() for word in value.split("_")) | python | def snake_to_camel_case(value: str) -> str:
log.debug("trying to convert %s to camel case", value)
return "".join(word.capitalize() for word in value.split("_")) | [
"def",
"snake_to_camel_case",
"(",
"value",
":",
"str",
")",
"->",
"str",
":",
"log",
".",
"debug",
"(",
"\"trying to convert %s to camel case\"",
",",
"value",
")",
"return",
"\"\"",
".",
"join",
"(",
"word",
".",
"capitalize",
"(",
")",
"for",
"word",
"i... | Convert a snake case param to CamelCase
:param value: The value to convert
:return: A CamelCase value | [
"Convert",
"a",
"snake",
"case",
"param",
"to",
"CamelCase"
] | 6dd8aafff86935dbb4763db9c56f9cdd7fc08b65 | https://github.com/notifiers/notifiers/blob/6dd8aafff86935dbb4763db9c56f9cdd7fc08b65/notifiers/utils/helpers.py#L56-L64 |
231,995 | notifiers/notifiers | notifiers/utils/helpers.py | valid_file | def valid_file(path: str) -> bool:
"""
Verifies that a string path actually exists and is a file
:param path: The path to verify
:return: **True** if path exist and is a file
"""
path = Path(path).expanduser()
log.debug("checking if %s is a valid file", path)
return path.exists() and path.is_file() | python | def valid_file(path: str) -> bool:
path = Path(path).expanduser()
log.debug("checking if %s is a valid file", path)
return path.exists() and path.is_file() | [
"def",
"valid_file",
"(",
"path",
":",
"str",
")",
"->",
"bool",
":",
"path",
"=",
"Path",
"(",
"path",
")",
".",
"expanduser",
"(",
")",
"log",
".",
"debug",
"(",
"\"checking if %s is a valid file\"",
",",
"path",
")",
"return",
"path",
".",
"exists",
... | Verifies that a string path actually exists and is a file
:param path: The path to verify
:return: **True** if path exist and is a file | [
"Verifies",
"that",
"a",
"string",
"path",
"actually",
"exists",
"and",
"is",
"a",
"file"
] | 6dd8aafff86935dbb4763db9c56f9cdd7fc08b65 | https://github.com/notifiers/notifiers/blob/6dd8aafff86935dbb4763db9c56f9cdd7fc08b65/notifiers/utils/helpers.py#L67-L76 |
231,996 | notifiers/notifiers | notifiers/logging.py | NotificationHandler.init_providers | def init_providers(self, provider, kwargs):
"""
Inits main and fallback provider if relevant
:param provider: Provider name to use
:param kwargs: Additional kwargs
:raises ValueError: If provider name or fallback names are not valid providers, a :exc:`ValueError` will
be raised
"""
self.provider = notifiers.get_notifier(provider, strict=True)
if kwargs.get("fallback"):
self.fallback = notifiers.get_notifier(kwargs.pop("fallback"), strict=True)
self.fallback_defaults = kwargs.pop("fallback_defaults", {}) | python | def init_providers(self, provider, kwargs):
self.provider = notifiers.get_notifier(provider, strict=True)
if kwargs.get("fallback"):
self.fallback = notifiers.get_notifier(kwargs.pop("fallback"), strict=True)
self.fallback_defaults = kwargs.pop("fallback_defaults", {}) | [
"def",
"init_providers",
"(",
"self",
",",
"provider",
",",
"kwargs",
")",
":",
"self",
".",
"provider",
"=",
"notifiers",
".",
"get_notifier",
"(",
"provider",
",",
"strict",
"=",
"True",
")",
"if",
"kwargs",
".",
"get",
"(",
"\"fallback\"",
")",
":",
... | Inits main and fallback provider if relevant
:param provider: Provider name to use
:param kwargs: Additional kwargs
:raises ValueError: If provider name or fallback names are not valid providers, a :exc:`ValueError` will
be raised | [
"Inits",
"main",
"and",
"fallback",
"provider",
"if",
"relevant"
] | 6dd8aafff86935dbb4763db9c56f9cdd7fc08b65 | https://github.com/notifiers/notifiers/blob/6dd8aafff86935dbb4763db9c56f9cdd7fc08b65/notifiers/logging.py#L27-L39 |
231,997 | notifiers/notifiers | notifiers_cli/core.py | provider_group_factory | def provider_group_factory():
"""Dynamically generate provider groups for all providers, and add all basic command to it"""
for provider in all_providers():
p = get_notifier(provider)
provider_name = p.name
help = f"Options for '{provider_name}'"
group = click.Group(name=provider_name, help=help)
# Notify command
notify = partial(_notify, p=p)
group.add_command(schema_to_command(p, "notify", notify, add_message=True))
# Resources command
resources_callback = partial(_resources, p=p)
resources_cmd = click.Command(
"resources",
callback=resources_callback,
help="Show provider resources list",
)
group.add_command(resources_cmd)
pretty_opt = click.Option(
["--pretty/--not-pretty"], help="Output a pretty version of the JSON"
)
# Add any provider resources
for resource in p.resources:
rsc = getattr(p, resource)
rsrc_callback = partial(_resource, rsc)
rsrc_command = schema_to_command(
rsc, resource, rsrc_callback, add_message=False
)
rsrc_command.params.append(pretty_opt)
group.add_command(rsrc_command)
for name, description in CORE_COMMANDS.items():
callback = func_factory(p, name)
params = [pretty_opt]
command = click.Command(
name,
callback=callback,
help=description.format(provider_name),
params=params,
)
group.add_command(command)
notifiers_cli.add_command(group) | python | def provider_group_factory():
for provider in all_providers():
p = get_notifier(provider)
provider_name = p.name
help = f"Options for '{provider_name}'"
group = click.Group(name=provider_name, help=help)
# Notify command
notify = partial(_notify, p=p)
group.add_command(schema_to_command(p, "notify", notify, add_message=True))
# Resources command
resources_callback = partial(_resources, p=p)
resources_cmd = click.Command(
"resources",
callback=resources_callback,
help="Show provider resources list",
)
group.add_command(resources_cmd)
pretty_opt = click.Option(
["--pretty/--not-pretty"], help="Output a pretty version of the JSON"
)
# Add any provider resources
for resource in p.resources:
rsc = getattr(p, resource)
rsrc_callback = partial(_resource, rsc)
rsrc_command = schema_to_command(
rsc, resource, rsrc_callback, add_message=False
)
rsrc_command.params.append(pretty_opt)
group.add_command(rsrc_command)
for name, description in CORE_COMMANDS.items():
callback = func_factory(p, name)
params = [pretty_opt]
command = click.Command(
name,
callback=callback,
help=description.format(provider_name),
params=params,
)
group.add_command(command)
notifiers_cli.add_command(group) | [
"def",
"provider_group_factory",
"(",
")",
":",
"for",
"provider",
"in",
"all_providers",
"(",
")",
":",
"p",
"=",
"get_notifier",
"(",
"provider",
")",
"provider_name",
"=",
"p",
".",
"name",
"help",
"=",
"f\"Options for '{provider_name}'\"",
"group",
"=",
"c... | Dynamically generate provider groups for all providers, and add all basic command to it | [
"Dynamically",
"generate",
"provider",
"groups",
"for",
"all",
"providers",
"and",
"add",
"all",
"basic",
"command",
"to",
"it"
] | 6dd8aafff86935dbb4763db9c56f9cdd7fc08b65 | https://github.com/notifiers/notifiers/blob/6dd8aafff86935dbb4763db9c56f9cdd7fc08b65/notifiers_cli/core.py#L12-L58 |
231,998 | notifiers/notifiers | notifiers_cli/core.py | entry_point | def entry_point():
"""The entry that CLI is executed from"""
try:
provider_group_factory()
notifiers_cli(obj={})
except NotifierException as e:
click.secho(f"ERROR: {e.message}", bold=True, fg="red")
exit(1) | python | def entry_point():
try:
provider_group_factory()
notifiers_cli(obj={})
except NotifierException as e:
click.secho(f"ERROR: {e.message}", bold=True, fg="red")
exit(1) | [
"def",
"entry_point",
"(",
")",
":",
"try",
":",
"provider_group_factory",
"(",
")",
"notifiers_cli",
"(",
"obj",
"=",
"{",
"}",
")",
"except",
"NotifierException",
"as",
"e",
":",
"click",
".",
"secho",
"(",
"f\"ERROR: {e.message}\"",
",",
"bold",
"=",
"T... | The entry that CLI is executed from | [
"The",
"entry",
"that",
"CLI",
"is",
"executed",
"from"
] | 6dd8aafff86935dbb4763db9c56f9cdd7fc08b65 | https://github.com/notifiers/notifiers/blob/6dd8aafff86935dbb4763db9c56f9cdd7fc08b65/notifiers_cli/core.py#L78-L85 |
231,999 | ucfopen/canvasapi | canvasapi/requester.py | Requester.request | def request(
self, method, endpoint=None, headers=None, use_auth=True,
_url=None, _kwargs=None, **kwargs):
"""
Make a request to the Canvas API and return the response.
:param method: The HTTP method for the request.
:type method: str
:param endpoint: The endpoint to call.
:type endpoint: str
:param headers: Optional HTTP headers to be sent with the request.
:type headers: dict
:param use_auth: Optional flag to remove the authentication
header from the request.
:type use_auth: bool
:param _url: Optional argument to send a request to a URL
outside of the Canvas API. If this is selected and an
endpoint is provided, the endpoint will be ignored and
only the _url argument will be used.
:type _url: str
:param _kwargs: A list of 2-tuples representing processed
keyword arguments to be sent to Canvas as params or data.
:type _kwargs: `list`
:rtype: str
"""
full_url = _url if _url else "{}{}".format(self.base_url, endpoint)
if not headers:
headers = {}
if use_auth:
auth_header = {'Authorization': 'Bearer {}'.format(self.access_token)}
headers.update(auth_header)
# Convert kwargs into list of 2-tuples and combine with _kwargs.
_kwargs = _kwargs or []
_kwargs.extend(kwargs.items())
# Do any final argument processing before sending to request method.
for i, kwarg in enumerate(_kwargs):
kw, arg = kwarg
# Convert boolean objects to a lowercase string.
if isinstance(arg, bool):
_kwargs[i] = (kw, str(arg).lower())
# Convert any datetime objects into ISO 8601 formatted strings.
elif isinstance(arg, datetime):
_kwargs[i] = (kw, arg.isoformat())
# Determine the appropriate request method.
if method == 'GET':
req_method = self._get_request
elif method == 'POST':
req_method = self._post_request
elif method == 'DELETE':
req_method = self._delete_request
elif method == 'PUT':
req_method = self._put_request
# Call the request method
response = req_method(full_url, headers, _kwargs)
# Add response to internal cache
if len(self._cache) > 4:
self._cache.pop()
self._cache.insert(0, response)
# Raise for status codes
if response.status_code == 400:
raise BadRequest(response.text)
elif response.status_code == 401:
if 'WWW-Authenticate' in response.headers:
raise InvalidAccessToken(response.json())
else:
raise Unauthorized(response.json())
elif response.status_code == 403:
raise Forbidden(response.text)
elif response.status_code == 404:
raise ResourceDoesNotExist('Not Found')
elif response.status_code == 409:
raise Conflict(response.text)
elif response.status_code == 500:
raise CanvasException("API encountered an error processing your request")
return response | python | def request(
self, method, endpoint=None, headers=None, use_auth=True,
_url=None, _kwargs=None, **kwargs):
full_url = _url if _url else "{}{}".format(self.base_url, endpoint)
if not headers:
headers = {}
if use_auth:
auth_header = {'Authorization': 'Bearer {}'.format(self.access_token)}
headers.update(auth_header)
# Convert kwargs into list of 2-tuples and combine with _kwargs.
_kwargs = _kwargs or []
_kwargs.extend(kwargs.items())
# Do any final argument processing before sending to request method.
for i, kwarg in enumerate(_kwargs):
kw, arg = kwarg
# Convert boolean objects to a lowercase string.
if isinstance(arg, bool):
_kwargs[i] = (kw, str(arg).lower())
# Convert any datetime objects into ISO 8601 formatted strings.
elif isinstance(arg, datetime):
_kwargs[i] = (kw, arg.isoformat())
# Determine the appropriate request method.
if method == 'GET':
req_method = self._get_request
elif method == 'POST':
req_method = self._post_request
elif method == 'DELETE':
req_method = self._delete_request
elif method == 'PUT':
req_method = self._put_request
# Call the request method
response = req_method(full_url, headers, _kwargs)
# Add response to internal cache
if len(self._cache) > 4:
self._cache.pop()
self._cache.insert(0, response)
# Raise for status codes
if response.status_code == 400:
raise BadRequest(response.text)
elif response.status_code == 401:
if 'WWW-Authenticate' in response.headers:
raise InvalidAccessToken(response.json())
else:
raise Unauthorized(response.json())
elif response.status_code == 403:
raise Forbidden(response.text)
elif response.status_code == 404:
raise ResourceDoesNotExist('Not Found')
elif response.status_code == 409:
raise Conflict(response.text)
elif response.status_code == 500:
raise CanvasException("API encountered an error processing your request")
return response | [
"def",
"request",
"(",
"self",
",",
"method",
",",
"endpoint",
"=",
"None",
",",
"headers",
"=",
"None",
",",
"use_auth",
"=",
"True",
",",
"_url",
"=",
"None",
",",
"_kwargs",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"full_url",
"=",
"_url"... | Make a request to the Canvas API and return the response.
:param method: The HTTP method for the request.
:type method: str
:param endpoint: The endpoint to call.
:type endpoint: str
:param headers: Optional HTTP headers to be sent with the request.
:type headers: dict
:param use_auth: Optional flag to remove the authentication
header from the request.
:type use_auth: bool
:param _url: Optional argument to send a request to a URL
outside of the Canvas API. If this is selected and an
endpoint is provided, the endpoint will be ignored and
only the _url argument will be used.
:type _url: str
:param _kwargs: A list of 2-tuples representing processed
keyword arguments to be sent to Canvas as params or data.
:type _kwargs: `list`
:rtype: str | [
"Make",
"a",
"request",
"to",
"the",
"Canvas",
"API",
"and",
"return",
"the",
"response",
"."
] | 319064b5fc97ba54250af683eb98723ef3f76cf8 | https://github.com/ucfopen/canvasapi/blob/319064b5fc97ba54250af683eb98723ef3f76cf8/canvasapi/requester.py#L29-L115 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.