repo stringlengths 7 55 | path stringlengths 4 127 | func_name stringlengths 1 88 | original_string stringlengths 75 19.8k | language stringclasses 1
value | code stringlengths 75 19.8k | code_tokens listlengths 20 707 | docstring stringlengths 3 17.3k | docstring_tokens listlengths 3 222 | sha stringlengths 40 40 | url stringlengths 87 242 | partition stringclasses 1
value | idx int64 0 252k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
PMEAL/porespy | porespy/io/__funcs__.py | to_palabos | def to_palabos(im, filename, solid=0):
r"""
Converts an ND-array image to a text file that Palabos can read in as a
geometry for Lattice Boltzmann simulations. Uses a Euclidean distance
transform to identify solid voxels neighboring fluid voxels and labels
them as the interface.
Parameters
----------
im : ND-array
The image of the porous material
filename : string
Path to output file
solid : int
The value of the solid voxels in the image used to convert image to
binary with all other voxels assumed to be fluid.
Notes
-----
File produced contains 3 values: 2 = Solid, 1 = Interface, 0 = Pore
Palabos will run the simulation applying the specified pressure drop from
x = 0 to x = -1.
"""
# Create binary image for fluid and solid phases
bin_im = im == solid
# Transform to integer for distance transform
bin_im = bin_im.astype(int)
# Distance Transform computes Euclidean distance in lattice units to
# Nearest fluid for every solid voxel
dt = nd.distance_transform_edt(bin_im)
dt[dt > np.sqrt(2)] = 2
dt[(dt > 0)*(dt <= np.sqrt(2))] = 1
dt = dt.astype(int)
# Write out data
with open(filename, 'w') as f:
out_data = dt.flatten().tolist()
f.write('\n'.join(map(repr, out_data))) | python | def to_palabos(im, filename, solid=0):
r"""
Converts an ND-array image to a text file that Palabos can read in as a
geometry for Lattice Boltzmann simulations. Uses a Euclidean distance
transform to identify solid voxels neighboring fluid voxels and labels
them as the interface.
Parameters
----------
im : ND-array
The image of the porous material
filename : string
Path to output file
solid : int
The value of the solid voxels in the image used to convert image to
binary with all other voxels assumed to be fluid.
Notes
-----
File produced contains 3 values: 2 = Solid, 1 = Interface, 0 = Pore
Palabos will run the simulation applying the specified pressure drop from
x = 0 to x = -1.
"""
# Create binary image for fluid and solid phases
bin_im = im == solid
# Transform to integer for distance transform
bin_im = bin_im.astype(int)
# Distance Transform computes Euclidean distance in lattice units to
# Nearest fluid for every solid voxel
dt = nd.distance_transform_edt(bin_im)
dt[dt > np.sqrt(2)] = 2
dt[(dt > 0)*(dt <= np.sqrt(2))] = 1
dt = dt.astype(int)
# Write out data
with open(filename, 'w') as f:
out_data = dt.flatten().tolist()
f.write('\n'.join(map(repr, out_data))) | [
"def",
"to_palabos",
"(",
"im",
",",
"filename",
",",
"solid",
"=",
"0",
")",
":",
"# Create binary image for fluid and solid phases",
"bin_im",
"=",
"im",
"==",
"solid",
"# Transform to integer for distance transform",
"bin_im",
"=",
"bin_im",
".",
"astype",
"(",
"... | r"""
Converts an ND-array image to a text file that Palabos can read in as a
geometry for Lattice Boltzmann simulations. Uses a Euclidean distance
transform to identify solid voxels neighboring fluid voxels and labels
them as the interface.
Parameters
----------
im : ND-array
The image of the porous material
filename : string
Path to output file
solid : int
The value of the solid voxels in the image used to convert image to
binary with all other voxels assumed to be fluid.
Notes
-----
File produced contains 3 values: 2 = Solid, 1 = Interface, 0 = Pore
Palabos will run the simulation applying the specified pressure drop from
x = 0 to x = -1. | [
"r",
"Converts",
"an",
"ND",
"-",
"array",
"image",
"to",
"a",
"text",
"file",
"that",
"Palabos",
"can",
"read",
"in",
"as",
"a",
"geometry",
"for",
"Lattice",
"Boltzmann",
"simulations",
".",
"Uses",
"a",
"Euclidean",
"distance",
"transform",
"to",
"ident... | 1e13875b56787d8f5b7ffdabce8c4342c33ba9f8 | https://github.com/PMEAL/porespy/blob/1e13875b56787d8f5b7ffdabce8c4342c33ba9f8/porespy/io/__funcs__.py#L116-L155 | train | 213,400 |
PMEAL/porespy | porespy/filters/__funcs__.py | distance_transform_lin | def distance_transform_lin(im, axis=0, mode='both'):
r"""
Replaces each void voxel with the linear distance to the nearest solid
voxel along the specified axis.
Parameters
----------
im : ND-array
The image of the porous material with ``True`` values indicating the
void phase (or phase of interest)
axis : int
The direction along which the distance should be measured, the default
is 0 (i.e. along the x-direction)
mode : string
Controls how the distance is measured. Options are:
'forward' - Distances are measured in the increasing direction along
the specified axis
'reverse' - Distances are measured in the reverse direction.
*'backward'* is also accepted.
'both' - Distances are calculated in both directions (by recursively
calling itself), then reporting the minimum value of the two results.
Returns
-------
image : ND-array
A copy of ``im`` with each foreground voxel containing the distance to
the nearest background along the specified axis.
"""
if im.ndim != im.squeeze().ndim:
warnings.warn('Input image conains a singleton axis:' + str(im.shape) +
' Reduce dimensionality with np.squeeze(im) to avoid' +
' unexpected behavior.')
if mode in ['backward', 'reverse']:
im = sp.flip(im, axis)
im = distance_transform_lin(im=im, axis=axis, mode='forward')
im = sp.flip(im, axis)
return im
elif mode in ['both']:
im_f = distance_transform_lin(im=im, axis=axis, mode='forward')
im_b = distance_transform_lin(im=im, axis=axis, mode='backward')
return sp.minimum(im_f, im_b)
else:
b = sp.cumsum(im > 0, axis=axis)
c = sp.diff(b*(im == 0), axis=axis)
d = sp.minimum.accumulate(c, axis=axis)
if im.ndim == 1:
e = sp.pad(d, pad_width=[1, 0], mode='constant', constant_values=0)
elif im.ndim == 2:
ax = [[[1, 0], [0, 0]], [[0, 0], [1, 0]]]
e = sp.pad(d, pad_width=ax[axis], mode='constant', constant_values=0)
elif im.ndim == 3:
ax = [[[1, 0], [0, 0], [0, 0]],
[[0, 0], [1, 0], [0, 0]],
[[0, 0], [0, 0], [1, 0]]]
e = sp.pad(d, pad_width=ax[axis], mode='constant', constant_values=0)
f = im*(b + e)
return f | python | def distance_transform_lin(im, axis=0, mode='both'):
r"""
Replaces each void voxel with the linear distance to the nearest solid
voxel along the specified axis.
Parameters
----------
im : ND-array
The image of the porous material with ``True`` values indicating the
void phase (or phase of interest)
axis : int
The direction along which the distance should be measured, the default
is 0 (i.e. along the x-direction)
mode : string
Controls how the distance is measured. Options are:
'forward' - Distances are measured in the increasing direction along
the specified axis
'reverse' - Distances are measured in the reverse direction.
*'backward'* is also accepted.
'both' - Distances are calculated in both directions (by recursively
calling itself), then reporting the minimum value of the two results.
Returns
-------
image : ND-array
A copy of ``im`` with each foreground voxel containing the distance to
the nearest background along the specified axis.
"""
if im.ndim != im.squeeze().ndim:
warnings.warn('Input image conains a singleton axis:' + str(im.shape) +
' Reduce dimensionality with np.squeeze(im) to avoid' +
' unexpected behavior.')
if mode in ['backward', 'reverse']:
im = sp.flip(im, axis)
im = distance_transform_lin(im=im, axis=axis, mode='forward')
im = sp.flip(im, axis)
return im
elif mode in ['both']:
im_f = distance_transform_lin(im=im, axis=axis, mode='forward')
im_b = distance_transform_lin(im=im, axis=axis, mode='backward')
return sp.minimum(im_f, im_b)
else:
b = sp.cumsum(im > 0, axis=axis)
c = sp.diff(b*(im == 0), axis=axis)
d = sp.minimum.accumulate(c, axis=axis)
if im.ndim == 1:
e = sp.pad(d, pad_width=[1, 0], mode='constant', constant_values=0)
elif im.ndim == 2:
ax = [[[1, 0], [0, 0]], [[0, 0], [1, 0]]]
e = sp.pad(d, pad_width=ax[axis], mode='constant', constant_values=0)
elif im.ndim == 3:
ax = [[[1, 0], [0, 0], [0, 0]],
[[0, 0], [1, 0], [0, 0]],
[[0, 0], [0, 0], [1, 0]]]
e = sp.pad(d, pad_width=ax[axis], mode='constant', constant_values=0)
f = im*(b + e)
return f | [
"def",
"distance_transform_lin",
"(",
"im",
",",
"axis",
"=",
"0",
",",
"mode",
"=",
"'both'",
")",
":",
"if",
"im",
".",
"ndim",
"!=",
"im",
".",
"squeeze",
"(",
")",
".",
"ndim",
":",
"warnings",
".",
"warn",
"(",
"'Input image conains a singleton axis... | r"""
Replaces each void voxel with the linear distance to the nearest solid
voxel along the specified axis.
Parameters
----------
im : ND-array
The image of the porous material with ``True`` values indicating the
void phase (or phase of interest)
axis : int
The direction along which the distance should be measured, the default
is 0 (i.e. along the x-direction)
mode : string
Controls how the distance is measured. Options are:
'forward' - Distances are measured in the increasing direction along
the specified axis
'reverse' - Distances are measured in the reverse direction.
*'backward'* is also accepted.
'both' - Distances are calculated in both directions (by recursively
calling itself), then reporting the minimum value of the two results.
Returns
-------
image : ND-array
A copy of ``im`` with each foreground voxel containing the distance to
the nearest background along the specified axis. | [
"r",
"Replaces",
"each",
"void",
"voxel",
"with",
"the",
"linear",
"distance",
"to",
"the",
"nearest",
"solid",
"voxel",
"along",
"the",
"specified",
"axis",
"."
] | 1e13875b56787d8f5b7ffdabce8c4342c33ba9f8 | https://github.com/PMEAL/porespy/blob/1e13875b56787d8f5b7ffdabce8c4342c33ba9f8/porespy/filters/__funcs__.py#L19-L80 | train | 213,401 |
PMEAL/porespy | porespy/filters/__funcs__.py | snow_partitioning | def snow_partitioning(im, dt=None, r_max=4, sigma=0.4, return_all=False,
mask=True, randomize=True):
r"""
Partitions the void space into pore regions using a marker-based watershed
algorithm, with specially filtered peaks as markers.
The SNOW network extraction algorithm (Sub-Network of an Over-segmented
Watershed) was designed to handle to perculiarities of high porosity
materials, but it applies well to other materials as well.
Parameters
----------
im : array_like
A boolean image of the domain, with ``True`` indicating the pore space
and ``False`` elsewhere.
dt : array_like, optional
The distance transform of the pore space. This is done automatically
if not provided, but if the distance transform has already been
computed then supplying it can save some time.
r_max : int
The radius of the spherical structuring element to use in the Maximum
filter stage that is used to find peaks. The default is 4
sigma : float
The standard deviation of the Gaussian filter used in step 1. The
default is 0.4. If 0 is given then the filter is not applied, which is
useful if a distance transform is supplied as the ``im`` argument that
has already been processed.
return_all : boolean
If set to ``True`` a named tuple is returned containing the original
image, the distance transform, the filtered peaks, and the final
pore regions. The default is ``False``
mask : boolean
Apply a mask to the regions where the solid phase is. Default is
``True``
randomize : boolean
If ``True`` (default), then the region colors will be randomized before
returning. This is helpful for visualizing otherwise neighboring
regions have simlar coloring are are hard to distinguish.
Returns
-------
image : ND-array
An image the same shape as ``im`` with the void space partitioned into
pores using a marker based watershed with the peaks found by the
SNOW algorithm [1].
Notes
-----
If ``return_all`` is ``True`` then a **named tuple** is returned containing
all of the images used during the process. They can be access as
attriutes with the following names:
* ``im``: The binary image of the void space
* ``dt``: The distance transform of the image
* ``peaks``: The peaks of the distance transform after applying the
steps of the SNOW algorithm
* ``regions``: The void space partitioned into pores using a marker
based watershed with the peaks found by the SNOW algorithm
References
----------
[1] Gostick, J. "A versatile and efficient network extraction algorithm
using marker-based watershed segmenation". Physical Review E. (2017)
"""
tup = namedtuple('results', field_names=['im', 'dt', 'peaks', 'regions'])
print('_'*60)
print("Beginning SNOW Algorithm")
im_shape = sp.array(im.shape)
if im.dtype is not bool:
print('Converting supplied image (im) to boolean')
im = im > 0
if dt is None:
print('Peforming Distance Transform')
if sp.any(im_shape == 1):
ax = sp.where(im_shape == 1)[0][0]
dt = spim.distance_transform_edt(input=im.squeeze())
dt = sp.expand_dims(dt, ax)
else:
dt = spim.distance_transform_edt(input=im)
tup.im = im
tup.dt = dt
if sigma > 0:
print('Applying Gaussian blur with sigma =', str(sigma))
dt = spim.gaussian_filter(input=dt, sigma=sigma)
peaks = find_peaks(dt=dt, r_max=r_max)
print('Initial number of peaks: ', spim.label(peaks)[1])
peaks = trim_saddle_points(peaks=peaks, dt=dt, max_iters=500)
print('Peaks after trimming saddle points: ', spim.label(peaks)[1])
peaks = trim_nearby_peaks(peaks=peaks, dt=dt)
peaks, N = spim.label(peaks)
print('Peaks after trimming nearby peaks: ', N)
tup.peaks = peaks
if mask:
mask_solid = im > 0
else:
mask_solid = None
regions = watershed(image=-dt, markers=peaks, mask=mask_solid)
if randomize:
regions = randomize_colors(regions)
if return_all:
tup.regions = regions
return tup
else:
return regions | python | def snow_partitioning(im, dt=None, r_max=4, sigma=0.4, return_all=False,
mask=True, randomize=True):
r"""
Partitions the void space into pore regions using a marker-based watershed
algorithm, with specially filtered peaks as markers.
The SNOW network extraction algorithm (Sub-Network of an Over-segmented
Watershed) was designed to handle to perculiarities of high porosity
materials, but it applies well to other materials as well.
Parameters
----------
im : array_like
A boolean image of the domain, with ``True`` indicating the pore space
and ``False`` elsewhere.
dt : array_like, optional
The distance transform of the pore space. This is done automatically
if not provided, but if the distance transform has already been
computed then supplying it can save some time.
r_max : int
The radius of the spherical structuring element to use in the Maximum
filter stage that is used to find peaks. The default is 4
sigma : float
The standard deviation of the Gaussian filter used in step 1. The
default is 0.4. If 0 is given then the filter is not applied, which is
useful if a distance transform is supplied as the ``im`` argument that
has already been processed.
return_all : boolean
If set to ``True`` a named tuple is returned containing the original
image, the distance transform, the filtered peaks, and the final
pore regions. The default is ``False``
mask : boolean
Apply a mask to the regions where the solid phase is. Default is
``True``
randomize : boolean
If ``True`` (default), then the region colors will be randomized before
returning. This is helpful for visualizing otherwise neighboring
regions have simlar coloring are are hard to distinguish.
Returns
-------
image : ND-array
An image the same shape as ``im`` with the void space partitioned into
pores using a marker based watershed with the peaks found by the
SNOW algorithm [1].
Notes
-----
If ``return_all`` is ``True`` then a **named tuple** is returned containing
all of the images used during the process. They can be access as
attriutes with the following names:
* ``im``: The binary image of the void space
* ``dt``: The distance transform of the image
* ``peaks``: The peaks of the distance transform after applying the
steps of the SNOW algorithm
* ``regions``: The void space partitioned into pores using a marker
based watershed with the peaks found by the SNOW algorithm
References
----------
[1] Gostick, J. "A versatile and efficient network extraction algorithm
using marker-based watershed segmenation". Physical Review E. (2017)
"""
tup = namedtuple('results', field_names=['im', 'dt', 'peaks', 'regions'])
print('_'*60)
print("Beginning SNOW Algorithm")
im_shape = sp.array(im.shape)
if im.dtype is not bool:
print('Converting supplied image (im) to boolean')
im = im > 0
if dt is None:
print('Peforming Distance Transform')
if sp.any(im_shape == 1):
ax = sp.where(im_shape == 1)[0][0]
dt = spim.distance_transform_edt(input=im.squeeze())
dt = sp.expand_dims(dt, ax)
else:
dt = spim.distance_transform_edt(input=im)
tup.im = im
tup.dt = dt
if sigma > 0:
print('Applying Gaussian blur with sigma =', str(sigma))
dt = spim.gaussian_filter(input=dt, sigma=sigma)
peaks = find_peaks(dt=dt, r_max=r_max)
print('Initial number of peaks: ', spim.label(peaks)[1])
peaks = trim_saddle_points(peaks=peaks, dt=dt, max_iters=500)
print('Peaks after trimming saddle points: ', spim.label(peaks)[1])
peaks = trim_nearby_peaks(peaks=peaks, dt=dt)
peaks, N = spim.label(peaks)
print('Peaks after trimming nearby peaks: ', N)
tup.peaks = peaks
if mask:
mask_solid = im > 0
else:
mask_solid = None
regions = watershed(image=-dt, markers=peaks, mask=mask_solid)
if randomize:
regions = randomize_colors(regions)
if return_all:
tup.regions = regions
return tup
else:
return regions | [
"def",
"snow_partitioning",
"(",
"im",
",",
"dt",
"=",
"None",
",",
"r_max",
"=",
"4",
",",
"sigma",
"=",
"0.4",
",",
"return_all",
"=",
"False",
",",
"mask",
"=",
"True",
",",
"randomize",
"=",
"True",
")",
":",
"tup",
"=",
"namedtuple",
"(",
"'re... | r"""
Partitions the void space into pore regions using a marker-based watershed
algorithm, with specially filtered peaks as markers.
The SNOW network extraction algorithm (Sub-Network of an Over-segmented
Watershed) was designed to handle to perculiarities of high porosity
materials, but it applies well to other materials as well.
Parameters
----------
im : array_like
A boolean image of the domain, with ``True`` indicating the pore space
and ``False`` elsewhere.
dt : array_like, optional
The distance transform of the pore space. This is done automatically
if not provided, but if the distance transform has already been
computed then supplying it can save some time.
r_max : int
The radius of the spherical structuring element to use in the Maximum
filter stage that is used to find peaks. The default is 4
sigma : float
The standard deviation of the Gaussian filter used in step 1. The
default is 0.4. If 0 is given then the filter is not applied, which is
useful if a distance transform is supplied as the ``im`` argument that
has already been processed.
return_all : boolean
If set to ``True`` a named tuple is returned containing the original
image, the distance transform, the filtered peaks, and the final
pore regions. The default is ``False``
mask : boolean
Apply a mask to the regions where the solid phase is. Default is
``True``
randomize : boolean
If ``True`` (default), then the region colors will be randomized before
returning. This is helpful for visualizing otherwise neighboring
regions have simlar coloring are are hard to distinguish.
Returns
-------
image : ND-array
An image the same shape as ``im`` with the void space partitioned into
pores using a marker based watershed with the peaks found by the
SNOW algorithm [1].
Notes
-----
If ``return_all`` is ``True`` then a **named tuple** is returned containing
all of the images used during the process. They can be access as
attriutes with the following names:
* ``im``: The binary image of the void space
* ``dt``: The distance transform of the image
* ``peaks``: The peaks of the distance transform after applying the
steps of the SNOW algorithm
* ``regions``: The void space partitioned into pores using a marker
based watershed with the peaks found by the SNOW algorithm
References
----------
[1] Gostick, J. "A versatile and efficient network extraction algorithm
using marker-based watershed segmenation". Physical Review E. (2017) | [
"r",
"Partitions",
"the",
"void",
"space",
"into",
"pore",
"regions",
"using",
"a",
"marker",
"-",
"based",
"watershed",
"algorithm",
"with",
"specially",
"filtered",
"peaks",
"as",
"markers",
"."
] | 1e13875b56787d8f5b7ffdabce8c4342c33ba9f8 | https://github.com/PMEAL/porespy/blob/1e13875b56787d8f5b7ffdabce8c4342c33ba9f8/porespy/filters/__funcs__.py#L83-L190 | train | 213,402 |
PMEAL/porespy | porespy/filters/__funcs__.py | snow_partitioning_n | def snow_partitioning_n(im, r_max=4, sigma=0.4, return_all=True,
mask=True, randomize=False, alias=None):
r"""
This function partitions an imaging oontain an arbitrary number of phases
into regions using a marker-based watershed segmentation. Its an extension
of snow_partitioning function with all phases partitioned together.
Parameters
----------
im : ND-array
Image of porous material where each phase is represented by unique
integer starting from 1 (0's are ignored).
r_max : scalar
The radius of the spherical structuring element to use in the Maximum
filter stage that is used to find peaks. The default is 4.
sigma : scalar
The standard deviation of the Gaussian filter used. The default is
0.4. If 0 is given then the filter is not applied, which is useful if a
distance transform is supplied as the ``im`` argument that has already
been processed.
return_all : boolean (default is False)
If set to ``True`` a named tuple is returned containing the original
image, the combined distance transform, list of each phase max label,
and the final combined regions of all phases.
mask : boolean (default is True)
Apply a mask to the regions which are not under concern.
randomize : boolean
If ``True`` (default), then the region colors will be randomized before
returning. This is helpful for visualizing otherwise neighboring
regions have similar coloring and are hard to distinguish.
alias : dict (Optional)
A dictionary that assigns unique image label to specific phases. For
example {1: 'Solid'} will show all structural properties associated
with label 1 as Solid phase properties. If ``None`` then default
labelling will be used i.e {1: 'Phase1',..}.
Returns
-------
An image the same shape as ``im`` with the all phases partitioned into
regions using a marker based watershed with the peaks found by the
SNOW algorithm [1]. If ``return_all`` is ``True`` then a **named tuple**
is returned with the following attribute:
* ``im`` : The actual image of the porous material
* ``dt`` : The combined distance transform of the image
* ``phase_max_label`` : The list of max label of each phase in order to
distinguish between each other
* ``regions`` : The partitioned regions of n phases using a marker
based watershed with the peaks found by the SNOW algorithm
References
----------
[1] Gostick, J. "A versatile and efficient network extraction algorithm
using marker-based watershed segmentation". Physical Review E. (2017)
[2] Khan, ZA et al. "Dual network extraction algorithm to investigate
multiple transport processes in porous materials: Image-based modeling
of pore and grain-scale processes". Computers in Chemical Engineering.
(2019)
See Also
----------
snow_partitioning
Notes
-----
In principle it is possible to perform a distance transform on each
phase separately, merge these into a single image, then apply the
watershed only once. This, however, has been found to create edge artifacts
between regions arising from the way watershed handles plateaus in the
distance transform. To overcome this, this function applies the watershed
to each of the distance transforms separately, then merges the segmented
regions back into a single image.
"""
# Get alias if provided by user
al = _create_alias_map(im=im, alias=alias)
# Perform snow on each phase and merge all segmentation and dt together
phases_num = sp.unique(im * 1)
phases_num = sp.trim_zeros(phases_num)
combined_dt = 0
combined_region = 0
num = [0]
for i in phases_num:
print('_' * 60)
if alias is None:
print('Processing Phase {}'.format(i))
else:
print('Processing Phase {}'.format(al[i]))
phase_snow = snow_partitioning(im == i,
dt=None, r_max=r_max, sigma=sigma,
return_all=return_all, mask=mask,
randomize=randomize)
if len(phases_num) == 1 and phases_num == 1:
combined_dt = phase_snow.dt
combined_region = phase_snow.regions
else:
combined_dt += phase_snow.dt
phase_snow.regions *= phase_snow.im
phase_snow.regions += num[i - 1]
phase_ws = phase_snow.regions * phase_snow.im
phase_ws[phase_ws == num[i - 1]] = 0
combined_region += phase_ws
num.append(sp.amax(combined_region))
if return_all:
tup = namedtuple('results', field_names=['im', 'dt', 'phase_max_label',
'regions'])
tup.im = im
tup.dt = combined_dt
tup.phase_max_label = num[1:]
tup.regions = combined_region
return tup
else:
return combined_region | python | def snow_partitioning_n(im, r_max=4, sigma=0.4, return_all=True,
mask=True, randomize=False, alias=None):
r"""
This function partitions an imaging oontain an arbitrary number of phases
into regions using a marker-based watershed segmentation. Its an extension
of snow_partitioning function with all phases partitioned together.
Parameters
----------
im : ND-array
Image of porous material where each phase is represented by unique
integer starting from 1 (0's are ignored).
r_max : scalar
The radius of the spherical structuring element to use in the Maximum
filter stage that is used to find peaks. The default is 4.
sigma : scalar
The standard deviation of the Gaussian filter used. The default is
0.4. If 0 is given then the filter is not applied, which is useful if a
distance transform is supplied as the ``im`` argument that has already
been processed.
return_all : boolean (default is False)
If set to ``True`` a named tuple is returned containing the original
image, the combined distance transform, list of each phase max label,
and the final combined regions of all phases.
mask : boolean (default is True)
Apply a mask to the regions which are not under concern.
randomize : boolean
If ``True`` (default), then the region colors will be randomized before
returning. This is helpful for visualizing otherwise neighboring
regions have similar coloring and are hard to distinguish.
alias : dict (Optional)
A dictionary that assigns unique image label to specific phases. For
example {1: 'Solid'} will show all structural properties associated
with label 1 as Solid phase properties. If ``None`` then default
labelling will be used i.e {1: 'Phase1',..}.
Returns
-------
An image the same shape as ``im`` with the all phases partitioned into
regions using a marker based watershed with the peaks found by the
SNOW algorithm [1]. If ``return_all`` is ``True`` then a **named tuple**
is returned with the following attribute:
* ``im`` : The actual image of the porous material
* ``dt`` : The combined distance transform of the image
* ``phase_max_label`` : The list of max label of each phase in order to
distinguish between each other
* ``regions`` : The partitioned regions of n phases using a marker
based watershed with the peaks found by the SNOW algorithm
References
----------
[1] Gostick, J. "A versatile and efficient network extraction algorithm
using marker-based watershed segmentation". Physical Review E. (2017)
[2] Khan, ZA et al. "Dual network extraction algorithm to investigate
multiple transport processes in porous materials: Image-based modeling
of pore and grain-scale processes". Computers in Chemical Engineering.
(2019)
See Also
----------
snow_partitioning
Notes
-----
In principle it is possible to perform a distance transform on each
phase separately, merge these into a single image, then apply the
watershed only once. This, however, has been found to create edge artifacts
between regions arising from the way watershed handles plateaus in the
distance transform. To overcome this, this function applies the watershed
to each of the distance transforms separately, then merges the segmented
regions back into a single image.
"""
# Get alias if provided by user
al = _create_alias_map(im=im, alias=alias)
# Perform snow on each phase and merge all segmentation and dt together
phases_num = sp.unique(im * 1)
phases_num = sp.trim_zeros(phases_num)
combined_dt = 0
combined_region = 0
num = [0]
for i in phases_num:
print('_' * 60)
if alias is None:
print('Processing Phase {}'.format(i))
else:
print('Processing Phase {}'.format(al[i]))
phase_snow = snow_partitioning(im == i,
dt=None, r_max=r_max, sigma=sigma,
return_all=return_all, mask=mask,
randomize=randomize)
if len(phases_num) == 1 and phases_num == 1:
combined_dt = phase_snow.dt
combined_region = phase_snow.regions
else:
combined_dt += phase_snow.dt
phase_snow.regions *= phase_snow.im
phase_snow.regions += num[i - 1]
phase_ws = phase_snow.regions * phase_snow.im
phase_ws[phase_ws == num[i - 1]] = 0
combined_region += phase_ws
num.append(sp.amax(combined_region))
if return_all:
tup = namedtuple('results', field_names=['im', 'dt', 'phase_max_label',
'regions'])
tup.im = im
tup.dt = combined_dt
tup.phase_max_label = num[1:]
tup.regions = combined_region
return tup
else:
return combined_region | [
"def",
"snow_partitioning_n",
"(",
"im",
",",
"r_max",
"=",
"4",
",",
"sigma",
"=",
"0.4",
",",
"return_all",
"=",
"True",
",",
"mask",
"=",
"True",
",",
"randomize",
"=",
"False",
",",
"alias",
"=",
"None",
")",
":",
"# Get alias if provided by user",
"... | r"""
This function partitions an imaging oontain an arbitrary number of phases
into regions using a marker-based watershed segmentation. Its an extension
of snow_partitioning function with all phases partitioned together.
Parameters
----------
im : ND-array
Image of porous material where each phase is represented by unique
integer starting from 1 (0's are ignored).
r_max : scalar
The radius of the spherical structuring element to use in the Maximum
filter stage that is used to find peaks. The default is 4.
sigma : scalar
The standard deviation of the Gaussian filter used. The default is
0.4. If 0 is given then the filter is not applied, which is useful if a
distance transform is supplied as the ``im`` argument that has already
been processed.
return_all : boolean (default is False)
If set to ``True`` a named tuple is returned containing the original
image, the combined distance transform, list of each phase max label,
and the final combined regions of all phases.
mask : boolean (default is True)
Apply a mask to the regions which are not under concern.
randomize : boolean
If ``True`` (default), then the region colors will be randomized before
returning. This is helpful for visualizing otherwise neighboring
regions have similar coloring and are hard to distinguish.
alias : dict (Optional)
A dictionary that assigns unique image label to specific phases. For
example {1: 'Solid'} will show all structural properties associated
with label 1 as Solid phase properties. If ``None`` then default
labelling will be used i.e {1: 'Phase1',..}.
Returns
-------
An image the same shape as ``im`` with the all phases partitioned into
regions using a marker based watershed with the peaks found by the
SNOW algorithm [1]. If ``return_all`` is ``True`` then a **named tuple**
is returned with the following attribute:
* ``im`` : The actual image of the porous material
* ``dt`` : The combined distance transform of the image
* ``phase_max_label`` : The list of max label of each phase in order to
distinguish between each other
* ``regions`` : The partitioned regions of n phases using a marker
based watershed with the peaks found by the SNOW algorithm
References
----------
[1] Gostick, J. "A versatile and efficient network extraction algorithm
using marker-based watershed segmentation". Physical Review E. (2017)
[2] Khan, ZA et al. "Dual network extraction algorithm to investigate
multiple transport processes in porous materials: Image-based modeling
of pore and grain-scale processes". Computers in Chemical Engineering.
(2019)
See Also
----------
snow_partitioning
Notes
-----
In principle it is possible to perform a distance transform on each
phase separately, merge these into a single image, then apply the
watershed only once. This, however, has been found to create edge artifacts
between regions arising from the way watershed handles plateaus in the
distance transform. To overcome this, this function applies the watershed
to each of the distance transforms separately, then merges the segmented
regions back into a single image. | [
"r",
"This",
"function",
"partitions",
"an",
"imaging",
"oontain",
"an",
"arbitrary",
"number",
"of",
"phases",
"into",
"regions",
"using",
"a",
"marker",
"-",
"based",
"watershed",
"segmentation",
".",
"Its",
"an",
"extension",
"of",
"snow_partitioning",
"funct... | 1e13875b56787d8f5b7ffdabce8c4342c33ba9f8 | https://github.com/PMEAL/porespy/blob/1e13875b56787d8f5b7ffdabce8c4342c33ba9f8/porespy/filters/__funcs__.py#L193-L306 | train | 213,403 |
PMEAL/porespy | porespy/filters/__funcs__.py | find_peaks | def find_peaks(dt, r_max=4, footprint=None):
r"""
Returns all local maxima in the distance transform
Parameters
----------
dt : ND-array
The distance transform of the pore space. This may be calculated and
filtered using any means desired.
r_max : scalar
The size of the structuring element used in the maximum filter. This
controls the localness of any maxima. The default is 4 voxels.
footprint : ND-array
Specifies the shape of the structuring element used to define the
neighborhood when looking for peaks. If none is specified then a
spherical shape is used (or circular in 2D).
Returns
-------
image : ND-array
An array of booleans with ``True`` values at the location of any
local maxima.
Notes
-----
It is also possible ot the ``peak_local_max`` function from the
``skimage.feature`` module as follows:
``peaks = peak_local_max(image=dt, min_distance=r, exclude_border=0,
indices=False)``
This automatically uses a square structuring element which is significantly
faster than using a circular or spherical element.
"""
im = dt > 0
if im.ndim != im.squeeze().ndim:
warnings.warn('Input image conains a singleton axis:' + str(im.shape) +
' Reduce dimensionality with np.squeeze(im) to avoid' +
' unexpected behavior.')
if footprint is None:
if im.ndim == 2:
footprint = disk
elif im.ndim == 3:
footprint = ball
else:
raise Exception("only 2-d and 3-d images are supported")
mx = spim.maximum_filter(dt + 2*(~im), footprint=footprint(r_max))
peaks = (dt == mx)*im
return peaks | python | def find_peaks(dt, r_max=4, footprint=None):
r"""
Returns all local maxima in the distance transform
Parameters
----------
dt : ND-array
The distance transform of the pore space. This may be calculated and
filtered using any means desired.
r_max : scalar
The size of the structuring element used in the maximum filter. This
controls the localness of any maxima. The default is 4 voxels.
footprint : ND-array
Specifies the shape of the structuring element used to define the
neighborhood when looking for peaks. If none is specified then a
spherical shape is used (or circular in 2D).
Returns
-------
image : ND-array
An array of booleans with ``True`` values at the location of any
local maxima.
Notes
-----
It is also possible ot the ``peak_local_max`` function from the
``skimage.feature`` module as follows:
``peaks = peak_local_max(image=dt, min_distance=r, exclude_border=0,
indices=False)``
This automatically uses a square structuring element which is significantly
faster than using a circular or spherical element.
"""
im = dt > 0
if im.ndim != im.squeeze().ndim:
warnings.warn('Input image conains a singleton axis:' + str(im.shape) +
' Reduce dimensionality with np.squeeze(im) to avoid' +
' unexpected behavior.')
if footprint is None:
if im.ndim == 2:
footprint = disk
elif im.ndim == 3:
footprint = ball
else:
raise Exception("only 2-d and 3-d images are supported")
mx = spim.maximum_filter(dt + 2*(~im), footprint=footprint(r_max))
peaks = (dt == mx)*im
return peaks | [
"def",
"find_peaks",
"(",
"dt",
",",
"r_max",
"=",
"4",
",",
"footprint",
"=",
"None",
")",
":",
"im",
"=",
"dt",
">",
"0",
"if",
"im",
".",
"ndim",
"!=",
"im",
".",
"squeeze",
"(",
")",
".",
"ndim",
":",
"warnings",
".",
"warn",
"(",
"'Input i... | r"""
Returns all local maxima in the distance transform
Parameters
----------
dt : ND-array
The distance transform of the pore space. This may be calculated and
filtered using any means desired.
r_max : scalar
The size of the structuring element used in the maximum filter. This
controls the localness of any maxima. The default is 4 voxels.
footprint : ND-array
Specifies the shape of the structuring element used to define the
neighborhood when looking for peaks. If none is specified then a
spherical shape is used (or circular in 2D).
Returns
-------
image : ND-array
An array of booleans with ``True`` values at the location of any
local maxima.
Notes
-----
It is also possible ot the ``peak_local_max`` function from the
``skimage.feature`` module as follows:
``peaks = peak_local_max(image=dt, min_distance=r, exclude_border=0,
indices=False)``
This automatically uses a square structuring element which is significantly
faster than using a circular or spherical element. | [
"r",
"Returns",
"all",
"local",
"maxima",
"in",
"the",
"distance",
"transform"
] | 1e13875b56787d8f5b7ffdabce8c4342c33ba9f8 | https://github.com/PMEAL/porespy/blob/1e13875b56787d8f5b7ffdabce8c4342c33ba9f8/porespy/filters/__funcs__.py#L309-L359 | train | 213,404 |
PMEAL/porespy | porespy/filters/__funcs__.py | reduce_peaks | def reduce_peaks(peaks):
r"""
Any peaks that are broad or elongated are replaced with a single voxel
that is located at the center of mass of the original voxels.
Parameters
----------
peaks : ND-image
An image containing True values indicating peaks in the distance
transform
Returns
-------
image : ND-array
An array with the same number of isolated peaks as the original image,
but fewer total voxels.
Notes
-----
The center of mass of a group of voxels is used as the new single voxel, so
if the group has an odd shape (like a horse shoe), the new voxel may *not*
lie on top of the original set.
"""
if peaks.ndim == 2:
strel = square
else:
strel = cube
markers, N = spim.label(input=peaks, structure=strel(3))
inds = spim.measurements.center_of_mass(input=peaks,
labels=markers,
index=sp.arange(1, N+1))
inds = sp.floor(inds).astype(int)
# Centroid may not be on old pixel, so create a new peaks image
peaks_new = sp.zeros_like(peaks, dtype=bool)
peaks_new[tuple(inds.T)] = True
return peaks_new | python | def reduce_peaks(peaks):
r"""
Any peaks that are broad or elongated are replaced with a single voxel
that is located at the center of mass of the original voxels.
Parameters
----------
peaks : ND-image
An image containing True values indicating peaks in the distance
transform
Returns
-------
image : ND-array
An array with the same number of isolated peaks as the original image,
but fewer total voxels.
Notes
-----
The center of mass of a group of voxels is used as the new single voxel, so
if the group has an odd shape (like a horse shoe), the new voxel may *not*
lie on top of the original set.
"""
if peaks.ndim == 2:
strel = square
else:
strel = cube
markers, N = spim.label(input=peaks, structure=strel(3))
inds = spim.measurements.center_of_mass(input=peaks,
labels=markers,
index=sp.arange(1, N+1))
inds = sp.floor(inds).astype(int)
# Centroid may not be on old pixel, so create a new peaks image
peaks_new = sp.zeros_like(peaks, dtype=bool)
peaks_new[tuple(inds.T)] = True
return peaks_new | [
"def",
"reduce_peaks",
"(",
"peaks",
")",
":",
"if",
"peaks",
".",
"ndim",
"==",
"2",
":",
"strel",
"=",
"square",
"else",
":",
"strel",
"=",
"cube",
"markers",
",",
"N",
"=",
"spim",
".",
"label",
"(",
"input",
"=",
"peaks",
",",
"structure",
"=",... | r"""
Any peaks that are broad or elongated are replaced with a single voxel
that is located at the center of mass of the original voxels.
Parameters
----------
peaks : ND-image
An image containing True values indicating peaks in the distance
transform
Returns
-------
image : ND-array
An array with the same number of isolated peaks as the original image,
but fewer total voxels.
Notes
-----
The center of mass of a group of voxels is used as the new single voxel, so
if the group has an odd shape (like a horse shoe), the new voxel may *not*
lie on top of the original set. | [
"r",
"Any",
"peaks",
"that",
"are",
"broad",
"or",
"elongated",
"are",
"replaced",
"with",
"a",
"single",
"voxel",
"that",
"is",
"located",
"at",
"the",
"center",
"of",
"mass",
"of",
"the",
"original",
"voxels",
"."
] | 1e13875b56787d8f5b7ffdabce8c4342c33ba9f8 | https://github.com/PMEAL/porespy/blob/1e13875b56787d8f5b7ffdabce8c4342c33ba9f8/porespy/filters/__funcs__.py#L362-L397 | train | 213,405 |
PMEAL/porespy | porespy/filters/__funcs__.py | trim_saddle_points | def trim_saddle_points(peaks, dt, max_iters=10):
r"""
Removes peaks that were mistakenly identified because they lied on a
saddle or ridge in the distance transform that was not actually a true
local peak.
Parameters
----------
peaks : ND-array
A boolean image containing True values to mark peaks in the distance
transform (``dt``)
dt : ND-array
The distance transform of the pore space for which the true peaks are
sought.
max_iters : int
The maximum number of iterations to run while eroding the saddle
points. The default is 10, which is usually not reached; however,
a warning is issued if the loop ends prior to removing all saddle
points.
Returns
-------
image : ND-array
An image with fewer peaks than the input image
References
----------
[1] Gostick, J. "A versatile and efficient network extraction algorithm
using marker-based watershed segmenation". Physical Review E. (2017)
"""
peaks = sp.copy(peaks)
if dt.ndim == 2:
from skimage.morphology import square as cube
else:
from skimage.morphology import cube
labels, N = spim.label(peaks)
slices = spim.find_objects(labels)
for i in range(N):
s = extend_slice(s=slices[i], shape=peaks.shape, pad=10)
peaks_i = labels[s] == i+1
dt_i = dt[s]
im_i = dt_i > 0
iters = 0
peaks_dil = sp.copy(peaks_i)
while iters < max_iters:
iters += 1
peaks_dil = spim.binary_dilation(input=peaks_dil,
structure=cube(3))
peaks_max = peaks_dil*sp.amax(dt_i*peaks_dil)
peaks_extended = (peaks_max == dt_i)*im_i
if sp.all(peaks_extended == peaks_i):
break # Found a true peak
elif sp.sum(peaks_extended*peaks_i) == 0:
peaks_i = False
break # Found a saddle point
peaks[s] = peaks_i
if iters >= max_iters:
print('Maximum number of iterations reached, consider'
+ 'running again with a larger value of max_iters')
return peaks | python | def trim_saddle_points(peaks, dt, max_iters=10):
r"""
Removes peaks that were mistakenly identified because they lied on a
saddle or ridge in the distance transform that was not actually a true
local peak.
Parameters
----------
peaks : ND-array
A boolean image containing True values to mark peaks in the distance
transform (``dt``)
dt : ND-array
The distance transform of the pore space for which the true peaks are
sought.
max_iters : int
The maximum number of iterations to run while eroding the saddle
points. The default is 10, which is usually not reached; however,
a warning is issued if the loop ends prior to removing all saddle
points.
Returns
-------
image : ND-array
An image with fewer peaks than the input image
References
----------
[1] Gostick, J. "A versatile and efficient network extraction algorithm
using marker-based watershed segmenation". Physical Review E. (2017)
"""
peaks = sp.copy(peaks)
if dt.ndim == 2:
from skimage.morphology import square as cube
else:
from skimage.morphology import cube
labels, N = spim.label(peaks)
slices = spim.find_objects(labels)
for i in range(N):
s = extend_slice(s=slices[i], shape=peaks.shape, pad=10)
peaks_i = labels[s] == i+1
dt_i = dt[s]
im_i = dt_i > 0
iters = 0
peaks_dil = sp.copy(peaks_i)
while iters < max_iters:
iters += 1
peaks_dil = spim.binary_dilation(input=peaks_dil,
structure=cube(3))
peaks_max = peaks_dil*sp.amax(dt_i*peaks_dil)
peaks_extended = (peaks_max == dt_i)*im_i
if sp.all(peaks_extended == peaks_i):
break # Found a true peak
elif sp.sum(peaks_extended*peaks_i) == 0:
peaks_i = False
break # Found a saddle point
peaks[s] = peaks_i
if iters >= max_iters:
print('Maximum number of iterations reached, consider'
+ 'running again with a larger value of max_iters')
return peaks | [
"def",
"trim_saddle_points",
"(",
"peaks",
",",
"dt",
",",
"max_iters",
"=",
"10",
")",
":",
"peaks",
"=",
"sp",
".",
"copy",
"(",
"peaks",
")",
"if",
"dt",
".",
"ndim",
"==",
"2",
":",
"from",
"skimage",
".",
"morphology",
"import",
"square",
"as",
... | r"""
Removes peaks that were mistakenly identified because they lied on a
saddle or ridge in the distance transform that was not actually a true
local peak.
Parameters
----------
peaks : ND-array
A boolean image containing True values to mark peaks in the distance
transform (``dt``)
dt : ND-array
The distance transform of the pore space for which the true peaks are
sought.
max_iters : int
The maximum number of iterations to run while eroding the saddle
points. The default is 10, which is usually not reached; however,
a warning is issued if the loop ends prior to removing all saddle
points.
Returns
-------
image : ND-array
An image with fewer peaks than the input image
References
----------
[1] Gostick, J. "A versatile and efficient network extraction algorithm
using marker-based watershed segmenation". Physical Review E. (2017) | [
"r",
"Removes",
"peaks",
"that",
"were",
"mistakenly",
"identified",
"because",
"they",
"lied",
"on",
"a",
"saddle",
"or",
"ridge",
"in",
"the",
"distance",
"transform",
"that",
"was",
"not",
"actually",
"a",
"true",
"local",
"peak",
"."
] | 1e13875b56787d8f5b7ffdabce8c4342c33ba9f8 | https://github.com/PMEAL/porespy/blob/1e13875b56787d8f5b7ffdabce8c4342c33ba9f8/porespy/filters/__funcs__.py#L400-L462 | train | 213,406 |
PMEAL/porespy | porespy/filters/__funcs__.py | trim_nearby_peaks | def trim_nearby_peaks(peaks, dt):
r"""
Finds pairs of peaks that are nearer to each other than to the solid phase,
and removes the peak that is closer to the solid.
Parameters
----------
peaks : ND-array
A boolean image containing True values to mark peaks in the distance
transform (``dt``)
dt : ND-array
The distance transform of the pore space for which the true peaks are
sought.
Returns
-------
image : ND-array
An array the same size as ``peaks`` containing a subset of the peaks
in the original image.
Notes
-----
Each pair of peaks is considered simultaneously, so for a triplet of peaks
each pair is considered. This ensures that only the single peak that is
furthest from the solid is kept. No iteration is required.
References
----------
[1] Gostick, J. "A versatile and efficient network extraction algorithm
using marker-based watershed segmenation". Physical Review E. (2017)
"""
peaks = sp.copy(peaks)
if dt.ndim == 2:
from skimage.morphology import square as cube
else:
from skimage.morphology import cube
peaks, N = spim.label(peaks, structure=cube(3))
crds = spim.measurements.center_of_mass(peaks, labels=peaks,
index=sp.arange(1, N+1))
crds = sp.vstack(crds).astype(int) # Convert to numpy array of ints
# Get distance between each peak as a distance map
tree = sptl.cKDTree(data=crds)
temp = tree.query(x=crds, k=2)
nearest_neighbor = temp[1][:, 1]
dist_to_neighbor = temp[0][:, 1]
del temp, tree # Free-up memory
dist_to_solid = dt[tuple(crds.T)] # Get distance to solid for each peak
hits = sp.where(dist_to_neighbor < dist_to_solid)[0]
# Drop peak that is closer to the solid than it's neighbor
drop_peaks = []
for peak in hits:
if dist_to_solid[peak] < dist_to_solid[nearest_neighbor[peak]]:
drop_peaks.append(peak)
else:
drop_peaks.append(nearest_neighbor[peak])
drop_peaks = sp.unique(drop_peaks)
# Remove peaks from image
slices = spim.find_objects(input=peaks)
for s in drop_peaks:
peaks[slices[s]] = 0
return (peaks > 0) | python | def trim_nearby_peaks(peaks, dt):
r"""
Finds pairs of peaks that are nearer to each other than to the solid phase,
and removes the peak that is closer to the solid.
Parameters
----------
peaks : ND-array
A boolean image containing True values to mark peaks in the distance
transform (``dt``)
dt : ND-array
The distance transform of the pore space for which the true peaks are
sought.
Returns
-------
image : ND-array
An array the same size as ``peaks`` containing a subset of the peaks
in the original image.
Notes
-----
Each pair of peaks is considered simultaneously, so for a triplet of peaks
each pair is considered. This ensures that only the single peak that is
furthest from the solid is kept. No iteration is required.
References
----------
[1] Gostick, J. "A versatile and efficient network extraction algorithm
using marker-based watershed segmenation". Physical Review E. (2017)
"""
peaks = sp.copy(peaks)
if dt.ndim == 2:
from skimage.morphology import square as cube
else:
from skimage.morphology import cube
peaks, N = spim.label(peaks, structure=cube(3))
crds = spim.measurements.center_of_mass(peaks, labels=peaks,
index=sp.arange(1, N+1))
crds = sp.vstack(crds).astype(int) # Convert to numpy array of ints
# Get distance between each peak as a distance map
tree = sptl.cKDTree(data=crds)
temp = tree.query(x=crds, k=2)
nearest_neighbor = temp[1][:, 1]
dist_to_neighbor = temp[0][:, 1]
del temp, tree # Free-up memory
dist_to_solid = dt[tuple(crds.T)] # Get distance to solid for each peak
hits = sp.where(dist_to_neighbor < dist_to_solid)[0]
# Drop peak that is closer to the solid than it's neighbor
drop_peaks = []
for peak in hits:
if dist_to_solid[peak] < dist_to_solid[nearest_neighbor[peak]]:
drop_peaks.append(peak)
else:
drop_peaks.append(nearest_neighbor[peak])
drop_peaks = sp.unique(drop_peaks)
# Remove peaks from image
slices = spim.find_objects(input=peaks)
for s in drop_peaks:
peaks[slices[s]] = 0
return (peaks > 0) | [
"def",
"trim_nearby_peaks",
"(",
"peaks",
",",
"dt",
")",
":",
"peaks",
"=",
"sp",
".",
"copy",
"(",
"peaks",
")",
"if",
"dt",
".",
"ndim",
"==",
"2",
":",
"from",
"skimage",
".",
"morphology",
"import",
"square",
"as",
"cube",
"else",
":",
"from",
... | r"""
Finds pairs of peaks that are nearer to each other than to the solid phase,
and removes the peak that is closer to the solid.
Parameters
----------
peaks : ND-array
A boolean image containing True values to mark peaks in the distance
transform (``dt``)
dt : ND-array
The distance transform of the pore space for which the true peaks are
sought.
Returns
-------
image : ND-array
An array the same size as ``peaks`` containing a subset of the peaks
in the original image.
Notes
-----
Each pair of peaks is considered simultaneously, so for a triplet of peaks
each pair is considered. This ensures that only the single peak that is
furthest from the solid is kept. No iteration is required.
References
----------
[1] Gostick, J. "A versatile and efficient network extraction algorithm
using marker-based watershed segmenation". Physical Review E. (2017) | [
"r",
"Finds",
"pairs",
"of",
"peaks",
"that",
"are",
"nearer",
"to",
"each",
"other",
"than",
"to",
"the",
"solid",
"phase",
"and",
"removes",
"the",
"peak",
"that",
"is",
"closer",
"to",
"the",
"solid",
"."
] | 1e13875b56787d8f5b7ffdabce8c4342c33ba9f8 | https://github.com/PMEAL/porespy/blob/1e13875b56787d8f5b7ffdabce8c4342c33ba9f8/porespy/filters/__funcs__.py#L465-L526 | train | 213,407 |
PMEAL/porespy | porespy/filters/__funcs__.py | fill_blind_pores | def fill_blind_pores(im):
r"""
Fills all pores that are not connected to the edges of the image.
Parameters
----------
im : ND-array
The image of the porous material
Returns
-------
image : ND-array
A version of ``im`` but with all the disconnected pores removed.
See Also
--------
find_disconnected_voxels
"""
im = sp.copy(im)
holes = find_disconnected_voxels(im)
im[holes] = False
return im | python | def fill_blind_pores(im):
r"""
Fills all pores that are not connected to the edges of the image.
Parameters
----------
im : ND-array
The image of the porous material
Returns
-------
image : ND-array
A version of ``im`` but with all the disconnected pores removed.
See Also
--------
find_disconnected_voxels
"""
im = sp.copy(im)
holes = find_disconnected_voxels(im)
im[holes] = False
return im | [
"def",
"fill_blind_pores",
"(",
"im",
")",
":",
"im",
"=",
"sp",
".",
"copy",
"(",
"im",
")",
"holes",
"=",
"find_disconnected_voxels",
"(",
"im",
")",
"im",
"[",
"holes",
"]",
"=",
"False",
"return",
"im"
] | r"""
Fills all pores that are not connected to the edges of the image.
Parameters
----------
im : ND-array
The image of the porous material
Returns
-------
image : ND-array
A version of ``im`` but with all the disconnected pores removed.
See Also
--------
find_disconnected_voxels | [
"r",
"Fills",
"all",
"pores",
"that",
"are",
"not",
"connected",
"to",
"the",
"edges",
"of",
"the",
"image",
"."
] | 1e13875b56787d8f5b7ffdabce8c4342c33ba9f8 | https://github.com/PMEAL/porespy/blob/1e13875b56787d8f5b7ffdabce8c4342c33ba9f8/porespy/filters/__funcs__.py#L579-L601 | train | 213,408 |
PMEAL/porespy | porespy/filters/__funcs__.py | trim_floating_solid | def trim_floating_solid(im):
r"""
Removes all solid that that is not attached to the edges of the image.
Parameters
----------
im : ND-array
The image of the porous material
Returns
-------
image : ND-array
A version of ``im`` but with all the disconnected solid removed.
See Also
--------
find_disconnected_voxels
"""
im = sp.copy(im)
holes = find_disconnected_voxels(~im)
im[holes] = True
return im | python | def trim_floating_solid(im):
r"""
Removes all solid that that is not attached to the edges of the image.
Parameters
----------
im : ND-array
The image of the porous material
Returns
-------
image : ND-array
A version of ``im`` but with all the disconnected solid removed.
See Also
--------
find_disconnected_voxels
"""
im = sp.copy(im)
holes = find_disconnected_voxels(~im)
im[holes] = True
return im | [
"def",
"trim_floating_solid",
"(",
"im",
")",
":",
"im",
"=",
"sp",
".",
"copy",
"(",
"im",
")",
"holes",
"=",
"find_disconnected_voxels",
"(",
"~",
"im",
")",
"im",
"[",
"holes",
"]",
"=",
"True",
"return",
"im"
] | r"""
Removes all solid that that is not attached to the edges of the image.
Parameters
----------
im : ND-array
The image of the porous material
Returns
-------
image : ND-array
A version of ``im`` but with all the disconnected solid removed.
See Also
--------
find_disconnected_voxels | [
"r",
"Removes",
"all",
"solid",
"that",
"that",
"is",
"not",
"attached",
"to",
"the",
"edges",
"of",
"the",
"image",
"."
] | 1e13875b56787d8f5b7ffdabce8c4342c33ba9f8 | https://github.com/PMEAL/porespy/blob/1e13875b56787d8f5b7ffdabce8c4342c33ba9f8/porespy/filters/__funcs__.py#L604-L626 | train | 213,409 |
PMEAL/porespy | porespy/filters/__funcs__.py | trim_nonpercolating_paths | def trim_nonpercolating_paths(im, inlet_axis=0, outlet_axis=0):
r"""
Removes all nonpercolating paths between specified edges
This function is essential when performing transport simulations on an
image, since image regions that do not span between the desired inlet and
outlet do not contribute to the transport.
Parameters
----------
im : ND-array
The image of the porous material with ```True`` values indicating the
phase of interest
inlet_axis : int
Inlet axis of boundary condition. For three dimensional image the
number ranges from 0 to 2. For two dimensional image the range is
between 0 to 1.
outlet_axis : int
Outlet axis of boundary condition. For three dimensional image the
number ranges from 0 to 2. For two dimensional image the range is
between 0 to 1.
Returns
-------
image : ND-array
A copy of ``im`` with all the nonpercolating paths removed
See Also
--------
find_disconnected_voxels
trim_floating_solid
trim_blind_pores
"""
if im.ndim != im.squeeze().ndim:
warnings.warn('Input image conains a singleton axis:' + str(im.shape) +
' Reduce dimensionality with np.squeeze(im) to avoid' +
' unexpected behavior.')
im = trim_floating_solid(~im)
labels = spim.label(~im)[0]
inlet = sp.zeros_like(im, dtype=int)
outlet = sp.zeros_like(im, dtype=int)
if im.ndim == 3:
if inlet_axis == 0:
inlet[0, :, :] = 1
elif inlet_axis == 1:
inlet[:, 0, :] = 1
elif inlet_axis == 2:
inlet[:, :, 0] = 1
if outlet_axis == 0:
outlet[-1, :, :] = 1
elif outlet_axis == 1:
outlet[:, -1, :] = 1
elif outlet_axis == 2:
outlet[:, :, -1] = 1
if im.ndim == 2:
if inlet_axis == 0:
inlet[0, :] = 1
elif inlet_axis == 1:
inlet[:, 0] = 1
if outlet_axis == 0:
outlet[-1, :] = 1
elif outlet_axis == 1:
outlet[:, -1] = 1
IN = sp.unique(labels*inlet)
OUT = sp.unique(labels*outlet)
new_im = sp.isin(labels, list(set(IN) ^ set(OUT)), invert=True)
im[new_im == 0] = True
return ~im | python | def trim_nonpercolating_paths(im, inlet_axis=0, outlet_axis=0):
r"""
Removes all nonpercolating paths between specified edges
This function is essential when performing transport simulations on an
image, since image regions that do not span between the desired inlet and
outlet do not contribute to the transport.
Parameters
----------
im : ND-array
The image of the porous material with ```True`` values indicating the
phase of interest
inlet_axis : int
Inlet axis of boundary condition. For three dimensional image the
number ranges from 0 to 2. For two dimensional image the range is
between 0 to 1.
outlet_axis : int
Outlet axis of boundary condition. For three dimensional image the
number ranges from 0 to 2. For two dimensional image the range is
between 0 to 1.
Returns
-------
image : ND-array
A copy of ``im`` with all the nonpercolating paths removed
See Also
--------
find_disconnected_voxels
trim_floating_solid
trim_blind_pores
"""
if im.ndim != im.squeeze().ndim:
warnings.warn('Input image conains a singleton axis:' + str(im.shape) +
' Reduce dimensionality with np.squeeze(im) to avoid' +
' unexpected behavior.')
im = trim_floating_solid(~im)
labels = spim.label(~im)[0]
inlet = sp.zeros_like(im, dtype=int)
outlet = sp.zeros_like(im, dtype=int)
if im.ndim == 3:
if inlet_axis == 0:
inlet[0, :, :] = 1
elif inlet_axis == 1:
inlet[:, 0, :] = 1
elif inlet_axis == 2:
inlet[:, :, 0] = 1
if outlet_axis == 0:
outlet[-1, :, :] = 1
elif outlet_axis == 1:
outlet[:, -1, :] = 1
elif outlet_axis == 2:
outlet[:, :, -1] = 1
if im.ndim == 2:
if inlet_axis == 0:
inlet[0, :] = 1
elif inlet_axis == 1:
inlet[:, 0] = 1
if outlet_axis == 0:
outlet[-1, :] = 1
elif outlet_axis == 1:
outlet[:, -1] = 1
IN = sp.unique(labels*inlet)
OUT = sp.unique(labels*outlet)
new_im = sp.isin(labels, list(set(IN) ^ set(OUT)), invert=True)
im[new_im == 0] = True
return ~im | [
"def",
"trim_nonpercolating_paths",
"(",
"im",
",",
"inlet_axis",
"=",
"0",
",",
"outlet_axis",
"=",
"0",
")",
":",
"if",
"im",
".",
"ndim",
"!=",
"im",
".",
"squeeze",
"(",
")",
".",
"ndim",
":",
"warnings",
".",
"warn",
"(",
"'Input image conains a sin... | r"""
Removes all nonpercolating paths between specified edges
This function is essential when performing transport simulations on an
image, since image regions that do not span between the desired inlet and
outlet do not contribute to the transport.
Parameters
----------
im : ND-array
The image of the porous material with ```True`` values indicating the
phase of interest
inlet_axis : int
Inlet axis of boundary condition. For three dimensional image the
number ranges from 0 to 2. For two dimensional image the range is
between 0 to 1.
outlet_axis : int
Outlet axis of boundary condition. For three dimensional image the
number ranges from 0 to 2. For two dimensional image the range is
between 0 to 1.
Returns
-------
image : ND-array
A copy of ``im`` with all the nonpercolating paths removed
See Also
--------
find_disconnected_voxels
trim_floating_solid
trim_blind_pores | [
"r",
"Removes",
"all",
"nonpercolating",
"paths",
"between",
"specified",
"edges"
] | 1e13875b56787d8f5b7ffdabce8c4342c33ba9f8 | https://github.com/PMEAL/porespy/blob/1e13875b56787d8f5b7ffdabce8c4342c33ba9f8/porespy/filters/__funcs__.py#L629-L702 | train | 213,410 |
PMEAL/porespy | porespy/filters/__funcs__.py | trim_extrema | def trim_extrema(im, h, mode='maxima'):
r"""
Trims local extrema in greyscale values by a specified amount.
This essentially decapitates peaks and/or floods valleys.
Parameters
----------
im : ND-array
The image whose extrema are to be removed
h : float
The height to remove from each peak or fill in each valley
mode : string {'maxima' | 'minima' | 'extrema'}
Specifies whether to remove maxima or minima or both
Returns
-------
image : ND-array
A copy of the input image with all the peaks and/or valleys removed.
Notes
-----
This function is referred to as **imhmax** or **imhmin** in Matlab.
"""
result = im
if mode in ['maxima', 'extrema']:
result = reconstruction(seed=im - h, mask=im, method='dilation')
elif mode in ['minima', 'extrema']:
result = reconstruction(seed=im + h, mask=im, method='erosion')
return result | python | def trim_extrema(im, h, mode='maxima'):
r"""
Trims local extrema in greyscale values by a specified amount.
This essentially decapitates peaks and/or floods valleys.
Parameters
----------
im : ND-array
The image whose extrema are to be removed
h : float
The height to remove from each peak or fill in each valley
mode : string {'maxima' | 'minima' | 'extrema'}
Specifies whether to remove maxima or minima or both
Returns
-------
image : ND-array
A copy of the input image with all the peaks and/or valleys removed.
Notes
-----
This function is referred to as **imhmax** or **imhmin** in Matlab.
"""
result = im
if mode in ['maxima', 'extrema']:
result = reconstruction(seed=im - h, mask=im, method='dilation')
elif mode in ['minima', 'extrema']:
result = reconstruction(seed=im + h, mask=im, method='erosion')
return result | [
"def",
"trim_extrema",
"(",
"im",
",",
"h",
",",
"mode",
"=",
"'maxima'",
")",
":",
"result",
"=",
"im",
"if",
"mode",
"in",
"[",
"'maxima'",
",",
"'extrema'",
"]",
":",
"result",
"=",
"reconstruction",
"(",
"seed",
"=",
"im",
"-",
"h",
",",
"mask"... | r"""
Trims local extrema in greyscale values by a specified amount.
This essentially decapitates peaks and/or floods valleys.
Parameters
----------
im : ND-array
The image whose extrema are to be removed
h : float
The height to remove from each peak or fill in each valley
mode : string {'maxima' | 'minima' | 'extrema'}
Specifies whether to remove maxima or minima or both
Returns
-------
image : ND-array
A copy of the input image with all the peaks and/or valleys removed.
Notes
-----
This function is referred to as **imhmax** or **imhmin** in Matlab. | [
"r",
"Trims",
"local",
"extrema",
"in",
"greyscale",
"values",
"by",
"a",
"specified",
"amount",
"."
] | 1e13875b56787d8f5b7ffdabce8c4342c33ba9f8 | https://github.com/PMEAL/porespy/blob/1e13875b56787d8f5b7ffdabce8c4342c33ba9f8/porespy/filters/__funcs__.py#L705-L737 | train | 213,411 |
PMEAL/porespy | porespy/filters/__funcs__.py | find_dt_artifacts | def find_dt_artifacts(dt):
r"""
Finds points in a distance transform that are closer to wall than solid.
These points could *potentially* be erroneously high since their distance
values do not reflect the possibility that solid may have been present
beyond the border of the image but lost by trimming.
Parameters
----------
dt : ND-array
The distance transform of the phase of interest
Returns
-------
image : ND-array
An ND-array the same shape as ``dt`` with numerical values indicating
the maximum amount of error in each volxel, which is found by
subtracting the distance to nearest edge of image from the distance
transform value. In other words, this is the error that would be found
if there were a solid voxel lurking just beyond the nearest edge of
the image. Obviously, voxels with a value of zero have no error.
"""
temp = sp.ones(shape=dt.shape)*sp.inf
for ax in range(dt.ndim):
dt_lin = distance_transform_lin(sp.ones_like(temp, dtype=bool),
axis=ax, mode='both')
temp = sp.minimum(temp, dt_lin)
result = sp.clip(dt - temp, a_min=0, a_max=sp.inf)
return result | python | def find_dt_artifacts(dt):
r"""
Finds points in a distance transform that are closer to wall than solid.
These points could *potentially* be erroneously high since their distance
values do not reflect the possibility that solid may have been present
beyond the border of the image but lost by trimming.
Parameters
----------
dt : ND-array
The distance transform of the phase of interest
Returns
-------
image : ND-array
An ND-array the same shape as ``dt`` with numerical values indicating
the maximum amount of error in each volxel, which is found by
subtracting the distance to nearest edge of image from the distance
transform value. In other words, this is the error that would be found
if there were a solid voxel lurking just beyond the nearest edge of
the image. Obviously, voxels with a value of zero have no error.
"""
temp = sp.ones(shape=dt.shape)*sp.inf
for ax in range(dt.ndim):
dt_lin = distance_transform_lin(sp.ones_like(temp, dtype=bool),
axis=ax, mode='both')
temp = sp.minimum(temp, dt_lin)
result = sp.clip(dt - temp, a_min=0, a_max=sp.inf)
return result | [
"def",
"find_dt_artifacts",
"(",
"dt",
")",
":",
"temp",
"=",
"sp",
".",
"ones",
"(",
"shape",
"=",
"dt",
".",
"shape",
")",
"*",
"sp",
".",
"inf",
"for",
"ax",
"in",
"range",
"(",
"dt",
".",
"ndim",
")",
":",
"dt_lin",
"=",
"distance_transform_lin... | r"""
Finds points in a distance transform that are closer to wall than solid.
These points could *potentially* be erroneously high since their distance
values do not reflect the possibility that solid may have been present
beyond the border of the image but lost by trimming.
Parameters
----------
dt : ND-array
The distance transform of the phase of interest
Returns
-------
image : ND-array
An ND-array the same shape as ``dt`` with numerical values indicating
the maximum amount of error in each volxel, which is found by
subtracting the distance to nearest edge of image from the distance
transform value. In other words, this is the error that would be found
if there were a solid voxel lurking just beyond the nearest edge of
the image. Obviously, voxels with a value of zero have no error. | [
"r",
"Finds",
"points",
"in",
"a",
"distance",
"transform",
"that",
"are",
"closer",
"to",
"wall",
"than",
"solid",
"."
] | 1e13875b56787d8f5b7ffdabce8c4342c33ba9f8 | https://github.com/PMEAL/porespy/blob/1e13875b56787d8f5b7ffdabce8c4342c33ba9f8/porespy/filters/__funcs__.py#L805-L835 | train | 213,412 |
PMEAL/porespy | porespy/filters/__funcs__.py | region_size | def region_size(im):
r"""
Replace each voxel with size of region to which it belongs
Parameters
----------
im : ND-array
Either a boolean image wtih ``True`` indicating the features of
interest, in which case ``scipy.ndimage.label`` will be applied to
find regions, or a greyscale image with integer values indicating
regions.
Returns
-------
image : ND-array
A copy of ``im`` with each voxel value indicating the size of the
region to which it belongs. This is particularly useful for finding
chord sizes on the image produced by ``apply_chords``.
"""
if im.dtype == bool:
im = spim.label(im)[0]
counts = sp.bincount(im.flatten())
counts[0] = 0
chords = counts[im]
return chords | python | def region_size(im):
r"""
Replace each voxel with size of region to which it belongs
Parameters
----------
im : ND-array
Either a boolean image wtih ``True`` indicating the features of
interest, in which case ``scipy.ndimage.label`` will be applied to
find regions, or a greyscale image with integer values indicating
regions.
Returns
-------
image : ND-array
A copy of ``im`` with each voxel value indicating the size of the
region to which it belongs. This is particularly useful for finding
chord sizes on the image produced by ``apply_chords``.
"""
if im.dtype == bool:
im = spim.label(im)[0]
counts = sp.bincount(im.flatten())
counts[0] = 0
chords = counts[im]
return chords | [
"def",
"region_size",
"(",
"im",
")",
":",
"if",
"im",
".",
"dtype",
"==",
"bool",
":",
"im",
"=",
"spim",
".",
"label",
"(",
"im",
")",
"[",
"0",
"]",
"counts",
"=",
"sp",
".",
"bincount",
"(",
"im",
".",
"flatten",
"(",
")",
")",
"counts",
... | r"""
Replace each voxel with size of region to which it belongs
Parameters
----------
im : ND-array
Either a boolean image wtih ``True`` indicating the features of
interest, in which case ``scipy.ndimage.label`` will be applied to
find regions, or a greyscale image with integer values indicating
regions.
Returns
-------
image : ND-array
A copy of ``im`` with each voxel value indicating the size of the
region to which it belongs. This is particularly useful for finding
chord sizes on the image produced by ``apply_chords``. | [
"r",
"Replace",
"each",
"voxel",
"with",
"size",
"of",
"region",
"to",
"which",
"it",
"belongs"
] | 1e13875b56787d8f5b7ffdabce8c4342c33ba9f8 | https://github.com/PMEAL/porespy/blob/1e13875b56787d8f5b7ffdabce8c4342c33ba9f8/porespy/filters/__funcs__.py#L838-L862 | train | 213,413 |
PMEAL/porespy | porespy/filters/__funcs__.py | apply_chords | def apply_chords(im, spacing=1, axis=0, trim_edges=True, label=False):
r"""
Adds chords to the void space in the specified direction. The chords are
separated by 1 voxel plus the provided spacing.
Parameters
----------
im : ND-array
An image of the porous material with void marked as ``True``.
spacing : int
Separation between chords. The default is 1 voxel. This can be
decreased to 0, meaning that the chords all touch each other, which
automatically sets to the ``label`` argument to ``True``.
axis : int (default = 0)
The axis along which the chords are drawn.
trim_edges : bool (default = ``True``)
Whether or not to remove chords that touch the edges of the image.
These chords are artifically shortened, so skew the chord length
distribution.
label : bool (default is ``False``)
If ``True`` the chords in the returned image are each given a unique
label, such that all voxels lying on the same chord have the same
value. This is automatically set to ``True`` if spacing is 0, but is
``False`` otherwise.
Returns
-------
image : ND-array
A copy of ``im`` with non-zero values indicating the chords.
See Also
--------
apply_chords_3D
"""
if im.ndim != im.squeeze().ndim:
warnings.warn('Input image conains a singleton axis:' + str(im.shape) +
' Reduce dimensionality with np.squeeze(im) to avoid' +
' unexpected behavior.')
if spacing < 0:
raise Exception('Spacing cannot be less than 0')
if spacing == 0:
label = True
result = sp.zeros(im.shape, dtype=int) # Will receive chords at end
slxyz = [slice(None, None, spacing*(axis != i) + 1) for i in [0, 1, 2]]
slices = tuple(slxyz[:im.ndim])
s = [[0, 1, 0], [0, 1, 0], [0, 1, 0]] # Straight-line structuring element
if im.ndim == 3: # Make structuring element 3D if necessary
s = sp.pad(sp.atleast_3d(s), pad_width=((0, 0), (0, 0), (1, 1)),
mode='constant', constant_values=0)
im = im[slices]
s = sp.swapaxes(s, 0, axis)
chords = spim.label(im, structure=s)[0]
if trim_edges: # Label on border chords will be set to 0
chords = clear_border(chords)
result[slices] = chords # Place chords into empty image created at top
if label is False: # Remove label if not requested
result = result > 0
return result | python | def apply_chords(im, spacing=1, axis=0, trim_edges=True, label=False):
r"""
Adds chords to the void space in the specified direction. The chords are
separated by 1 voxel plus the provided spacing.
Parameters
----------
im : ND-array
An image of the porous material with void marked as ``True``.
spacing : int
Separation between chords. The default is 1 voxel. This can be
decreased to 0, meaning that the chords all touch each other, which
automatically sets to the ``label`` argument to ``True``.
axis : int (default = 0)
The axis along which the chords are drawn.
trim_edges : bool (default = ``True``)
Whether or not to remove chords that touch the edges of the image.
These chords are artifically shortened, so skew the chord length
distribution.
label : bool (default is ``False``)
If ``True`` the chords in the returned image are each given a unique
label, such that all voxels lying on the same chord have the same
value. This is automatically set to ``True`` if spacing is 0, but is
``False`` otherwise.
Returns
-------
image : ND-array
A copy of ``im`` with non-zero values indicating the chords.
See Also
--------
apply_chords_3D
"""
if im.ndim != im.squeeze().ndim:
warnings.warn('Input image conains a singleton axis:' + str(im.shape) +
' Reduce dimensionality with np.squeeze(im) to avoid' +
' unexpected behavior.')
if spacing < 0:
raise Exception('Spacing cannot be less than 0')
if spacing == 0:
label = True
result = sp.zeros(im.shape, dtype=int) # Will receive chords at end
slxyz = [slice(None, None, spacing*(axis != i) + 1) for i in [0, 1, 2]]
slices = tuple(slxyz[:im.ndim])
s = [[0, 1, 0], [0, 1, 0], [0, 1, 0]] # Straight-line structuring element
if im.ndim == 3: # Make structuring element 3D if necessary
s = sp.pad(sp.atleast_3d(s), pad_width=((0, 0), (0, 0), (1, 1)),
mode='constant', constant_values=0)
im = im[slices]
s = sp.swapaxes(s, 0, axis)
chords = spim.label(im, structure=s)[0]
if trim_edges: # Label on border chords will be set to 0
chords = clear_border(chords)
result[slices] = chords # Place chords into empty image created at top
if label is False: # Remove label if not requested
result = result > 0
return result | [
"def",
"apply_chords",
"(",
"im",
",",
"spacing",
"=",
"1",
",",
"axis",
"=",
"0",
",",
"trim_edges",
"=",
"True",
",",
"label",
"=",
"False",
")",
":",
"if",
"im",
".",
"ndim",
"!=",
"im",
".",
"squeeze",
"(",
")",
".",
"ndim",
":",
"warnings",
... | r"""
Adds chords to the void space in the specified direction. The chords are
separated by 1 voxel plus the provided spacing.
Parameters
----------
im : ND-array
An image of the porous material with void marked as ``True``.
spacing : int
Separation between chords. The default is 1 voxel. This can be
decreased to 0, meaning that the chords all touch each other, which
automatically sets to the ``label`` argument to ``True``.
axis : int (default = 0)
The axis along which the chords are drawn.
trim_edges : bool (default = ``True``)
Whether or not to remove chords that touch the edges of the image.
These chords are artifically shortened, so skew the chord length
distribution.
label : bool (default is ``False``)
If ``True`` the chords in the returned image are each given a unique
label, such that all voxels lying on the same chord have the same
value. This is automatically set to ``True`` if spacing is 0, but is
``False`` otherwise.
Returns
-------
image : ND-array
A copy of ``im`` with non-zero values indicating the chords.
See Also
--------
apply_chords_3D | [
"r",
"Adds",
"chords",
"to",
"the",
"void",
"space",
"in",
"the",
"specified",
"direction",
".",
"The",
"chords",
"are",
"separated",
"by",
"1",
"voxel",
"plus",
"the",
"provided",
"spacing",
"."
] | 1e13875b56787d8f5b7ffdabce8c4342c33ba9f8 | https://github.com/PMEAL/porespy/blob/1e13875b56787d8f5b7ffdabce8c4342c33ba9f8/porespy/filters/__funcs__.py#L865-L927 | train | 213,414 |
PMEAL/porespy | porespy/filters/__funcs__.py | apply_chords_3D | def apply_chords_3D(im, spacing=0, trim_edges=True):
r"""
Adds chords to the void space in all three principle directions. The
chords are seprated by 1 voxel plus the provided spacing. Chords in the X,
Y and Z directions are labelled 1, 2 and 3 resepctively.
Parameters
----------
im : ND-array
A 3D image of the porous material with void space marked as True.
spacing : int (default = 0)
Chords are automatically separed by 1 voxel on all sides, and this
argument increases the separation.
trim_edges : bool (default is ``True``)
Whether or not to remove chords that touch the edges of the image.
These chords are artifically shortened, so skew the chord length
distribution
Returns
-------
image : ND-array
A copy of ``im`` with values of 1 indicating x-direction chords,
2 indicating y-direction chords, and 3 indicating z-direction chords.
Notes
-----
The chords are separated by a spacing of at least 1 voxel so that tools
that search for connected components, such as ``scipy.ndimage.label`` can
detect individual chords.
See Also
--------
apply_chords
"""
if im.ndim != im.squeeze().ndim:
warnings.warn('Input image conains a singleton axis:' + str(im.shape) +
' Reduce dimensionality with np.squeeze(im) to avoid' +
' unexpected behavior.')
if im.ndim < 3:
raise Exception('Must be a 3D image to use this function')
if spacing < 0:
raise Exception('Spacing cannot be less than 0')
ch = sp.zeros_like(im, dtype=int)
ch[:, ::4+2*spacing, ::4+2*spacing] = 1 # X-direction
ch[::4+2*spacing, :, 2::4+2*spacing] = 2 # Y-direction
ch[2::4+2*spacing, 2::4+2*spacing, :] = 3 # Z-direction
chords = ch*im
if trim_edges:
temp = clear_border(spim.label(chords > 0)[0]) > 0
chords = temp*chords
return chords | python | def apply_chords_3D(im, spacing=0, trim_edges=True):
r"""
Adds chords to the void space in all three principle directions. The
chords are seprated by 1 voxel plus the provided spacing. Chords in the X,
Y and Z directions are labelled 1, 2 and 3 resepctively.
Parameters
----------
im : ND-array
A 3D image of the porous material with void space marked as True.
spacing : int (default = 0)
Chords are automatically separed by 1 voxel on all sides, and this
argument increases the separation.
trim_edges : bool (default is ``True``)
Whether or not to remove chords that touch the edges of the image.
These chords are artifically shortened, so skew the chord length
distribution
Returns
-------
image : ND-array
A copy of ``im`` with values of 1 indicating x-direction chords,
2 indicating y-direction chords, and 3 indicating z-direction chords.
Notes
-----
The chords are separated by a spacing of at least 1 voxel so that tools
that search for connected components, such as ``scipy.ndimage.label`` can
detect individual chords.
See Also
--------
apply_chords
"""
if im.ndim != im.squeeze().ndim:
warnings.warn('Input image conains a singleton axis:' + str(im.shape) +
' Reduce dimensionality with np.squeeze(im) to avoid' +
' unexpected behavior.')
if im.ndim < 3:
raise Exception('Must be a 3D image to use this function')
if spacing < 0:
raise Exception('Spacing cannot be less than 0')
ch = sp.zeros_like(im, dtype=int)
ch[:, ::4+2*spacing, ::4+2*spacing] = 1 # X-direction
ch[::4+2*spacing, :, 2::4+2*spacing] = 2 # Y-direction
ch[2::4+2*spacing, 2::4+2*spacing, :] = 3 # Z-direction
chords = ch*im
if trim_edges:
temp = clear_border(spim.label(chords > 0)[0]) > 0
chords = temp*chords
return chords | [
"def",
"apply_chords_3D",
"(",
"im",
",",
"spacing",
"=",
"0",
",",
"trim_edges",
"=",
"True",
")",
":",
"if",
"im",
".",
"ndim",
"!=",
"im",
".",
"squeeze",
"(",
")",
".",
"ndim",
":",
"warnings",
".",
"warn",
"(",
"'Input image conains a singleton axis... | r"""
Adds chords to the void space in all three principle directions. The
chords are seprated by 1 voxel plus the provided spacing. Chords in the X,
Y and Z directions are labelled 1, 2 and 3 resepctively.
Parameters
----------
im : ND-array
A 3D image of the porous material with void space marked as True.
spacing : int (default = 0)
Chords are automatically separed by 1 voxel on all sides, and this
argument increases the separation.
trim_edges : bool (default is ``True``)
Whether or not to remove chords that touch the edges of the image.
These chords are artifically shortened, so skew the chord length
distribution
Returns
-------
image : ND-array
A copy of ``im`` with values of 1 indicating x-direction chords,
2 indicating y-direction chords, and 3 indicating z-direction chords.
Notes
-----
The chords are separated by a spacing of at least 1 voxel so that tools
that search for connected components, such as ``scipy.ndimage.label`` can
detect individual chords.
See Also
--------
apply_chords | [
"r",
"Adds",
"chords",
"to",
"the",
"void",
"space",
"in",
"all",
"three",
"principle",
"directions",
".",
"The",
"chords",
"are",
"seprated",
"by",
"1",
"voxel",
"plus",
"the",
"provided",
"spacing",
".",
"Chords",
"in",
"the",
"X",
"Y",
"and",
"Z",
"... | 1e13875b56787d8f5b7ffdabce8c4342c33ba9f8 | https://github.com/PMEAL/porespy/blob/1e13875b56787d8f5b7ffdabce8c4342c33ba9f8/porespy/filters/__funcs__.py#L930-L983 | train | 213,415 |
PMEAL/porespy | porespy/filters/__funcs__.py | porosimetry | def porosimetry(im, sizes=25, inlets=None, access_limited=True,
mode='hybrid'):
r"""
Performs a porosimetry simulution on the image
Parameters
----------
im : ND-array
An ND image of the porous material containing True values in the
pore space.
sizes : array_like or scalar
The sizes to invade. If a list of values of provided they are used
directly. If a scalar is provided then that number of points spanning
the min and max of the distance transform are used.
inlets : ND-array, boolean
A boolean mask with True values indicating where the invasion
enters the image. By default all faces are considered inlets,
akin to a mercury porosimetry experiment. Users can also apply
solid boundaries to their image externally before passing it in,
allowing for complex inlets like circular openings, etc. This argument
is only used if ``access_limited`` is ``True``.
access_limited : Boolean
This flag indicates if the intrusion should only occur from the
surfaces (``access_limited`` is True, which is the default), or
if the invading phase should be allowed to appear in the core of
the image. The former simulates experimental tools like mercury
intrusion porosimetry, while the latter is useful for comparison
to gauge the extent of shielding effects in the sample.
mode : string
Controls with method is used to compute the result. Options are:
'hybrid' - (default) Performs a distance tranform of the void space,
thresholds to find voxels larger than ``sizes[i]``, trims the resulting
mask if ``access_limitations`` is ``True``, then dilates it using the
efficient fft-method to obtain the non-wetting fluid configuration.
'dt' - Same as 'hybrid', except uses a second distance transform,
relative to the thresholded mask, to find the invading fluid
configuration. The choice of 'dt' or 'hybrid' depends on speed, which
is system and installation specific.
'mio' - Using a single morphological image opening step to obtain the
invading fluid confirguration directly, *then* trims if
``access_limitations`` is ``True``. This method is not ideal and is
included mostly for comparison purposes. The morphological operations
are done using fft-based method implementations.
Returns
-------
image : ND-array
A copy of ``im`` with voxel values indicating the sphere radius at
which it becomes accessible from the inlets. This image can be used
to find invading fluid configurations as a function of applied
capillary pressure by applying a boolean comparison:
``inv_phase = im > r`` where ``r`` is the radius (in voxels) of the
invading sphere. Of course, ``r`` can be converted to capillary
pressure using your favorite model.
Notes
-----
There are many ways to perform this filter, and PoreSpy offer 3, which
users can choose between via the ``mode`` argument. These methods all
work in a similar way by finding which foreground voxels can accomodate
a sphere of a given radius, then repeating for smaller radii.
See Also
--------
fftmorphology
local_thickness
"""
if im.ndim != im.squeeze().ndim:
warnings.warn('Input image conains a singleton axis:' + str(im.shape) +
' Reduce dimensionality with np.squeeze(im) to avoid' +
' unexpected behavior.')
dt = spim.distance_transform_edt(im > 0)
if inlets is None:
inlets = get_border(im.shape, mode='faces')
if isinstance(sizes, int):
sizes = sp.logspace(start=sp.log10(sp.amax(dt)), stop=0, num=sizes)
else:
sizes = sp.unique(sizes)[-1::-1]
if im.ndim == 2:
strel = ps_disk
else:
strel = ps_ball
if mode == 'mio':
pw = int(sp.floor(dt.max()))
impad = sp.pad(im, mode='symmetric', pad_width=pw)
inletspad = sp.pad(inlets, mode='symmetric', pad_width=pw)
inlets = sp.where(inletspad)
# sizes = sp.unique(sp.around(sizes, decimals=0).astype(int))[-1::-1]
imresults = sp.zeros(sp.shape(impad))
for r in tqdm(sizes):
imtemp = fftmorphology(impad, strel(r), mode='erosion')
if access_limited:
imtemp = trim_disconnected_blobs(imtemp, inlets)
imtemp = fftmorphology(imtemp, strel(r), mode='dilation')
if sp.any(imtemp):
imresults[(imresults == 0)*imtemp] = r
imresults = extract_subsection(imresults, shape=im.shape)
elif mode == 'dt':
inlets = sp.where(inlets)
imresults = sp.zeros(sp.shape(im))
for r in tqdm(sizes):
imtemp = dt >= r
if access_limited:
imtemp = trim_disconnected_blobs(imtemp, inlets)
if sp.any(imtemp):
imtemp = spim.distance_transform_edt(~imtemp) < r
imresults[(imresults == 0)*imtemp] = r
elif mode == 'hybrid':
inlets = sp.where(inlets)
imresults = sp.zeros(sp.shape(im))
for r in tqdm(sizes):
imtemp = dt >= r
if access_limited:
imtemp = trim_disconnected_blobs(imtemp, inlets)
if sp.any(imtemp):
imtemp = fftconvolve(imtemp, strel(r), mode='same') > 0.0001
imresults[(imresults == 0)*imtemp] = r
else:
raise Exception('Unreckognized mode ' + mode)
return imresults | python | def porosimetry(im, sizes=25, inlets=None, access_limited=True,
mode='hybrid'):
r"""
Performs a porosimetry simulution on the image
Parameters
----------
im : ND-array
An ND image of the porous material containing True values in the
pore space.
sizes : array_like or scalar
The sizes to invade. If a list of values of provided they are used
directly. If a scalar is provided then that number of points spanning
the min and max of the distance transform are used.
inlets : ND-array, boolean
A boolean mask with True values indicating where the invasion
enters the image. By default all faces are considered inlets,
akin to a mercury porosimetry experiment. Users can also apply
solid boundaries to their image externally before passing it in,
allowing for complex inlets like circular openings, etc. This argument
is only used if ``access_limited`` is ``True``.
access_limited : Boolean
This flag indicates if the intrusion should only occur from the
surfaces (``access_limited`` is True, which is the default), or
if the invading phase should be allowed to appear in the core of
the image. The former simulates experimental tools like mercury
intrusion porosimetry, while the latter is useful for comparison
to gauge the extent of shielding effects in the sample.
mode : string
Controls with method is used to compute the result. Options are:
'hybrid' - (default) Performs a distance tranform of the void space,
thresholds to find voxels larger than ``sizes[i]``, trims the resulting
mask if ``access_limitations`` is ``True``, then dilates it using the
efficient fft-method to obtain the non-wetting fluid configuration.
'dt' - Same as 'hybrid', except uses a second distance transform,
relative to the thresholded mask, to find the invading fluid
configuration. The choice of 'dt' or 'hybrid' depends on speed, which
is system and installation specific.
'mio' - Using a single morphological image opening step to obtain the
invading fluid confirguration directly, *then* trims if
``access_limitations`` is ``True``. This method is not ideal and is
included mostly for comparison purposes. The morphological operations
are done using fft-based method implementations.
Returns
-------
image : ND-array
A copy of ``im`` with voxel values indicating the sphere radius at
which it becomes accessible from the inlets. This image can be used
to find invading fluid configurations as a function of applied
capillary pressure by applying a boolean comparison:
``inv_phase = im > r`` where ``r`` is the radius (in voxels) of the
invading sphere. Of course, ``r`` can be converted to capillary
pressure using your favorite model.
Notes
-----
There are many ways to perform this filter, and PoreSpy offer 3, which
users can choose between via the ``mode`` argument. These methods all
work in a similar way by finding which foreground voxels can accomodate
a sphere of a given radius, then repeating for smaller radii.
See Also
--------
fftmorphology
local_thickness
"""
if im.ndim != im.squeeze().ndim:
warnings.warn('Input image conains a singleton axis:' + str(im.shape) +
' Reduce dimensionality with np.squeeze(im) to avoid' +
' unexpected behavior.')
dt = spim.distance_transform_edt(im > 0)
if inlets is None:
inlets = get_border(im.shape, mode='faces')
if isinstance(sizes, int):
sizes = sp.logspace(start=sp.log10(sp.amax(dt)), stop=0, num=sizes)
else:
sizes = sp.unique(sizes)[-1::-1]
if im.ndim == 2:
strel = ps_disk
else:
strel = ps_ball
if mode == 'mio':
pw = int(sp.floor(dt.max()))
impad = sp.pad(im, mode='symmetric', pad_width=pw)
inletspad = sp.pad(inlets, mode='symmetric', pad_width=pw)
inlets = sp.where(inletspad)
# sizes = sp.unique(sp.around(sizes, decimals=0).astype(int))[-1::-1]
imresults = sp.zeros(sp.shape(impad))
for r in tqdm(sizes):
imtemp = fftmorphology(impad, strel(r), mode='erosion')
if access_limited:
imtemp = trim_disconnected_blobs(imtemp, inlets)
imtemp = fftmorphology(imtemp, strel(r), mode='dilation')
if sp.any(imtemp):
imresults[(imresults == 0)*imtemp] = r
imresults = extract_subsection(imresults, shape=im.shape)
elif mode == 'dt':
inlets = sp.where(inlets)
imresults = sp.zeros(sp.shape(im))
for r in tqdm(sizes):
imtemp = dt >= r
if access_limited:
imtemp = trim_disconnected_blobs(imtemp, inlets)
if sp.any(imtemp):
imtemp = spim.distance_transform_edt(~imtemp) < r
imresults[(imresults == 0)*imtemp] = r
elif mode == 'hybrid':
inlets = sp.where(inlets)
imresults = sp.zeros(sp.shape(im))
for r in tqdm(sizes):
imtemp = dt >= r
if access_limited:
imtemp = trim_disconnected_blobs(imtemp, inlets)
if sp.any(imtemp):
imtemp = fftconvolve(imtemp, strel(r), mode='same') > 0.0001
imresults[(imresults == 0)*imtemp] = r
else:
raise Exception('Unreckognized mode ' + mode)
return imresults | [
"def",
"porosimetry",
"(",
"im",
",",
"sizes",
"=",
"25",
",",
"inlets",
"=",
"None",
",",
"access_limited",
"=",
"True",
",",
"mode",
"=",
"'hybrid'",
")",
":",
"if",
"im",
".",
"ndim",
"!=",
"im",
".",
"squeeze",
"(",
")",
".",
"ndim",
":",
"wa... | r"""
Performs a porosimetry simulution on the image
Parameters
----------
im : ND-array
An ND image of the porous material containing True values in the
pore space.
sizes : array_like or scalar
The sizes to invade. If a list of values of provided they are used
directly. If a scalar is provided then that number of points spanning
the min and max of the distance transform are used.
inlets : ND-array, boolean
A boolean mask with True values indicating where the invasion
enters the image. By default all faces are considered inlets,
akin to a mercury porosimetry experiment. Users can also apply
solid boundaries to their image externally before passing it in,
allowing for complex inlets like circular openings, etc. This argument
is only used if ``access_limited`` is ``True``.
access_limited : Boolean
This flag indicates if the intrusion should only occur from the
surfaces (``access_limited`` is True, which is the default), or
if the invading phase should be allowed to appear in the core of
the image. The former simulates experimental tools like mercury
intrusion porosimetry, while the latter is useful for comparison
to gauge the extent of shielding effects in the sample.
mode : string
Controls with method is used to compute the result. Options are:
'hybrid' - (default) Performs a distance tranform of the void space,
thresholds to find voxels larger than ``sizes[i]``, trims the resulting
mask if ``access_limitations`` is ``True``, then dilates it using the
efficient fft-method to obtain the non-wetting fluid configuration.
'dt' - Same as 'hybrid', except uses a second distance transform,
relative to the thresholded mask, to find the invading fluid
configuration. The choice of 'dt' or 'hybrid' depends on speed, which
is system and installation specific.
'mio' - Using a single morphological image opening step to obtain the
invading fluid confirguration directly, *then* trims if
``access_limitations`` is ``True``. This method is not ideal and is
included mostly for comparison purposes. The morphological operations
are done using fft-based method implementations.
Returns
-------
image : ND-array
A copy of ``im`` with voxel values indicating the sphere radius at
which it becomes accessible from the inlets. This image can be used
to find invading fluid configurations as a function of applied
capillary pressure by applying a boolean comparison:
``inv_phase = im > r`` where ``r`` is the radius (in voxels) of the
invading sphere. Of course, ``r`` can be converted to capillary
pressure using your favorite model.
Notes
-----
There are many ways to perform this filter, and PoreSpy offer 3, which
users can choose between via the ``mode`` argument. These methods all
work in a similar way by finding which foreground voxels can accomodate
a sphere of a given radius, then repeating for smaller radii.
See Also
--------
fftmorphology
local_thickness | [
"r",
"Performs",
"a",
"porosimetry",
"simulution",
"on",
"the",
"image"
] | 1e13875b56787d8f5b7ffdabce8c4342c33ba9f8 | https://github.com/PMEAL/porespy/blob/1e13875b56787d8f5b7ffdabce8c4342c33ba9f8/porespy/filters/__funcs__.py#L1050-L1182 | train | 213,416 |
PMEAL/porespy | porespy/filters/__funcs__.py | trim_disconnected_blobs | def trim_disconnected_blobs(im, inlets):
r"""
Removes foreground voxels not connected to specified inlets
Parameters
----------
im : ND-array
The array to be trimmed
inlets : ND-array of tuple of indices
The locations of the inlets. Any voxels *not* connected directly to
the inlets will be trimmed
Returns
-------
image : ND-array
An array of the same shape as ``im``, but with all foreground
voxels not connected to the ``inlets`` removed.
"""
temp = sp.zeros_like(im)
temp[inlets] = True
labels, N = spim.label(im + temp)
im = im ^ (clear_border(labels=labels) > 0)
return im | python | def trim_disconnected_blobs(im, inlets):
r"""
Removes foreground voxels not connected to specified inlets
Parameters
----------
im : ND-array
The array to be trimmed
inlets : ND-array of tuple of indices
The locations of the inlets. Any voxels *not* connected directly to
the inlets will be trimmed
Returns
-------
image : ND-array
An array of the same shape as ``im``, but with all foreground
voxels not connected to the ``inlets`` removed.
"""
temp = sp.zeros_like(im)
temp[inlets] = True
labels, N = spim.label(im + temp)
im = im ^ (clear_border(labels=labels) > 0)
return im | [
"def",
"trim_disconnected_blobs",
"(",
"im",
",",
"inlets",
")",
":",
"temp",
"=",
"sp",
".",
"zeros_like",
"(",
"im",
")",
"temp",
"[",
"inlets",
"]",
"=",
"True",
"labels",
",",
"N",
"=",
"spim",
".",
"label",
"(",
"im",
"+",
"temp",
")",
"im",
... | r"""
Removes foreground voxels not connected to specified inlets
Parameters
----------
im : ND-array
The array to be trimmed
inlets : ND-array of tuple of indices
The locations of the inlets. Any voxels *not* connected directly to
the inlets will be trimmed
Returns
-------
image : ND-array
An array of the same shape as ``im``, but with all foreground
voxels not connected to the ``inlets`` removed. | [
"r",
"Removes",
"foreground",
"voxels",
"not",
"connected",
"to",
"specified",
"inlets"
] | 1e13875b56787d8f5b7ffdabce8c4342c33ba9f8 | https://github.com/PMEAL/porespy/blob/1e13875b56787d8f5b7ffdabce8c4342c33ba9f8/porespy/filters/__funcs__.py#L1185-L1207 | train | 213,417 |
PMEAL/porespy | porespy/filters/__funcs__.py | _make_stack | def _make_stack(im, include_diagonals=False):
r'''
Creates a stack of images with one extra dimension to the input image
with length equal to the number of borders to search + 1.
Image is rolled along the axial shifts so that the border pixel is
overlapping the original pixel. First image in stack is the original.
Stacking makes direct vectorized array comparisons possible.
'''
ndim = len(np.shape(im))
axial_shift = _get_axial_shifts(ndim, include_diagonals)
if ndim == 2:
stack = np.zeros([np.shape(im)[0],
np.shape(im)[1],
len(axial_shift)+1])
stack[:, :, 0] = im
for i in range(len(axial_shift)):
ax0, ax1 = axial_shift[i]
temp = np.roll(np.roll(im, ax0, 0), ax1, 1)
stack[:, :, i+1] = temp
return stack
elif ndim == 3:
stack = np.zeros([np.shape(im)[0],
np.shape(im)[1],
np.shape(im)[2],
len(axial_shift)+1])
stack[:, :, :, 0] = im
for i in range(len(axial_shift)):
ax0, ax1, ax2 = axial_shift[i]
temp = np.roll(np.roll(np.roll(im, ax0, 0), ax1, 1), ax2, 2)
stack[:, :, :, i+1] = temp
return stack | python | def _make_stack(im, include_diagonals=False):
r'''
Creates a stack of images with one extra dimension to the input image
with length equal to the number of borders to search + 1.
Image is rolled along the axial shifts so that the border pixel is
overlapping the original pixel. First image in stack is the original.
Stacking makes direct vectorized array comparisons possible.
'''
ndim = len(np.shape(im))
axial_shift = _get_axial_shifts(ndim, include_diagonals)
if ndim == 2:
stack = np.zeros([np.shape(im)[0],
np.shape(im)[1],
len(axial_shift)+1])
stack[:, :, 0] = im
for i in range(len(axial_shift)):
ax0, ax1 = axial_shift[i]
temp = np.roll(np.roll(im, ax0, 0), ax1, 1)
stack[:, :, i+1] = temp
return stack
elif ndim == 3:
stack = np.zeros([np.shape(im)[0],
np.shape(im)[1],
np.shape(im)[2],
len(axial_shift)+1])
stack[:, :, :, 0] = im
for i in range(len(axial_shift)):
ax0, ax1, ax2 = axial_shift[i]
temp = np.roll(np.roll(np.roll(im, ax0, 0), ax1, 1), ax2, 2)
stack[:, :, :, i+1] = temp
return stack | [
"def",
"_make_stack",
"(",
"im",
",",
"include_diagonals",
"=",
"False",
")",
":",
"ndim",
"=",
"len",
"(",
"np",
".",
"shape",
"(",
"im",
")",
")",
"axial_shift",
"=",
"_get_axial_shifts",
"(",
"ndim",
",",
"include_diagonals",
")",
"if",
"ndim",
"==",
... | r'''
Creates a stack of images with one extra dimension to the input image
with length equal to the number of borders to search + 1.
Image is rolled along the axial shifts so that the border pixel is
overlapping the original pixel. First image in stack is the original.
Stacking makes direct vectorized array comparisons possible. | [
"r",
"Creates",
"a",
"stack",
"of",
"images",
"with",
"one",
"extra",
"dimension",
"to",
"the",
"input",
"image",
"with",
"length",
"equal",
"to",
"the",
"number",
"of",
"borders",
"to",
"search",
"+",
"1",
".",
"Image",
"is",
"rolled",
"along",
"the",
... | 1e13875b56787d8f5b7ffdabce8c4342c33ba9f8 | https://github.com/PMEAL/porespy/blob/1e13875b56787d8f5b7ffdabce8c4342c33ba9f8/porespy/filters/__funcs__.py#L1238-L1268 | train | 213,418 |
PMEAL/porespy | porespy/networks/__funcs__.py | map_to_regions | def map_to_regions(regions, values):
r"""
Maps pore values from a network onto the image from which it was extracted
This function assumes that the pore numbering in the network has remained
unchanged from the region labels in the partitioned image.
Parameters
----------
regions : ND-array
An image of the pore space partitioned into regions and labeled
values : array_like
An array containing the numerical values to insert into each region.
The value at location *n* will be inserted into the image where
``regions`` is *n+1*. This mis-match is caused by the fact that 0's
in the ``regions`` image is assumed to be the backgroung phase, while
pore index 0 is valid.
Notes
-----
This function assumes that the array of pore values are indexed starting
at location 0, while in the region image 0's indicate background phase and
the region indexing starts at 1. That is, region 1 corresponds to pore 0.
"""
values = sp.array(values).flatten()
if sp.size(values) != regions.max() + 1:
raise Exception('Number of values does not match number of regions')
im = sp.zeros_like(regions)
im = values[regions]
return im | python | def map_to_regions(regions, values):
r"""
Maps pore values from a network onto the image from which it was extracted
This function assumes that the pore numbering in the network has remained
unchanged from the region labels in the partitioned image.
Parameters
----------
regions : ND-array
An image of the pore space partitioned into regions and labeled
values : array_like
An array containing the numerical values to insert into each region.
The value at location *n* will be inserted into the image where
``regions`` is *n+1*. This mis-match is caused by the fact that 0's
in the ``regions`` image is assumed to be the backgroung phase, while
pore index 0 is valid.
Notes
-----
This function assumes that the array of pore values are indexed starting
at location 0, while in the region image 0's indicate background phase and
the region indexing starts at 1. That is, region 1 corresponds to pore 0.
"""
values = sp.array(values).flatten()
if sp.size(values) != regions.max() + 1:
raise Exception('Number of values does not match number of regions')
im = sp.zeros_like(regions)
im = values[regions]
return im | [
"def",
"map_to_regions",
"(",
"regions",
",",
"values",
")",
":",
"values",
"=",
"sp",
".",
"array",
"(",
"values",
")",
".",
"flatten",
"(",
")",
"if",
"sp",
".",
"size",
"(",
"values",
")",
"!=",
"regions",
".",
"max",
"(",
")",
"+",
"1",
":",
... | r"""
Maps pore values from a network onto the image from which it was extracted
This function assumes that the pore numbering in the network has remained
unchanged from the region labels in the partitioned image.
Parameters
----------
regions : ND-array
An image of the pore space partitioned into regions and labeled
values : array_like
An array containing the numerical values to insert into each region.
The value at location *n* will be inserted into the image where
``regions`` is *n+1*. This mis-match is caused by the fact that 0's
in the ``regions`` image is assumed to be the backgroung phase, while
pore index 0 is valid.
Notes
-----
This function assumes that the array of pore values are indexed starting
at location 0, while in the region image 0's indicate background phase and
the region indexing starts at 1. That is, region 1 corresponds to pore 0. | [
"r",
"Maps",
"pore",
"values",
"from",
"a",
"network",
"onto",
"the",
"image",
"from",
"which",
"it",
"was",
"extracted"
] | 1e13875b56787d8f5b7ffdabce8c4342c33ba9f8 | https://github.com/PMEAL/porespy/blob/1e13875b56787d8f5b7ffdabce8c4342c33ba9f8/porespy/networks/__funcs__.py#L12-L43 | train | 213,419 |
PMEAL/porespy | porespy/networks/__funcs__.py | label_boundary_cells | def label_boundary_cells(network=None, boundary_faces=None):
r"""
Takes 2D or 3D network and assign labels to boundary pores
Parameters
----------
network : dictionary
A dictionary as produced by the SNOW network extraction algorithms
containing edge/vertex, site/bond, node/link information.
boundary_faces : list of strings
The user can choose ‘left’, ‘right’, ‘top’, ‘bottom’, ‘front’ and
‘back’ face labels to assign boundary nodes. If no label is
assigned then all six faces will be selected as boundary nodes
automatically which can be trimmed later on based on user requirements.
Returns
-------
The same dictionar s pass ing, but containing boundary nodes labels. For
example network['pore.left'], network['pore.right'], network['pore.top'],
network['pore.bottom'] etc.
Notes
-----
The dictionary names use the OpenPNM convention so it may be converted
directly to an OpenPNM network object using the ``update`` command.
"""
f = boundary_faces
if f is not None:
coords = network['pore.coords']
condition = coords[~network['pore.boundary']]
dic = {'left': 0, 'right': 0, 'front': 1, 'back': 1,
'top': 2, 'bottom': 2}
if all(coords[:, 2] == 0):
dic['top'] = 1
dic['bottom'] = 1
for i in f:
if i in ['left', 'front', 'bottom']:
network['pore.{}'.format(i)] = (coords[:, dic[i]] <
min(condition[:, dic[i]]))
elif i in ['right', 'back', 'top']:
network['pore.{}'.format(i)] = (coords[:, dic[i]] >
max(condition[:, dic[i]]))
return network | python | def label_boundary_cells(network=None, boundary_faces=None):
r"""
Takes 2D or 3D network and assign labels to boundary pores
Parameters
----------
network : dictionary
A dictionary as produced by the SNOW network extraction algorithms
containing edge/vertex, site/bond, node/link information.
boundary_faces : list of strings
The user can choose ‘left’, ‘right’, ‘top’, ‘bottom’, ‘front’ and
‘back’ face labels to assign boundary nodes. If no label is
assigned then all six faces will be selected as boundary nodes
automatically which can be trimmed later on based on user requirements.
Returns
-------
The same dictionar s pass ing, but containing boundary nodes labels. For
example network['pore.left'], network['pore.right'], network['pore.top'],
network['pore.bottom'] etc.
Notes
-----
The dictionary names use the OpenPNM convention so it may be converted
directly to an OpenPNM network object using the ``update`` command.
"""
f = boundary_faces
if f is not None:
coords = network['pore.coords']
condition = coords[~network['pore.boundary']]
dic = {'left': 0, 'right': 0, 'front': 1, 'back': 1,
'top': 2, 'bottom': 2}
if all(coords[:, 2] == 0):
dic['top'] = 1
dic['bottom'] = 1
for i in f:
if i in ['left', 'front', 'bottom']:
network['pore.{}'.format(i)] = (coords[:, dic[i]] <
min(condition[:, dic[i]]))
elif i in ['right', 'back', 'top']:
network['pore.{}'.format(i)] = (coords[:, dic[i]] >
max(condition[:, dic[i]]))
return network | [
"def",
"label_boundary_cells",
"(",
"network",
"=",
"None",
",",
"boundary_faces",
"=",
"None",
")",
":",
"f",
"=",
"boundary_faces",
"if",
"f",
"is",
"not",
"None",
":",
"coords",
"=",
"network",
"[",
"'pore.coords'",
"]",
"condition",
"=",
"coords",
"[",... | r"""
Takes 2D or 3D network and assign labels to boundary pores
Parameters
----------
network : dictionary
A dictionary as produced by the SNOW network extraction algorithms
containing edge/vertex, site/bond, node/link information.
boundary_faces : list of strings
The user can choose ‘left’, ‘right’, ‘top’, ‘bottom’, ‘front’ and
‘back’ face labels to assign boundary nodes. If no label is
assigned then all six faces will be selected as boundary nodes
automatically which can be trimmed later on based on user requirements.
Returns
-------
The same dictionar s pass ing, but containing boundary nodes labels. For
example network['pore.left'], network['pore.right'], network['pore.top'],
network['pore.bottom'] etc.
Notes
-----
The dictionary names use the OpenPNM convention so it may be converted
directly to an OpenPNM network object using the ``update`` command. | [
"r",
"Takes",
"2D",
"or",
"3D",
"network",
"and",
"assign",
"labels",
"to",
"boundary",
"pores"
] | 1e13875b56787d8f5b7ffdabce8c4342c33ba9f8 | https://github.com/PMEAL/porespy/blob/1e13875b56787d8f5b7ffdabce8c4342c33ba9f8/porespy/networks/__funcs__.py#L444-L489 | train | 213,420 |
PMEAL/porespy | porespy/generators/__imgen__.py | insert_shape | def insert_shape(im, element, center=None, corner=None, value=1,
mode='overwrite'):
r"""
Inserts sub-image into a larger image at the specified location.
If the inserted image extends beyond the boundaries of the image it will
be cropped accordingly.
Parameters
----------
im : ND-array
The image into which the sub-image will be inserted
element : ND-array
The sub-image to insert
center : tuple
Coordinates indicating the position in the main image where the
inserted imaged will be centered. If ``center`` is given then
``corner`` cannot be specified. Note that ``center`` can only be
used if all dimensions of ``element`` are odd, otherwise the meaning
of center is not defined.
corner : tuple
Coordinates indicating the position in the main image where the
lower corner (i.e. [0, 0, 0]) of the inserted image should be anchored.
If ``corner`` is given then ``corner`` cannot be specified.
value : scalar
A scalar value to apply to the sub-image. The default is 1.
mode : string
If 'overwrite' (default) the inserted image replaces the values in the
main image. If 'overlay' the inserted image is added to the main
image. In both cases the inserted image is multiplied by ``value``
first.
Returns
-------
im : ND-array
A copy of ``im`` with the supplied element inserted.
"""
im = im.copy()
if im.ndim != element.ndim:
raise Exception('Image shape ' + str(im.shape)
+ ' and element shape ' + str(element.shape)
+ ' do not match')
s_im = []
s_el = []
if (center is not None) and (corner is None):
for dim in range(im.ndim):
r, d = sp.divmod(element.shape[dim], 2)
if d == 0:
raise Exception('Cannot specify center point when element ' +
'has one or more even dimension')
lower_im = sp.amax((center[dim] - r, 0))
upper_im = sp.amin((center[dim] + r + 1, im.shape[dim]))
s_im.append(slice(lower_im, upper_im))
lower_el = sp.amax((lower_im - center[dim] + r, 0))
upper_el = sp.amin((upper_im - center[dim] + r,
element.shape[dim]))
s_el.append(slice(lower_el, upper_el))
elif (corner is not None) and (center is None):
for dim in range(im.ndim):
L = int(element.shape[dim])
lower_im = sp.amax((corner[dim], 0))
upper_im = sp.amin((corner[dim] + L, im.shape[dim]))
s_im.append(slice(lower_im, upper_im))
lower_el = sp.amax((lower_im - corner[dim], 0))
upper_el = sp.amin((upper_im - corner[dim],
element.shape[dim]))
s_el.append(slice(min(lower_el, upper_el), upper_el))
else:
raise Exception('Cannot specify both corner and center')
if mode == 'overlay':
im[tuple(s_im)] = im[tuple(s_im)] + element[tuple(s_el)]*value
elif mode == 'overwrite':
im[tuple(s_im)] = element[tuple(s_el)]*value
else:
raise Exception('Invalid mode ' + mode)
return im | python | def insert_shape(im, element, center=None, corner=None, value=1,
mode='overwrite'):
r"""
Inserts sub-image into a larger image at the specified location.
If the inserted image extends beyond the boundaries of the image it will
be cropped accordingly.
Parameters
----------
im : ND-array
The image into which the sub-image will be inserted
element : ND-array
The sub-image to insert
center : tuple
Coordinates indicating the position in the main image where the
inserted imaged will be centered. If ``center`` is given then
``corner`` cannot be specified. Note that ``center`` can only be
used if all dimensions of ``element`` are odd, otherwise the meaning
of center is not defined.
corner : tuple
Coordinates indicating the position in the main image where the
lower corner (i.e. [0, 0, 0]) of the inserted image should be anchored.
If ``corner`` is given then ``corner`` cannot be specified.
value : scalar
A scalar value to apply to the sub-image. The default is 1.
mode : string
If 'overwrite' (default) the inserted image replaces the values in the
main image. If 'overlay' the inserted image is added to the main
image. In both cases the inserted image is multiplied by ``value``
first.
Returns
-------
im : ND-array
A copy of ``im`` with the supplied element inserted.
"""
im = im.copy()
if im.ndim != element.ndim:
raise Exception('Image shape ' + str(im.shape)
+ ' and element shape ' + str(element.shape)
+ ' do not match')
s_im = []
s_el = []
if (center is not None) and (corner is None):
for dim in range(im.ndim):
r, d = sp.divmod(element.shape[dim], 2)
if d == 0:
raise Exception('Cannot specify center point when element ' +
'has one or more even dimension')
lower_im = sp.amax((center[dim] - r, 0))
upper_im = sp.amin((center[dim] + r + 1, im.shape[dim]))
s_im.append(slice(lower_im, upper_im))
lower_el = sp.amax((lower_im - center[dim] + r, 0))
upper_el = sp.amin((upper_im - center[dim] + r,
element.shape[dim]))
s_el.append(slice(lower_el, upper_el))
elif (corner is not None) and (center is None):
for dim in range(im.ndim):
L = int(element.shape[dim])
lower_im = sp.amax((corner[dim], 0))
upper_im = sp.amin((corner[dim] + L, im.shape[dim]))
s_im.append(slice(lower_im, upper_im))
lower_el = sp.amax((lower_im - corner[dim], 0))
upper_el = sp.amin((upper_im - corner[dim],
element.shape[dim]))
s_el.append(slice(min(lower_el, upper_el), upper_el))
else:
raise Exception('Cannot specify both corner and center')
if mode == 'overlay':
im[tuple(s_im)] = im[tuple(s_im)] + element[tuple(s_el)]*value
elif mode == 'overwrite':
im[tuple(s_im)] = element[tuple(s_el)]*value
else:
raise Exception('Invalid mode ' + mode)
return im | [
"def",
"insert_shape",
"(",
"im",
",",
"element",
",",
"center",
"=",
"None",
",",
"corner",
"=",
"None",
",",
"value",
"=",
"1",
",",
"mode",
"=",
"'overwrite'",
")",
":",
"im",
"=",
"im",
".",
"copy",
"(",
")",
"if",
"im",
".",
"ndim",
"!=",
... | r"""
Inserts sub-image into a larger image at the specified location.
If the inserted image extends beyond the boundaries of the image it will
be cropped accordingly.
Parameters
----------
im : ND-array
The image into which the sub-image will be inserted
element : ND-array
The sub-image to insert
center : tuple
Coordinates indicating the position in the main image where the
inserted imaged will be centered. If ``center`` is given then
``corner`` cannot be specified. Note that ``center`` can only be
used if all dimensions of ``element`` are odd, otherwise the meaning
of center is not defined.
corner : tuple
Coordinates indicating the position in the main image where the
lower corner (i.e. [0, 0, 0]) of the inserted image should be anchored.
If ``corner`` is given then ``corner`` cannot be specified.
value : scalar
A scalar value to apply to the sub-image. The default is 1.
mode : string
If 'overwrite' (default) the inserted image replaces the values in the
main image. If 'overlay' the inserted image is added to the main
image. In both cases the inserted image is multiplied by ``value``
first.
Returns
-------
im : ND-array
A copy of ``im`` with the supplied element inserted. | [
"r",
"Inserts",
"sub",
"-",
"image",
"into",
"a",
"larger",
"image",
"at",
"the",
"specified",
"location",
"."
] | 1e13875b56787d8f5b7ffdabce8c4342c33ba9f8 | https://github.com/PMEAL/porespy/blob/1e13875b56787d8f5b7ffdabce8c4342c33ba9f8/porespy/generators/__imgen__.py#L10-L87 | train | 213,421 |
PMEAL/porespy | porespy/generators/__imgen__.py | bundle_of_tubes | def bundle_of_tubes(shape: List[int], spacing: int):
r"""
Create a 3D image of a bundle of tubes, in the form of a rectangular
plate with randomly sized holes through it.
Parameters
----------
shape : list
The size the image, with the 3rd dimension indicating the plate
thickness. If the 3rd dimension is not given then a thickness of
1 voxel is assumed.
spacing : scalar
The center to center distance of the holes. The hole sizes will be
randomly distributed between this values down to 3 voxels.
Returns
-------
image : ND-array
A boolean array with ``True`` values denoting the pore space
"""
shape = sp.array(shape)
if sp.size(shape) == 1:
shape = sp.full((3, ), int(shape))
if sp.size(shape) == 2:
shape = sp.hstack((shape, [1]))
temp = sp.zeros(shape=shape[:2])
Xi = sp.ceil(sp.linspace(spacing/2,
shape[0]-(spacing/2)-1,
int(shape[0]/spacing)))
Xi = sp.array(Xi, dtype=int)
Yi = sp.ceil(sp.linspace(spacing/2,
shape[1]-(spacing/2)-1,
int(shape[1]/spacing)))
Yi = sp.array(Yi, dtype=int)
temp[tuple(sp.meshgrid(Xi, Yi))] = 1
inds = sp.where(temp)
for i in range(len(inds[0])):
r = sp.random.randint(1, (spacing/2))
try:
s1 = slice(inds[0][i]-r, inds[0][i]+r+1)
s2 = slice(inds[1][i]-r, inds[1][i]+r+1)
temp[s1, s2] = ps_disk(r)
except ValueError:
odd_shape = sp.shape(temp[s1, s2])
temp[s1, s2] = ps_disk(r)[:odd_shape[0], :odd_shape[1]]
im = sp.broadcast_to(array=sp.atleast_3d(temp), shape=shape)
return im | python | def bundle_of_tubes(shape: List[int], spacing: int):
r"""
Create a 3D image of a bundle of tubes, in the form of a rectangular
plate with randomly sized holes through it.
Parameters
----------
shape : list
The size the image, with the 3rd dimension indicating the plate
thickness. If the 3rd dimension is not given then a thickness of
1 voxel is assumed.
spacing : scalar
The center to center distance of the holes. The hole sizes will be
randomly distributed between this values down to 3 voxels.
Returns
-------
image : ND-array
A boolean array with ``True`` values denoting the pore space
"""
shape = sp.array(shape)
if sp.size(shape) == 1:
shape = sp.full((3, ), int(shape))
if sp.size(shape) == 2:
shape = sp.hstack((shape, [1]))
temp = sp.zeros(shape=shape[:2])
Xi = sp.ceil(sp.linspace(spacing/2,
shape[0]-(spacing/2)-1,
int(shape[0]/spacing)))
Xi = sp.array(Xi, dtype=int)
Yi = sp.ceil(sp.linspace(spacing/2,
shape[1]-(spacing/2)-1,
int(shape[1]/spacing)))
Yi = sp.array(Yi, dtype=int)
temp[tuple(sp.meshgrid(Xi, Yi))] = 1
inds = sp.where(temp)
for i in range(len(inds[0])):
r = sp.random.randint(1, (spacing/2))
try:
s1 = slice(inds[0][i]-r, inds[0][i]+r+1)
s2 = slice(inds[1][i]-r, inds[1][i]+r+1)
temp[s1, s2] = ps_disk(r)
except ValueError:
odd_shape = sp.shape(temp[s1, s2])
temp[s1, s2] = ps_disk(r)[:odd_shape[0], :odd_shape[1]]
im = sp.broadcast_to(array=sp.atleast_3d(temp), shape=shape)
return im | [
"def",
"bundle_of_tubes",
"(",
"shape",
":",
"List",
"[",
"int",
"]",
",",
"spacing",
":",
"int",
")",
":",
"shape",
"=",
"sp",
".",
"array",
"(",
"shape",
")",
"if",
"sp",
".",
"size",
"(",
"shape",
")",
"==",
"1",
":",
"shape",
"=",
"sp",
"."... | r"""
Create a 3D image of a bundle of tubes, in the form of a rectangular
plate with randomly sized holes through it.
Parameters
----------
shape : list
The size the image, with the 3rd dimension indicating the plate
thickness. If the 3rd dimension is not given then a thickness of
1 voxel is assumed.
spacing : scalar
The center to center distance of the holes. The hole sizes will be
randomly distributed between this values down to 3 voxels.
Returns
-------
image : ND-array
A boolean array with ``True`` values denoting the pore space | [
"r",
"Create",
"a",
"3D",
"image",
"of",
"a",
"bundle",
"of",
"tubes",
"in",
"the",
"form",
"of",
"a",
"rectangular",
"plate",
"with",
"randomly",
"sized",
"holes",
"through",
"it",
"."
] | 1e13875b56787d8f5b7ffdabce8c4342c33ba9f8 | https://github.com/PMEAL/porespy/blob/1e13875b56787d8f5b7ffdabce8c4342c33ba9f8/porespy/generators/__imgen__.py#L195-L242 | train | 213,422 |
PMEAL/porespy | porespy/generators/__imgen__.py | polydisperse_spheres | def polydisperse_spheres(shape: List[int], porosity: float, dist,
nbins: int = 5, r_min: int = 5):
r"""
Create an image of randomly place, overlapping spheres with a distribution
of radii.
Parameters
----------
shape : list
The size of the image to generate in [Nx, Ny, Nz] where Ni is the
number of voxels in each direction. If shape is only 2D, then an
image of polydisperse disks is returns
porosity : scalar
The porosity of the image, defined as the number of void voxels
divided by the number of voxels in the image. The specified value
is only matched approximately, so it's suggested to check this value
after the image is generated.
dist : scipy.stats distribution object
This should be an initialized distribution chosen from the large number
of options in the ``scipy.stats`` submodule. For instance, a normal
distribution with a mean of 20 and a standard deviation of 10 can be
obtained with ``dist = scipy.stats.norm(loc=20, scale=10)``
nbins : scalar
The number of discrete sphere sizes that will be used to generate the
image. This function generates ``nbins`` images of monodisperse
spheres that span 0.05 and 0.95 of the possible values produced by the
provided distribution, then overlays them to get polydispersivity.
Returns
-------
image : ND-array
A boolean array with ``True`` values denoting the pore space
"""
shape = sp.array(shape)
if sp.size(shape) == 1:
shape = sp.full((3, ), int(shape))
Rs = dist.interval(sp.linspace(0.05, 0.95, nbins))
Rs = sp.vstack(Rs).T
Rs = (Rs[:-1] + Rs[1:])/2
Rs = sp.clip(Rs.flatten(), a_min=r_min, a_max=None)
phi_desired = 1 - (1 - porosity)/(len(Rs))
im = sp.ones(shape, dtype=bool)
for r in Rs:
phi_im = im.sum() / sp.prod(shape)
phi_corrected = 1 - (1 - phi_desired) / phi_im
temp = overlapping_spheres(shape=shape, radius=r, porosity=phi_corrected)
im = im * temp
return im | python | def polydisperse_spheres(shape: List[int], porosity: float, dist,
nbins: int = 5, r_min: int = 5):
r"""
Create an image of randomly place, overlapping spheres with a distribution
of radii.
Parameters
----------
shape : list
The size of the image to generate in [Nx, Ny, Nz] where Ni is the
number of voxels in each direction. If shape is only 2D, then an
image of polydisperse disks is returns
porosity : scalar
The porosity of the image, defined as the number of void voxels
divided by the number of voxels in the image. The specified value
is only matched approximately, so it's suggested to check this value
after the image is generated.
dist : scipy.stats distribution object
This should be an initialized distribution chosen from the large number
of options in the ``scipy.stats`` submodule. For instance, a normal
distribution with a mean of 20 and a standard deviation of 10 can be
obtained with ``dist = scipy.stats.norm(loc=20, scale=10)``
nbins : scalar
The number of discrete sphere sizes that will be used to generate the
image. This function generates ``nbins`` images of monodisperse
spheres that span 0.05 and 0.95 of the possible values produced by the
provided distribution, then overlays them to get polydispersivity.
Returns
-------
image : ND-array
A boolean array with ``True`` values denoting the pore space
"""
shape = sp.array(shape)
if sp.size(shape) == 1:
shape = sp.full((3, ), int(shape))
Rs = dist.interval(sp.linspace(0.05, 0.95, nbins))
Rs = sp.vstack(Rs).T
Rs = (Rs[:-1] + Rs[1:])/2
Rs = sp.clip(Rs.flatten(), a_min=r_min, a_max=None)
phi_desired = 1 - (1 - porosity)/(len(Rs))
im = sp.ones(shape, dtype=bool)
for r in Rs:
phi_im = im.sum() / sp.prod(shape)
phi_corrected = 1 - (1 - phi_desired) / phi_im
temp = overlapping_spheres(shape=shape, radius=r, porosity=phi_corrected)
im = im * temp
return im | [
"def",
"polydisperse_spheres",
"(",
"shape",
":",
"List",
"[",
"int",
"]",
",",
"porosity",
":",
"float",
",",
"dist",
",",
"nbins",
":",
"int",
"=",
"5",
",",
"r_min",
":",
"int",
"=",
"5",
")",
":",
"shape",
"=",
"sp",
".",
"array",
"(",
"shape... | r"""
Create an image of randomly place, overlapping spheres with a distribution
of radii.
Parameters
----------
shape : list
The size of the image to generate in [Nx, Ny, Nz] where Ni is the
number of voxels in each direction. If shape is only 2D, then an
image of polydisperse disks is returns
porosity : scalar
The porosity of the image, defined as the number of void voxels
divided by the number of voxels in the image. The specified value
is only matched approximately, so it's suggested to check this value
after the image is generated.
dist : scipy.stats distribution object
This should be an initialized distribution chosen from the large number
of options in the ``scipy.stats`` submodule. For instance, a normal
distribution with a mean of 20 and a standard deviation of 10 can be
obtained with ``dist = scipy.stats.norm(loc=20, scale=10)``
nbins : scalar
The number of discrete sphere sizes that will be used to generate the
image. This function generates ``nbins`` images of monodisperse
spheres that span 0.05 and 0.95 of the possible values produced by the
provided distribution, then overlays them to get polydispersivity.
Returns
-------
image : ND-array
A boolean array with ``True`` values denoting the pore space | [
"r",
"Create",
"an",
"image",
"of",
"randomly",
"place",
"overlapping",
"spheres",
"with",
"a",
"distribution",
"of",
"radii",
"."
] | 1e13875b56787d8f5b7ffdabce8c4342c33ba9f8 | https://github.com/PMEAL/porespy/blob/1e13875b56787d8f5b7ffdabce8c4342c33ba9f8/porespy/generators/__imgen__.py#L245-L295 | train | 213,423 |
PMEAL/porespy | porespy/generators/__imgen__.py | _get_Voronoi_edges | def _get_Voronoi_edges(vor):
r"""
Given a Voronoi object as produced by the scipy.spatial.Voronoi class,
this function calculates the start and end points of eeach edge in the
Voronoi diagram, in terms of the vertex indices used by the received
Voronoi object.
Parameters
----------
vor : scipy.spatial.Voronoi object
Returns
-------
A 2-by-N array of vertex indices, indicating the start and end points of
each vertex in the Voronoi diagram. These vertex indices can be used to
index straight into the ``vor.vertices`` array to get spatial positions.
"""
edges = [[], []]
for facet in vor.ridge_vertices:
# Create a closed cycle of vertices that define the facet
edges[0].extend(facet[:-1]+[facet[-1]])
edges[1].extend(facet[1:]+[facet[0]])
edges = sp.vstack(edges).T # Convert to scipy-friendly format
mask = sp.any(edges == -1, axis=1) # Identify edges at infinity
edges = edges[~mask] # Remove edges at infinity
edges = sp.sort(edges, axis=1) # Move all points to upper triangle
# Remove duplicate pairs
edges = edges[:, 0] + 1j*edges[:, 1] # Convert to imaginary
edges = sp.unique(edges) # Remove duplicates
edges = sp.vstack((sp.real(edges), sp.imag(edges))).T # Back to real
edges = sp.array(edges, dtype=int)
return edges | python | def _get_Voronoi_edges(vor):
r"""
Given a Voronoi object as produced by the scipy.spatial.Voronoi class,
this function calculates the start and end points of eeach edge in the
Voronoi diagram, in terms of the vertex indices used by the received
Voronoi object.
Parameters
----------
vor : scipy.spatial.Voronoi object
Returns
-------
A 2-by-N array of vertex indices, indicating the start and end points of
each vertex in the Voronoi diagram. These vertex indices can be used to
index straight into the ``vor.vertices`` array to get spatial positions.
"""
edges = [[], []]
for facet in vor.ridge_vertices:
# Create a closed cycle of vertices that define the facet
edges[0].extend(facet[:-1]+[facet[-1]])
edges[1].extend(facet[1:]+[facet[0]])
edges = sp.vstack(edges).T # Convert to scipy-friendly format
mask = sp.any(edges == -1, axis=1) # Identify edges at infinity
edges = edges[~mask] # Remove edges at infinity
edges = sp.sort(edges, axis=1) # Move all points to upper triangle
# Remove duplicate pairs
edges = edges[:, 0] + 1j*edges[:, 1] # Convert to imaginary
edges = sp.unique(edges) # Remove duplicates
edges = sp.vstack((sp.real(edges), sp.imag(edges))).T # Back to real
edges = sp.array(edges, dtype=int)
return edges | [
"def",
"_get_Voronoi_edges",
"(",
"vor",
")",
":",
"edges",
"=",
"[",
"[",
"]",
",",
"[",
"]",
"]",
"for",
"facet",
"in",
"vor",
".",
"ridge_vertices",
":",
"# Create a closed cycle of vertices that define the facet",
"edges",
"[",
"0",
"]",
".",
"extend",
"... | r"""
Given a Voronoi object as produced by the scipy.spatial.Voronoi class,
this function calculates the start and end points of eeach edge in the
Voronoi diagram, in terms of the vertex indices used by the received
Voronoi object.
Parameters
----------
vor : scipy.spatial.Voronoi object
Returns
-------
A 2-by-N array of vertex indices, indicating the start and end points of
each vertex in the Voronoi diagram. These vertex indices can be used to
index straight into the ``vor.vertices`` array to get spatial positions. | [
"r",
"Given",
"a",
"Voronoi",
"object",
"as",
"produced",
"by",
"the",
"scipy",
".",
"spatial",
".",
"Voronoi",
"class",
"this",
"function",
"calculates",
"the",
"start",
"and",
"end",
"points",
"of",
"eeach",
"edge",
"in",
"the",
"Voronoi",
"diagram",
"in... | 1e13875b56787d8f5b7ffdabce8c4342c33ba9f8 | https://github.com/PMEAL/porespy/blob/1e13875b56787d8f5b7ffdabce8c4342c33ba9f8/porespy/generators/__imgen__.py#L358-L389 | train | 213,424 |
PMEAL/porespy | porespy/generators/__imgen__.py | overlapping_spheres | def overlapping_spheres(shape: List[int], radius: int, porosity: float,
iter_max: int = 10, tol: float = 0.01):
r"""
Generate a packing of overlapping mono-disperse spheres
Parameters
----------
shape : list
The size of the image to generate in [Nx, Ny, Nz] where Ni is the
number of voxels in the i-th direction.
radius : scalar
The radius of spheres in the packing.
porosity : scalar
The porosity of the final image, accurate to the given tolerance.
iter_max : int
Maximum number of iterations for the iterative algorithm that improves
the porosity of the final image to match the given value.
tol : float
Tolerance for porosity of the final image compared to the given value.
Returns
-------
image : ND-array
A boolean array with ``True`` values denoting the pore space
Notes
-----
This method can also be used to generate a dispersion of hollows by
treating ``porosity`` as solid volume fraction and inverting the
returned image.
"""
shape = sp.array(shape)
if sp.size(shape) == 1:
shape = sp.full((3, ), int(shape))
ndim = (shape != 1).sum()
s_vol = ps_disk(radius).sum() if ndim == 2 else ps_ball(radius).sum()
bulk_vol = sp.prod(shape)
N = int(sp.ceil((1 - porosity)*bulk_vol/s_vol))
im = sp.random.random(size=shape)
# Helper functions for calculating porosity: phi = g(f(N))
f = lambda N: spim.distance_transform_edt(im > N/bulk_vol) < radius
g = lambda im: 1 - im.sum() / sp.prod(shape)
# # Newton's method for getting image porosity match the given
# w = 1.0 # Damping factor
# dN = 5 if ndim == 2 else 25 # Perturbation
# for i in range(iter_max):
# err = g(f(N)) - porosity
# d_err = (g(f(N+dN)) - g(f(N))) / dN
# if d_err == 0:
# break
# if abs(err) <= tol:
# break
# N2 = N - int(err/d_err) # xnew = xold - f/df
# N = w * N2 + (1-w) * N
# Bisection search: N is always undershoot (bc. of overlaps)
N_low, N_high = N, 4*N
for i in range(iter_max):
N = sp.mean([N_high, N_low], dtype=int)
err = g(f(N)) - porosity
if err > 0:
N_low = N
else:
N_high = N
if abs(err) <= tol:
break
return ~f(N) | python | def overlapping_spheres(shape: List[int], radius: int, porosity: float,
iter_max: int = 10, tol: float = 0.01):
r"""
Generate a packing of overlapping mono-disperse spheres
Parameters
----------
shape : list
The size of the image to generate in [Nx, Ny, Nz] where Ni is the
number of voxels in the i-th direction.
radius : scalar
The radius of spheres in the packing.
porosity : scalar
The porosity of the final image, accurate to the given tolerance.
iter_max : int
Maximum number of iterations for the iterative algorithm that improves
the porosity of the final image to match the given value.
tol : float
Tolerance for porosity of the final image compared to the given value.
Returns
-------
image : ND-array
A boolean array with ``True`` values denoting the pore space
Notes
-----
This method can also be used to generate a dispersion of hollows by
treating ``porosity`` as solid volume fraction and inverting the
returned image.
"""
shape = sp.array(shape)
if sp.size(shape) == 1:
shape = sp.full((3, ), int(shape))
ndim = (shape != 1).sum()
s_vol = ps_disk(radius).sum() if ndim == 2 else ps_ball(radius).sum()
bulk_vol = sp.prod(shape)
N = int(sp.ceil((1 - porosity)*bulk_vol/s_vol))
im = sp.random.random(size=shape)
# Helper functions for calculating porosity: phi = g(f(N))
f = lambda N: spim.distance_transform_edt(im > N/bulk_vol) < radius
g = lambda im: 1 - im.sum() / sp.prod(shape)
# # Newton's method for getting image porosity match the given
# w = 1.0 # Damping factor
# dN = 5 if ndim == 2 else 25 # Perturbation
# for i in range(iter_max):
# err = g(f(N)) - porosity
# d_err = (g(f(N+dN)) - g(f(N))) / dN
# if d_err == 0:
# break
# if abs(err) <= tol:
# break
# N2 = N - int(err/d_err) # xnew = xold - f/df
# N = w * N2 + (1-w) * N
# Bisection search: N is always undershoot (bc. of overlaps)
N_low, N_high = N, 4*N
for i in range(iter_max):
N = sp.mean([N_high, N_low], dtype=int)
err = g(f(N)) - porosity
if err > 0:
N_low = N
else:
N_high = N
if abs(err) <= tol:
break
return ~f(N) | [
"def",
"overlapping_spheres",
"(",
"shape",
":",
"List",
"[",
"int",
"]",
",",
"radius",
":",
"int",
",",
"porosity",
":",
"float",
",",
"iter_max",
":",
"int",
"=",
"10",
",",
"tol",
":",
"float",
"=",
"0.01",
")",
":",
"shape",
"=",
"sp",
".",
... | r"""
Generate a packing of overlapping mono-disperse spheres
Parameters
----------
shape : list
The size of the image to generate in [Nx, Ny, Nz] where Ni is the
number of voxels in the i-th direction.
radius : scalar
The radius of spheres in the packing.
porosity : scalar
The porosity of the final image, accurate to the given tolerance.
iter_max : int
Maximum number of iterations for the iterative algorithm that improves
the porosity of the final image to match the given value.
tol : float
Tolerance for porosity of the final image compared to the given value.
Returns
-------
image : ND-array
A boolean array with ``True`` values denoting the pore space
Notes
-----
This method can also be used to generate a dispersion of hollows by
treating ``porosity`` as solid volume fraction and inverting the
returned image. | [
"r",
"Generate",
"a",
"packing",
"of",
"overlapping",
"mono",
"-",
"disperse",
"spheres"
] | 1e13875b56787d8f5b7ffdabce8c4342c33ba9f8 | https://github.com/PMEAL/porespy/blob/1e13875b56787d8f5b7ffdabce8c4342c33ba9f8/porespy/generators/__imgen__.py#L499-L574 | train | 213,425 |
PMEAL/porespy | porespy/generators/__imgen__.py | generate_noise | def generate_noise(shape: List[int], porosity=None, octaves: int = 3,
frequency: int = 32, mode: str = 'simplex'):
r"""
Generate a field of spatially correlated random noise using the Perlin
noise algorithm, or the updated Simplex noise algorithm.
Parameters
----------
shape : array_like
The size of the image to generate in [Nx, Ny, Nz] where N is the
number of voxels.
porosity : float
If specified, this will threshold the image to the specified value
prior to returning. If no value is given (the default), then the
scalar noise field is returned.
octaves : int
Controls the *texture* of the noise, with higher octaves giving more
complex features over larger length scales.
frequency : array_like
Controls the relative sizes of the features, with higher frequencies
giving larger features. A scalar value will apply the same frequency
in all directions, given an isotropic field; a vector value will
apply the specified values along each axis to create anisotropy.
mode : string
Which noise algorithm to use, either ``'simplex'`` (default) or
``'perlin'``.
Returns
-------
image : ND-array
If porosity is given, then a boolean array with ``True`` values
denoting the pore space is returned. If not, then normally
distributed and spatially correlated randomly noise is returned.
Notes
-----
This method depends the a package called 'noise' which must be
compiled. It is included in the Anaconda distribution, or a platform
specific binary can be downloaded.
See Also
--------
porespy.tools.norm_to_uniform
"""
try:
import noise
except ModuleNotFoundError:
raise Exception("The noise package must be installed")
shape = sp.array(shape)
if sp.size(shape) == 1:
Lx, Ly, Lz = sp.full((3, ), int(shape))
elif len(shape) == 2:
Lx, Ly = shape
Lz = 1
elif len(shape) == 3:
Lx, Ly, Lz = shape
if mode == 'simplex':
f = noise.snoise3
else:
f = noise.pnoise3
frequency = sp.atleast_1d(frequency)
if frequency.size == 1:
freq = sp.full(shape=[3, ], fill_value=frequency[0])
elif frequency.size == 2:
freq = sp.concatenate((frequency, [1]))
else:
freq = sp.array(frequency)
im = sp.zeros(shape=[Lx, Ly, Lz], dtype=float)
for x in range(Lx):
for y in range(Ly):
for z in range(Lz):
im[x, y, z] = f(x=x/freq[0], y=y/freq[1], z=z/freq[2],
octaves=octaves)
im = im.squeeze()
if porosity:
im = norm_to_uniform(im, scale=[0, 1])
im = im < porosity
return im | python | def generate_noise(shape: List[int], porosity=None, octaves: int = 3,
frequency: int = 32, mode: str = 'simplex'):
r"""
Generate a field of spatially correlated random noise using the Perlin
noise algorithm, or the updated Simplex noise algorithm.
Parameters
----------
shape : array_like
The size of the image to generate in [Nx, Ny, Nz] where N is the
number of voxels.
porosity : float
If specified, this will threshold the image to the specified value
prior to returning. If no value is given (the default), then the
scalar noise field is returned.
octaves : int
Controls the *texture* of the noise, with higher octaves giving more
complex features over larger length scales.
frequency : array_like
Controls the relative sizes of the features, with higher frequencies
giving larger features. A scalar value will apply the same frequency
in all directions, given an isotropic field; a vector value will
apply the specified values along each axis to create anisotropy.
mode : string
Which noise algorithm to use, either ``'simplex'`` (default) or
``'perlin'``.
Returns
-------
image : ND-array
If porosity is given, then a boolean array with ``True`` values
denoting the pore space is returned. If not, then normally
distributed and spatially correlated randomly noise is returned.
Notes
-----
This method depends the a package called 'noise' which must be
compiled. It is included in the Anaconda distribution, or a platform
specific binary can be downloaded.
See Also
--------
porespy.tools.norm_to_uniform
"""
try:
import noise
except ModuleNotFoundError:
raise Exception("The noise package must be installed")
shape = sp.array(shape)
if sp.size(shape) == 1:
Lx, Ly, Lz = sp.full((3, ), int(shape))
elif len(shape) == 2:
Lx, Ly = shape
Lz = 1
elif len(shape) == 3:
Lx, Ly, Lz = shape
if mode == 'simplex':
f = noise.snoise3
else:
f = noise.pnoise3
frequency = sp.atleast_1d(frequency)
if frequency.size == 1:
freq = sp.full(shape=[3, ], fill_value=frequency[0])
elif frequency.size == 2:
freq = sp.concatenate((frequency, [1]))
else:
freq = sp.array(frequency)
im = sp.zeros(shape=[Lx, Ly, Lz], dtype=float)
for x in range(Lx):
for y in range(Ly):
for z in range(Lz):
im[x, y, z] = f(x=x/freq[0], y=y/freq[1], z=z/freq[2],
octaves=octaves)
im = im.squeeze()
if porosity:
im = norm_to_uniform(im, scale=[0, 1])
im = im < porosity
return im | [
"def",
"generate_noise",
"(",
"shape",
":",
"List",
"[",
"int",
"]",
",",
"porosity",
"=",
"None",
",",
"octaves",
":",
"int",
"=",
"3",
",",
"frequency",
":",
"int",
"=",
"32",
",",
"mode",
":",
"str",
"=",
"'simplex'",
")",
":",
"try",
":",
"im... | r"""
Generate a field of spatially correlated random noise using the Perlin
noise algorithm, or the updated Simplex noise algorithm.
Parameters
----------
shape : array_like
The size of the image to generate in [Nx, Ny, Nz] where N is the
number of voxels.
porosity : float
If specified, this will threshold the image to the specified value
prior to returning. If no value is given (the default), then the
scalar noise field is returned.
octaves : int
Controls the *texture* of the noise, with higher octaves giving more
complex features over larger length scales.
frequency : array_like
Controls the relative sizes of the features, with higher frequencies
giving larger features. A scalar value will apply the same frequency
in all directions, given an isotropic field; a vector value will
apply the specified values along each axis to create anisotropy.
mode : string
Which noise algorithm to use, either ``'simplex'`` (default) or
``'perlin'``.
Returns
-------
image : ND-array
If porosity is given, then a boolean array with ``True`` values
denoting the pore space is returned. If not, then normally
distributed and spatially correlated randomly noise is returned.
Notes
-----
This method depends the a package called 'noise' which must be
compiled. It is included in the Anaconda distribution, or a platform
specific binary can be downloaded.
See Also
--------
porespy.tools.norm_to_uniform | [
"r",
"Generate",
"a",
"field",
"of",
"spatially",
"correlated",
"random",
"noise",
"using",
"the",
"Perlin",
"noise",
"algorithm",
"or",
"the",
"updated",
"Simplex",
"noise",
"algorithm",
"."
] | 1e13875b56787d8f5b7ffdabce8c4342c33ba9f8 | https://github.com/PMEAL/porespy/blob/1e13875b56787d8f5b7ffdabce8c4342c33ba9f8/porespy/generators/__imgen__.py#L577-L659 | train | 213,426 |
PMEAL/porespy | porespy/generators/__imgen__.py | blobs | def blobs(shape: List[int], porosity: float = 0.5, blobiness: int = 1):
"""
Generates an image containing amorphous blobs
Parameters
----------
shape : list
The size of the image to generate in [Nx, Ny, Nz] where N is the
number of voxels
porosity : float
If specified, this will threshold the image to the specified value
prior to returning. If ``None`` is specified, then the scalar noise
field is converted to a uniform distribution and returned without
thresholding.
blobiness : int or list of ints(default = 1)
Controls the morphology of the blobs. A higher number results in
a larger number of small blobs. If a list is supplied then the blobs
are anisotropic.
Returns
-------
image : ND-array
A boolean array with ``True`` values denoting the pore space
See Also
--------
norm_to_uniform
"""
blobiness = sp.array(blobiness)
shape = sp.array(shape)
if sp.size(shape) == 1:
shape = sp.full((3, ), int(shape))
sigma = sp.mean(shape)/(40*blobiness)
im = sp.random.random(shape)
im = spim.gaussian_filter(im, sigma=sigma)
im = norm_to_uniform(im, scale=[0, 1])
if porosity:
im = im < porosity
return im | python | def blobs(shape: List[int], porosity: float = 0.5, blobiness: int = 1):
"""
Generates an image containing amorphous blobs
Parameters
----------
shape : list
The size of the image to generate in [Nx, Ny, Nz] where N is the
number of voxels
porosity : float
If specified, this will threshold the image to the specified value
prior to returning. If ``None`` is specified, then the scalar noise
field is converted to a uniform distribution and returned without
thresholding.
blobiness : int or list of ints(default = 1)
Controls the morphology of the blobs. A higher number results in
a larger number of small blobs. If a list is supplied then the blobs
are anisotropic.
Returns
-------
image : ND-array
A boolean array with ``True`` values denoting the pore space
See Also
--------
norm_to_uniform
"""
blobiness = sp.array(blobiness)
shape = sp.array(shape)
if sp.size(shape) == 1:
shape = sp.full((3, ), int(shape))
sigma = sp.mean(shape)/(40*blobiness)
im = sp.random.random(shape)
im = spim.gaussian_filter(im, sigma=sigma)
im = norm_to_uniform(im, scale=[0, 1])
if porosity:
im = im < porosity
return im | [
"def",
"blobs",
"(",
"shape",
":",
"List",
"[",
"int",
"]",
",",
"porosity",
":",
"float",
"=",
"0.5",
",",
"blobiness",
":",
"int",
"=",
"1",
")",
":",
"blobiness",
"=",
"sp",
".",
"array",
"(",
"blobiness",
")",
"shape",
"=",
"sp",
".",
"array"... | Generates an image containing amorphous blobs
Parameters
----------
shape : list
The size of the image to generate in [Nx, Ny, Nz] where N is the
number of voxels
porosity : float
If specified, this will threshold the image to the specified value
prior to returning. If ``None`` is specified, then the scalar noise
field is converted to a uniform distribution and returned without
thresholding.
blobiness : int or list of ints(default = 1)
Controls the morphology of the blobs. A higher number results in
a larger number of small blobs. If a list is supplied then the blobs
are anisotropic.
Returns
-------
image : ND-array
A boolean array with ``True`` values denoting the pore space
See Also
--------
norm_to_uniform | [
"Generates",
"an",
"image",
"containing",
"amorphous",
"blobs"
] | 1e13875b56787d8f5b7ffdabce8c4342c33ba9f8 | https://github.com/PMEAL/porespy/blob/1e13875b56787d8f5b7ffdabce8c4342c33ba9f8/porespy/generators/__imgen__.py#L662-L703 | train | 213,427 |
PMEAL/porespy | porespy/generators/__imgen__.py | cylinders | def cylinders(shape: List[int], radius: int, ncylinders: int,
phi_max: float = 0, theta_max: float = 90):
r"""
Generates a binary image of overlapping cylinders. This is a good
approximation of a fibrous mat.
Parameters
----------
shape : list
The size of the image to generate in [Nx, Ny, Nz] where N is the
number of voxels. 2D images are not permitted.
radius : scalar
The radius of the cylinders in voxels
ncylinders : scalar
The number of cylinders to add to the domain. Adjust this value to
control the final porosity, which is not easily specified since
cylinders overlap and intersect different fractions of the domain.
theta_max : scalar
A value between 0 and 90 that controls the amount of rotation *in the*
XY plane, with 0 meaning all fibers point in the X-direction, and
90 meaning they are randomly rotated about the Z axis by as much
as +/- 90 degrees.
phi_max : scalar
A value between 0 and 90 that controls the amount that the fibers
lie *out of* the XY plane, with 0 meaning all fibers lie in the XY
plane, and 90 meaning that fibers are randomly oriented out of the
plane by as much as +/- 90 degrees.
Returns
-------
image : ND-array
A boolean array with ``True`` values denoting the pore space
"""
shape = sp.array(shape)
if sp.size(shape) == 1:
shape = sp.full((3, ), int(shape))
elif sp.size(shape) == 2:
raise Exception("2D cylinders don't make sense")
R = sp.sqrt(sp.sum(sp.square(shape))).astype(int)
im = sp.zeros(shape)
# Adjust max angles to be between 0 and 90
if (phi_max > 90) or (phi_max < 0):
raise Exception('phi_max must be betwen 0 and 90')
if (theta_max > 90) or (theta_max < 0):
raise Exception('theta_max must be betwen 0 and 90')
n = 0
while n < ncylinders:
# Choose a random starting point in domain
x = sp.rand(3)*shape
# Chose a random phi and theta within given ranges
phi = (sp.pi/2 - sp.pi*sp.rand())*phi_max/90
theta = (sp.pi/2 - sp.pi*sp.rand())*theta_max/90
X0 = R*sp.array([sp.cos(phi)*sp.cos(theta),
sp.cos(phi)*sp.sin(theta),
sp.sin(phi)])
[X0, X1] = [x + X0, x - X0]
crds = line_segment(X0, X1)
lower = ~sp.any(sp.vstack(crds).T < [0, 0, 0], axis=1)
upper = ~sp.any(sp.vstack(crds).T >= shape, axis=1)
valid = upper*lower
if sp.any(valid):
im[crds[0][valid], crds[1][valid], crds[2][valid]] = 1
n += 1
im = sp.array(im, dtype=bool)
dt = spim.distance_transform_edt(~im) < radius
return ~dt | python | def cylinders(shape: List[int], radius: int, ncylinders: int,
phi_max: float = 0, theta_max: float = 90):
r"""
Generates a binary image of overlapping cylinders. This is a good
approximation of a fibrous mat.
Parameters
----------
shape : list
The size of the image to generate in [Nx, Ny, Nz] where N is the
number of voxels. 2D images are not permitted.
radius : scalar
The radius of the cylinders in voxels
ncylinders : scalar
The number of cylinders to add to the domain. Adjust this value to
control the final porosity, which is not easily specified since
cylinders overlap and intersect different fractions of the domain.
theta_max : scalar
A value between 0 and 90 that controls the amount of rotation *in the*
XY plane, with 0 meaning all fibers point in the X-direction, and
90 meaning they are randomly rotated about the Z axis by as much
as +/- 90 degrees.
phi_max : scalar
A value between 0 and 90 that controls the amount that the fibers
lie *out of* the XY plane, with 0 meaning all fibers lie in the XY
plane, and 90 meaning that fibers are randomly oriented out of the
plane by as much as +/- 90 degrees.
Returns
-------
image : ND-array
A boolean array with ``True`` values denoting the pore space
"""
shape = sp.array(shape)
if sp.size(shape) == 1:
shape = sp.full((3, ), int(shape))
elif sp.size(shape) == 2:
raise Exception("2D cylinders don't make sense")
R = sp.sqrt(sp.sum(sp.square(shape))).astype(int)
im = sp.zeros(shape)
# Adjust max angles to be between 0 and 90
if (phi_max > 90) or (phi_max < 0):
raise Exception('phi_max must be betwen 0 and 90')
if (theta_max > 90) or (theta_max < 0):
raise Exception('theta_max must be betwen 0 and 90')
n = 0
while n < ncylinders:
# Choose a random starting point in domain
x = sp.rand(3)*shape
# Chose a random phi and theta within given ranges
phi = (sp.pi/2 - sp.pi*sp.rand())*phi_max/90
theta = (sp.pi/2 - sp.pi*sp.rand())*theta_max/90
X0 = R*sp.array([sp.cos(phi)*sp.cos(theta),
sp.cos(phi)*sp.sin(theta),
sp.sin(phi)])
[X0, X1] = [x + X0, x - X0]
crds = line_segment(X0, X1)
lower = ~sp.any(sp.vstack(crds).T < [0, 0, 0], axis=1)
upper = ~sp.any(sp.vstack(crds).T >= shape, axis=1)
valid = upper*lower
if sp.any(valid):
im[crds[0][valid], crds[1][valid], crds[2][valid]] = 1
n += 1
im = sp.array(im, dtype=bool)
dt = spim.distance_transform_edt(~im) < radius
return ~dt | [
"def",
"cylinders",
"(",
"shape",
":",
"List",
"[",
"int",
"]",
",",
"radius",
":",
"int",
",",
"ncylinders",
":",
"int",
",",
"phi_max",
":",
"float",
"=",
"0",
",",
"theta_max",
":",
"float",
"=",
"90",
")",
":",
"shape",
"=",
"sp",
".",
"array... | r"""
Generates a binary image of overlapping cylinders. This is a good
approximation of a fibrous mat.
Parameters
----------
shape : list
The size of the image to generate in [Nx, Ny, Nz] where N is the
number of voxels. 2D images are not permitted.
radius : scalar
The radius of the cylinders in voxels
ncylinders : scalar
The number of cylinders to add to the domain. Adjust this value to
control the final porosity, which is not easily specified since
cylinders overlap and intersect different fractions of the domain.
theta_max : scalar
A value between 0 and 90 that controls the amount of rotation *in the*
XY plane, with 0 meaning all fibers point in the X-direction, and
90 meaning they are randomly rotated about the Z axis by as much
as +/- 90 degrees.
phi_max : scalar
A value between 0 and 90 that controls the amount that the fibers
lie *out of* the XY plane, with 0 meaning all fibers lie in the XY
plane, and 90 meaning that fibers are randomly oriented out of the
plane by as much as +/- 90 degrees.
Returns
-------
image : ND-array
A boolean array with ``True`` values denoting the pore space | [
"r",
"Generates",
"a",
"binary",
"image",
"of",
"overlapping",
"cylinders",
".",
"This",
"is",
"a",
"good",
"approximation",
"of",
"a",
"fibrous",
"mat",
"."
] | 1e13875b56787d8f5b7ffdabce8c4342c33ba9f8 | https://github.com/PMEAL/porespy/blob/1e13875b56787d8f5b7ffdabce8c4342c33ba9f8/porespy/generators/__imgen__.py#L706-L771 | train | 213,428 |
PMEAL/porespy | porespy/generators/__imgen__.py | line_segment | def line_segment(X0, X1):
r"""
Calculate the voxel coordinates of a straight line between the two given
end points
Parameters
----------
X0 and X1 : array_like
The [x, y] or [x, y, z] coordinates of the start and end points of
the line.
Returns
-------
coords : list of lists
A list of lists containing the X, Y, and Z coordinates of all voxels
that should be drawn between the start and end points to create a solid
line.
"""
X0 = sp.around(X0).astype(int)
X1 = sp.around(X1).astype(int)
if len(X0) == 3:
L = sp.amax(sp.absolute([[X1[0]-X0[0]], [X1[1]-X0[1]], [X1[2]-X0[2]]])) + 1
x = sp.rint(sp.linspace(X0[0], X1[0], L)).astype(int)
y = sp.rint(sp.linspace(X0[1], X1[1], L)).astype(int)
z = sp.rint(sp.linspace(X0[2], X1[2], L)).astype(int)
return [x, y, z]
else:
L = sp.amax(sp.absolute([[X1[0]-X0[0]], [X1[1]-X0[1]]])) + 1
x = sp.rint(sp.linspace(X0[0], X1[0], L)).astype(int)
y = sp.rint(sp.linspace(X0[1], X1[1], L)).astype(int)
return [x, y] | python | def line_segment(X0, X1):
r"""
Calculate the voxel coordinates of a straight line between the two given
end points
Parameters
----------
X0 and X1 : array_like
The [x, y] or [x, y, z] coordinates of the start and end points of
the line.
Returns
-------
coords : list of lists
A list of lists containing the X, Y, and Z coordinates of all voxels
that should be drawn between the start and end points to create a solid
line.
"""
X0 = sp.around(X0).astype(int)
X1 = sp.around(X1).astype(int)
if len(X0) == 3:
L = sp.amax(sp.absolute([[X1[0]-X0[0]], [X1[1]-X0[1]], [X1[2]-X0[2]]])) + 1
x = sp.rint(sp.linspace(X0[0], X1[0], L)).astype(int)
y = sp.rint(sp.linspace(X0[1], X1[1], L)).astype(int)
z = sp.rint(sp.linspace(X0[2], X1[2], L)).astype(int)
return [x, y, z]
else:
L = sp.amax(sp.absolute([[X1[0]-X0[0]], [X1[1]-X0[1]]])) + 1
x = sp.rint(sp.linspace(X0[0], X1[0], L)).astype(int)
y = sp.rint(sp.linspace(X0[1], X1[1], L)).astype(int)
return [x, y] | [
"def",
"line_segment",
"(",
"X0",
",",
"X1",
")",
":",
"X0",
"=",
"sp",
".",
"around",
"(",
"X0",
")",
".",
"astype",
"(",
"int",
")",
"X1",
"=",
"sp",
".",
"around",
"(",
"X1",
")",
".",
"astype",
"(",
"int",
")",
"if",
"len",
"(",
"X0",
"... | r"""
Calculate the voxel coordinates of a straight line between the two given
end points
Parameters
----------
X0 and X1 : array_like
The [x, y] or [x, y, z] coordinates of the start and end points of
the line.
Returns
-------
coords : list of lists
A list of lists containing the X, Y, and Z coordinates of all voxels
that should be drawn between the start and end points to create a solid
line. | [
"r",
"Calculate",
"the",
"voxel",
"coordinates",
"of",
"a",
"straight",
"line",
"between",
"the",
"two",
"given",
"end",
"points"
] | 1e13875b56787d8f5b7ffdabce8c4342c33ba9f8 | https://github.com/PMEAL/porespy/blob/1e13875b56787d8f5b7ffdabce8c4342c33ba9f8/porespy/generators/__imgen__.py#L774-L804 | train | 213,429 |
PMEAL/porespy | porespy/generators/__imgen__.py | _remove_edge | def _remove_edge(im, r):
r'''
Fill in the edges of the input image.
Used by RSA to ensure that no elements are placed too close to the edge.
'''
edge = sp.ones_like(im)
if len(im.shape) == 2:
sx, sy = im.shape
edge[r:sx-r, r:sy-r] = im[r:sx-r, r:sy-r]
else:
sx, sy, sz = im.shape
edge[r:sx-r, r:sy-r, r:sz-r] = im[r:sx-r, r:sy-r, r:sz-r]
return edge | python | def _remove_edge(im, r):
r'''
Fill in the edges of the input image.
Used by RSA to ensure that no elements are placed too close to the edge.
'''
edge = sp.ones_like(im)
if len(im.shape) == 2:
sx, sy = im.shape
edge[r:sx-r, r:sy-r] = im[r:sx-r, r:sy-r]
else:
sx, sy, sz = im.shape
edge[r:sx-r, r:sy-r, r:sz-r] = im[r:sx-r, r:sy-r, r:sz-r]
return edge | [
"def",
"_remove_edge",
"(",
"im",
",",
"r",
")",
":",
"edge",
"=",
"sp",
".",
"ones_like",
"(",
"im",
")",
"if",
"len",
"(",
"im",
".",
"shape",
")",
"==",
"2",
":",
"sx",
",",
"sy",
"=",
"im",
".",
"shape",
"edge",
"[",
"r",
":",
"sx",
"-"... | r'''
Fill in the edges of the input image.
Used by RSA to ensure that no elements are placed too close to the edge. | [
"r",
"Fill",
"in",
"the",
"edges",
"of",
"the",
"input",
"image",
".",
"Used",
"by",
"RSA",
"to",
"ensure",
"that",
"no",
"elements",
"are",
"placed",
"too",
"close",
"to",
"the",
"edge",
"."
] | 1e13875b56787d8f5b7ffdabce8c4342c33ba9f8 | https://github.com/PMEAL/porespy/blob/1e13875b56787d8f5b7ffdabce8c4342c33ba9f8/porespy/generators/__imgen__.py#L878-L890 | train | 213,430 |
PMEAL/porespy | porespy/tools/__funcs__.py | align_image_with_openpnm | def align_image_with_openpnm(im):
r"""
Rotates an image to agree with the coordinates used in OpenPNM. It is
unclear why they are not in agreement to start with. This is necessary
for overlaying the image and the network in Paraview.
Parameters
----------
im : ND-array
The image to be rotated. Can be the Boolean image of the pore space or
any other image of interest.
Returns
-------
image : ND-array
Returns a copy of ``im`` rotated accordingly.
"""
if im.ndim != im.squeeze().ndim:
warnings.warn('Input image conains a singleton axis:' + str(im.shape) +
' Reduce dimensionality with np.squeeze(im) to avoid' +
' unexpected behavior.')
im = sp.copy(im)
if im.ndim == 2:
im = (sp.swapaxes(im, 1, 0))
im = im[-1::-1, :]
elif im.ndim == 3:
im = (sp.swapaxes(im, 2, 0))
im = im[:, -1::-1, :]
return im | python | def align_image_with_openpnm(im):
r"""
Rotates an image to agree with the coordinates used in OpenPNM. It is
unclear why they are not in agreement to start with. This is necessary
for overlaying the image and the network in Paraview.
Parameters
----------
im : ND-array
The image to be rotated. Can be the Boolean image of the pore space or
any other image of interest.
Returns
-------
image : ND-array
Returns a copy of ``im`` rotated accordingly.
"""
if im.ndim != im.squeeze().ndim:
warnings.warn('Input image conains a singleton axis:' + str(im.shape) +
' Reduce dimensionality with np.squeeze(im) to avoid' +
' unexpected behavior.')
im = sp.copy(im)
if im.ndim == 2:
im = (sp.swapaxes(im, 1, 0))
im = im[-1::-1, :]
elif im.ndim == 3:
im = (sp.swapaxes(im, 2, 0))
im = im[:, -1::-1, :]
return im | [
"def",
"align_image_with_openpnm",
"(",
"im",
")",
":",
"if",
"im",
".",
"ndim",
"!=",
"im",
".",
"squeeze",
"(",
")",
".",
"ndim",
":",
"warnings",
".",
"warn",
"(",
"'Input image conains a singleton axis:'",
"+",
"str",
"(",
"im",
".",
"shape",
")",
"+... | r"""
Rotates an image to agree with the coordinates used in OpenPNM. It is
unclear why they are not in agreement to start with. This is necessary
for overlaying the image and the network in Paraview.
Parameters
----------
im : ND-array
The image to be rotated. Can be the Boolean image of the pore space or
any other image of interest.
Returns
-------
image : ND-array
Returns a copy of ``im`` rotated accordingly. | [
"r",
"Rotates",
"an",
"image",
"to",
"agree",
"with",
"the",
"coordinates",
"used",
"in",
"OpenPNM",
".",
"It",
"is",
"unclear",
"why",
"they",
"are",
"not",
"in",
"agreement",
"to",
"start",
"with",
".",
"This",
"is",
"necessary",
"for",
"overlaying",
"... | 1e13875b56787d8f5b7ffdabce8c4342c33ba9f8 | https://github.com/PMEAL/porespy/blob/1e13875b56787d8f5b7ffdabce8c4342c33ba9f8/porespy/tools/__funcs__.py#L11-L39 | train | 213,431 |
PMEAL/porespy | porespy/tools/__funcs__.py | fftmorphology | def fftmorphology(im, strel, mode='opening'):
r"""
Perform morphological operations on binary images using fft approach for
improved performance
Parameters
----------
im : nd-array
The binary image on which to perform the morphological operation
strel : nd-array
The structuring element to use. Must have the same dims as ``im``.
mode : string
The type of operation to perform. Options are 'dilation', 'erosion',
'opening' and 'closing'.
Returns
-------
image : ND-array
A copy of the image with the specified moropholgical operation applied
using the fft-based methods available in scipy.fftconvolve.
Notes
-----
This function uses ``scipy.signal.fftconvolve`` which *can* be more than
10x faster than the standard binary morphology operation in
``scipy.ndimage``. This speed up may not always be realized, depending
on the scipy distribution used.
Examples
--------
>>> import porespy as ps
>>> from numpy import array_equal
>>> import scipy.ndimage as spim
>>> from skimage.morphology import disk
>>> im = ps.generators.blobs(shape=[100, 100], porosity=0.8)
Check that erosion, dilation, opening, and closing are all the same as
the ``scipy.ndimage`` functions:
>>> result = ps.filters.fftmorphology(im, strel=disk(5), mode='erosion')
>>> temp = spim.binary_erosion(im, structure=disk(5))
>>> array_equal(result, temp)
True
>>> result = ps.filters.fftmorphology(im, strel=disk(5), mode='dilation')
>>> temp = spim.binary_dilation(im, structure=disk(5))
>>> array_equal(result, temp)
True
>>> result = ps.filters.fftmorphology(im, strel=disk(5), mode='opening')
>>> temp = spim.binary_opening(im, structure=disk(5))
>>> array_equal(result, temp)
True
>>> result = ps.filters.fftmorphology(im, strel=disk(5), mode='closing')
>>> temp = spim.binary_closing(im, structure=disk(5))
>>> array_equal(result, temp)
True
"""
def erode(im, strel):
t = fftconvolve(im, strel, mode='same') > (strel.sum() - 0.1)
return t
def dilate(im, strel):
t = fftconvolve(im, strel, mode='same') > 0.1
return t
if im.ndim != im.squeeze().ndim:
warnings.warn('Input image conains a singleton axis:' + str(im.shape) +
' Reduce dimensionality with np.squeeze(im) to avoid' +
' unexpected behavior.')
# Perform erosion and dilation
# The array must be padded with 0's so it works correctly at edges
temp = sp.pad(array=im, pad_width=1, mode='constant', constant_values=0)
if mode.startswith('ero'):
temp = erode(temp, strel)
if mode.startswith('dila'):
temp = dilate(temp, strel)
# Remove padding from resulting image
if im.ndim == 2:
result = temp[1:-1, 1:-1]
elif im.ndim == 3:
result = temp[1:-1, 1:-1, 1:-1]
# Perform opening and closing
if mode.startswith('open'):
temp = fftmorphology(im=im, strel=strel, mode='erosion')
result = fftmorphology(im=temp, strel=strel, mode='dilation')
if mode.startswith('clos'):
temp = fftmorphology(im=im, strel=strel, mode='dilation')
result = fftmorphology(im=temp, strel=strel, mode='erosion')
return result | python | def fftmorphology(im, strel, mode='opening'):
r"""
Perform morphological operations on binary images using fft approach for
improved performance
Parameters
----------
im : nd-array
The binary image on which to perform the morphological operation
strel : nd-array
The structuring element to use. Must have the same dims as ``im``.
mode : string
The type of operation to perform. Options are 'dilation', 'erosion',
'opening' and 'closing'.
Returns
-------
image : ND-array
A copy of the image with the specified moropholgical operation applied
using the fft-based methods available in scipy.fftconvolve.
Notes
-----
This function uses ``scipy.signal.fftconvolve`` which *can* be more than
10x faster than the standard binary morphology operation in
``scipy.ndimage``. This speed up may not always be realized, depending
on the scipy distribution used.
Examples
--------
>>> import porespy as ps
>>> from numpy import array_equal
>>> import scipy.ndimage as spim
>>> from skimage.morphology import disk
>>> im = ps.generators.blobs(shape=[100, 100], porosity=0.8)
Check that erosion, dilation, opening, and closing are all the same as
the ``scipy.ndimage`` functions:
>>> result = ps.filters.fftmorphology(im, strel=disk(5), mode='erosion')
>>> temp = spim.binary_erosion(im, structure=disk(5))
>>> array_equal(result, temp)
True
>>> result = ps.filters.fftmorphology(im, strel=disk(5), mode='dilation')
>>> temp = spim.binary_dilation(im, structure=disk(5))
>>> array_equal(result, temp)
True
>>> result = ps.filters.fftmorphology(im, strel=disk(5), mode='opening')
>>> temp = spim.binary_opening(im, structure=disk(5))
>>> array_equal(result, temp)
True
>>> result = ps.filters.fftmorphology(im, strel=disk(5), mode='closing')
>>> temp = spim.binary_closing(im, structure=disk(5))
>>> array_equal(result, temp)
True
"""
def erode(im, strel):
t = fftconvolve(im, strel, mode='same') > (strel.sum() - 0.1)
return t
def dilate(im, strel):
t = fftconvolve(im, strel, mode='same') > 0.1
return t
if im.ndim != im.squeeze().ndim:
warnings.warn('Input image conains a singleton axis:' + str(im.shape) +
' Reduce dimensionality with np.squeeze(im) to avoid' +
' unexpected behavior.')
# Perform erosion and dilation
# The array must be padded with 0's so it works correctly at edges
temp = sp.pad(array=im, pad_width=1, mode='constant', constant_values=0)
if mode.startswith('ero'):
temp = erode(temp, strel)
if mode.startswith('dila'):
temp = dilate(temp, strel)
# Remove padding from resulting image
if im.ndim == 2:
result = temp[1:-1, 1:-1]
elif im.ndim == 3:
result = temp[1:-1, 1:-1, 1:-1]
# Perform opening and closing
if mode.startswith('open'):
temp = fftmorphology(im=im, strel=strel, mode='erosion')
result = fftmorphology(im=temp, strel=strel, mode='dilation')
if mode.startswith('clos'):
temp = fftmorphology(im=im, strel=strel, mode='dilation')
result = fftmorphology(im=temp, strel=strel, mode='erosion')
return result | [
"def",
"fftmorphology",
"(",
"im",
",",
"strel",
",",
"mode",
"=",
"'opening'",
")",
":",
"def",
"erode",
"(",
"im",
",",
"strel",
")",
":",
"t",
"=",
"fftconvolve",
"(",
"im",
",",
"strel",
",",
"mode",
"=",
"'same'",
")",
">",
"(",
"strel",
"."... | r"""
Perform morphological operations on binary images using fft approach for
improved performance
Parameters
----------
im : nd-array
The binary image on which to perform the morphological operation
strel : nd-array
The structuring element to use. Must have the same dims as ``im``.
mode : string
The type of operation to perform. Options are 'dilation', 'erosion',
'opening' and 'closing'.
Returns
-------
image : ND-array
A copy of the image with the specified moropholgical operation applied
using the fft-based methods available in scipy.fftconvolve.
Notes
-----
This function uses ``scipy.signal.fftconvolve`` which *can* be more than
10x faster than the standard binary morphology operation in
``scipy.ndimage``. This speed up may not always be realized, depending
on the scipy distribution used.
Examples
--------
>>> import porespy as ps
>>> from numpy import array_equal
>>> import scipy.ndimage as spim
>>> from skimage.morphology import disk
>>> im = ps.generators.blobs(shape=[100, 100], porosity=0.8)
Check that erosion, dilation, opening, and closing are all the same as
the ``scipy.ndimage`` functions:
>>> result = ps.filters.fftmorphology(im, strel=disk(5), mode='erosion')
>>> temp = spim.binary_erosion(im, structure=disk(5))
>>> array_equal(result, temp)
True
>>> result = ps.filters.fftmorphology(im, strel=disk(5), mode='dilation')
>>> temp = spim.binary_dilation(im, structure=disk(5))
>>> array_equal(result, temp)
True
>>> result = ps.filters.fftmorphology(im, strel=disk(5), mode='opening')
>>> temp = spim.binary_opening(im, structure=disk(5))
>>> array_equal(result, temp)
True
>>> result = ps.filters.fftmorphology(im, strel=disk(5), mode='closing')
>>> temp = spim.binary_closing(im, structure=disk(5))
>>> array_equal(result, temp)
True | [
"r",
"Perform",
"morphological",
"operations",
"on",
"binary",
"images",
"using",
"fft",
"approach",
"for",
"improved",
"performance"
] | 1e13875b56787d8f5b7ffdabce8c4342c33ba9f8 | https://github.com/PMEAL/porespy/blob/1e13875b56787d8f5b7ffdabce8c4342c33ba9f8/porespy/tools/__funcs__.py#L42-L140 | train | 213,432 |
PMEAL/porespy | porespy/tools/__funcs__.py | subdivide | def subdivide(im, divs=2):
r"""
Returns slices into an image describing the specified number of sub-arrays.
This function is useful for performing operations on smaller images for
memory or speed. Note that for most typical operations this will NOT work,
since the image borders would cause artifacts (e.g. ``distance_transform``)
Parameters
----------
im : ND-array
The image of the porous media
divs : scalar or array_like
The number of sub-divisions to create in each axis of the image. If a
scalar is given it is assumed this value applies in all dimensions.
Returns
-------
slices : 1D-array
A 1-D array containing slice objects for indexing into ``im`` that
extract the sub-divided arrays.
Notes
-----
This method uses the
`array_split package <https://github.com/array-split/array_split>`_ which
offers the same functionality as the ``split`` method of Numpy's ND-array,
but supports the splitting multidimensional arrays in all dimensions.
Examples
--------
>>> import porespy as ps
>>> import matplotlib.pyplot as plt
>>> im = ps.generators.blobs(shape=[200, 200])
>>> s = ps.tools.subdivide(im, divs=[2, 2])
``s`` contains an array with the shape given by ``divs``. To access the
first and last quadrants of ``im`` use:
>>> print(im[tuple(s[0, 0])].shape)
(100, 100)
>>> print(im[tuple(s[1, 1])].shape)
(100, 100)
It can be easier to index the array with the slices by applying ``flatten``
first:
>>> s_flat = s.flatten()
>>> for i in s_flat:
... print(im[i].shape)
(100, 100)
(100, 100)
(100, 100)
(100, 100)
"""
# Expand scalar divs
if isinstance(divs, int):
divs = [divs for i in range(im.ndim)]
s = shape_split(im.shape, axis=divs)
return s | python | def subdivide(im, divs=2):
r"""
Returns slices into an image describing the specified number of sub-arrays.
This function is useful for performing operations on smaller images for
memory or speed. Note that for most typical operations this will NOT work,
since the image borders would cause artifacts (e.g. ``distance_transform``)
Parameters
----------
im : ND-array
The image of the porous media
divs : scalar or array_like
The number of sub-divisions to create in each axis of the image. If a
scalar is given it is assumed this value applies in all dimensions.
Returns
-------
slices : 1D-array
A 1-D array containing slice objects for indexing into ``im`` that
extract the sub-divided arrays.
Notes
-----
This method uses the
`array_split package <https://github.com/array-split/array_split>`_ which
offers the same functionality as the ``split`` method of Numpy's ND-array,
but supports the splitting multidimensional arrays in all dimensions.
Examples
--------
>>> import porespy as ps
>>> import matplotlib.pyplot as plt
>>> im = ps.generators.blobs(shape=[200, 200])
>>> s = ps.tools.subdivide(im, divs=[2, 2])
``s`` contains an array with the shape given by ``divs``. To access the
first and last quadrants of ``im`` use:
>>> print(im[tuple(s[0, 0])].shape)
(100, 100)
>>> print(im[tuple(s[1, 1])].shape)
(100, 100)
It can be easier to index the array with the slices by applying ``flatten``
first:
>>> s_flat = s.flatten()
>>> for i in s_flat:
... print(im[i].shape)
(100, 100)
(100, 100)
(100, 100)
(100, 100)
"""
# Expand scalar divs
if isinstance(divs, int):
divs = [divs for i in range(im.ndim)]
s = shape_split(im.shape, axis=divs)
return s | [
"def",
"subdivide",
"(",
"im",
",",
"divs",
"=",
"2",
")",
":",
"# Expand scalar divs",
"if",
"isinstance",
"(",
"divs",
",",
"int",
")",
":",
"divs",
"=",
"[",
"divs",
"for",
"i",
"in",
"range",
"(",
"im",
".",
"ndim",
")",
"]",
"s",
"=",
"shape... | r"""
Returns slices into an image describing the specified number of sub-arrays.
This function is useful for performing operations on smaller images for
memory or speed. Note that for most typical operations this will NOT work,
since the image borders would cause artifacts (e.g. ``distance_transform``)
Parameters
----------
im : ND-array
The image of the porous media
divs : scalar or array_like
The number of sub-divisions to create in each axis of the image. If a
scalar is given it is assumed this value applies in all dimensions.
Returns
-------
slices : 1D-array
A 1-D array containing slice objects for indexing into ``im`` that
extract the sub-divided arrays.
Notes
-----
This method uses the
`array_split package <https://github.com/array-split/array_split>`_ which
offers the same functionality as the ``split`` method of Numpy's ND-array,
but supports the splitting multidimensional arrays in all dimensions.
Examples
--------
>>> import porespy as ps
>>> import matplotlib.pyplot as plt
>>> im = ps.generators.blobs(shape=[200, 200])
>>> s = ps.tools.subdivide(im, divs=[2, 2])
``s`` contains an array with the shape given by ``divs``. To access the
first and last quadrants of ``im`` use:
>>> print(im[tuple(s[0, 0])].shape)
(100, 100)
>>> print(im[tuple(s[1, 1])].shape)
(100, 100)
It can be easier to index the array with the slices by applying ``flatten``
first:
>>> s_flat = s.flatten()
>>> for i in s_flat:
... print(im[i].shape)
(100, 100)
(100, 100)
(100, 100)
(100, 100) | [
"r",
"Returns",
"slices",
"into",
"an",
"image",
"describing",
"the",
"specified",
"number",
"of",
"sub",
"-",
"arrays",
"."
] | 1e13875b56787d8f5b7ffdabce8c4342c33ba9f8 | https://github.com/PMEAL/porespy/blob/1e13875b56787d8f5b7ffdabce8c4342c33ba9f8/porespy/tools/__funcs__.py#L143-L201 | train | 213,433 |
PMEAL/porespy | porespy/tools/__funcs__.py | bbox_to_slices | def bbox_to_slices(bbox):
r"""
Given a tuple containing bounding box coordinates, return a tuple of slice
objects.
A bounding box in the form of a straight list is returned by several
functions in skimage, but these cannot be used to direct index into an
image. This function returns a tuples of slices can be, such as:
``im[bbox_to_slices([xmin, ymin, xmax, ymax])]``.
Parameters
----------
bbox : tuple of ints
The bounding box indices in the form (``xmin``, ``ymin``, ``zmin``,
``xmax``, ``ymax``, ``zmax``). For a 2D image, simply omit the
``zmin`` and ``zmax`` entries.
Returns
-------
slices : tuple
A tuple of slice objects that can be used to directly index into a
larger image.
"""
if len(bbox) == 4:
ret = (slice(bbox[0], bbox[2]),
slice(bbox[1], bbox[3]))
else:
ret = (slice(bbox[0], bbox[3]),
slice(bbox[1], bbox[4]),
slice(bbox[2], bbox[5]))
return ret | python | def bbox_to_slices(bbox):
r"""
Given a tuple containing bounding box coordinates, return a tuple of slice
objects.
A bounding box in the form of a straight list is returned by several
functions in skimage, but these cannot be used to direct index into an
image. This function returns a tuples of slices can be, such as:
``im[bbox_to_slices([xmin, ymin, xmax, ymax])]``.
Parameters
----------
bbox : tuple of ints
The bounding box indices in the form (``xmin``, ``ymin``, ``zmin``,
``xmax``, ``ymax``, ``zmax``). For a 2D image, simply omit the
``zmin`` and ``zmax`` entries.
Returns
-------
slices : tuple
A tuple of slice objects that can be used to directly index into a
larger image.
"""
if len(bbox) == 4:
ret = (slice(bbox[0], bbox[2]),
slice(bbox[1], bbox[3]))
else:
ret = (slice(bbox[0], bbox[3]),
slice(bbox[1], bbox[4]),
slice(bbox[2], bbox[5]))
return ret | [
"def",
"bbox_to_slices",
"(",
"bbox",
")",
":",
"if",
"len",
"(",
"bbox",
")",
"==",
"4",
":",
"ret",
"=",
"(",
"slice",
"(",
"bbox",
"[",
"0",
"]",
",",
"bbox",
"[",
"2",
"]",
")",
",",
"slice",
"(",
"bbox",
"[",
"1",
"]",
",",
"bbox",
"["... | r"""
Given a tuple containing bounding box coordinates, return a tuple of slice
objects.
A bounding box in the form of a straight list is returned by several
functions in skimage, but these cannot be used to direct index into an
image. This function returns a tuples of slices can be, such as:
``im[bbox_to_slices([xmin, ymin, xmax, ymax])]``.
Parameters
----------
bbox : tuple of ints
The bounding box indices in the form (``xmin``, ``ymin``, ``zmin``,
``xmax``, ``ymax``, ``zmax``). For a 2D image, simply omit the
``zmin`` and ``zmax`` entries.
Returns
-------
slices : tuple
A tuple of slice objects that can be used to directly index into a
larger image. | [
"r",
"Given",
"a",
"tuple",
"containing",
"bounding",
"box",
"coordinates",
"return",
"a",
"tuple",
"of",
"slice",
"objects",
"."
] | 1e13875b56787d8f5b7ffdabce8c4342c33ba9f8 | https://github.com/PMEAL/porespy/blob/1e13875b56787d8f5b7ffdabce8c4342c33ba9f8/porespy/tools/__funcs__.py#L204-L234 | train | 213,434 |
PMEAL/porespy | porespy/tools/__funcs__.py | get_slice | def get_slice(im, center, size, pad=0):
r"""
Given a ``center`` location and ``radius`` of a feature, returns the slice
object into the ``im`` that bounds the feature but does not extend beyond
the image boundaries.
Parameters
----------
im : ND-image
The image of the porous media
center : array_like
The coordinates of the center of the feature of interest
size : array_like or scalar
The size of the feature in each direction. If a scalar is supplied,
this implies the same size in all directions.
pad : scalar or array_like
The amount to pad onto each side of the slice. The default is 0. A
scalar value will increase the slice size equally in all directions,
while an array the same shape as ``im.shape`` can be passed to pad
a specified amount in each direction.
Returns
-------
slices : list
A list of slice objects, each indexing into one dimension of the image.
"""
p = sp.ones(shape=im.ndim, dtype=int) * sp.array(pad)
s = sp.ones(shape=im.ndim, dtype=int) * sp.array(size)
slc = []
for dim in range(im.ndim):
lower_im = sp.amax((center[dim] - s[dim] - p[dim], 0))
upper_im = sp.amin((center[dim] + s[dim] + 1 + p[dim], im.shape[dim]))
slc.append(slice(lower_im, upper_im))
return slc | python | def get_slice(im, center, size, pad=0):
r"""
Given a ``center`` location and ``radius`` of a feature, returns the slice
object into the ``im`` that bounds the feature but does not extend beyond
the image boundaries.
Parameters
----------
im : ND-image
The image of the porous media
center : array_like
The coordinates of the center of the feature of interest
size : array_like or scalar
The size of the feature in each direction. If a scalar is supplied,
this implies the same size in all directions.
pad : scalar or array_like
The amount to pad onto each side of the slice. The default is 0. A
scalar value will increase the slice size equally in all directions,
while an array the same shape as ``im.shape`` can be passed to pad
a specified amount in each direction.
Returns
-------
slices : list
A list of slice objects, each indexing into one dimension of the image.
"""
p = sp.ones(shape=im.ndim, dtype=int) * sp.array(pad)
s = sp.ones(shape=im.ndim, dtype=int) * sp.array(size)
slc = []
for dim in range(im.ndim):
lower_im = sp.amax((center[dim] - s[dim] - p[dim], 0))
upper_im = sp.amin((center[dim] + s[dim] + 1 + p[dim], im.shape[dim]))
slc.append(slice(lower_im, upper_im))
return slc | [
"def",
"get_slice",
"(",
"im",
",",
"center",
",",
"size",
",",
"pad",
"=",
"0",
")",
":",
"p",
"=",
"sp",
".",
"ones",
"(",
"shape",
"=",
"im",
".",
"ndim",
",",
"dtype",
"=",
"int",
")",
"*",
"sp",
".",
"array",
"(",
"pad",
")",
"s",
"=",... | r"""
Given a ``center`` location and ``radius`` of a feature, returns the slice
object into the ``im`` that bounds the feature but does not extend beyond
the image boundaries.
Parameters
----------
im : ND-image
The image of the porous media
center : array_like
The coordinates of the center of the feature of interest
size : array_like or scalar
The size of the feature in each direction. If a scalar is supplied,
this implies the same size in all directions.
pad : scalar or array_like
The amount to pad onto each side of the slice. The default is 0. A
scalar value will increase the slice size equally in all directions,
while an array the same shape as ``im.shape`` can be passed to pad
a specified amount in each direction.
Returns
-------
slices : list
A list of slice objects, each indexing into one dimension of the image. | [
"r",
"Given",
"a",
"center",
"location",
"and",
"radius",
"of",
"a",
"feature",
"returns",
"the",
"slice",
"object",
"into",
"the",
"im",
"that",
"bounds",
"the",
"feature",
"but",
"does",
"not",
"extend",
"beyond",
"the",
"image",
"boundaries",
"."
] | 1e13875b56787d8f5b7ffdabce8c4342c33ba9f8 | https://github.com/PMEAL/porespy/blob/1e13875b56787d8f5b7ffdabce8c4342c33ba9f8/porespy/tools/__funcs__.py#L237-L273 | train | 213,435 |
PMEAL/porespy | porespy/tools/__funcs__.py | find_outer_region | def find_outer_region(im, r=0):
r"""
Finds regions of the image that are outside of the solid matrix.
This function uses the rolling ball method to define where the outer region
ends and the void space begins.
This function is particularly useful for samples that do not fill the
entire rectangular image, such as cylindrical cores or samples with non-
parallel faces.
Parameters
----------
im : ND-array
Image of the porous material with 1's for void and 0's for solid
r : scalar
The radius of the rolling ball to use. If not specified then a value
is calculated as twice maximum of the distance transform. The image
size is padded by this amount in all directions, so the image can
become quite large and unwieldy if too large a value is given.
Returns
-------
image : ND-array
A boolean mask the same shape as ``im``, containing True in all voxels
identified as *outside* the sample.
"""
if r == 0:
dt = spim.distance_transform_edt(input=im)
r = int(sp.amax(dt)) * 2
im_padded = sp.pad(array=im, pad_width=r, mode='constant',
constant_values=True)
dt = spim.distance_transform_edt(input=im_padded)
seeds = (dt >= r) + get_border(shape=im_padded.shape)
# Remove seeds not connected to edges
labels = spim.label(seeds)[0]
mask = labels == 1 # Assume label of 1 on edges, assured by adding border
dt = spim.distance_transform_edt(~mask)
outer_region = dt < r
outer_region = extract_subsection(im=outer_region, shape=im.shape)
return outer_region | python | def find_outer_region(im, r=0):
r"""
Finds regions of the image that are outside of the solid matrix.
This function uses the rolling ball method to define where the outer region
ends and the void space begins.
This function is particularly useful for samples that do not fill the
entire rectangular image, such as cylindrical cores or samples with non-
parallel faces.
Parameters
----------
im : ND-array
Image of the porous material with 1's for void and 0's for solid
r : scalar
The radius of the rolling ball to use. If not specified then a value
is calculated as twice maximum of the distance transform. The image
size is padded by this amount in all directions, so the image can
become quite large and unwieldy if too large a value is given.
Returns
-------
image : ND-array
A boolean mask the same shape as ``im``, containing True in all voxels
identified as *outside* the sample.
"""
if r == 0:
dt = spim.distance_transform_edt(input=im)
r = int(sp.amax(dt)) * 2
im_padded = sp.pad(array=im, pad_width=r, mode='constant',
constant_values=True)
dt = spim.distance_transform_edt(input=im_padded)
seeds = (dt >= r) + get_border(shape=im_padded.shape)
# Remove seeds not connected to edges
labels = spim.label(seeds)[0]
mask = labels == 1 # Assume label of 1 on edges, assured by adding border
dt = spim.distance_transform_edt(~mask)
outer_region = dt < r
outer_region = extract_subsection(im=outer_region, shape=im.shape)
return outer_region | [
"def",
"find_outer_region",
"(",
"im",
",",
"r",
"=",
"0",
")",
":",
"if",
"r",
"==",
"0",
":",
"dt",
"=",
"spim",
".",
"distance_transform_edt",
"(",
"input",
"=",
"im",
")",
"r",
"=",
"int",
"(",
"sp",
".",
"amax",
"(",
"dt",
")",
")",
"*",
... | r"""
Finds regions of the image that are outside of the solid matrix.
This function uses the rolling ball method to define where the outer region
ends and the void space begins.
This function is particularly useful for samples that do not fill the
entire rectangular image, such as cylindrical cores or samples with non-
parallel faces.
Parameters
----------
im : ND-array
Image of the porous material with 1's for void and 0's for solid
r : scalar
The radius of the rolling ball to use. If not specified then a value
is calculated as twice maximum of the distance transform. The image
size is padded by this amount in all directions, so the image can
become quite large and unwieldy if too large a value is given.
Returns
-------
image : ND-array
A boolean mask the same shape as ``im``, containing True in all voxels
identified as *outside* the sample. | [
"r",
"Finds",
"regions",
"of",
"the",
"image",
"that",
"are",
"outside",
"of",
"the",
"solid",
"matrix",
"."
] | 1e13875b56787d8f5b7ffdabce8c4342c33ba9f8 | https://github.com/PMEAL/porespy/blob/1e13875b56787d8f5b7ffdabce8c4342c33ba9f8/porespy/tools/__funcs__.py#L276-L318 | train | 213,436 |
PMEAL/porespy | porespy/tools/__funcs__.py | extract_cylinder | def extract_cylinder(im, r=None, axis=0):
r"""
Returns a cylindrical section of the image of specified radius.
This is useful for making square images look like cylindrical cores such
as those obtained from X-ray tomography.
Parameters
----------
im : ND-array
The image of the porous material. Can be any data type.
r : scalr
The radius of the cylinder to extract. If ``None`` is given then the
default is the largest cylinder that can fit inside the specified
plane.
axis : scalar
The axis along with the cylinder will be oriented.
Returns
-------
image : ND-array
A copy of ``im`` with values outside the cylindrical area set to 0 or
``False``.
"""
if r is None:
a = list(im.shape)
a.pop(axis)
r = sp.floor(sp.amin(a) / 2)
dim = [range(int(-s / 2), int(s / 2) + s % 2) for s in im.shape]
inds = sp.meshgrid(*dim, indexing='ij')
inds[axis] = inds[axis] * 0
d = sp.sqrt(sp.sum(sp.square(inds), axis=0))
mask = d < r
im_temp = im*mask
return im_temp | python | def extract_cylinder(im, r=None, axis=0):
r"""
Returns a cylindrical section of the image of specified radius.
This is useful for making square images look like cylindrical cores such
as those obtained from X-ray tomography.
Parameters
----------
im : ND-array
The image of the porous material. Can be any data type.
r : scalr
The radius of the cylinder to extract. If ``None`` is given then the
default is the largest cylinder that can fit inside the specified
plane.
axis : scalar
The axis along with the cylinder will be oriented.
Returns
-------
image : ND-array
A copy of ``im`` with values outside the cylindrical area set to 0 or
``False``.
"""
if r is None:
a = list(im.shape)
a.pop(axis)
r = sp.floor(sp.amin(a) / 2)
dim = [range(int(-s / 2), int(s / 2) + s % 2) for s in im.shape]
inds = sp.meshgrid(*dim, indexing='ij')
inds[axis] = inds[axis] * 0
d = sp.sqrt(sp.sum(sp.square(inds), axis=0))
mask = d < r
im_temp = im*mask
return im_temp | [
"def",
"extract_cylinder",
"(",
"im",
",",
"r",
"=",
"None",
",",
"axis",
"=",
"0",
")",
":",
"if",
"r",
"is",
"None",
":",
"a",
"=",
"list",
"(",
"im",
".",
"shape",
")",
"a",
".",
"pop",
"(",
"axis",
")",
"r",
"=",
"sp",
".",
"floor",
"("... | r"""
Returns a cylindrical section of the image of specified radius.
This is useful for making square images look like cylindrical cores such
as those obtained from X-ray tomography.
Parameters
----------
im : ND-array
The image of the porous material. Can be any data type.
r : scalr
The radius of the cylinder to extract. If ``None`` is given then the
default is the largest cylinder that can fit inside the specified
plane.
axis : scalar
The axis along with the cylinder will be oriented.
Returns
-------
image : ND-array
A copy of ``im`` with values outside the cylindrical area set to 0 or
``False``. | [
"r",
"Returns",
"a",
"cylindrical",
"section",
"of",
"the",
"image",
"of",
"specified",
"radius",
"."
] | 1e13875b56787d8f5b7ffdabce8c4342c33ba9f8 | https://github.com/PMEAL/porespy/blob/1e13875b56787d8f5b7ffdabce8c4342c33ba9f8/porespy/tools/__funcs__.py#L321-L358 | train | 213,437 |
PMEAL/porespy | porespy/tools/__funcs__.py | extract_subsection | def extract_subsection(im, shape):
r"""
Extracts the middle section of a image
Parameters
----------
im : ND-array
Image from which to extract the subsection
shape : array_like
Can either specify the size of the extracted section or the fractional
size of the image to extact.
Returns
-------
image : ND-array
An ND-array of size given by the ``shape`` argument, taken from the
center of the image.
Examples
--------
>>> import scipy as sp
>>> from porespy.tools import extract_subsection
>>> im = sp.array([[1, 1, 1, 1], [1, 2, 2, 2], [1, 2, 3, 3], [1, 2, 3, 4]])
>>> print(im)
[[1 1 1 1]
[1 2 2 2]
[1 2 3 3]
[1 2 3 4]]
>>> im = extract_subsection(im=im, shape=[2, 2])
>>> print(im)
[[2 2]
[2 3]]
"""
# Check if shape was given as a fraction
shape = sp.array(shape)
if shape[0] < 1:
shape = sp.array(im.shape) * shape
center = sp.array(im.shape) / 2
s_im = []
for dim in range(im.ndim):
r = shape[dim] / 2
lower_im = sp.amax((center[dim] - r, 0))
upper_im = sp.amin((center[dim] + r, im.shape[dim]))
s_im.append(slice(int(lower_im), int(upper_im)))
return im[tuple(s_im)] | python | def extract_subsection(im, shape):
r"""
Extracts the middle section of a image
Parameters
----------
im : ND-array
Image from which to extract the subsection
shape : array_like
Can either specify the size of the extracted section or the fractional
size of the image to extact.
Returns
-------
image : ND-array
An ND-array of size given by the ``shape`` argument, taken from the
center of the image.
Examples
--------
>>> import scipy as sp
>>> from porespy.tools import extract_subsection
>>> im = sp.array([[1, 1, 1, 1], [1, 2, 2, 2], [1, 2, 3, 3], [1, 2, 3, 4]])
>>> print(im)
[[1 1 1 1]
[1 2 2 2]
[1 2 3 3]
[1 2 3 4]]
>>> im = extract_subsection(im=im, shape=[2, 2])
>>> print(im)
[[2 2]
[2 3]]
"""
# Check if shape was given as a fraction
shape = sp.array(shape)
if shape[0] < 1:
shape = sp.array(im.shape) * shape
center = sp.array(im.shape) / 2
s_im = []
for dim in range(im.ndim):
r = shape[dim] / 2
lower_im = sp.amax((center[dim] - r, 0))
upper_im = sp.amin((center[dim] + r, im.shape[dim]))
s_im.append(slice(int(lower_im), int(upper_im)))
return im[tuple(s_im)] | [
"def",
"extract_subsection",
"(",
"im",
",",
"shape",
")",
":",
"# Check if shape was given as a fraction",
"shape",
"=",
"sp",
".",
"array",
"(",
"shape",
")",
"if",
"shape",
"[",
"0",
"]",
"<",
"1",
":",
"shape",
"=",
"sp",
".",
"array",
"(",
"im",
"... | r"""
Extracts the middle section of a image
Parameters
----------
im : ND-array
Image from which to extract the subsection
shape : array_like
Can either specify the size of the extracted section or the fractional
size of the image to extact.
Returns
-------
image : ND-array
An ND-array of size given by the ``shape`` argument, taken from the
center of the image.
Examples
--------
>>> import scipy as sp
>>> from porespy.tools import extract_subsection
>>> im = sp.array([[1, 1, 1, 1], [1, 2, 2, 2], [1, 2, 3, 3], [1, 2, 3, 4]])
>>> print(im)
[[1 1 1 1]
[1 2 2 2]
[1 2 3 3]
[1 2 3 4]]
>>> im = extract_subsection(im=im, shape=[2, 2])
>>> print(im)
[[2 2]
[2 3]] | [
"r",
"Extracts",
"the",
"middle",
"section",
"of",
"a",
"image"
] | 1e13875b56787d8f5b7ffdabce8c4342c33ba9f8 | https://github.com/PMEAL/porespy/blob/1e13875b56787d8f5b7ffdabce8c4342c33ba9f8/porespy/tools/__funcs__.py#L361-L407 | train | 213,438 |
PMEAL/porespy | porespy/tools/__funcs__.py | get_planes | def get_planes(im, squeeze=True):
r"""
Extracts three planar images from the volumetric image, one for each
principle axis. The planes are taken from the middle of the domain.
Parameters
----------
im : ND-array
The volumetric image from which the 3 planar images are to be obtained
squeeze : boolean, optional
If True (default) the returned images are 2D (i.e. squeezed). If
False, the images are 1 element deep along the axis where the slice
was obtained.
Returns
-------
planes : list
A list of 2D-images
"""
x, y, z = (sp.array(im.shape) / 2).astype(int)
planes = [im[x, :, :], im[:, y, :], im[:, :, z]]
if not squeeze:
imx = planes[0]
planes[0] = sp.reshape(imx, [1, imx.shape[0], imx.shape[1]])
imy = planes[1]
planes[1] = sp.reshape(imy, [imy.shape[0], 1, imy.shape[1]])
imz = planes[2]
planes[2] = sp.reshape(imz, [imz.shape[0], imz.shape[1], 1])
return planes | python | def get_planes(im, squeeze=True):
r"""
Extracts three planar images from the volumetric image, one for each
principle axis. The planes are taken from the middle of the domain.
Parameters
----------
im : ND-array
The volumetric image from which the 3 planar images are to be obtained
squeeze : boolean, optional
If True (default) the returned images are 2D (i.e. squeezed). If
False, the images are 1 element deep along the axis where the slice
was obtained.
Returns
-------
planes : list
A list of 2D-images
"""
x, y, z = (sp.array(im.shape) / 2).astype(int)
planes = [im[x, :, :], im[:, y, :], im[:, :, z]]
if not squeeze:
imx = planes[0]
planes[0] = sp.reshape(imx, [1, imx.shape[0], imx.shape[1]])
imy = planes[1]
planes[1] = sp.reshape(imy, [imy.shape[0], 1, imy.shape[1]])
imz = planes[2]
planes[2] = sp.reshape(imz, [imz.shape[0], imz.shape[1], 1])
return planes | [
"def",
"get_planes",
"(",
"im",
",",
"squeeze",
"=",
"True",
")",
":",
"x",
",",
"y",
",",
"z",
"=",
"(",
"sp",
".",
"array",
"(",
"im",
".",
"shape",
")",
"/",
"2",
")",
".",
"astype",
"(",
"int",
")",
"planes",
"=",
"[",
"im",
"[",
"x",
... | r"""
Extracts three planar images from the volumetric image, one for each
principle axis. The planes are taken from the middle of the domain.
Parameters
----------
im : ND-array
The volumetric image from which the 3 planar images are to be obtained
squeeze : boolean, optional
If True (default) the returned images are 2D (i.e. squeezed). If
False, the images are 1 element deep along the axis where the slice
was obtained.
Returns
-------
planes : list
A list of 2D-images | [
"r",
"Extracts",
"three",
"planar",
"images",
"from",
"the",
"volumetric",
"image",
"one",
"for",
"each",
"principle",
"axis",
".",
"The",
"planes",
"are",
"taken",
"from",
"the",
"middle",
"of",
"the",
"domain",
"."
] | 1e13875b56787d8f5b7ffdabce8c4342c33ba9f8 | https://github.com/PMEAL/porespy/blob/1e13875b56787d8f5b7ffdabce8c4342c33ba9f8/porespy/tools/__funcs__.py#L410-L439 | train | 213,439 |
PMEAL/porespy | porespy/tools/__funcs__.py | extend_slice | def extend_slice(s, shape, pad=1):
r"""
Adjust slice indices to include additional voxles around the slice.
This function does bounds checking to ensure the indices don't extend
outside the image.
Parameters
----------
s : list of slice objects
A list (or tuple) of N slice objects, where N is the number of
dimensions in the image.
shape : array_like
The shape of the image into which the slice objects apply. This is
used to check the bounds to prevent indexing beyond the image.
pad : int
The number of voxels to expand in each direction.
Returns
-------
slices : list of slice objects
A list slice of objects with the start and stop attributes respectively
incremented and decremented by 1, without extending beyond the image
boundaries.
Examples
--------
>>> from scipy.ndimage import label, find_objects
>>> from porespy.tools import extend_slice
>>> im = sp.array([[1, 0, 0], [1, 0, 0], [0, 0, 1]])
>>> labels = label(im)[0]
>>> s = find_objects(labels)
Using the slices returned by ``find_objects``, set the first label to 3
>>> labels[s[0]] = 3
>>> print(labels)
[[3 0 0]
[3 0 0]
[0 0 2]]
Next extend the slice, and use it to set the values to 4
>>> s_ext = extend_slice(s[0], shape=im.shape, pad=1)
>>> labels[s_ext] = 4
>>> print(labels)
[[4 4 0]
[4 4 0]
[4 4 2]]
As can be seen by the location of the 4s, the slice was extended by 1, and
also handled the extension beyond the boundary correctly.
"""
pad = int(pad)
a = []
for i, dim in zip(s, shape):
start = 0
stop = dim
if i.start - pad >= 0:
start = i.start - pad
if i.stop + pad < dim:
stop = i.stop + pad
a.append(slice(start, stop, None))
return tuple(a) | python | def extend_slice(s, shape, pad=1):
r"""
Adjust slice indices to include additional voxles around the slice.
This function does bounds checking to ensure the indices don't extend
outside the image.
Parameters
----------
s : list of slice objects
A list (or tuple) of N slice objects, where N is the number of
dimensions in the image.
shape : array_like
The shape of the image into which the slice objects apply. This is
used to check the bounds to prevent indexing beyond the image.
pad : int
The number of voxels to expand in each direction.
Returns
-------
slices : list of slice objects
A list slice of objects with the start and stop attributes respectively
incremented and decremented by 1, without extending beyond the image
boundaries.
Examples
--------
>>> from scipy.ndimage import label, find_objects
>>> from porespy.tools import extend_slice
>>> im = sp.array([[1, 0, 0], [1, 0, 0], [0, 0, 1]])
>>> labels = label(im)[0]
>>> s = find_objects(labels)
Using the slices returned by ``find_objects``, set the first label to 3
>>> labels[s[0]] = 3
>>> print(labels)
[[3 0 0]
[3 0 0]
[0 0 2]]
Next extend the slice, and use it to set the values to 4
>>> s_ext = extend_slice(s[0], shape=im.shape, pad=1)
>>> labels[s_ext] = 4
>>> print(labels)
[[4 4 0]
[4 4 0]
[4 4 2]]
As can be seen by the location of the 4s, the slice was extended by 1, and
also handled the extension beyond the boundary correctly.
"""
pad = int(pad)
a = []
for i, dim in zip(s, shape):
start = 0
stop = dim
if i.start - pad >= 0:
start = i.start - pad
if i.stop + pad < dim:
stop = i.stop + pad
a.append(slice(start, stop, None))
return tuple(a) | [
"def",
"extend_slice",
"(",
"s",
",",
"shape",
",",
"pad",
"=",
"1",
")",
":",
"pad",
"=",
"int",
"(",
"pad",
")",
"a",
"=",
"[",
"]",
"for",
"i",
",",
"dim",
"in",
"zip",
"(",
"s",
",",
"shape",
")",
":",
"start",
"=",
"0",
"stop",
"=",
... | r"""
Adjust slice indices to include additional voxles around the slice.
This function does bounds checking to ensure the indices don't extend
outside the image.
Parameters
----------
s : list of slice objects
A list (or tuple) of N slice objects, where N is the number of
dimensions in the image.
shape : array_like
The shape of the image into which the slice objects apply. This is
used to check the bounds to prevent indexing beyond the image.
pad : int
The number of voxels to expand in each direction.
Returns
-------
slices : list of slice objects
A list slice of objects with the start and stop attributes respectively
incremented and decremented by 1, without extending beyond the image
boundaries.
Examples
--------
>>> from scipy.ndimage import label, find_objects
>>> from porespy.tools import extend_slice
>>> im = sp.array([[1, 0, 0], [1, 0, 0], [0, 0, 1]])
>>> labels = label(im)[0]
>>> s = find_objects(labels)
Using the slices returned by ``find_objects``, set the first label to 3
>>> labels[s[0]] = 3
>>> print(labels)
[[3 0 0]
[3 0 0]
[0 0 2]]
Next extend the slice, and use it to set the values to 4
>>> s_ext = extend_slice(s[0], shape=im.shape, pad=1)
>>> labels[s_ext] = 4
>>> print(labels)
[[4 4 0]
[4 4 0]
[4 4 2]]
As can be seen by the location of the 4s, the slice was extended by 1, and
also handled the extension beyond the boundary correctly. | [
"r",
"Adjust",
"slice",
"indices",
"to",
"include",
"additional",
"voxles",
"around",
"the",
"slice",
"."
] | 1e13875b56787d8f5b7ffdabce8c4342c33ba9f8 | https://github.com/PMEAL/porespy/blob/1e13875b56787d8f5b7ffdabce8c4342c33ba9f8/porespy/tools/__funcs__.py#L442-L507 | train | 213,440 |
PMEAL/porespy | porespy/tools/__funcs__.py | randomize_colors | def randomize_colors(im, keep_vals=[0]):
r'''
Takes a greyscale image and randomly shuffles the greyscale values, so that
all voxels labeled X will be labelled Y, and all voxels labeled Y will be
labeled Z, where X, Y, Z and so on are randomly selected from the values
in the input image.
This function is useful for improving the visibility of images with
neighboring regions that are only incrementally different from each other,
such as that returned by `scipy.ndimage.label`.
Parameters
----------
im : array_like
An ND image of greyscale values.
keep_vals : array_like
Indicate which voxel values should NOT be altered. The default is
`[0]` which is useful for leaving the background of the image
untouched.
Returns
-------
image : ND-array
An image the same size and type as ``im`` but with the greyscale values
reassigned. The unique values in both the input and output images will
be identical.
Notes
-----
If the greyscale values in the input image are not contiguous then the
neither will they be in the output.
Examples
--------
>>> import porespy as ps
>>> import scipy as sp
>>> sp.random.seed(0)
>>> im = sp.random.randint(low=0, high=5, size=[4, 4])
>>> print(im)
[[4 0 3 3]
[3 1 3 2]
[4 0 0 4]
[2 1 0 1]]
>>> im_rand = ps.tools.randomize_colors(im)
>>> print(im_rand)
[[2 0 4 4]
[4 1 4 3]
[2 0 0 2]
[3 1 0 1]]
As can be seen, the 2's have become 3, 3's have become 4, and 4's have
become 2. 1's remained 1 by random accident. 0's remain zeros by default,
but this can be controlled using the `keep_vals` argument.
'''
im_flat = im.flatten()
keep_vals = sp.array(keep_vals)
swap_vals = ~sp.in1d(im_flat, keep_vals)
im_vals = sp.unique(im_flat[swap_vals])
new_vals = sp.random.permutation(im_vals)
im_map = sp.zeros(shape=[sp.amax(im_vals) + 1, ], dtype=int)
im_map[im_vals] = new_vals
im_new = im_map[im_flat]
im_new = sp.reshape(im_new, newshape=sp.shape(im))
return im_new | python | def randomize_colors(im, keep_vals=[0]):
r'''
Takes a greyscale image and randomly shuffles the greyscale values, so that
all voxels labeled X will be labelled Y, and all voxels labeled Y will be
labeled Z, where X, Y, Z and so on are randomly selected from the values
in the input image.
This function is useful for improving the visibility of images with
neighboring regions that are only incrementally different from each other,
such as that returned by `scipy.ndimage.label`.
Parameters
----------
im : array_like
An ND image of greyscale values.
keep_vals : array_like
Indicate which voxel values should NOT be altered. The default is
`[0]` which is useful for leaving the background of the image
untouched.
Returns
-------
image : ND-array
An image the same size and type as ``im`` but with the greyscale values
reassigned. The unique values in both the input and output images will
be identical.
Notes
-----
If the greyscale values in the input image are not contiguous then the
neither will they be in the output.
Examples
--------
>>> import porespy as ps
>>> import scipy as sp
>>> sp.random.seed(0)
>>> im = sp.random.randint(low=0, high=5, size=[4, 4])
>>> print(im)
[[4 0 3 3]
[3 1 3 2]
[4 0 0 4]
[2 1 0 1]]
>>> im_rand = ps.tools.randomize_colors(im)
>>> print(im_rand)
[[2 0 4 4]
[4 1 4 3]
[2 0 0 2]
[3 1 0 1]]
As can be seen, the 2's have become 3, 3's have become 4, and 4's have
become 2. 1's remained 1 by random accident. 0's remain zeros by default,
but this can be controlled using the `keep_vals` argument.
'''
im_flat = im.flatten()
keep_vals = sp.array(keep_vals)
swap_vals = ~sp.in1d(im_flat, keep_vals)
im_vals = sp.unique(im_flat[swap_vals])
new_vals = sp.random.permutation(im_vals)
im_map = sp.zeros(shape=[sp.amax(im_vals) + 1, ], dtype=int)
im_map[im_vals] = new_vals
im_new = im_map[im_flat]
im_new = sp.reshape(im_new, newshape=sp.shape(im))
return im_new | [
"def",
"randomize_colors",
"(",
"im",
",",
"keep_vals",
"=",
"[",
"0",
"]",
")",
":",
"im_flat",
"=",
"im",
".",
"flatten",
"(",
")",
"keep_vals",
"=",
"sp",
".",
"array",
"(",
"keep_vals",
")",
"swap_vals",
"=",
"~",
"sp",
".",
"in1d",
"(",
"im_fl... | r'''
Takes a greyscale image and randomly shuffles the greyscale values, so that
all voxels labeled X will be labelled Y, and all voxels labeled Y will be
labeled Z, where X, Y, Z and so on are randomly selected from the values
in the input image.
This function is useful for improving the visibility of images with
neighboring regions that are only incrementally different from each other,
such as that returned by `scipy.ndimage.label`.
Parameters
----------
im : array_like
An ND image of greyscale values.
keep_vals : array_like
Indicate which voxel values should NOT be altered. The default is
`[0]` which is useful for leaving the background of the image
untouched.
Returns
-------
image : ND-array
An image the same size and type as ``im`` but with the greyscale values
reassigned. The unique values in both the input and output images will
be identical.
Notes
-----
If the greyscale values in the input image are not contiguous then the
neither will they be in the output.
Examples
--------
>>> import porespy as ps
>>> import scipy as sp
>>> sp.random.seed(0)
>>> im = sp.random.randint(low=0, high=5, size=[4, 4])
>>> print(im)
[[4 0 3 3]
[3 1 3 2]
[4 0 0 4]
[2 1 0 1]]
>>> im_rand = ps.tools.randomize_colors(im)
>>> print(im_rand)
[[2 0 4 4]
[4 1 4 3]
[2 0 0 2]
[3 1 0 1]]
As can be seen, the 2's have become 3, 3's have become 4, and 4's have
become 2. 1's remained 1 by random accident. 0's remain zeros by default,
but this can be controlled using the `keep_vals` argument. | [
"r",
"Takes",
"a",
"greyscale",
"image",
"and",
"randomly",
"shuffles",
"the",
"greyscale",
"values",
"so",
"that",
"all",
"voxels",
"labeled",
"X",
"will",
"be",
"labelled",
"Y",
"and",
"all",
"voxels",
"labeled",
"Y",
"will",
"be",
"labeled",
"Z",
"where... | 1e13875b56787d8f5b7ffdabce8c4342c33ba9f8 | https://github.com/PMEAL/porespy/blob/1e13875b56787d8f5b7ffdabce8c4342c33ba9f8/porespy/tools/__funcs__.py#L510-L575 | train | 213,441 |
PMEAL/porespy | porespy/tools/__funcs__.py | make_contiguous | def make_contiguous(im, keep_zeros=True):
r"""
Take an image with arbitrary greyscale values and adjust them to ensure
all values fall in a contiguous range starting at 0.
This function will handle negative numbers such that most negative number
will become 0, *unless* ``keep_zeros`` is ``True`` in which case it will
become 1, and all 0's in the original image remain 0.
Parameters
----------
im : array_like
An ND array containing greyscale values
keep_zeros : Boolean
If ``True`` (default) then 0 values remain 0, regardless of how the
other numbers are adjusted. This is mostly relevant when the array
contains negative numbers, and means that -1 will become +1, while
0 values remain 0.
Returns
-------
image : ND-array
An ND-array the same size as ``im`` but with all values in contiguous
orders.
Example
-------
>>> import porespy as ps
>>> import scipy as sp
>>> im = sp.array([[0, 2, 9], [6, 8, 3]])
>>> im = ps.tools.make_contiguous(im)
>>> print(im)
[[0 1 5]
[3 4 2]]
"""
im = sp.copy(im)
if keep_zeros:
mask = (im == 0)
im[mask] = im.min() - 1
im = im - im.min()
im_flat = im.flatten()
im_vals = sp.unique(im_flat)
im_map = sp.zeros(shape=sp.amax(im_flat) + 1)
im_map[im_vals] = sp.arange(0, sp.size(sp.unique(im_flat)))
im_new = im_map[im_flat]
im_new = sp.reshape(im_new, newshape=sp.shape(im))
im_new = sp.array(im_new, dtype=im_flat.dtype)
return im_new | python | def make_contiguous(im, keep_zeros=True):
r"""
Take an image with arbitrary greyscale values and adjust them to ensure
all values fall in a contiguous range starting at 0.
This function will handle negative numbers such that most negative number
will become 0, *unless* ``keep_zeros`` is ``True`` in which case it will
become 1, and all 0's in the original image remain 0.
Parameters
----------
im : array_like
An ND array containing greyscale values
keep_zeros : Boolean
If ``True`` (default) then 0 values remain 0, regardless of how the
other numbers are adjusted. This is mostly relevant when the array
contains negative numbers, and means that -1 will become +1, while
0 values remain 0.
Returns
-------
image : ND-array
An ND-array the same size as ``im`` but with all values in contiguous
orders.
Example
-------
>>> import porespy as ps
>>> import scipy as sp
>>> im = sp.array([[0, 2, 9], [6, 8, 3]])
>>> im = ps.tools.make_contiguous(im)
>>> print(im)
[[0 1 5]
[3 4 2]]
"""
im = sp.copy(im)
if keep_zeros:
mask = (im == 0)
im[mask] = im.min() - 1
im = im - im.min()
im_flat = im.flatten()
im_vals = sp.unique(im_flat)
im_map = sp.zeros(shape=sp.amax(im_flat) + 1)
im_map[im_vals] = sp.arange(0, sp.size(sp.unique(im_flat)))
im_new = im_map[im_flat]
im_new = sp.reshape(im_new, newshape=sp.shape(im))
im_new = sp.array(im_new, dtype=im_flat.dtype)
return im_new | [
"def",
"make_contiguous",
"(",
"im",
",",
"keep_zeros",
"=",
"True",
")",
":",
"im",
"=",
"sp",
".",
"copy",
"(",
"im",
")",
"if",
"keep_zeros",
":",
"mask",
"=",
"(",
"im",
"==",
"0",
")",
"im",
"[",
"mask",
"]",
"=",
"im",
".",
"min",
"(",
... | r"""
Take an image with arbitrary greyscale values and adjust them to ensure
all values fall in a contiguous range starting at 0.
This function will handle negative numbers such that most negative number
will become 0, *unless* ``keep_zeros`` is ``True`` in which case it will
become 1, and all 0's in the original image remain 0.
Parameters
----------
im : array_like
An ND array containing greyscale values
keep_zeros : Boolean
If ``True`` (default) then 0 values remain 0, regardless of how the
other numbers are adjusted. This is mostly relevant when the array
contains negative numbers, and means that -1 will become +1, while
0 values remain 0.
Returns
-------
image : ND-array
An ND-array the same size as ``im`` but with all values in contiguous
orders.
Example
-------
>>> import porespy as ps
>>> import scipy as sp
>>> im = sp.array([[0, 2, 9], [6, 8, 3]])
>>> im = ps.tools.make_contiguous(im)
>>> print(im)
[[0 1 5]
[3 4 2]] | [
"r",
"Take",
"an",
"image",
"with",
"arbitrary",
"greyscale",
"values",
"and",
"adjust",
"them",
"to",
"ensure",
"all",
"values",
"fall",
"in",
"a",
"contiguous",
"range",
"starting",
"at",
"0",
"."
] | 1e13875b56787d8f5b7ffdabce8c4342c33ba9f8 | https://github.com/PMEAL/porespy/blob/1e13875b56787d8f5b7ffdabce8c4342c33ba9f8/porespy/tools/__funcs__.py#L578-L627 | train | 213,442 |
PMEAL/porespy | porespy/tools/__funcs__.py | get_border | def get_border(shape, thickness=1, mode='edges', return_indices=False):
r"""
Creates an array of specified size with corners, edges or faces labelled as
True. This can be used as mask to manipulate values laying on the
perimeter of an image.
Parameters
----------
shape : array_like
The shape of the array to return. Can be either 2D or 3D.
thickness : scalar (default is 1)
The number of pixels/voxels to place along perimeter.
mode : string
The type of border to create. Options are 'faces', 'edges' (default)
and 'corners'. In 2D 'faces' and 'edges' give the same result.
return_indices : boolean
If ``False`` (default) an image is returned with the border voxels set
to ``True``. If ``True``, then a tuple with the x, y, z (if ``im`` is
3D) indices is returned. This tuple can be used directly to index into
the image, such as ``im[tup] = 2``.
Returns
-------
image : ND-array
An ND-array of specified shape with ``True`` values at the perimeter
and ``False`` elsewhere
Notes
-----
TODO: This function uses brute force to create an image then fill the
edges using location-based logic, and if the user requests
``return_indices`` it finds them using ``np.where``. Since these arrays
are cubic it should be possible to use more elegant and efficient
index-based logic to find the indices, then use them to fill an empty
image with ``True`` using these indices.
Examples
--------
>>> import porespy as ps
>>> import scipy as sp
>>> mask = ps.tools.get_border(shape=[3, 3], mode='corners')
>>> print(mask)
[[ True False True]
[False False False]
[ True False True]]
>>> mask = ps.tools.get_border(shape=[3, 3], mode='edges')
>>> print(mask)
[[ True True True]
[ True False True]
[ True True True]]
"""
ndims = len(shape)
t = thickness
border = sp.ones(shape, dtype=bool)
if mode == 'faces':
if ndims == 2:
border[t:-t, t:-t] = False
if ndims == 3:
border[t:-t, t:-t, t:-t] = False
elif mode == 'edges':
if ndims == 2:
border[t:-t, t:-t] = False
if ndims == 3:
border[0::, t:-t, t:-t] = False
border[t:-t, 0::, t:-t] = False
border[t:-t, t:-t, 0::] = False
elif mode == 'corners':
if ndims == 2:
border[t:-t, 0::] = False
border[0::, t:-t] = False
if ndims == 3:
border[t:-t, 0::, 0::] = False
border[0::, t:-t, 0::] = False
border[0::, 0::, t:-t] = False
if return_indices:
border = sp.where(border)
return border | python | def get_border(shape, thickness=1, mode='edges', return_indices=False):
r"""
Creates an array of specified size with corners, edges or faces labelled as
True. This can be used as mask to manipulate values laying on the
perimeter of an image.
Parameters
----------
shape : array_like
The shape of the array to return. Can be either 2D or 3D.
thickness : scalar (default is 1)
The number of pixels/voxels to place along perimeter.
mode : string
The type of border to create. Options are 'faces', 'edges' (default)
and 'corners'. In 2D 'faces' and 'edges' give the same result.
return_indices : boolean
If ``False`` (default) an image is returned with the border voxels set
to ``True``. If ``True``, then a tuple with the x, y, z (if ``im`` is
3D) indices is returned. This tuple can be used directly to index into
the image, such as ``im[tup] = 2``.
Returns
-------
image : ND-array
An ND-array of specified shape with ``True`` values at the perimeter
and ``False`` elsewhere
Notes
-----
TODO: This function uses brute force to create an image then fill the
edges using location-based logic, and if the user requests
``return_indices`` it finds them using ``np.where``. Since these arrays
are cubic it should be possible to use more elegant and efficient
index-based logic to find the indices, then use them to fill an empty
image with ``True`` using these indices.
Examples
--------
>>> import porespy as ps
>>> import scipy as sp
>>> mask = ps.tools.get_border(shape=[3, 3], mode='corners')
>>> print(mask)
[[ True False True]
[False False False]
[ True False True]]
>>> mask = ps.tools.get_border(shape=[3, 3], mode='edges')
>>> print(mask)
[[ True True True]
[ True False True]
[ True True True]]
"""
ndims = len(shape)
t = thickness
border = sp.ones(shape, dtype=bool)
if mode == 'faces':
if ndims == 2:
border[t:-t, t:-t] = False
if ndims == 3:
border[t:-t, t:-t, t:-t] = False
elif mode == 'edges':
if ndims == 2:
border[t:-t, t:-t] = False
if ndims == 3:
border[0::, t:-t, t:-t] = False
border[t:-t, 0::, t:-t] = False
border[t:-t, t:-t, 0::] = False
elif mode == 'corners':
if ndims == 2:
border[t:-t, 0::] = False
border[0::, t:-t] = False
if ndims == 3:
border[t:-t, 0::, 0::] = False
border[0::, t:-t, 0::] = False
border[0::, 0::, t:-t] = False
if return_indices:
border = sp.where(border)
return border | [
"def",
"get_border",
"(",
"shape",
",",
"thickness",
"=",
"1",
",",
"mode",
"=",
"'edges'",
",",
"return_indices",
"=",
"False",
")",
":",
"ndims",
"=",
"len",
"(",
"shape",
")",
"t",
"=",
"thickness",
"border",
"=",
"sp",
".",
"ones",
"(",
"shape",
... | r"""
Creates an array of specified size with corners, edges or faces labelled as
True. This can be used as mask to manipulate values laying on the
perimeter of an image.
Parameters
----------
shape : array_like
The shape of the array to return. Can be either 2D or 3D.
thickness : scalar (default is 1)
The number of pixels/voxels to place along perimeter.
mode : string
The type of border to create. Options are 'faces', 'edges' (default)
and 'corners'. In 2D 'faces' and 'edges' give the same result.
return_indices : boolean
If ``False`` (default) an image is returned with the border voxels set
to ``True``. If ``True``, then a tuple with the x, y, z (if ``im`` is
3D) indices is returned. This tuple can be used directly to index into
the image, such as ``im[tup] = 2``.
Returns
-------
image : ND-array
An ND-array of specified shape with ``True`` values at the perimeter
and ``False`` elsewhere
Notes
-----
TODO: This function uses brute force to create an image then fill the
edges using location-based logic, and if the user requests
``return_indices`` it finds them using ``np.where``. Since these arrays
are cubic it should be possible to use more elegant and efficient
index-based logic to find the indices, then use them to fill an empty
image with ``True`` using these indices.
Examples
--------
>>> import porespy as ps
>>> import scipy as sp
>>> mask = ps.tools.get_border(shape=[3, 3], mode='corners')
>>> print(mask)
[[ True False True]
[False False False]
[ True False True]]
>>> mask = ps.tools.get_border(shape=[3, 3], mode='edges')
>>> print(mask)
[[ True True True]
[ True False True]
[ True True True]] | [
"r",
"Creates",
"an",
"array",
"of",
"specified",
"size",
"with",
"corners",
"edges",
"or",
"faces",
"labelled",
"as",
"True",
".",
"This",
"can",
"be",
"used",
"as",
"mask",
"to",
"manipulate",
"values",
"laying",
"on",
"the",
"perimeter",
"of",
"an",
"... | 1e13875b56787d8f5b7ffdabce8c4342c33ba9f8 | https://github.com/PMEAL/porespy/blob/1e13875b56787d8f5b7ffdabce8c4342c33ba9f8/porespy/tools/__funcs__.py#L630-L706 | train | 213,443 |
PMEAL/porespy | porespy/tools/__funcs__.py | in_hull | def in_hull(points, hull):
"""
Test if a list of coordinates are inside a given convex hull
Parameters
----------
points : array_like (N x ndims)
The spatial coordinates of the points to check
hull : scipy.spatial.ConvexHull object **OR** array_like
Can be either a convex hull object as returned by
``scipy.spatial.ConvexHull`` or simply the coordinates of the points
that define the convex hull.
Returns
-------
result : 1D-array
A 1D-array Boolean array of length *N* indicating whether or not the
given points in ``points`` lies within the provided ``hull``.
"""
from scipy.spatial import Delaunay, ConvexHull
if isinstance(hull, ConvexHull):
hull = hull.points
hull = Delaunay(hull)
return hull.find_simplex(points) >= 0 | python | def in_hull(points, hull):
"""
Test if a list of coordinates are inside a given convex hull
Parameters
----------
points : array_like (N x ndims)
The spatial coordinates of the points to check
hull : scipy.spatial.ConvexHull object **OR** array_like
Can be either a convex hull object as returned by
``scipy.spatial.ConvexHull`` or simply the coordinates of the points
that define the convex hull.
Returns
-------
result : 1D-array
A 1D-array Boolean array of length *N* indicating whether or not the
given points in ``points`` lies within the provided ``hull``.
"""
from scipy.spatial import Delaunay, ConvexHull
if isinstance(hull, ConvexHull):
hull = hull.points
hull = Delaunay(hull)
return hull.find_simplex(points) >= 0 | [
"def",
"in_hull",
"(",
"points",
",",
"hull",
")",
":",
"from",
"scipy",
".",
"spatial",
"import",
"Delaunay",
",",
"ConvexHull",
"if",
"isinstance",
"(",
"hull",
",",
"ConvexHull",
")",
":",
"hull",
"=",
"hull",
".",
"points",
"hull",
"=",
"Delaunay",
... | Test if a list of coordinates are inside a given convex hull
Parameters
----------
points : array_like (N x ndims)
The spatial coordinates of the points to check
hull : scipy.spatial.ConvexHull object **OR** array_like
Can be either a convex hull object as returned by
``scipy.spatial.ConvexHull`` or simply the coordinates of the points
that define the convex hull.
Returns
-------
result : 1D-array
A 1D-array Boolean array of length *N* indicating whether or not the
given points in ``points`` lies within the provided ``hull``. | [
"Test",
"if",
"a",
"list",
"of",
"coordinates",
"are",
"inside",
"a",
"given",
"convex",
"hull"
] | 1e13875b56787d8f5b7ffdabce8c4342c33ba9f8 | https://github.com/PMEAL/porespy/blob/1e13875b56787d8f5b7ffdabce8c4342c33ba9f8/porespy/tools/__funcs__.py#L709-L734 | train | 213,444 |
PMEAL/porespy | porespy/tools/__funcs__.py | functions_to_table | def functions_to_table(mod, colwidth=[27, 48]):
r"""
Given a module of functions, returns a ReST formatted text string that
outputs a table when printed.
Parameters
----------
mod : module
The module containing the functions to be included in the table, such
as 'porespy.filters'.
colwidths : list of ints
The width of the first and second columns. Note that because of the
vertical lines separating columns and define the edges of the table,
the total table width will be 3 characters wider than the total sum
of the specified column widths.
"""
temp = mod.__dir__()
funcs = [i for i in temp if not i[0].startswith('_')]
funcs.sort()
row = '+' + '-'*colwidth[0] + '+' + '-'*colwidth[1] + '+'
fmt = '{0:1s} {1:' + str(colwidth[0]-2) + 's} {2:1s} {3:' \
+ str(colwidth[1]-2) + 's} {4:1s}'
lines = []
lines.append(row)
lines.append(fmt.format('|', 'Method', '|', 'Description', '|'))
lines.append(row.replace('-', '='))
for i, item in enumerate(funcs):
try:
s = getattr(mod, item).__doc__.strip()
end = s.find('\n')
if end > colwidth[1] - 2:
s = s[:colwidth[1] - 5] + '...'
lines.append(fmt.format('|', item, '|', s[:end], '|'))
lines.append(row)
except AttributeError:
pass
s = '\n'.join(lines)
return s | python | def functions_to_table(mod, colwidth=[27, 48]):
r"""
Given a module of functions, returns a ReST formatted text string that
outputs a table when printed.
Parameters
----------
mod : module
The module containing the functions to be included in the table, such
as 'porespy.filters'.
colwidths : list of ints
The width of the first and second columns. Note that because of the
vertical lines separating columns and define the edges of the table,
the total table width will be 3 characters wider than the total sum
of the specified column widths.
"""
temp = mod.__dir__()
funcs = [i for i in temp if not i[0].startswith('_')]
funcs.sort()
row = '+' + '-'*colwidth[0] + '+' + '-'*colwidth[1] + '+'
fmt = '{0:1s} {1:' + str(colwidth[0]-2) + 's} {2:1s} {3:' \
+ str(colwidth[1]-2) + 's} {4:1s}'
lines = []
lines.append(row)
lines.append(fmt.format('|', 'Method', '|', 'Description', '|'))
lines.append(row.replace('-', '='))
for i, item in enumerate(funcs):
try:
s = getattr(mod, item).__doc__.strip()
end = s.find('\n')
if end > colwidth[1] - 2:
s = s[:colwidth[1] - 5] + '...'
lines.append(fmt.format('|', item, '|', s[:end], '|'))
lines.append(row)
except AttributeError:
pass
s = '\n'.join(lines)
return s | [
"def",
"functions_to_table",
"(",
"mod",
",",
"colwidth",
"=",
"[",
"27",
",",
"48",
"]",
")",
":",
"temp",
"=",
"mod",
".",
"__dir__",
"(",
")",
"funcs",
"=",
"[",
"i",
"for",
"i",
"in",
"temp",
"if",
"not",
"i",
"[",
"0",
"]",
".",
"startswit... | r"""
Given a module of functions, returns a ReST formatted text string that
outputs a table when printed.
Parameters
----------
mod : module
The module containing the functions to be included in the table, such
as 'porespy.filters'.
colwidths : list of ints
The width of the first and second columns. Note that because of the
vertical lines separating columns and define the edges of the table,
the total table width will be 3 characters wider than the total sum
of the specified column widths. | [
"r",
"Given",
"a",
"module",
"of",
"functions",
"returns",
"a",
"ReST",
"formatted",
"text",
"string",
"that",
"outputs",
"a",
"table",
"when",
"printed",
"."
] | 1e13875b56787d8f5b7ffdabce8c4342c33ba9f8 | https://github.com/PMEAL/porespy/blob/1e13875b56787d8f5b7ffdabce8c4342c33ba9f8/porespy/tools/__funcs__.py#L769-L807 | train | 213,445 |
PMEAL/porespy | porespy/tools/__funcs__.py | mesh_region | def mesh_region(region: bool, strel=None):
r"""
Creates a tri-mesh of the provided region using the marching cubes
algorithm
Parameters
----------
im : ND-array
A boolean image with ``True`` values indicating the region of interest
strel : ND-array
The structuring element to use when blurring the region. The blur is
perfomed using a simple convolution filter. The point is to create a
greyscale region to allow the marching cubes algorithm some freedom
to conform the mesh to the surface. As the size of ``strel`` increases
the region will become increasingly blurred and inaccurate. The default
is a spherical element with a radius of 1.
Returns
-------
mesh : tuple
A named-tuple containing ``faces``, ``verts``, ``norm``, and ``val``
as returned by ``scikit-image.measure.marching_cubes`` function.
"""
im = region
if im.ndim != im.squeeze().ndim:
warnings.warn('Input image conains a singleton axis:' + str(im.shape) +
' Reduce dimensionality with np.squeeze(im) to avoid' +
' unexpected behavior.')
if strel is None:
if region.ndim == 3:
strel = ball(1)
if region.ndim == 2:
strel = disk(1)
pad_width = sp.amax(strel.shape)
if im.ndim == 3:
padded_mask = sp.pad(im, pad_width=pad_width, mode='constant')
padded_mask = spim.convolve(padded_mask * 1.0,
weights=strel) / sp.sum(strel)
else:
padded_mask = sp.reshape(im, (1,) + im.shape)
padded_mask = sp.pad(padded_mask, pad_width=pad_width, mode='constant')
verts, faces, norm, val = marching_cubes_lewiner(padded_mask)
result = namedtuple('mesh', ('verts', 'faces', 'norm', 'val'))
result.verts = verts - pad_width
result.faces = faces
result.norm = norm
result.val = val
return result | python | def mesh_region(region: bool, strel=None):
r"""
Creates a tri-mesh of the provided region using the marching cubes
algorithm
Parameters
----------
im : ND-array
A boolean image with ``True`` values indicating the region of interest
strel : ND-array
The structuring element to use when blurring the region. The blur is
perfomed using a simple convolution filter. The point is to create a
greyscale region to allow the marching cubes algorithm some freedom
to conform the mesh to the surface. As the size of ``strel`` increases
the region will become increasingly blurred and inaccurate. The default
is a spherical element with a radius of 1.
Returns
-------
mesh : tuple
A named-tuple containing ``faces``, ``verts``, ``norm``, and ``val``
as returned by ``scikit-image.measure.marching_cubes`` function.
"""
im = region
if im.ndim != im.squeeze().ndim:
warnings.warn('Input image conains a singleton axis:' + str(im.shape) +
' Reduce dimensionality with np.squeeze(im) to avoid' +
' unexpected behavior.')
if strel is None:
if region.ndim == 3:
strel = ball(1)
if region.ndim == 2:
strel = disk(1)
pad_width = sp.amax(strel.shape)
if im.ndim == 3:
padded_mask = sp.pad(im, pad_width=pad_width, mode='constant')
padded_mask = spim.convolve(padded_mask * 1.0,
weights=strel) / sp.sum(strel)
else:
padded_mask = sp.reshape(im, (1,) + im.shape)
padded_mask = sp.pad(padded_mask, pad_width=pad_width, mode='constant')
verts, faces, norm, val = marching_cubes_lewiner(padded_mask)
result = namedtuple('mesh', ('verts', 'faces', 'norm', 'val'))
result.verts = verts - pad_width
result.faces = faces
result.norm = norm
result.val = val
return result | [
"def",
"mesh_region",
"(",
"region",
":",
"bool",
",",
"strel",
"=",
"None",
")",
":",
"im",
"=",
"region",
"if",
"im",
".",
"ndim",
"!=",
"im",
".",
"squeeze",
"(",
")",
".",
"ndim",
":",
"warnings",
".",
"warn",
"(",
"'Input image conains a singleton... | r"""
Creates a tri-mesh of the provided region using the marching cubes
algorithm
Parameters
----------
im : ND-array
A boolean image with ``True`` values indicating the region of interest
strel : ND-array
The structuring element to use when blurring the region. The blur is
perfomed using a simple convolution filter. The point is to create a
greyscale region to allow the marching cubes algorithm some freedom
to conform the mesh to the surface. As the size of ``strel`` increases
the region will become increasingly blurred and inaccurate. The default
is a spherical element with a radius of 1.
Returns
-------
mesh : tuple
A named-tuple containing ``faces``, ``verts``, ``norm``, and ``val``
as returned by ``scikit-image.measure.marching_cubes`` function. | [
"r",
"Creates",
"a",
"tri",
"-",
"mesh",
"of",
"the",
"provided",
"region",
"using",
"the",
"marching",
"cubes",
"algorithm"
] | 1e13875b56787d8f5b7ffdabce8c4342c33ba9f8 | https://github.com/PMEAL/porespy/blob/1e13875b56787d8f5b7ffdabce8c4342c33ba9f8/porespy/tools/__funcs__.py#L810-L859 | train | 213,446 |
PMEAL/porespy | porespy/tools/__funcs__.py | ps_disk | def ps_disk(radius):
r"""
Creates circular disk structuring element for morphological operations
Parameters
----------
radius : float or int
The desired radius of the structuring element
Returns
-------
strel : 2D-array
A 2D numpy bool array of the structring element
"""
rad = int(sp.ceil(radius))
other = sp.ones((2 * rad + 1, 2 * rad + 1), dtype=bool)
other[rad, rad] = False
disk = spim.distance_transform_edt(other) < radius
return disk | python | def ps_disk(radius):
r"""
Creates circular disk structuring element for morphological operations
Parameters
----------
radius : float or int
The desired radius of the structuring element
Returns
-------
strel : 2D-array
A 2D numpy bool array of the structring element
"""
rad = int(sp.ceil(radius))
other = sp.ones((2 * rad + 1, 2 * rad + 1), dtype=bool)
other[rad, rad] = False
disk = spim.distance_transform_edt(other) < radius
return disk | [
"def",
"ps_disk",
"(",
"radius",
")",
":",
"rad",
"=",
"int",
"(",
"sp",
".",
"ceil",
"(",
"radius",
")",
")",
"other",
"=",
"sp",
".",
"ones",
"(",
"(",
"2",
"*",
"rad",
"+",
"1",
",",
"2",
"*",
"rad",
"+",
"1",
")",
",",
"dtype",
"=",
"... | r"""
Creates circular disk structuring element for morphological operations
Parameters
----------
radius : float or int
The desired radius of the structuring element
Returns
-------
strel : 2D-array
A 2D numpy bool array of the structring element | [
"r",
"Creates",
"circular",
"disk",
"structuring",
"element",
"for",
"morphological",
"operations"
] | 1e13875b56787d8f5b7ffdabce8c4342c33ba9f8 | https://github.com/PMEAL/porespy/blob/1e13875b56787d8f5b7ffdabce8c4342c33ba9f8/porespy/tools/__funcs__.py#L862-L880 | train | 213,447 |
PMEAL/porespy | porespy/tools/__funcs__.py | ps_ball | def ps_ball(radius):
r"""
Creates spherical ball structuring element for morphological operations
Parameters
----------
radius : float or int
The desired radius of the structuring element
Returns
-------
strel : 3D-array
A 3D numpy array of the structuring element
"""
rad = int(sp.ceil(radius))
other = sp.ones((2 * rad + 1, 2 * rad + 1, 2 * rad + 1), dtype=bool)
other[rad, rad, rad] = False
ball = spim.distance_transform_edt(other) < radius
return ball | python | def ps_ball(radius):
r"""
Creates spherical ball structuring element for morphological operations
Parameters
----------
radius : float or int
The desired radius of the structuring element
Returns
-------
strel : 3D-array
A 3D numpy array of the structuring element
"""
rad = int(sp.ceil(radius))
other = sp.ones((2 * rad + 1, 2 * rad + 1, 2 * rad + 1), dtype=bool)
other[rad, rad, rad] = False
ball = spim.distance_transform_edt(other) < radius
return ball | [
"def",
"ps_ball",
"(",
"radius",
")",
":",
"rad",
"=",
"int",
"(",
"sp",
".",
"ceil",
"(",
"radius",
")",
")",
"other",
"=",
"sp",
".",
"ones",
"(",
"(",
"2",
"*",
"rad",
"+",
"1",
",",
"2",
"*",
"rad",
"+",
"1",
",",
"2",
"*",
"rad",
"+"... | r"""
Creates spherical ball structuring element for morphological operations
Parameters
----------
radius : float or int
The desired radius of the structuring element
Returns
-------
strel : 3D-array
A 3D numpy array of the structuring element | [
"r",
"Creates",
"spherical",
"ball",
"structuring",
"element",
"for",
"morphological",
"operations"
] | 1e13875b56787d8f5b7ffdabce8c4342c33ba9f8 | https://github.com/PMEAL/porespy/blob/1e13875b56787d8f5b7ffdabce8c4342c33ba9f8/porespy/tools/__funcs__.py#L883-L901 | train | 213,448 |
PMEAL/porespy | porespy/tools/__funcs__.py | overlay | def overlay(im1, im2, c):
r"""
Overlays ``im2`` onto ``im1``, given voxel coords of center of ``im2``
in ``im1``.
Parameters
----------
im1 : ND-array
Original voxelated image
im2 : ND-array
Template voxelated image
c : array_like
[x, y, z] coordinates in ``im1`` where ``im2`` will be centered
Returns
-------
image : ND-array
A modified version of ``im1``, with ``im2`` overlaid at the specified
location
"""
shape = im2.shape
for ni in shape:
if ni % 2 == 0:
raise Exception("Structuring element must be odd-voxeled...")
nx, ny, nz = [(ni - 1) // 2 for ni in shape]
cx, cy, cz = c
im1[cx-nx:cx+nx+1, cy-ny:cy+ny+1, cz-nz:cz+nz+1] += im2
return im1 | python | def overlay(im1, im2, c):
r"""
Overlays ``im2`` onto ``im1``, given voxel coords of center of ``im2``
in ``im1``.
Parameters
----------
im1 : ND-array
Original voxelated image
im2 : ND-array
Template voxelated image
c : array_like
[x, y, z] coordinates in ``im1`` where ``im2`` will be centered
Returns
-------
image : ND-array
A modified version of ``im1``, with ``im2`` overlaid at the specified
location
"""
shape = im2.shape
for ni in shape:
if ni % 2 == 0:
raise Exception("Structuring element must be odd-voxeled...")
nx, ny, nz = [(ni - 1) // 2 for ni in shape]
cx, cy, cz = c
im1[cx-nx:cx+nx+1, cy-ny:cy+ny+1, cz-nz:cz+nz+1] += im2
return im1 | [
"def",
"overlay",
"(",
"im1",
",",
"im2",
",",
"c",
")",
":",
"shape",
"=",
"im2",
".",
"shape",
"for",
"ni",
"in",
"shape",
":",
"if",
"ni",
"%",
"2",
"==",
"0",
":",
"raise",
"Exception",
"(",
"\"Structuring element must be odd-voxeled...\"",
")",
"n... | r"""
Overlays ``im2`` onto ``im1``, given voxel coords of center of ``im2``
in ``im1``.
Parameters
----------
im1 : ND-array
Original voxelated image
im2 : ND-array
Template voxelated image
c : array_like
[x, y, z] coordinates in ``im1`` where ``im2`` will be centered
Returns
-------
image : ND-array
A modified version of ``im1``, with ``im2`` overlaid at the specified
location | [
"r",
"Overlays",
"im2",
"onto",
"im1",
"given",
"voxel",
"coords",
"of",
"center",
"of",
"im2",
"in",
"im1",
"."
] | 1e13875b56787d8f5b7ffdabce8c4342c33ba9f8 | https://github.com/PMEAL/porespy/blob/1e13875b56787d8f5b7ffdabce8c4342c33ba9f8/porespy/tools/__funcs__.py#L904-L935 | train | 213,449 |
PMEAL/porespy | porespy/tools/__funcs__.py | insert_sphere | def insert_sphere(im, c, r):
r"""
Inserts a sphere of a specified radius into a given image
Parameters
----------
im : array_like
Image into which the sphere should be inserted
c : array_like
The [x, y, z] coordinate indicating the center of the sphere
r : int
The radius of sphere to insert
Returns
-------
image : ND-array
The original image with a sphere inerted at the specified location
"""
c = sp.array(c, dtype=int)
if c.size != im.ndim:
raise Exception('Coordinates do not match dimensionality of image')
bbox = []
[bbox.append(sp.clip(c[i] - r, 0, im.shape[i])) for i in range(im.ndim)]
[bbox.append(sp.clip(c[i] + r, 0, im.shape[i])) for i in range(im.ndim)]
bbox = sp.ravel(bbox)
s = bbox_to_slices(bbox)
temp = im[s]
blank = sp.ones_like(temp)
blank[tuple(c - bbox[0:im.ndim])] = 0
blank = spim.distance_transform_edt(blank) < r
im[s] = blank
return im | python | def insert_sphere(im, c, r):
r"""
Inserts a sphere of a specified radius into a given image
Parameters
----------
im : array_like
Image into which the sphere should be inserted
c : array_like
The [x, y, z] coordinate indicating the center of the sphere
r : int
The radius of sphere to insert
Returns
-------
image : ND-array
The original image with a sphere inerted at the specified location
"""
c = sp.array(c, dtype=int)
if c.size != im.ndim:
raise Exception('Coordinates do not match dimensionality of image')
bbox = []
[bbox.append(sp.clip(c[i] - r, 0, im.shape[i])) for i in range(im.ndim)]
[bbox.append(sp.clip(c[i] + r, 0, im.shape[i])) for i in range(im.ndim)]
bbox = sp.ravel(bbox)
s = bbox_to_slices(bbox)
temp = im[s]
blank = sp.ones_like(temp)
blank[tuple(c - bbox[0:im.ndim])] = 0
blank = spim.distance_transform_edt(blank) < r
im[s] = blank
return im | [
"def",
"insert_sphere",
"(",
"im",
",",
"c",
",",
"r",
")",
":",
"c",
"=",
"sp",
".",
"array",
"(",
"c",
",",
"dtype",
"=",
"int",
")",
"if",
"c",
".",
"size",
"!=",
"im",
".",
"ndim",
":",
"raise",
"Exception",
"(",
"'Coordinates do not match dime... | r"""
Inserts a sphere of a specified radius into a given image
Parameters
----------
im : array_like
Image into which the sphere should be inserted
c : array_like
The [x, y, z] coordinate indicating the center of the sphere
r : int
The radius of sphere to insert
Returns
-------
image : ND-array
The original image with a sphere inerted at the specified location | [
"r",
"Inserts",
"a",
"sphere",
"of",
"a",
"specified",
"radius",
"into",
"a",
"given",
"image"
] | 1e13875b56787d8f5b7ffdabce8c4342c33ba9f8 | https://github.com/PMEAL/porespy/blob/1e13875b56787d8f5b7ffdabce8c4342c33ba9f8/porespy/tools/__funcs__.py#L938-L970 | train | 213,450 |
PMEAL/porespy | porespy/tools/__funcs__.py | insert_cylinder | def insert_cylinder(im, xyz0, xyz1, r):
r"""
Inserts a cylinder of given radius onto a given image
Parameters
----------
im : array_like
Original voxelated image
xyz0, xyz1 : 3-by-1 array_like
Voxel coordinates of the two end points of the cylinder
r : int
Radius of the cylinder
Returns
-------
im : ND-array
Original voxelated image overlayed with the cylinder
Notes
-----
This function is only implemented for 3D images
"""
if im.ndim != 3:
raise Exception('This function is only implemented for 3D images')
# Converting coordinates to numpy array
xyz0, xyz1 = [sp.array(xyz).astype(int) for xyz in (xyz0, xyz1)]
r = int(r)
L = sp.absolute(xyz0 - xyz1).max() + 1
xyz_line = [sp.linspace(xyz0[i], xyz1[i], L).astype(int) for i in range(3)]
xyz_min = sp.amin(xyz_line, axis=1) - r
xyz_max = sp.amax(xyz_line, axis=1) + r
shape_template = xyz_max - xyz_min + 1
template = sp.zeros(shape=shape_template)
# Shortcut for orthogonal cylinders
if (xyz0 == xyz1).sum() == 2:
unique_dim = [xyz0[i] != xyz1[i] for i in range(3)].index(True)
shape_template[unique_dim] = 1
template_2D = disk(radius=r).reshape(shape_template)
template = sp.repeat(template_2D, repeats=L, axis=unique_dim)
xyz_min[unique_dim] += r
xyz_max[unique_dim] += -r
else:
xyz_line_in_template_coords = [xyz_line[i] - xyz_min[i] for i in range(3)]
template[tuple(xyz_line_in_template_coords)] = 1
template = spim.distance_transform_edt(template == 0) <= r
im[xyz_min[0]:xyz_max[0]+1,
xyz_min[1]:xyz_max[1]+1,
xyz_min[2]:xyz_max[2]+1] += template
return im | python | def insert_cylinder(im, xyz0, xyz1, r):
r"""
Inserts a cylinder of given radius onto a given image
Parameters
----------
im : array_like
Original voxelated image
xyz0, xyz1 : 3-by-1 array_like
Voxel coordinates of the two end points of the cylinder
r : int
Radius of the cylinder
Returns
-------
im : ND-array
Original voxelated image overlayed with the cylinder
Notes
-----
This function is only implemented for 3D images
"""
if im.ndim != 3:
raise Exception('This function is only implemented for 3D images')
# Converting coordinates to numpy array
xyz0, xyz1 = [sp.array(xyz).astype(int) for xyz in (xyz0, xyz1)]
r = int(r)
L = sp.absolute(xyz0 - xyz1).max() + 1
xyz_line = [sp.linspace(xyz0[i], xyz1[i], L).astype(int) for i in range(3)]
xyz_min = sp.amin(xyz_line, axis=1) - r
xyz_max = sp.amax(xyz_line, axis=1) + r
shape_template = xyz_max - xyz_min + 1
template = sp.zeros(shape=shape_template)
# Shortcut for orthogonal cylinders
if (xyz0 == xyz1).sum() == 2:
unique_dim = [xyz0[i] != xyz1[i] for i in range(3)].index(True)
shape_template[unique_dim] = 1
template_2D = disk(radius=r).reshape(shape_template)
template = sp.repeat(template_2D, repeats=L, axis=unique_dim)
xyz_min[unique_dim] += r
xyz_max[unique_dim] += -r
else:
xyz_line_in_template_coords = [xyz_line[i] - xyz_min[i] for i in range(3)]
template[tuple(xyz_line_in_template_coords)] = 1
template = spim.distance_transform_edt(template == 0) <= r
im[xyz_min[0]:xyz_max[0]+1,
xyz_min[1]:xyz_max[1]+1,
xyz_min[2]:xyz_max[2]+1] += template
return im | [
"def",
"insert_cylinder",
"(",
"im",
",",
"xyz0",
",",
"xyz1",
",",
"r",
")",
":",
"if",
"im",
".",
"ndim",
"!=",
"3",
":",
"raise",
"Exception",
"(",
"'This function is only implemented for 3D images'",
")",
"# Converting coordinates to numpy array",
"xyz0",
",",... | r"""
Inserts a cylinder of given radius onto a given image
Parameters
----------
im : array_like
Original voxelated image
xyz0, xyz1 : 3-by-1 array_like
Voxel coordinates of the two end points of the cylinder
r : int
Radius of the cylinder
Returns
-------
im : ND-array
Original voxelated image overlayed with the cylinder
Notes
-----
This function is only implemented for 3D images | [
"r",
"Inserts",
"a",
"cylinder",
"of",
"given",
"radius",
"onto",
"a",
"given",
"image"
] | 1e13875b56787d8f5b7ffdabce8c4342c33ba9f8 | https://github.com/PMEAL/porespy/blob/1e13875b56787d8f5b7ffdabce8c4342c33ba9f8/porespy/tools/__funcs__.py#L973-L1026 | train | 213,451 |
PMEAL/porespy | porespy/tools/__funcs__.py | pad_faces | def pad_faces(im, faces):
r"""
Pads the input image at specified faces. This shape of image is
same as the output image of add_boundary_regions function.
Parameters
----------
im : ND_array
The image that needs to be padded
faces : list of strings
Labels indicating where image needs to be padded. Given a 3D image
of shape ``[x, y, z] = [i, j, k]``, the following conventions are used
to indicate along which axis the padding should be applied:
* 'left' -> ``x = 0``
* 'right' -> ``x = i``
* 'front' -> ``y = 0``
* 'back' -> ``y = j``
* 'bottom' -> ``z = 0``
* 'top' -> ``z = k``
Returns
-------
A image padded at specified face(s)
See also
--------
add_boundary_regions
"""
if im.ndim != im.squeeze().ndim:
warnings.warn('Input image conains a singleton axis:' + str(im.shape) +
' Reduce dimensionality with np.squeeze(im) to avoid' +
' unexpected behavior.')
f = faces
if f is not None:
if im.ndim == 2:
faces = [(int('left' in f) * 3, int('right' in f) * 3),
(int(('front') in f) * 3 or int(('bottom') in f) * 3,
int(('back') in f) * 3 or int(('top') in f) * 3)]
if im.ndim == 3:
faces = [(int('left' in f) * 3, int('right' in f) * 3),
(int('front' in f) * 3, int('back' in f) * 3),
(int('top' in f) * 3, int('bottom' in f) * 3)]
im = sp.pad(im, pad_width=faces, mode='edge')
else:
im = im
return im | python | def pad_faces(im, faces):
r"""
Pads the input image at specified faces. This shape of image is
same as the output image of add_boundary_regions function.
Parameters
----------
im : ND_array
The image that needs to be padded
faces : list of strings
Labels indicating where image needs to be padded. Given a 3D image
of shape ``[x, y, z] = [i, j, k]``, the following conventions are used
to indicate along which axis the padding should be applied:
* 'left' -> ``x = 0``
* 'right' -> ``x = i``
* 'front' -> ``y = 0``
* 'back' -> ``y = j``
* 'bottom' -> ``z = 0``
* 'top' -> ``z = k``
Returns
-------
A image padded at specified face(s)
See also
--------
add_boundary_regions
"""
if im.ndim != im.squeeze().ndim:
warnings.warn('Input image conains a singleton axis:' + str(im.shape) +
' Reduce dimensionality with np.squeeze(im) to avoid' +
' unexpected behavior.')
f = faces
if f is not None:
if im.ndim == 2:
faces = [(int('left' in f) * 3, int('right' in f) * 3),
(int(('front') in f) * 3 or int(('bottom') in f) * 3,
int(('back') in f) * 3 or int(('top') in f) * 3)]
if im.ndim == 3:
faces = [(int('left' in f) * 3, int('right' in f) * 3),
(int('front' in f) * 3, int('back' in f) * 3),
(int('top' in f) * 3, int('bottom' in f) * 3)]
im = sp.pad(im, pad_width=faces, mode='edge')
else:
im = im
return im | [
"def",
"pad_faces",
"(",
"im",
",",
"faces",
")",
":",
"if",
"im",
".",
"ndim",
"!=",
"im",
".",
"squeeze",
"(",
")",
".",
"ndim",
":",
"warnings",
".",
"warn",
"(",
"'Input image conains a singleton axis:'",
"+",
"str",
"(",
"im",
".",
"shape",
")",
... | r"""
Pads the input image at specified faces. This shape of image is
same as the output image of add_boundary_regions function.
Parameters
----------
im : ND_array
The image that needs to be padded
faces : list of strings
Labels indicating where image needs to be padded. Given a 3D image
of shape ``[x, y, z] = [i, j, k]``, the following conventions are used
to indicate along which axis the padding should be applied:
* 'left' -> ``x = 0``
* 'right' -> ``x = i``
* 'front' -> ``y = 0``
* 'back' -> ``y = j``
* 'bottom' -> ``z = 0``
* 'top' -> ``z = k``
Returns
-------
A image padded at specified face(s)
See also
--------
add_boundary_regions | [
"r",
"Pads",
"the",
"input",
"image",
"at",
"specified",
"faces",
".",
"This",
"shape",
"of",
"image",
"is",
"same",
"as",
"the",
"output",
"image",
"of",
"add_boundary_regions",
"function",
"."
] | 1e13875b56787d8f5b7ffdabce8c4342c33ba9f8 | https://github.com/PMEAL/porespy/blob/1e13875b56787d8f5b7ffdabce8c4342c33ba9f8/porespy/tools/__funcs__.py#L1029-L1077 | train | 213,452 |
PMEAL/porespy | porespy/tools/__funcs__.py | _create_alias_map | def _create_alias_map(im, alias=None):
r"""
Creates an alias mapping between phases in original image and identifyable
names. This mapping is used during network extraction to label
interconnection between and properties of each phase.
Parameters
----------
im : ND-array
Image of porous material where each phase is represented by unique
integer. Phase integer should start from 1. Boolean image will extract
only one network labeled with True's only.
alias : dict (Optional)
A dictionary that assigns unique image label to specific phase.
For example {1: 'Solid'} will show all structural properties associated
with label 1 as Solid phase properties.
If ``None`` then default labelling will be used i.e {1: 'Phase1',..}.
Returns
-------
A dictionary with numerical phase labels as key, and readable phase names
as valuies. If no alias is provided then default labelling is used
i.e {1: 'Phase1',..}
"""
# -------------------------------------------------------------------------
# Get alias if provided by user
phases_num = sp.unique(im * 1)
phases_num = sp.trim_zeros(phases_num)
al = {}
for values in phases_num:
al[values] = 'phase{}'.format(values)
if alias is not None:
alias_sort = dict(sorted(alias.items()))
phase_labels = sp.array([*alias_sort])
al = alias
if set(phase_labels) != set(phases_num):
raise Exception('Alias labels does not match with image labels '
'please provide correct image labels')
return al | python | def _create_alias_map(im, alias=None):
r"""
Creates an alias mapping between phases in original image and identifyable
names. This mapping is used during network extraction to label
interconnection between and properties of each phase.
Parameters
----------
im : ND-array
Image of porous material where each phase is represented by unique
integer. Phase integer should start from 1. Boolean image will extract
only one network labeled with True's only.
alias : dict (Optional)
A dictionary that assigns unique image label to specific phase.
For example {1: 'Solid'} will show all structural properties associated
with label 1 as Solid phase properties.
If ``None`` then default labelling will be used i.e {1: 'Phase1',..}.
Returns
-------
A dictionary with numerical phase labels as key, and readable phase names
as valuies. If no alias is provided then default labelling is used
i.e {1: 'Phase1',..}
"""
# -------------------------------------------------------------------------
# Get alias if provided by user
phases_num = sp.unique(im * 1)
phases_num = sp.trim_zeros(phases_num)
al = {}
for values in phases_num:
al[values] = 'phase{}'.format(values)
if alias is not None:
alias_sort = dict(sorted(alias.items()))
phase_labels = sp.array([*alias_sort])
al = alias
if set(phase_labels) != set(phases_num):
raise Exception('Alias labels does not match with image labels '
'please provide correct image labels')
return al | [
"def",
"_create_alias_map",
"(",
"im",
",",
"alias",
"=",
"None",
")",
":",
"# -------------------------------------------------------------------------",
"# Get alias if provided by user",
"phases_num",
"=",
"sp",
".",
"unique",
"(",
"im",
"*",
"1",
")",
"phases_num",
... | r"""
Creates an alias mapping between phases in original image and identifyable
names. This mapping is used during network extraction to label
interconnection between and properties of each phase.
Parameters
----------
im : ND-array
Image of porous material where each phase is represented by unique
integer. Phase integer should start from 1. Boolean image will extract
only one network labeled with True's only.
alias : dict (Optional)
A dictionary that assigns unique image label to specific phase.
For example {1: 'Solid'} will show all structural properties associated
with label 1 as Solid phase properties.
If ``None`` then default labelling will be used i.e {1: 'Phase1',..}.
Returns
-------
A dictionary with numerical phase labels as key, and readable phase names
as valuies. If no alias is provided then default labelling is used
i.e {1: 'Phase1',..} | [
"r",
"Creates",
"an",
"alias",
"mapping",
"between",
"phases",
"in",
"original",
"image",
"and",
"identifyable",
"names",
".",
"This",
"mapping",
"is",
"used",
"during",
"network",
"extraction",
"to",
"label",
"interconnection",
"between",
"and",
"properties",
"... | 1e13875b56787d8f5b7ffdabce8c4342c33ba9f8 | https://github.com/PMEAL/porespy/blob/1e13875b56787d8f5b7ffdabce8c4342c33ba9f8/porespy/tools/__funcs__.py#L1080-L1119 | train | 213,453 |
frictionlessdata/datapackage-pipelines | datapackage_pipelines/utilities/dirtools.py | _filehash | def _filehash(filepath, blocksize=4096):
""" Return the hash object for the file `filepath', processing the file
by chunk of `blocksize'.
:type filepath: str
:param filepath: Path to file
:type blocksize: int
:param blocksize: Size of the chunk when processing the file
"""
sha = hashlib.sha256()
with open(filepath, 'rb') as fp:
while 1:
data = fp.read(blocksize)
if data:
sha.update(data)
else:
break
return sha | python | def _filehash(filepath, blocksize=4096):
""" Return the hash object for the file `filepath', processing the file
by chunk of `blocksize'.
:type filepath: str
:param filepath: Path to file
:type blocksize: int
:param blocksize: Size of the chunk when processing the file
"""
sha = hashlib.sha256()
with open(filepath, 'rb') as fp:
while 1:
data = fp.read(blocksize)
if data:
sha.update(data)
else:
break
return sha | [
"def",
"_filehash",
"(",
"filepath",
",",
"blocksize",
"=",
"4096",
")",
":",
"sha",
"=",
"hashlib",
".",
"sha256",
"(",
")",
"with",
"open",
"(",
"filepath",
",",
"'rb'",
")",
"as",
"fp",
":",
"while",
"1",
":",
"data",
"=",
"fp",
".",
"read",
"... | Return the hash object for the file `filepath', processing the file
by chunk of `blocksize'.
:type filepath: str
:param filepath: Path to file
:type blocksize: int
:param blocksize: Size of the chunk when processing the file | [
"Return",
"the",
"hash",
"object",
"for",
"the",
"file",
"filepath",
"processing",
"the",
"file",
"by",
"chunk",
"of",
"blocksize",
"."
] | 3a34bbdf042d13c3bec5eef46ff360ee41403874 | https://github.com/frictionlessdata/datapackage-pipelines/blob/3a34bbdf042d13c3bec5eef46ff360ee41403874/datapackage_pipelines/utilities/dirtools.py#L34-L53 | train | 213,454 |
frictionlessdata/datapackage-pipelines | datapackage_pipelines/utilities/dirtools.py | File.compress_to | def compress_to(self, archive_path=None):
""" Compress the directory with gzip using tarlib.
:type archive_path: str
:param archive_path: Path to the archive, if None, a tempfile is created
"""
if archive_path is None:
archive = tempfile.NamedTemporaryFile(delete=False)
tar_args = ()
tar_kwargs = {'fileobj': archive}
_return = archive.name
else:
tar_args = (archive_path)
tar_kwargs = {}
_return = archive_path
tar_kwargs.update({'mode': 'w:gz'})
with closing(tarfile.open(*tar_args, **tar_kwargs)) as tar:
tar.add(self.path, arcname=self.file)
return _return | python | def compress_to(self, archive_path=None):
""" Compress the directory with gzip using tarlib.
:type archive_path: str
:param archive_path: Path to the archive, if None, a tempfile is created
"""
if archive_path is None:
archive = tempfile.NamedTemporaryFile(delete=False)
tar_args = ()
tar_kwargs = {'fileobj': archive}
_return = archive.name
else:
tar_args = (archive_path)
tar_kwargs = {}
_return = archive_path
tar_kwargs.update({'mode': 'w:gz'})
with closing(tarfile.open(*tar_args, **tar_kwargs)) as tar:
tar.add(self.path, arcname=self.file)
return _return | [
"def",
"compress_to",
"(",
"self",
",",
"archive_path",
"=",
"None",
")",
":",
"if",
"archive_path",
"is",
"None",
":",
"archive",
"=",
"tempfile",
".",
"NamedTemporaryFile",
"(",
"delete",
"=",
"False",
")",
"tar_args",
"=",
"(",
")",
"tar_kwargs",
"=",
... | Compress the directory with gzip using tarlib.
:type archive_path: str
:param archive_path: Path to the archive, if None, a tempfile is created | [
"Compress",
"the",
"directory",
"with",
"gzip",
"using",
"tarlib",
"."
] | 3a34bbdf042d13c3bec5eef46ff360ee41403874 | https://github.com/frictionlessdata/datapackage-pipelines/blob/3a34bbdf042d13c3bec5eef46ff360ee41403874/datapackage_pipelines/utilities/dirtools.py#L84-L104 | train | 213,455 |
frictionlessdata/datapackage-pipelines | datapackage_pipelines/utilities/dirtools.py | Dir.iterfiles | def iterfiles(self, pattern=None, abspath=False):
""" Generator for all the files not excluded recursively.
Return relative path.
:type pattern: str
:param pattern: Unix style (glob like/gitignore like) pattern
"""
if pattern is not None:
globster = Globster([pattern])
for root, dirs, files in self.walk():
for f in files:
if pattern is None or (pattern is not None and globster.match(f)):
if abspath:
yield os.path.join(root, f)
else:
yield self.relpath(os.path.join(root, f)) | python | def iterfiles(self, pattern=None, abspath=False):
""" Generator for all the files not excluded recursively.
Return relative path.
:type pattern: str
:param pattern: Unix style (glob like/gitignore like) pattern
"""
if pattern is not None:
globster = Globster([pattern])
for root, dirs, files in self.walk():
for f in files:
if pattern is None or (pattern is not None and globster.match(f)):
if abspath:
yield os.path.join(root, f)
else:
yield self.relpath(os.path.join(root, f)) | [
"def",
"iterfiles",
"(",
"self",
",",
"pattern",
"=",
"None",
",",
"abspath",
"=",
"False",
")",
":",
"if",
"pattern",
"is",
"not",
"None",
":",
"globster",
"=",
"Globster",
"(",
"[",
"pattern",
"]",
")",
"for",
"root",
",",
"dirs",
",",
"files",
"... | Generator for all the files not excluded recursively.
Return relative path.
:type pattern: str
:param pattern: Unix style (glob like/gitignore like) pattern | [
"Generator",
"for",
"all",
"the",
"files",
"not",
"excluded",
"recursively",
"."
] | 3a34bbdf042d13c3bec5eef46ff360ee41403874 | https://github.com/frictionlessdata/datapackage-pipelines/blob/3a34bbdf042d13c3bec5eef46ff360ee41403874/datapackage_pipelines/utilities/dirtools.py#L155-L172 | train | 213,456 |
frictionlessdata/datapackage-pipelines | datapackage_pipelines/utilities/dirtools.py | Dir.size | def size(self):
""" Return directory size in bytes.
:rtype: int
:return: Total directory size in bytes.
"""
dir_size = 0
for f in self.iterfiles(abspath=True):
dir_size += os.path.getsize(f)
return dir_size | python | def size(self):
""" Return directory size in bytes.
:rtype: int
:return: Total directory size in bytes.
"""
dir_size = 0
for f in self.iterfiles(abspath=True):
dir_size += os.path.getsize(f)
return dir_size | [
"def",
"size",
"(",
"self",
")",
":",
"dir_size",
"=",
"0",
"for",
"f",
"in",
"self",
".",
"iterfiles",
"(",
"abspath",
"=",
"True",
")",
":",
"dir_size",
"+=",
"os",
".",
"path",
".",
"getsize",
"(",
"f",
")",
"return",
"dir_size"
] | Return directory size in bytes.
:rtype: int
:return: Total directory size in bytes. | [
"Return",
"directory",
"size",
"in",
"bytes",
"."
] | 3a34bbdf042d13c3bec5eef46ff360ee41403874 | https://github.com/frictionlessdata/datapackage-pipelines/blob/3a34bbdf042d13c3bec5eef46ff360ee41403874/datapackage_pipelines/utilities/dirtools.py#L227-L236 | train | 213,457 |
frictionlessdata/datapackage-pipelines | datapackage_pipelines/utilities/dirtools.py | Dir.is_excluded | def is_excluded(self, path):
""" Return True if `path' should be excluded
given patterns in the `exclude_file'. """
match = self.globster.match(self.relpath(path))
if match:
log.debug("{0} matched {1} for exclusion".format(path, match))
return True
return False | python | def is_excluded(self, path):
""" Return True if `path' should be excluded
given patterns in the `exclude_file'. """
match = self.globster.match(self.relpath(path))
if match:
log.debug("{0} matched {1} for exclusion".format(path, match))
return True
return False | [
"def",
"is_excluded",
"(",
"self",
",",
"path",
")",
":",
"match",
"=",
"self",
".",
"globster",
".",
"match",
"(",
"self",
".",
"relpath",
"(",
"path",
")",
")",
"if",
"match",
":",
"log",
".",
"debug",
"(",
"\"{0} matched {1} for exclusion\"",
".",
"... | Return True if `path' should be excluded
given patterns in the `exclude_file'. | [
"Return",
"True",
"if",
"path",
"should",
"be",
"excluded",
"given",
"patterns",
"in",
"the",
"exclude_file",
"."
] | 3a34bbdf042d13c3bec5eef46ff360ee41403874 | https://github.com/frictionlessdata/datapackage-pipelines/blob/3a34bbdf042d13c3bec5eef46ff360ee41403874/datapackage_pipelines/utilities/dirtools.py#L238-L245 | train | 213,458 |
frictionlessdata/datapackage-pipelines | datapackage_pipelines/utilities/dirtools.py | Dir.find_projects | def find_projects(self, file_identifier=".project"):
""" Search all directory recursively for subdirs
with `file_identifier' in it.
:type file_identifier: str
:param file_identifier: File identier, .project by default.
:rtype: list
:return: The list of subdirs with a `file_identifier' in it.
"""
projects = []
for d in self.subdirs():
project_file = os.path.join(self.directory, d, file_identifier)
if os.path.isfile(project_file):
projects.append(d)
return projects | python | def find_projects(self, file_identifier=".project"):
""" Search all directory recursively for subdirs
with `file_identifier' in it.
:type file_identifier: str
:param file_identifier: File identier, .project by default.
:rtype: list
:return: The list of subdirs with a `file_identifier' in it.
"""
projects = []
for d in self.subdirs():
project_file = os.path.join(self.directory, d, file_identifier)
if os.path.isfile(project_file):
projects.append(d)
return projects | [
"def",
"find_projects",
"(",
"self",
",",
"file_identifier",
"=",
"\".project\"",
")",
":",
"projects",
"=",
"[",
"]",
"for",
"d",
"in",
"self",
".",
"subdirs",
"(",
")",
":",
"project_file",
"=",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"di... | Search all directory recursively for subdirs
with `file_identifier' in it.
:type file_identifier: str
:param file_identifier: File identier, .project by default.
:rtype: list
:return: The list of subdirs with a `file_identifier' in it. | [
"Search",
"all",
"directory",
"recursively",
"for",
"subdirs",
"with",
"file_identifier",
"in",
"it",
"."
] | 3a34bbdf042d13c3bec5eef46ff360ee41403874 | https://github.com/frictionlessdata/datapackage-pipelines/blob/3a34bbdf042d13c3bec5eef46ff360ee41403874/datapackage_pipelines/utilities/dirtools.py#L269-L285 | train | 213,459 |
frictionlessdata/datapackage-pipelines | datapackage_pipelines/utilities/dirtools.py | Dir.relpath | def relpath(self, path):
""" Return a relative filepath to path from Dir path. """
return os.path.relpath(path, start=self.path) | python | def relpath(self, path):
""" Return a relative filepath to path from Dir path. """
return os.path.relpath(path, start=self.path) | [
"def",
"relpath",
"(",
"self",
",",
"path",
")",
":",
"return",
"os",
".",
"path",
".",
"relpath",
"(",
"path",
",",
"start",
"=",
"self",
".",
"path",
")"
] | Return a relative filepath to path from Dir path. | [
"Return",
"a",
"relative",
"filepath",
"to",
"path",
"from",
"Dir",
"path",
"."
] | 3a34bbdf042d13c3bec5eef46ff360ee41403874 | https://github.com/frictionlessdata/datapackage-pipelines/blob/3a34bbdf042d13c3bec5eef46ff360ee41403874/datapackage_pipelines/utilities/dirtools.py#L287-L289 | train | 213,460 |
frictionlessdata/datapackage-pipelines | datapackage_pipelines/utilities/dirtools.py | DirState.compute_state | def compute_state(self):
""" Generate the index. """
data = {}
data['directory'] = self._dir.path
data['files'] = list(self._dir.files())
data['subdirs'] = list(self._dir.subdirs())
data['index'] = self.index()
return data | python | def compute_state(self):
""" Generate the index. """
data = {}
data['directory'] = self._dir.path
data['files'] = list(self._dir.files())
data['subdirs'] = list(self._dir.subdirs())
data['index'] = self.index()
return data | [
"def",
"compute_state",
"(",
"self",
")",
":",
"data",
"=",
"{",
"}",
"data",
"[",
"'directory'",
"]",
"=",
"self",
".",
"_dir",
".",
"path",
"data",
"[",
"'files'",
"]",
"=",
"list",
"(",
"self",
".",
"_dir",
".",
"files",
"(",
")",
")",
"data",... | Generate the index. | [
"Generate",
"the",
"index",
"."
] | 3a34bbdf042d13c3bec5eef46ff360ee41403874 | https://github.com/frictionlessdata/datapackage-pipelines/blob/3a34bbdf042d13c3bec5eef46ff360ee41403874/datapackage_pipelines/utilities/dirtools.py#L321-L328 | train | 213,461 |
frictionlessdata/datapackage-pipelines | datapackage_pipelines/web/server.py | basic_auth_required | def basic_auth_required(view_func):
"""
A decorator that can be used to protect specific views with HTTP basic
access authentication. Conditional on having BASIC_AUTH_USERNAME and
BASIC_AUTH_PASSWORD set as env vars.
"""
@wraps(view_func)
def wrapper(*args, **kwargs):
if app.config.get('BASIC_AUTH_ACTIVE', False):
if basic_auth.authenticate():
return view_func(*args, **kwargs)
else:
return basic_auth.challenge()
else:
return view_func(*args, **kwargs)
return wrapper | python | def basic_auth_required(view_func):
"""
A decorator that can be used to protect specific views with HTTP basic
access authentication. Conditional on having BASIC_AUTH_USERNAME and
BASIC_AUTH_PASSWORD set as env vars.
"""
@wraps(view_func)
def wrapper(*args, **kwargs):
if app.config.get('BASIC_AUTH_ACTIVE', False):
if basic_auth.authenticate():
return view_func(*args, **kwargs)
else:
return basic_auth.challenge()
else:
return view_func(*args, **kwargs)
return wrapper | [
"def",
"basic_auth_required",
"(",
"view_func",
")",
":",
"@",
"wraps",
"(",
"view_func",
")",
"def",
"wrapper",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"app",
".",
"config",
".",
"get",
"(",
"'BASIC_AUTH_ACTIVE'",
",",
"False",
")",... | A decorator that can be used to protect specific views with HTTP basic
access authentication. Conditional on having BASIC_AUTH_USERNAME and
BASIC_AUTH_PASSWORD set as env vars. | [
"A",
"decorator",
"that",
"can",
"be",
"used",
"to",
"protect",
"specific",
"views",
"with",
"HTTP",
"basic",
"access",
"authentication",
".",
"Conditional",
"on",
"having",
"BASIC_AUTH_USERNAME",
"and",
"BASIC_AUTH_PASSWORD",
"set",
"as",
"env",
"vars",
"."
] | 3a34bbdf042d13c3bec5eef46ff360ee41403874 | https://github.com/frictionlessdata/datapackage-pipelines/blob/3a34bbdf042d13c3bec5eef46ff360ee41403874/datapackage_pipelines/web/server.py#L87-L102 | train | 213,462 |
frictionlessdata/datapackage-pipelines | datapackage_pipelines/web/server.py | badge | def badge(pipeline_id):
'''An individual pipeline status'''
if not pipeline_id.startswith('./'):
pipeline_id = './' + pipeline_id
pipeline_status = status.get(pipeline_id)
status_color = 'lightgray'
if pipeline_status.pipeline_details:
status_text = pipeline_status.state().lower()
last_execution = pipeline_status.get_last_execution()
success = last_execution.success if last_execution else None
if success is True:
stats = last_execution.stats if last_execution else None
record_count = stats.get('count_of_rows')
if record_count is not None:
status_text += ' (%d records)' % record_count
status_color = 'brightgreen'
elif success is False:
status_color = 'red'
else:
status_text = "not found"
return _make_badge_response('pipeline', status_text, status_color) | python | def badge(pipeline_id):
'''An individual pipeline status'''
if not pipeline_id.startswith('./'):
pipeline_id = './' + pipeline_id
pipeline_status = status.get(pipeline_id)
status_color = 'lightgray'
if pipeline_status.pipeline_details:
status_text = pipeline_status.state().lower()
last_execution = pipeline_status.get_last_execution()
success = last_execution.success if last_execution else None
if success is True:
stats = last_execution.stats if last_execution else None
record_count = stats.get('count_of_rows')
if record_count is not None:
status_text += ' (%d records)' % record_count
status_color = 'brightgreen'
elif success is False:
status_color = 'red'
else:
status_text = "not found"
return _make_badge_response('pipeline', status_text, status_color) | [
"def",
"badge",
"(",
"pipeline_id",
")",
":",
"if",
"not",
"pipeline_id",
".",
"startswith",
"(",
"'./'",
")",
":",
"pipeline_id",
"=",
"'./'",
"+",
"pipeline_id",
"pipeline_status",
"=",
"status",
".",
"get",
"(",
"pipeline_id",
")",
"status_color",
"=",
... | An individual pipeline status | [
"An",
"individual",
"pipeline",
"status"
] | 3a34bbdf042d13c3bec5eef46ff360ee41403874 | https://github.com/frictionlessdata/datapackage-pipelines/blob/3a34bbdf042d13c3bec5eef46ff360ee41403874/datapackage_pipelines/web/server.py#L267-L288 | train | 213,463 |
frictionlessdata/datapackage-pipelines | datapackage_pipelines/web/server.py | badge_collection | def badge_collection(pipeline_path):
'''Status badge for a collection of pipelines.'''
all_pipeline_ids = sorted(status.all_pipeline_ids())
if not pipeline_path.startswith('./'):
pipeline_path = './' + pipeline_path
# Filter pipeline ids to only include those that start with pipeline_path.
path_pipeline_ids = \
[p for p in all_pipeline_ids if p.startswith(pipeline_path)]
statuses = []
for pipeline_id in path_pipeline_ids:
pipeline_status = status.get(pipeline_id)
if pipeline_status is None:
abort(404)
status_text = pipeline_status.state().lower()
statuses.append(status_text)
status_color = 'lightgray'
status_counter = Counter(statuses)
if status_counter:
if len(status_counter) == 1 and status_counter['succeeded'] > 0:
status_color = 'brightgreen'
elif status_counter['failed'] > 0:
status_color = 'red'
elif status_counter['failed'] == 0:
status_color = 'yellow'
status_text = \
', '.join(['{} {}'.format(v, k)
for k, v in status_counter.items()])
else:
status_text = "not found"
return _make_badge_response('pipelines', status_text, status_color) | python | def badge_collection(pipeline_path):
'''Status badge for a collection of pipelines.'''
all_pipeline_ids = sorted(status.all_pipeline_ids())
if not pipeline_path.startswith('./'):
pipeline_path = './' + pipeline_path
# Filter pipeline ids to only include those that start with pipeline_path.
path_pipeline_ids = \
[p for p in all_pipeline_ids if p.startswith(pipeline_path)]
statuses = []
for pipeline_id in path_pipeline_ids:
pipeline_status = status.get(pipeline_id)
if pipeline_status is None:
abort(404)
status_text = pipeline_status.state().lower()
statuses.append(status_text)
status_color = 'lightgray'
status_counter = Counter(statuses)
if status_counter:
if len(status_counter) == 1 and status_counter['succeeded'] > 0:
status_color = 'brightgreen'
elif status_counter['failed'] > 0:
status_color = 'red'
elif status_counter['failed'] == 0:
status_color = 'yellow'
status_text = \
', '.join(['{} {}'.format(v, k)
for k, v in status_counter.items()])
else:
status_text = "not found"
return _make_badge_response('pipelines', status_text, status_color) | [
"def",
"badge_collection",
"(",
"pipeline_path",
")",
":",
"all_pipeline_ids",
"=",
"sorted",
"(",
"status",
".",
"all_pipeline_ids",
"(",
")",
")",
"if",
"not",
"pipeline_path",
".",
"startswith",
"(",
"'./'",
")",
":",
"pipeline_path",
"=",
"'./'",
"+",
"p... | Status badge for a collection of pipelines. | [
"Status",
"badge",
"for",
"a",
"collection",
"of",
"pipelines",
"."
] | 3a34bbdf042d13c3bec5eef46ff360ee41403874 | https://github.com/frictionlessdata/datapackage-pipelines/blob/3a34bbdf042d13c3bec5eef46ff360ee41403874/datapackage_pipelines/web/server.py#L292-L326 | train | 213,464 |
frictionlessdata/datapackage-pipelines | datapackage_pipelines/manager/runner.py | run_pipelines | def run_pipelines(pipeline_id_pattern,
root_dir,
use_cache=True,
dirty=False,
force=False,
concurrency=1,
verbose_logs=True,
progress_cb=None,
slave=False):
"""Run a pipeline by pipeline-id.
pipeline-id supports the '%' wildcard for any-suffix matching.
Use 'all' or '%' for running all pipelines"""
with concurrent.futures.ThreadPoolExecutor(max_workers=concurrency,
thread_name_prefix='T') as executor:
try:
results = []
pending_futures = set()
done_futures = set()
finished_futures = []
progress_thread = None
progress_queue = None
status_manager = status_mgr(root_dir)
if progress_cb is not None:
progress_queue = Queue()
progress_thread = threading.Thread(target=progress_report_handler, args=(progress_cb, progress_queue))
progress_thread.start()
all_specs = specs_to_execute(pipeline_id_pattern, root_dir, status_manager, force, dirty, results)
while True:
done = None
if len(done_futures) > 0:
done = done_futures.pop()
finished_futures.append(done)
done = done.result()[0]
try:
spec = all_specs.send(done)
except StopIteration:
spec = None
if spec is None:
# Wait for all runners to idle...
if len(done_futures) == 0:
if len(pending_futures) > 0:
done_futures, pending_futures = \
concurrent.futures.wait(pending_futures,
return_when=concurrent.futures.FIRST_COMPLETED)
continue
else:
break
else:
continue
if len(spec.validation_errors) > 0:
results.append(
ExecutionResult(spec.pipeline_id,
False,
{},
['init'] + list(map(str, spec.validation_errors)))
)
continue
if slave:
ps = status_manager.get(spec.pipeline_id)
ps.init(spec.pipeline_details,
spec.source_details,
spec.validation_errors,
spec.cache_hash)
eid = gen_execution_id()
if ps.queue_execution(eid, 'manual'):
success, stats, errors = \
execute_pipeline(spec, eid,
use_cache=use_cache)
results.append(ExecutionResult(
spec.pipeline_id,
success,
stats,
errors
))
else:
results.append(
ExecutionResult(spec.pipeline_id,
False,
None,
['Already Running'])
)
else:
f = executor.submit(remote_execute_pipeline,
spec,
root_dir,
use_cache,
verbose_logs,
progress_queue)
pending_futures.add(f)
for f in finished_futures:
ret = f.result()
results.append(ExecutionResult(*ret))
except KeyboardInterrupt:
pass
finally:
if slave:
finalize()
if progress_thread is not None:
progress_queue.put(None)
progress_thread.join()
return results | python | def run_pipelines(pipeline_id_pattern,
root_dir,
use_cache=True,
dirty=False,
force=False,
concurrency=1,
verbose_logs=True,
progress_cb=None,
slave=False):
"""Run a pipeline by pipeline-id.
pipeline-id supports the '%' wildcard for any-suffix matching.
Use 'all' or '%' for running all pipelines"""
with concurrent.futures.ThreadPoolExecutor(max_workers=concurrency,
thread_name_prefix='T') as executor:
try:
results = []
pending_futures = set()
done_futures = set()
finished_futures = []
progress_thread = None
progress_queue = None
status_manager = status_mgr(root_dir)
if progress_cb is not None:
progress_queue = Queue()
progress_thread = threading.Thread(target=progress_report_handler, args=(progress_cb, progress_queue))
progress_thread.start()
all_specs = specs_to_execute(pipeline_id_pattern, root_dir, status_manager, force, dirty, results)
while True:
done = None
if len(done_futures) > 0:
done = done_futures.pop()
finished_futures.append(done)
done = done.result()[0]
try:
spec = all_specs.send(done)
except StopIteration:
spec = None
if spec is None:
# Wait for all runners to idle...
if len(done_futures) == 0:
if len(pending_futures) > 0:
done_futures, pending_futures = \
concurrent.futures.wait(pending_futures,
return_when=concurrent.futures.FIRST_COMPLETED)
continue
else:
break
else:
continue
if len(spec.validation_errors) > 0:
results.append(
ExecutionResult(spec.pipeline_id,
False,
{},
['init'] + list(map(str, spec.validation_errors)))
)
continue
if slave:
ps = status_manager.get(spec.pipeline_id)
ps.init(spec.pipeline_details,
spec.source_details,
spec.validation_errors,
spec.cache_hash)
eid = gen_execution_id()
if ps.queue_execution(eid, 'manual'):
success, stats, errors = \
execute_pipeline(spec, eid,
use_cache=use_cache)
results.append(ExecutionResult(
spec.pipeline_id,
success,
stats,
errors
))
else:
results.append(
ExecutionResult(spec.pipeline_id,
False,
None,
['Already Running'])
)
else:
f = executor.submit(remote_execute_pipeline,
spec,
root_dir,
use_cache,
verbose_logs,
progress_queue)
pending_futures.add(f)
for f in finished_futures:
ret = f.result()
results.append(ExecutionResult(*ret))
except KeyboardInterrupt:
pass
finally:
if slave:
finalize()
if progress_thread is not None:
progress_queue.put(None)
progress_thread.join()
return results | [
"def",
"run_pipelines",
"(",
"pipeline_id_pattern",
",",
"root_dir",
",",
"use_cache",
"=",
"True",
",",
"dirty",
"=",
"False",
",",
"force",
"=",
"False",
",",
"concurrency",
"=",
"1",
",",
"verbose_logs",
"=",
"True",
",",
"progress_cb",
"=",
"None",
","... | Run a pipeline by pipeline-id.
pipeline-id supports the '%' wildcard for any-suffix matching.
Use 'all' or '%' for running all pipelines | [
"Run",
"a",
"pipeline",
"by",
"pipeline",
"-",
"id",
".",
"pipeline",
"-",
"id",
"supports",
"the",
"%",
"wildcard",
"for",
"any",
"-",
"suffix",
"matching",
".",
"Use",
"all",
"or",
"%",
"for",
"running",
"all",
"pipelines"
] | 3a34bbdf042d13c3bec5eef46ff360ee41403874 | https://github.com/frictionlessdata/datapackage-pipelines/blob/3a34bbdf042d13c3bec5eef46ff360ee41403874/datapackage_pipelines/manager/runner.py#L161-L274 | train | 213,465 |
pyecore/pyecore | pyecore/ordered_set_patch.py | insert | def insert(self, index, key):
"""Adds an element at a dedicated position in an OrderedSet.
This implementation is meant for the OrderedSet from the ordered_set
package only.
"""
if key in self.map:
return
# compute the right index
size = len(self.items)
if index < 0:
index = size + index if size + index > 0 else 0
else:
index = index if index < size else size
# insert the value
self.items.insert(index, key)
for k, v in self.map.items():
if v >= index:
self.map[k] = v + 1
self.map[key] = index | python | def insert(self, index, key):
"""Adds an element at a dedicated position in an OrderedSet.
This implementation is meant for the OrderedSet from the ordered_set
package only.
"""
if key in self.map:
return
# compute the right index
size = len(self.items)
if index < 0:
index = size + index if size + index > 0 else 0
else:
index = index if index < size else size
# insert the value
self.items.insert(index, key)
for k, v in self.map.items():
if v >= index:
self.map[k] = v + 1
self.map[key] = index | [
"def",
"insert",
"(",
"self",
",",
"index",
",",
"key",
")",
":",
"if",
"key",
"in",
"self",
".",
"map",
":",
"return",
"# compute the right index",
"size",
"=",
"len",
"(",
"self",
".",
"items",
")",
"if",
"index",
"<",
"0",
":",
"index",
"=",
"si... | Adds an element at a dedicated position in an OrderedSet.
This implementation is meant for the OrderedSet from the ordered_set
package only. | [
"Adds",
"an",
"element",
"at",
"a",
"dedicated",
"position",
"in",
"an",
"OrderedSet",
"."
] | 22b67ad8799594f8f44fd8bee497583d4f12ed63 | https://github.com/pyecore/pyecore/blob/22b67ad8799594f8f44fd8bee497583d4f12ed63/pyecore/ordered_set_patch.py#L8-L27 | train | 213,466 |
pyecore/pyecore | pyecore/ordered_set_patch.py | pop | def pop(self, index=None):
"""Removes an element at the tail of the OrderedSet or at a dedicated
position.
This implementation is meant for the OrderedSet from the ordered_set
package only.
"""
if not self.items:
raise KeyError('Set is empty')
def remove_index(i):
elem = self.items[i]
del self.items[i]
del self.map[elem]
return elem
if index is None:
elem = remove_index(-1)
else:
size = len(self.items)
if index < 0:
index = size + index
if index < 0:
raise IndexError('assignement index out of range')
elif index >= size:
raise IndexError('assignement index out of range')
elem = remove_index(index)
for k, v in self.map.items():
if v >= index and v > 0:
self.map[k] = v - 1
return elem | python | def pop(self, index=None):
"""Removes an element at the tail of the OrderedSet or at a dedicated
position.
This implementation is meant for the OrderedSet from the ordered_set
package only.
"""
if not self.items:
raise KeyError('Set is empty')
def remove_index(i):
elem = self.items[i]
del self.items[i]
del self.map[elem]
return elem
if index is None:
elem = remove_index(-1)
else:
size = len(self.items)
if index < 0:
index = size + index
if index < 0:
raise IndexError('assignement index out of range')
elif index >= size:
raise IndexError('assignement index out of range')
elem = remove_index(index)
for k, v in self.map.items():
if v >= index and v > 0:
self.map[k] = v - 1
return elem | [
"def",
"pop",
"(",
"self",
",",
"index",
"=",
"None",
")",
":",
"if",
"not",
"self",
".",
"items",
":",
"raise",
"KeyError",
"(",
"'Set is empty'",
")",
"def",
"remove_index",
"(",
"i",
")",
":",
"elem",
"=",
"self",
".",
"items",
"[",
"i",
"]",
... | Removes an element at the tail of the OrderedSet or at a dedicated
position.
This implementation is meant for the OrderedSet from the ordered_set
package only. | [
"Removes",
"an",
"element",
"at",
"the",
"tail",
"of",
"the",
"OrderedSet",
"or",
"at",
"a",
"dedicated",
"position",
"."
] | 22b67ad8799594f8f44fd8bee497583d4f12ed63 | https://github.com/pyecore/pyecore/blob/22b67ad8799594f8f44fd8bee497583d4f12ed63/pyecore/ordered_set_patch.py#L30-L59 | train | 213,467 |
pyecore/pyecore | pyecore/ecore.py | EMetaclass | def EMetaclass(cls):
"""Class decorator for creating PyEcore metaclass."""
superclass = cls.__bases__
if not issubclass(cls, EObject):
sclasslist = list(superclass)
if object in superclass:
index = sclasslist.index(object)
sclasslist.insert(index, EObject)
sclasslist.remove(object)
else:
sclasslist.insert(0, EObject)
superclass = tuple(sclasslist)
orig_vars = cls.__dict__.copy()
slots = orig_vars.get('__slots__')
if slots is not None:
if isinstance(slots, str):
slots = [slots]
for slots_var in slots:
orig_vars.pop(slots_var)
orig_vars.pop('__dict__', None)
orig_vars.pop('__weakref__', None)
return MetaEClass(cls.__name__, superclass, orig_vars) | python | def EMetaclass(cls):
"""Class decorator for creating PyEcore metaclass."""
superclass = cls.__bases__
if not issubclass(cls, EObject):
sclasslist = list(superclass)
if object in superclass:
index = sclasslist.index(object)
sclasslist.insert(index, EObject)
sclasslist.remove(object)
else:
sclasslist.insert(0, EObject)
superclass = tuple(sclasslist)
orig_vars = cls.__dict__.copy()
slots = orig_vars.get('__slots__')
if slots is not None:
if isinstance(slots, str):
slots = [slots]
for slots_var in slots:
orig_vars.pop(slots_var)
orig_vars.pop('__dict__', None)
orig_vars.pop('__weakref__', None)
return MetaEClass(cls.__name__, superclass, orig_vars) | [
"def",
"EMetaclass",
"(",
"cls",
")",
":",
"superclass",
"=",
"cls",
".",
"__bases__",
"if",
"not",
"issubclass",
"(",
"cls",
",",
"EObject",
")",
":",
"sclasslist",
"=",
"list",
"(",
"superclass",
")",
"if",
"object",
"in",
"superclass",
":",
"index",
... | Class decorator for creating PyEcore metaclass. | [
"Class",
"decorator",
"for",
"creating",
"PyEcore",
"metaclass",
"."
] | 22b67ad8799594f8f44fd8bee497583d4f12ed63 | https://github.com/pyecore/pyecore/blob/22b67ad8799594f8f44fd8bee497583d4f12ed63/pyecore/ecore.py#L873-L894 | train | 213,468 |
pyecore/pyecore | pyecore/ecore.py | EModelElement.getEAnnotation | def getEAnnotation(self, source):
"""Return the annotation with a matching source attribute."""
for annotation in self.eAnnotations:
if annotation.source == source:
return annotation
return None | python | def getEAnnotation(self, source):
"""Return the annotation with a matching source attribute."""
for annotation in self.eAnnotations:
if annotation.source == source:
return annotation
return None | [
"def",
"getEAnnotation",
"(",
"self",
",",
"source",
")",
":",
"for",
"annotation",
"in",
"self",
".",
"eAnnotations",
":",
"if",
"annotation",
".",
"source",
"==",
"source",
":",
"return",
"annotation",
"return",
"None"
] | Return the annotation with a matching source attribute. | [
"Return",
"the",
"annotation",
"with",
"a",
"matching",
"source",
"attribute",
"."
] | 22b67ad8799594f8f44fd8bee497583d4f12ed63 | https://github.com/pyecore/pyecore/blob/22b67ad8799594f8f44fd8bee497583d4f12ed63/pyecore/ecore.py#L300-L305 | train | 213,469 |
bitlabstudio/django-calendarium | calendarium/templatetags/calendarium_tags.py | get_week_URL | def get_week_URL(date, day=0):
"""
Returns the week view URL for a given date.
:param date: A date instance.
:param day: Day number in a month.
"""
if day < 1:
day = 1
date = datetime(year=date.year, month=date.month, day=day, tzinfo=utc)
return reverse('calendar_week', kwargs={'year': date.isocalendar()[0],
'week': date.isocalendar()[1]}) | python | def get_week_URL(date, day=0):
"""
Returns the week view URL for a given date.
:param date: A date instance.
:param day: Day number in a month.
"""
if day < 1:
day = 1
date = datetime(year=date.year, month=date.month, day=day, tzinfo=utc)
return reverse('calendar_week', kwargs={'year': date.isocalendar()[0],
'week': date.isocalendar()[1]}) | [
"def",
"get_week_URL",
"(",
"date",
",",
"day",
"=",
"0",
")",
":",
"if",
"day",
"<",
"1",
":",
"day",
"=",
"1",
"date",
"=",
"datetime",
"(",
"year",
"=",
"date",
".",
"year",
",",
"month",
"=",
"date",
".",
"month",
",",
"day",
"=",
"day",
... | Returns the week view URL for a given date.
:param date: A date instance.
:param day: Day number in a month. | [
"Returns",
"the",
"week",
"view",
"URL",
"for",
"a",
"given",
"date",
"."
] | cabe69eff965dff80893012fb4dfe724e995807a | https://github.com/bitlabstudio/django-calendarium/blob/cabe69eff965dff80893012fb4dfe724e995807a/calendarium/templatetags/calendarium_tags.py#L12-L24 | train | 213,470 |
bitlabstudio/django-calendarium | calendarium/utils.py | monday_of_week | def monday_of_week(year, week):
"""
Returns a datetime for the monday of the given week of the given year.
"""
str_time = time.strptime('{0} {1} 1'.format(year, week), '%Y %W %w')
date = timezone.datetime(year=str_time.tm_year, month=str_time.tm_mon,
day=str_time.tm_mday, tzinfo=timezone.utc)
if timezone.datetime(year, 1, 4).isoweekday() > 4:
# ISO 8601 where week 1 is the first week that has at least 4 days in
# the current year
date -= timezone.timedelta(days=7)
return date | python | def monday_of_week(year, week):
"""
Returns a datetime for the monday of the given week of the given year.
"""
str_time = time.strptime('{0} {1} 1'.format(year, week), '%Y %W %w')
date = timezone.datetime(year=str_time.tm_year, month=str_time.tm_mon,
day=str_time.tm_mday, tzinfo=timezone.utc)
if timezone.datetime(year, 1, 4).isoweekday() > 4:
# ISO 8601 where week 1 is the first week that has at least 4 days in
# the current year
date -= timezone.timedelta(days=7)
return date | [
"def",
"monday_of_week",
"(",
"year",
",",
"week",
")",
":",
"str_time",
"=",
"time",
".",
"strptime",
"(",
"'{0} {1} 1'",
".",
"format",
"(",
"year",
",",
"week",
")",
",",
"'%Y %W %w'",
")",
"date",
"=",
"timezone",
".",
"datetime",
"(",
"year",
"=",... | Returns a datetime for the monday of the given week of the given year. | [
"Returns",
"a",
"datetime",
"for",
"the",
"monday",
"of",
"the",
"given",
"week",
"of",
"the",
"given",
"year",
"."
] | cabe69eff965dff80893012fb4dfe724e995807a | https://github.com/bitlabstudio/django-calendarium/blob/cabe69eff965dff80893012fb4dfe724e995807a/calendarium/utils.py#L28-L40 | train | 213,471 |
bitlabstudio/django-calendarium | calendarium/utils.py | OccurrenceReplacer.get_occurrence | def get_occurrence(self, occ):
"""
Return a persisted occurrences matching the occ and remove it from
lookup since it has already been matched
"""
return self.lookup.pop(
(occ.event, occ.original_start, occ.original_end),
occ) | python | def get_occurrence(self, occ):
"""
Return a persisted occurrences matching the occ and remove it from
lookup since it has already been matched
"""
return self.lookup.pop(
(occ.event, occ.original_start, occ.original_end),
occ) | [
"def",
"get_occurrence",
"(",
"self",
",",
"occ",
")",
":",
"return",
"self",
".",
"lookup",
".",
"pop",
"(",
"(",
"occ",
".",
"event",
",",
"occ",
".",
"original_start",
",",
"occ",
".",
"original_end",
")",
",",
"occ",
")"
] | Return a persisted occurrences matching the occ and remove it from
lookup since it has already been matched | [
"Return",
"a",
"persisted",
"occurrences",
"matching",
"the",
"occ",
"and",
"remove",
"it",
"from",
"lookup",
"since",
"it",
"has",
"already",
"been",
"matched"
] | cabe69eff965dff80893012fb4dfe724e995807a | https://github.com/bitlabstudio/django-calendarium/blob/cabe69eff965dff80893012fb4dfe724e995807a/calendarium/utils.py#L57-L64 | train | 213,472 |
InQuest/python-sandboxapi | sandboxapi/__init__.py | SandboxAPI._request | def _request(self, uri, method='GET', params=None, files=None, headers=None, auth=None):
"""Robustness wrapper. Tries up to 3 times to dance with the Sandbox API.
:type uri: str
:param uri: URI to append to base_url.
:type params: dict
:param params: Optional parameters for API.
:type files: dict
:param files: Optional dictionary of files for multipart post.
:type headers: dict
:param headers: Optional headers to send to the API.
:type auth: dict
:param auth: Optional authentication object to send to the API.
:rtype: requests.response.
:return: Response object.
:raises SandboxError: If all attempts failed.
"""
# make up to three attempts to dance with the API, use a jittered
# exponential back-off delay
for i in range(3):
try:
full_url = '{b}{u}'.format(b=self.api_url, u=uri)
response = None
if method == 'POST':
response = requests.post(full_url, data=params, files=files, headers=headers,
verify=self.verify_ssl, auth=auth, proxies=self.proxies)
else:
response = requests.get(full_url, params=params, headers=headers,
verify=self.verify_ssl, auth=auth, proxies=self.proxies)
# if the status code is 503, is no longer available.
if response.status_code >= 500:
# server error
self.server_available = False
raise SandboxError("server returned {c} status code on {u}, assuming unavailable...".format(
c=response.status_code, u=response.url))
else:
return response
# 0.4, 1.6, 6.4, 25.6, ...
except requests.exceptions.RequestException:
time.sleep(random.uniform(0, 4 ** i * 100 / 1000.0))
# if we couldn't reach the API, we assume that the box is down and lower availability flag.
self.server_available = False
# raise an exception.
msg = "exceeded 3 attempts with sandbox API: {u}, p:{p}, f:{f}".format(u=full_url,
p=params, f=files)
try:
msg += "\n" + response.content.decode('utf-8')
except AttributeError:
pass
raise SandboxError(msg) | python | def _request(self, uri, method='GET', params=None, files=None, headers=None, auth=None):
"""Robustness wrapper. Tries up to 3 times to dance with the Sandbox API.
:type uri: str
:param uri: URI to append to base_url.
:type params: dict
:param params: Optional parameters for API.
:type files: dict
:param files: Optional dictionary of files for multipart post.
:type headers: dict
:param headers: Optional headers to send to the API.
:type auth: dict
:param auth: Optional authentication object to send to the API.
:rtype: requests.response.
:return: Response object.
:raises SandboxError: If all attempts failed.
"""
# make up to three attempts to dance with the API, use a jittered
# exponential back-off delay
for i in range(3):
try:
full_url = '{b}{u}'.format(b=self.api_url, u=uri)
response = None
if method == 'POST':
response = requests.post(full_url, data=params, files=files, headers=headers,
verify=self.verify_ssl, auth=auth, proxies=self.proxies)
else:
response = requests.get(full_url, params=params, headers=headers,
verify=self.verify_ssl, auth=auth, proxies=self.proxies)
# if the status code is 503, is no longer available.
if response.status_code >= 500:
# server error
self.server_available = False
raise SandboxError("server returned {c} status code on {u}, assuming unavailable...".format(
c=response.status_code, u=response.url))
else:
return response
# 0.4, 1.6, 6.4, 25.6, ...
except requests.exceptions.RequestException:
time.sleep(random.uniform(0, 4 ** i * 100 / 1000.0))
# if we couldn't reach the API, we assume that the box is down and lower availability flag.
self.server_available = False
# raise an exception.
msg = "exceeded 3 attempts with sandbox API: {u}, p:{p}, f:{f}".format(u=full_url,
p=params, f=files)
try:
msg += "\n" + response.content.decode('utf-8')
except AttributeError:
pass
raise SandboxError(msg) | [
"def",
"_request",
"(",
"self",
",",
"uri",
",",
"method",
"=",
"'GET'",
",",
"params",
"=",
"None",
",",
"files",
"=",
"None",
",",
"headers",
"=",
"None",
",",
"auth",
"=",
"None",
")",
":",
"# make up to three attempts to dance with the API, use a jittered"... | Robustness wrapper. Tries up to 3 times to dance with the Sandbox API.
:type uri: str
:param uri: URI to append to base_url.
:type params: dict
:param params: Optional parameters for API.
:type files: dict
:param files: Optional dictionary of files for multipart post.
:type headers: dict
:param headers: Optional headers to send to the API.
:type auth: dict
:param auth: Optional authentication object to send to the API.
:rtype: requests.response.
:return: Response object.
:raises SandboxError: If all attempts failed. | [
"Robustness",
"wrapper",
".",
"Tries",
"up",
"to",
"3",
"times",
"to",
"dance",
"with",
"the",
"Sandbox",
"API",
"."
] | 9bad73f453e25d7d23e7b4b1ae927f44a35a5bc3 | https://github.com/InQuest/python-sandboxapi/blob/9bad73f453e25d7d23e7b4b1ae927f44a35a5bc3/sandboxapi/__init__.py#L47-L105 | train | 213,473 |
InQuest/python-sandboxapi | sandboxapi/cuckoo.py | CuckooAPI.analyses | def analyses(self):
"""Retrieve a list of analyzed samples.
:rtype: list
:return: List of objects referencing each analyzed file.
"""
response = self._request("tasks/list")
return json.loads(response.content.decode('utf-8'))['tasks'] | python | def analyses(self):
"""Retrieve a list of analyzed samples.
:rtype: list
:return: List of objects referencing each analyzed file.
"""
response = self._request("tasks/list")
return json.loads(response.content.decode('utf-8'))['tasks'] | [
"def",
"analyses",
"(",
"self",
")",
":",
"response",
"=",
"self",
".",
"_request",
"(",
"\"tasks/list\"",
")",
"return",
"json",
".",
"loads",
"(",
"response",
".",
"content",
".",
"decode",
"(",
"'utf-8'",
")",
")",
"[",
"'tasks'",
"]"
] | Retrieve a list of analyzed samples.
:rtype: list
:return: List of objects referencing each analyzed file. | [
"Retrieve",
"a",
"list",
"of",
"analyzed",
"samples",
"."
] | 9bad73f453e25d7d23e7b4b1ae927f44a35a5bc3 | https://github.com/InQuest/python-sandboxapi/blob/9bad73f453e25d7d23e7b4b1ae927f44a35a5bc3/sandboxapi/cuckoo.py#L40-L48 | train | 213,474 |
InQuest/python-sandboxapi | sandboxapi/cuckoo.py | CuckooAPI.check | def check(self, item_id):
"""Check if an analysis is complete
:type item_id: int
:param item_id: task_id to check.
:rtype: bool
:return: Boolean indicating if a report is done or not.
"""
response = self._request("tasks/view/{id}".format(id=item_id))
if response.status_code == 404:
# probably an unknown task id
return False
try:
content = json.loads(response.content.decode('utf-8'))
status = content['task']["status"]
if status == 'completed' or status == "reported":
return True
except ValueError as e:
raise sandboxapi.SandboxError(e)
return False | python | def check(self, item_id):
"""Check if an analysis is complete
:type item_id: int
:param item_id: task_id to check.
:rtype: bool
:return: Boolean indicating if a report is done or not.
"""
response = self._request("tasks/view/{id}".format(id=item_id))
if response.status_code == 404:
# probably an unknown task id
return False
try:
content = json.loads(response.content.decode('utf-8'))
status = content['task']["status"]
if status == 'completed' or status == "reported":
return True
except ValueError as e:
raise sandboxapi.SandboxError(e)
return False | [
"def",
"check",
"(",
"self",
",",
"item_id",
")",
":",
"response",
"=",
"self",
".",
"_request",
"(",
"\"tasks/view/{id}\"",
".",
"format",
"(",
"id",
"=",
"item_id",
")",
")",
"if",
"response",
".",
"status_code",
"==",
"404",
":",
"# probably an unknown ... | Check if an analysis is complete
:type item_id: int
:param item_id: task_id to check.
:rtype: bool
:return: Boolean indicating if a report is done or not. | [
"Check",
"if",
"an",
"analysis",
"is",
"complete"
] | 9bad73f453e25d7d23e7b4b1ae927f44a35a5bc3 | https://github.com/InQuest/python-sandboxapi/blob/9bad73f453e25d7d23e7b4b1ae927f44a35a5bc3/sandboxapi/cuckoo.py#L75-L99 | train | 213,475 |
InQuest/python-sandboxapi | sandboxapi/cuckoo.py | CuckooAPI.delete | def delete(self, item_id):
"""Delete the reports associated with the given item_id.
:type item_id: int
:param item_id: Report ID to delete.
:rtype: bool
:return: True on success, False otherwise.
"""
try:
response = self._request("tasks/delete/{id}".format(id=item_id))
if response.status_code == 200:
return True
except sandboxapi.SandboxError:
pass
return False | python | def delete(self, item_id):
"""Delete the reports associated with the given item_id.
:type item_id: int
:param item_id: Report ID to delete.
:rtype: bool
:return: True on success, False otherwise.
"""
try:
response = self._request("tasks/delete/{id}".format(id=item_id))
if response.status_code == 200:
return True
except sandboxapi.SandboxError:
pass
return False | [
"def",
"delete",
"(",
"self",
",",
"item_id",
")",
":",
"try",
":",
"response",
"=",
"self",
".",
"_request",
"(",
"\"tasks/delete/{id}\"",
".",
"format",
"(",
"id",
"=",
"item_id",
")",
")",
"if",
"response",
".",
"status_code",
"==",
"200",
":",
"ret... | Delete the reports associated with the given item_id.
:type item_id: int
:param item_id: Report ID to delete.
:rtype: bool
:return: True on success, False otherwise. | [
"Delete",
"the",
"reports",
"associated",
"with",
"the",
"given",
"item_id",
"."
] | 9bad73f453e25d7d23e7b4b1ae927f44a35a5bc3 | https://github.com/InQuest/python-sandboxapi/blob/9bad73f453e25d7d23e7b4b1ae927f44a35a5bc3/sandboxapi/cuckoo.py#L101-L119 | train | 213,476 |
InQuest/python-sandboxapi | sandboxapi/cuckoo.py | CuckooAPI.is_available | def is_available(self):
"""Determine if the Cuckoo Sandbox API servers are alive or in maintenance mode.
:rtype: bool
:return: True if service is available, False otherwise.
"""
# if the availability flag is raised, return True immediately.
# NOTE: subsequent API failures will lower this flag. we do this here
# to ensure we don't keep hitting Cuckoo with requests while
# availability is there.
if self.server_available:
return True
# otherwise, we have to check with the cloud.
else:
try:
response = self._request("cuckoo/status")
# we've got cuckoo.
if response.status_code == 200:
self.server_available = True
return True
except sandboxapi.SandboxError:
pass
self.server_available = False
return False | python | def is_available(self):
"""Determine if the Cuckoo Sandbox API servers are alive or in maintenance mode.
:rtype: bool
:return: True if service is available, False otherwise.
"""
# if the availability flag is raised, return True immediately.
# NOTE: subsequent API failures will lower this flag. we do this here
# to ensure we don't keep hitting Cuckoo with requests while
# availability is there.
if self.server_available:
return True
# otherwise, we have to check with the cloud.
else:
try:
response = self._request("cuckoo/status")
# we've got cuckoo.
if response.status_code == 200:
self.server_available = True
return True
except sandboxapi.SandboxError:
pass
self.server_available = False
return False | [
"def",
"is_available",
"(",
"self",
")",
":",
"# if the availability flag is raised, return True immediately.",
"# NOTE: subsequent API failures will lower this flag. we do this here",
"# to ensure we don't keep hitting Cuckoo with requests while",
"# availability is there.",
"if",
"self",
"... | Determine if the Cuckoo Sandbox API servers are alive or in maintenance mode.
:rtype: bool
:return: True if service is available, False otherwise. | [
"Determine",
"if",
"the",
"Cuckoo",
"Sandbox",
"API",
"servers",
"are",
"alive",
"or",
"in",
"maintenance",
"mode",
"."
] | 9bad73f453e25d7d23e7b4b1ae927f44a35a5bc3 | https://github.com/InQuest/python-sandboxapi/blob/9bad73f453e25d7d23e7b4b1ae927f44a35a5bc3/sandboxapi/cuckoo.py#L121-L148 | train | 213,477 |
InQuest/python-sandboxapi | sandboxapi/cuckoo.py | CuckooAPI.queue_size | def queue_size(self):
"""Determine Cuckoo sandbox queue length
There isn't a built in way to do this like with Joe
:rtype: int
:return: Number of submissions in sandbox queue.
"""
response = self._request("tasks/list")
tasks = json.loads(response.content.decode('utf-8'))["tasks"]
return len([t for t in tasks if t['status'] == 'pending']) | python | def queue_size(self):
"""Determine Cuckoo sandbox queue length
There isn't a built in way to do this like with Joe
:rtype: int
:return: Number of submissions in sandbox queue.
"""
response = self._request("tasks/list")
tasks = json.loads(response.content.decode('utf-8'))["tasks"]
return len([t for t in tasks if t['status'] == 'pending']) | [
"def",
"queue_size",
"(",
"self",
")",
":",
"response",
"=",
"self",
".",
"_request",
"(",
"\"tasks/list\"",
")",
"tasks",
"=",
"json",
".",
"loads",
"(",
"response",
".",
"content",
".",
"decode",
"(",
"'utf-8'",
")",
")",
"[",
"\"tasks\"",
"]",
"retu... | Determine Cuckoo sandbox queue length
There isn't a built in way to do this like with Joe
:rtype: int
:return: Number of submissions in sandbox queue. | [
"Determine",
"Cuckoo",
"sandbox",
"queue",
"length"
] | 9bad73f453e25d7d23e7b4b1ae927f44a35a5bc3 | https://github.com/InQuest/python-sandboxapi/blob/9bad73f453e25d7d23e7b4b1ae927f44a35a5bc3/sandboxapi/cuckoo.py#L150-L161 | train | 213,478 |
InQuest/python-sandboxapi | sandboxapi/joe.py | JoeAPI.is_available | def is_available(self):
"""Determine if the Joe Sandbox API server is alive.
:rtype: bool
:return: True if service is available, False otherwise.
"""
# if the availability flag is raised, return True immediately.
# NOTE: subsequent API failures will lower this flag. we do this here
# to ensure we don't keep hitting Joe with requests while availability
# is there.
if self.server_available:
return True
# otherwise, we have to check with the cloud.
else:
try:
self.server_available = self.jbx.server_online()
return self.server_available
except jbxapi.JoeException:
pass
self.server_available = False
return False | python | def is_available(self):
"""Determine if the Joe Sandbox API server is alive.
:rtype: bool
:return: True if service is available, False otherwise.
"""
# if the availability flag is raised, return True immediately.
# NOTE: subsequent API failures will lower this flag. we do this here
# to ensure we don't keep hitting Joe with requests while availability
# is there.
if self.server_available:
return True
# otherwise, we have to check with the cloud.
else:
try:
self.server_available = self.jbx.server_online()
return self.server_available
except jbxapi.JoeException:
pass
self.server_available = False
return False | [
"def",
"is_available",
"(",
"self",
")",
":",
"# if the availability flag is raised, return True immediately.",
"# NOTE: subsequent API failures will lower this flag. we do this here",
"# to ensure we don't keep hitting Joe with requests while availability",
"# is there.",
"if",
"self",
".",... | Determine if the Joe Sandbox API server is alive.
:rtype: bool
:return: True if service is available, False otherwise. | [
"Determine",
"if",
"the",
"Joe",
"Sandbox",
"API",
"server",
"is",
"alive",
"."
] | 9bad73f453e25d7d23e7b4b1ae927f44a35a5bc3 | https://github.com/InQuest/python-sandboxapi/blob/9bad73f453e25d7d23e7b4b1ae927f44a35a5bc3/sandboxapi/joe.py#L53-L76 | train | 213,479 |
vstinner/perf | perf/_cpu_utils.py | get_isolated_cpus | def get_isolated_cpus():
"""Get the list of isolated CPUs.
Return a sorted list of CPU identifiers, or return None if no CPU is
isolated.
"""
# The cpu/isolated sysfs was added in Linux 4.2
# (commit 59f30abe94bff50636c8cad45207a01fdcb2ee49)
path = sysfs_path('devices/system/cpu/isolated')
isolated = read_first_line(path)
if isolated:
return parse_cpu_list(isolated)
cmdline = read_first_line(proc_path('cmdline'))
if cmdline:
match = re.search(r'\bisolcpus=([^ ]+)', cmdline)
if match:
isolated = match.group(1)
return parse_cpu_list(isolated)
return None | python | def get_isolated_cpus():
"""Get the list of isolated CPUs.
Return a sorted list of CPU identifiers, or return None if no CPU is
isolated.
"""
# The cpu/isolated sysfs was added in Linux 4.2
# (commit 59f30abe94bff50636c8cad45207a01fdcb2ee49)
path = sysfs_path('devices/system/cpu/isolated')
isolated = read_first_line(path)
if isolated:
return parse_cpu_list(isolated)
cmdline = read_first_line(proc_path('cmdline'))
if cmdline:
match = re.search(r'\bisolcpus=([^ ]+)', cmdline)
if match:
isolated = match.group(1)
return parse_cpu_list(isolated)
return None | [
"def",
"get_isolated_cpus",
"(",
")",
":",
"# The cpu/isolated sysfs was added in Linux 4.2",
"# (commit 59f30abe94bff50636c8cad45207a01fdcb2ee49)",
"path",
"=",
"sysfs_path",
"(",
"'devices/system/cpu/isolated'",
")",
"isolated",
"=",
"read_first_line",
"(",
"path",
")",
"if",... | Get the list of isolated CPUs.
Return a sorted list of CPU identifiers, or return None if no CPU is
isolated. | [
"Get",
"the",
"list",
"of",
"isolated",
"CPUs",
"."
] | cf096c0c0c955d0aa1c893847fa6393ba4922ada | https://github.com/vstinner/perf/blob/cf096c0c0c955d0aa1c893847fa6393ba4922ada/perf/_cpu_utils.py#L127-L147 | train | 213,480 |
vstinner/perf | perf/__main__.py | Benchmarks.has_same_unique_benchmark | def has_same_unique_benchmark(self):
"True if all suites have one benchmark with the same name"
if any(len(suite) > 1 for suite in self.suites):
return False
names = self.suites[0].get_benchmark_names()
return all(suite.get_benchmark_names() == names
for suite in self.suites[1:]) | python | def has_same_unique_benchmark(self):
"True if all suites have one benchmark with the same name"
if any(len(suite) > 1 for suite in self.suites):
return False
names = self.suites[0].get_benchmark_names()
return all(suite.get_benchmark_names() == names
for suite in self.suites[1:]) | [
"def",
"has_same_unique_benchmark",
"(",
"self",
")",
":",
"if",
"any",
"(",
"len",
"(",
"suite",
")",
">",
"1",
"for",
"suite",
"in",
"self",
".",
"suites",
")",
":",
"return",
"False",
"names",
"=",
"self",
".",
"suites",
"[",
"0",
"]",
".",
"get... | True if all suites have one benchmark with the same name | [
"True",
"if",
"all",
"suites",
"have",
"one",
"benchmark",
"with",
"the",
"same",
"name"
] | cf096c0c0c955d0aa1c893847fa6393ba4922ada | https://github.com/vstinner/perf/blob/cf096c0c0c955d0aa1c893847fa6393ba4922ada/perf/__main__.py#L257-L263 | train | 213,481 |
mdgart/sentrylogs | sentrylogs/helpers.py | send_message | def send_message(message, params, site, logger):
"""Send a message to the Sentry server"""
client.capture(
'Message',
message=message,
params=tuple(params),
data={
'site': site,
'logger': logger,
},
) | python | def send_message(message, params, site, logger):
"""Send a message to the Sentry server"""
client.capture(
'Message',
message=message,
params=tuple(params),
data={
'site': site,
'logger': logger,
},
) | [
"def",
"send_message",
"(",
"message",
",",
"params",
",",
"site",
",",
"logger",
")",
":",
"client",
".",
"capture",
"(",
"'Message'",
",",
"message",
"=",
"message",
",",
"params",
"=",
"tuple",
"(",
"params",
")",
",",
"data",
"=",
"{",
"'site'",
... | Send a message to the Sentry server | [
"Send",
"a",
"message",
"to",
"the",
"Sentry",
"server"
] | 1bff3f2c8e37265430269cdf1ed8f860ce2dd72a | https://github.com/mdgart/sentrylogs/blob/1bff3f2c8e37265430269cdf1ed8f860ce2dd72a/sentrylogs/helpers.py#L7-L17 | train | 213,482 |
mdgart/sentrylogs | sentrylogs/bin/sentrylogs.py | get_command_line_args | def get_command_line_args():
"""CLI command line arguments handling"""
parser = argparse.ArgumentParser(description='Send logs to Django Sentry.')
parser.add_argument('--sentryconfig', '-c', default=None,
help='A configuration file (.ini, .yaml) of some '
'Sentry integration to extract the Sentry DSN from')
parser.add_argument('--sentrydsn', '-s', default="",
help='The Sentry DSN string (overrides -c)')
parser.add_argument('--daemonize', '-d', default=False,
action='store_const', const=True,
help='Run this script in background')
parser.add_argument('--follow', '-f', default="all",
help='Which logs to follow, default ALL')
parser.add_argument('--nginxerrorpath', '-n', default=None,
help='Nginx error log path')
return parser.parse_args() | python | def get_command_line_args():
"""CLI command line arguments handling"""
parser = argparse.ArgumentParser(description='Send logs to Django Sentry.')
parser.add_argument('--sentryconfig', '-c', default=None,
help='A configuration file (.ini, .yaml) of some '
'Sentry integration to extract the Sentry DSN from')
parser.add_argument('--sentrydsn', '-s', default="",
help='The Sentry DSN string (overrides -c)')
parser.add_argument('--daemonize', '-d', default=False,
action='store_const', const=True,
help='Run this script in background')
parser.add_argument('--follow', '-f', default="all",
help='Which logs to follow, default ALL')
parser.add_argument('--nginxerrorpath', '-n', default=None,
help='Nginx error log path')
return parser.parse_args() | [
"def",
"get_command_line_args",
"(",
")",
":",
"parser",
"=",
"argparse",
".",
"ArgumentParser",
"(",
"description",
"=",
"'Send logs to Django Sentry.'",
")",
"parser",
".",
"add_argument",
"(",
"'--sentryconfig'",
",",
"'-c'",
",",
"default",
"=",
"None",
",",
... | CLI command line arguments handling | [
"CLI",
"command",
"line",
"arguments",
"handling"
] | 1bff3f2c8e37265430269cdf1ed8f860ce2dd72a | https://github.com/mdgart/sentrylogs/blob/1bff3f2c8e37265430269cdf1ed8f860ce2dd72a/sentrylogs/bin/sentrylogs.py#L17-L34 | train | 213,483 |
mdgart/sentrylogs | sentrylogs/bin/sentrylogs.py | process_arguments | def process_arguments(args):
"""Deal with arguments passed on the command line"""
if args.sentryconfig:
print('Parsing DSN from %s' % args.sentryconfig)
os.environ['SENTRY_DSN'] = parse_sentry_configuration(args.sentryconfig)
if args.sentrydsn:
print('Using the DSN %s' % args.sentrydsn)
os.environ['SENTRY_DSN'] = args.sentrydsn
if args.nginxerrorpath:
print('Using the Nginx error log path %s' % args.nginxerrorpath)
os.environ['NGINX_ERROR_PATH'] = args.nginxerrorpath
from ..conf import settings # noqa; pylint: disable=unused-variable
if args.daemonize:
print('Running process in background')
from ..daemonize import create_daemon
create_daemon() | python | def process_arguments(args):
"""Deal with arguments passed on the command line"""
if args.sentryconfig:
print('Parsing DSN from %s' % args.sentryconfig)
os.environ['SENTRY_DSN'] = parse_sentry_configuration(args.sentryconfig)
if args.sentrydsn:
print('Using the DSN %s' % args.sentrydsn)
os.environ['SENTRY_DSN'] = args.sentrydsn
if args.nginxerrorpath:
print('Using the Nginx error log path %s' % args.nginxerrorpath)
os.environ['NGINX_ERROR_PATH'] = args.nginxerrorpath
from ..conf import settings # noqa; pylint: disable=unused-variable
if args.daemonize:
print('Running process in background')
from ..daemonize import create_daemon
create_daemon() | [
"def",
"process_arguments",
"(",
"args",
")",
":",
"if",
"args",
".",
"sentryconfig",
":",
"print",
"(",
"'Parsing DSN from %s'",
"%",
"args",
".",
"sentryconfig",
")",
"os",
".",
"environ",
"[",
"'SENTRY_DSN'",
"]",
"=",
"parse_sentry_configuration",
"(",
"ar... | Deal with arguments passed on the command line | [
"Deal",
"with",
"arguments",
"passed",
"on",
"the",
"command",
"line"
] | 1bff3f2c8e37265430269cdf1ed8f860ce2dd72a | https://github.com/mdgart/sentrylogs/blob/1bff3f2c8e37265430269cdf1ed8f860ce2dd72a/sentrylogs/bin/sentrylogs.py#L37-L56 | train | 213,484 |
mdgart/sentrylogs | sentrylogs/bin/sentrylogs.py | parse_sentry_configuration | def parse_sentry_configuration(filename):
"""Parse Sentry DSN out of an application or Sentry configuration file"""
filetype = os.path.splitext(filename)[-1][1:].lower()
if filetype == 'ini': # Pyramid, Pylons
config = ConfigParser()
config.read(filename)
ini_key = 'dsn'
ini_sections = ['sentry', 'filter:raven']
for section in ini_sections:
if section in config:
print('- Using value from [{section}]:[{key}]'
.format(section=section, key=ini_key))
try:
return config[section][ini_key]
except KeyError:
print('- Warning: Key "{key}" not found in section '
'[{section}]'.format(section=section, key=ini_key))
raise SystemExit('No DSN found in {file}. Tried sections [{sec_list}]'
.format(
file=filename,
sec_list='], ['.join(ini_sections),
))
elif filetype == 'py': # Django, Flask, Bottle, ...
raise SystemExit('Parsing configuration from pure Python (Django,'
'Flask, Bottle, etc.) not implemented yet.')
else:
raise SystemExit('Configuration file type not supported for parsing: '
'%s' % filetype) | python | def parse_sentry_configuration(filename):
"""Parse Sentry DSN out of an application or Sentry configuration file"""
filetype = os.path.splitext(filename)[-1][1:].lower()
if filetype == 'ini': # Pyramid, Pylons
config = ConfigParser()
config.read(filename)
ini_key = 'dsn'
ini_sections = ['sentry', 'filter:raven']
for section in ini_sections:
if section in config:
print('- Using value from [{section}]:[{key}]'
.format(section=section, key=ini_key))
try:
return config[section][ini_key]
except KeyError:
print('- Warning: Key "{key}" not found in section '
'[{section}]'.format(section=section, key=ini_key))
raise SystemExit('No DSN found in {file}. Tried sections [{sec_list}]'
.format(
file=filename,
sec_list='], ['.join(ini_sections),
))
elif filetype == 'py': # Django, Flask, Bottle, ...
raise SystemExit('Parsing configuration from pure Python (Django,'
'Flask, Bottle, etc.) not implemented yet.')
else:
raise SystemExit('Configuration file type not supported for parsing: '
'%s' % filetype) | [
"def",
"parse_sentry_configuration",
"(",
"filename",
")",
":",
"filetype",
"=",
"os",
".",
"path",
".",
"splitext",
"(",
"filename",
")",
"[",
"-",
"1",
"]",
"[",
"1",
":",
"]",
".",
"lower",
"(",
")",
"if",
"filetype",
"==",
"'ini'",
":",
"# Pyrami... | Parse Sentry DSN out of an application or Sentry configuration file | [
"Parse",
"Sentry",
"DSN",
"out",
"of",
"an",
"application",
"or",
"Sentry",
"configuration",
"file"
] | 1bff3f2c8e37265430269cdf1ed8f860ce2dd72a | https://github.com/mdgart/sentrylogs/blob/1bff3f2c8e37265430269cdf1ed8f860ce2dd72a/sentrylogs/bin/sentrylogs.py#L59-L88 | train | 213,485 |
mdgart/sentrylogs | setup.py | read_file | def read_file(filename):
"""Read the contents of a file located relative to setup.py"""
with open(join(abspath(dirname(__file__)), filename)) as file:
return file.read() | python | def read_file(filename):
"""Read the contents of a file located relative to setup.py"""
with open(join(abspath(dirname(__file__)), filename)) as file:
return file.read() | [
"def",
"read_file",
"(",
"filename",
")",
":",
"with",
"open",
"(",
"join",
"(",
"abspath",
"(",
"dirname",
"(",
"__file__",
")",
")",
",",
"filename",
")",
")",
"as",
"file",
":",
"return",
"file",
".",
"read",
"(",
")"
] | Read the contents of a file located relative to setup.py | [
"Read",
"the",
"contents",
"of",
"a",
"file",
"located",
"relative",
"to",
"setup",
".",
"py"
] | 1bff3f2c8e37265430269cdf1ed8f860ce2dd72a | https://github.com/mdgart/sentrylogs/blob/1bff3f2c8e37265430269cdf1ed8f860ce2dd72a/setup.py#L69-L72 | train | 213,486 |
mdgart/sentrylogs | sentrylogs/parsers/nginx.py | Nginx.parse | def parse(self, line):
"""Parse a line of the Nginx error log"""
csv_list = line.split(",")
date_time_message = csv_list.pop(0).split(" ", 2)
otherinfo = dict()
for item in csv_list:
key_value_pair = item.split(":", 1)
key = key_value_pair[0].strip()
if len(key_value_pair) > 1:
value = key_value_pair[1].strip()
if not value:
value = "-"
else:
value = "-"
otherinfo[key] = value
self.message = '%s\n' \
'Date: %s\n' \
'Time: %s\n' \
'Request: %s\n' \
'Referrer: %s\n' \
'Server: %s\n' \
'Client: %s\n' \
'Host: %s\n' \
'Upstream: %s\n'
self.params = [
date_time_message[2],
date_time_message[0],
date_time_message[1],
otherinfo.get("request", "-"),
otherinfo.get("referrer", "-"),
otherinfo.get("server", "-"),
otherinfo.get("client", "-"),
otherinfo.get("host", "-"),
otherinfo.get("upstream", "-"),
]
self.site = otherinfo.get("referrer", "-") | python | def parse(self, line):
"""Parse a line of the Nginx error log"""
csv_list = line.split(",")
date_time_message = csv_list.pop(0).split(" ", 2)
otherinfo = dict()
for item in csv_list:
key_value_pair = item.split(":", 1)
key = key_value_pair[0].strip()
if len(key_value_pair) > 1:
value = key_value_pair[1].strip()
if not value:
value = "-"
else:
value = "-"
otherinfo[key] = value
self.message = '%s\n' \
'Date: %s\n' \
'Time: %s\n' \
'Request: %s\n' \
'Referrer: %s\n' \
'Server: %s\n' \
'Client: %s\n' \
'Host: %s\n' \
'Upstream: %s\n'
self.params = [
date_time_message[2],
date_time_message[0],
date_time_message[1],
otherinfo.get("request", "-"),
otherinfo.get("referrer", "-"),
otherinfo.get("server", "-"),
otherinfo.get("client", "-"),
otherinfo.get("host", "-"),
otherinfo.get("upstream", "-"),
]
self.site = otherinfo.get("referrer", "-") | [
"def",
"parse",
"(",
"self",
",",
"line",
")",
":",
"csv_list",
"=",
"line",
".",
"split",
"(",
"\",\"",
")",
"date_time_message",
"=",
"csv_list",
".",
"pop",
"(",
"0",
")",
".",
"split",
"(",
"\" \"",
",",
"2",
")",
"otherinfo",
"=",
"dict",
"(",... | Parse a line of the Nginx error log | [
"Parse",
"a",
"line",
"of",
"the",
"Nginx",
"error",
"log"
] | 1bff3f2c8e37265430269cdf1ed8f860ce2dd72a | https://github.com/mdgart/sentrylogs/blob/1bff3f2c8e37265430269cdf1ed8f860ce2dd72a/sentrylogs/parsers/nginx.py#L24-L63 | train | 213,487 |
okpy/ok-client | client/utils/guidance.py | Guidance.validate_json | def validate_json(self):
""" Ensure that the checksum matches. """
if not hasattr(self, 'guidance_json'):
return False
checksum = self.guidance_json.get('checksum')
contents = self.guidance_json.get('db')
hash_key = ("{}{}".format(json.dumps(contents, sort_keys=True),
self.assignment.endpoint).encode())
digest = hashlib.md5(hash_key).hexdigest()
if not checksum:
log.warning("Checksum on guidance not found. Invalidating file")
return False
if digest != checksum:
log.warning("Checksum %s did not match actual digest %s", checksum, digest)
return False
return True | python | def validate_json(self):
""" Ensure that the checksum matches. """
if not hasattr(self, 'guidance_json'):
return False
checksum = self.guidance_json.get('checksum')
contents = self.guidance_json.get('db')
hash_key = ("{}{}".format(json.dumps(contents, sort_keys=True),
self.assignment.endpoint).encode())
digest = hashlib.md5(hash_key).hexdigest()
if not checksum:
log.warning("Checksum on guidance not found. Invalidating file")
return False
if digest != checksum:
log.warning("Checksum %s did not match actual digest %s", checksum, digest)
return False
return True | [
"def",
"validate_json",
"(",
"self",
")",
":",
"if",
"not",
"hasattr",
"(",
"self",
",",
"'guidance_json'",
")",
":",
"return",
"False",
"checksum",
"=",
"self",
".",
"guidance_json",
".",
"get",
"(",
"'checksum'",
")",
"contents",
"=",
"self",
".",
"gui... | Ensure that the checksum matches. | [
"Ensure",
"that",
"the",
"checksum",
"matches",
"."
] | 517f57dd76284af40ba9766e42d9222b644afd9c | https://github.com/okpy/ok-client/blob/517f57dd76284af40ba9766e42d9222b644afd9c/client/utils/guidance.py#L107-L126 | train | 213,488 |
okpy/ok-client | client/utils/guidance.py | Guidance.set_tg | def set_tg(self):
""" Try to grab the treatment group number for the student.
If there is no treatment group number available, request it
from the server.
"""
# Checks to see the student currently has a treatment group number.
if not os.path.isfile(self.current_working_dir + LOCAL_TG_FILE):
cur_email = self.assignment.get_student_email()
log.info("Current email is %s", cur_email)
if not cur_email:
self.tg_id = -1
return EMPTY_MISUCOUNT_TGID_PRNTEDMSG
tg_url = ("{}{}/{}{}"
.format(TGSERVER, cur_email, self.assignment_name,
TG_SERVER_ENDING))
try:
log.info("Accessing treatment server at %s", tg_url)
data = requests.get(tg_url, timeout=1).json()
except IOError:
data = {"tg": -1}
log.warning("Failed to communicate to server", exc_info=True)
if data.get("tg") is None:
log.warning("Server returned back a bad treatment group ID.")
data = {"tg": -1}
with open(self.current_working_dir + LOCAL_TG_FILE, "w") as fd:
fd.write(str(data["tg"]))
tg_file = open(self.current_working_dir + LOCAL_TG_FILE, 'r')
self.tg_id = int(tg_file.read()) | python | def set_tg(self):
""" Try to grab the treatment group number for the student.
If there is no treatment group number available, request it
from the server.
"""
# Checks to see the student currently has a treatment group number.
if not os.path.isfile(self.current_working_dir + LOCAL_TG_FILE):
cur_email = self.assignment.get_student_email()
log.info("Current email is %s", cur_email)
if not cur_email:
self.tg_id = -1
return EMPTY_MISUCOUNT_TGID_PRNTEDMSG
tg_url = ("{}{}/{}{}"
.format(TGSERVER, cur_email, self.assignment_name,
TG_SERVER_ENDING))
try:
log.info("Accessing treatment server at %s", tg_url)
data = requests.get(tg_url, timeout=1).json()
except IOError:
data = {"tg": -1}
log.warning("Failed to communicate to server", exc_info=True)
if data.get("tg") is None:
log.warning("Server returned back a bad treatment group ID.")
data = {"tg": -1}
with open(self.current_working_dir + LOCAL_TG_FILE, "w") as fd:
fd.write(str(data["tg"]))
tg_file = open(self.current_working_dir + LOCAL_TG_FILE, 'r')
self.tg_id = int(tg_file.read()) | [
"def",
"set_tg",
"(",
"self",
")",
":",
"# Checks to see the student currently has a treatment group number.",
"if",
"not",
"os",
".",
"path",
".",
"isfile",
"(",
"self",
".",
"current_working_dir",
"+",
"LOCAL_TG_FILE",
")",
":",
"cur_email",
"=",
"self",
".",
"a... | Try to grab the treatment group number for the student.
If there is no treatment group number available, request it
from the server. | [
"Try",
"to",
"grab",
"the",
"treatment",
"group",
"number",
"for",
"the",
"student",
".",
"If",
"there",
"is",
"no",
"treatment",
"group",
"number",
"available",
"request",
"it",
"from",
"the",
"server",
"."
] | 517f57dd76284af40ba9766e42d9222b644afd9c | https://github.com/okpy/ok-client/blob/517f57dd76284af40ba9766e42d9222b644afd9c/client/utils/guidance.py#L345-L376 | train | 213,489 |
okpy/ok-client | client/utils/guidance.py | Guidance.prompt_with_prob | def prompt_with_prob(self, orig_response=None, prob=None):
"""Ask for rationale with a specific level of probability. """
# Disable opt-out.
# if self.assignment.cmd_args.no_experiments:
# log.info("Skipping prompt due to --no-experiments")
# return "Skipped due to --no-experiments"
if self.load_error:
return 'Failed to read guidance config file'
if hasattr(self.assignment, 'is_test'):
log.info("Skipping prompt due to test mode")
return "Test response"
if prob is None:
prob = self.prompt_probability
if random.random() > prob:
log.info("Did not prompt for rationale: Insufficient Probability")
return "Did not prompt for rationale"
with format.block(style="-"):
rationale = prompt.explanation_msg(EXPLANTION_PROMPT,
short_msg=CONFIRM_BLANK_EXPLANATION)
if prob is None:
# Reduce future prompt likelihood
self.prompt_probability = 0
if orig_response:
print('Thanks! Your original response was: {}'.format('\n'.join(orig_response)))
return rationale | python | def prompt_with_prob(self, orig_response=None, prob=None):
"""Ask for rationale with a specific level of probability. """
# Disable opt-out.
# if self.assignment.cmd_args.no_experiments:
# log.info("Skipping prompt due to --no-experiments")
# return "Skipped due to --no-experiments"
if self.load_error:
return 'Failed to read guidance config file'
if hasattr(self.assignment, 'is_test'):
log.info("Skipping prompt due to test mode")
return "Test response"
if prob is None:
prob = self.prompt_probability
if random.random() > prob:
log.info("Did not prompt for rationale: Insufficient Probability")
return "Did not prompt for rationale"
with format.block(style="-"):
rationale = prompt.explanation_msg(EXPLANTION_PROMPT,
short_msg=CONFIRM_BLANK_EXPLANATION)
if prob is None:
# Reduce future prompt likelihood
self.prompt_probability = 0
if orig_response:
print('Thanks! Your original response was: {}'.format('\n'.join(orig_response)))
return rationale | [
"def",
"prompt_with_prob",
"(",
"self",
",",
"orig_response",
"=",
"None",
",",
"prob",
"=",
"None",
")",
":",
"# Disable opt-out.",
"# if self.assignment.cmd_args.no_experiments:",
"# log.info(\"Skipping prompt due to --no-experiments\")",
"# return \"Skipped due to --no-e... | Ask for rationale with a specific level of probability. | [
"Ask",
"for",
"rationale",
"with",
"a",
"specific",
"level",
"of",
"probability",
"."
] | 517f57dd76284af40ba9766e42d9222b644afd9c | https://github.com/okpy/ok-client/blob/517f57dd76284af40ba9766e42d9222b644afd9c/client/utils/guidance.py#L378-L406 | train | 213,490 |
okpy/ok-client | client/__main__.py | patch_requests | def patch_requests():
""" Customize the cacerts.pem file that requests uses.
Automatically updates the cert file if the contents are different.
"""
config.create_config_directory()
ca_certs_file = config.CERT_FILE
ca_certs_contents = requests.__loader__.get_data('requests/cacert.pem')
should_write_certs = True
if os.path.isfile(ca_certs_file):
with open(ca_certs_file, 'rb') as f:
existing_certs = f.read()
if existing_certs != ca_certs_contents:
should_write_certs = True
print("Updating local SSL certificates")
else:
should_write_certs = False
if should_write_certs:
with open(ca_certs_file, 'wb') as f:
f.write(ca_certs_contents)
os.environ['REQUESTS_CA_BUNDLE'] = ca_certs_file | python | def patch_requests():
""" Customize the cacerts.pem file that requests uses.
Automatically updates the cert file if the contents are different.
"""
config.create_config_directory()
ca_certs_file = config.CERT_FILE
ca_certs_contents = requests.__loader__.get_data('requests/cacert.pem')
should_write_certs = True
if os.path.isfile(ca_certs_file):
with open(ca_certs_file, 'rb') as f:
existing_certs = f.read()
if existing_certs != ca_certs_contents:
should_write_certs = True
print("Updating local SSL certificates")
else:
should_write_certs = False
if should_write_certs:
with open(ca_certs_file, 'wb') as f:
f.write(ca_certs_contents)
os.environ['REQUESTS_CA_BUNDLE'] = ca_certs_file | [
"def",
"patch_requests",
"(",
")",
":",
"config",
".",
"create_config_directory",
"(",
")",
"ca_certs_file",
"=",
"config",
".",
"CERT_FILE",
"ca_certs_contents",
"=",
"requests",
".",
"__loader__",
".",
"get_data",
"(",
"'requests/cacert.pem'",
")",
"should_write_c... | Customize the cacerts.pem file that requests uses.
Automatically updates the cert file if the contents are different. | [
"Customize",
"the",
"cacerts",
".",
"pem",
"file",
"that",
"requests",
"uses",
".",
"Automatically",
"updates",
"the",
"cert",
"file",
"if",
"the",
"contents",
"are",
"different",
"."
] | 517f57dd76284af40ba9766e42d9222b644afd9c | https://github.com/okpy/ok-client/blob/517f57dd76284af40ba9766e42d9222b644afd9c/client/__main__.py#L18-L41 | train | 213,491 |
okpy/ok-client | client/protocols/file_contents.py | FileContentsProtocol.run | def run(self, messages):
"""Find all source files and return their complete contents.
Source files are considered to be files listed self.assignment.src.
If a certain source filepath is not a valid file (e.g. does not exist
or is not a file), then the contents associated with that filepath will
be an empty string.
RETURNS:
dict; a mapping of source filepath -> contents as strings.
"""
files = {}
# TODO(albert): move this to AnalyticsProtocol
if self.args.submit:
files['submit'] = True
for file in self.assignment.src:
if not self.is_file(file):
# TODO(albert): add an error message
contents = ''
log.warning('File {} does not exist'.format(file))
else:
contents = self.read_file(file)
log.info('Loaded contents of {} to send to server'.format(file))
files[file] = contents
messages['file_contents'] = files | python | def run(self, messages):
"""Find all source files and return their complete contents.
Source files are considered to be files listed self.assignment.src.
If a certain source filepath is not a valid file (e.g. does not exist
or is not a file), then the contents associated with that filepath will
be an empty string.
RETURNS:
dict; a mapping of source filepath -> contents as strings.
"""
files = {}
# TODO(albert): move this to AnalyticsProtocol
if self.args.submit:
files['submit'] = True
for file in self.assignment.src:
if not self.is_file(file):
# TODO(albert): add an error message
contents = ''
log.warning('File {} does not exist'.format(file))
else:
contents = self.read_file(file)
log.info('Loaded contents of {} to send to server'.format(file))
files[file] = contents
messages['file_contents'] = files | [
"def",
"run",
"(",
"self",
",",
"messages",
")",
":",
"files",
"=",
"{",
"}",
"# TODO(albert): move this to AnalyticsProtocol",
"if",
"self",
".",
"args",
".",
"submit",
":",
"files",
"[",
"'submit'",
"]",
"=",
"True",
"for",
"file",
"in",
"self",
".",
"... | Find all source files and return their complete contents.
Source files are considered to be files listed self.assignment.src.
If a certain source filepath is not a valid file (e.g. does not exist
or is not a file), then the contents associated with that filepath will
be an empty string.
RETURNS:
dict; a mapping of source filepath -> contents as strings. | [
"Find",
"all",
"source",
"files",
"and",
"return",
"their",
"complete",
"contents",
"."
] | 517f57dd76284af40ba9766e42d9222b644afd9c | https://github.com/okpy/ok-client/blob/517f57dd76284af40ba9766e42d9222b644afd9c/client/protocols/file_contents.py#L10-L35 | train | 213,492 |
okpy/ok-client | client/protocols/lock.py | LockProtocol.run | def run(self, messages):
"""Responsible for locking each test."""
if not self.args.lock:
return
format.print_line('~')
print('Locking tests')
print()
for test in self.assignment.test_map.values():
log.info('Locking {}'.format(test.name))
test.lock(self._hash_fn) | python | def run(self, messages):
"""Responsible for locking each test."""
if not self.args.lock:
return
format.print_line('~')
print('Locking tests')
print()
for test in self.assignment.test_map.values():
log.info('Locking {}'.format(test.name))
test.lock(self._hash_fn) | [
"def",
"run",
"(",
"self",
",",
"messages",
")",
":",
"if",
"not",
"self",
".",
"args",
".",
"lock",
":",
"return",
"format",
".",
"print_line",
"(",
"'~'",
")",
"print",
"(",
"'Locking tests'",
")",
"print",
"(",
")",
"for",
"test",
"in",
"self",
... | Responsible for locking each test. | [
"Responsible",
"for",
"locking",
"each",
"test",
"."
] | 517f57dd76284af40ba9766e42d9222b644afd9c | https://github.com/okpy/ok-client/blob/517f57dd76284af40ba9766e42d9222b644afd9c/client/protocols/lock.py#L13-L24 | train | 213,493 |
okpy/ok-client | client/utils/auth.py | pick_free_port | def pick_free_port(hostname=REDIRECT_HOST, port=0):
""" Try to bind a port. Default=0 selects a free port. """
import socket
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
s.bind((hostname, port)) # port=0 finds an open port
except OSError as e:
log.warning("Could not bind to %s:%s %s", hostname, port, e)
if port == 0:
print('Unable to find an open port for authentication.')
raise AuthenticationException(e)
else:
return pick_free_port(hostname, 0)
addr, port = s.getsockname()
s.close()
return port | python | def pick_free_port(hostname=REDIRECT_HOST, port=0):
""" Try to bind a port. Default=0 selects a free port. """
import socket
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
s.bind((hostname, port)) # port=0 finds an open port
except OSError as e:
log.warning("Could not bind to %s:%s %s", hostname, port, e)
if port == 0:
print('Unable to find an open port for authentication.')
raise AuthenticationException(e)
else:
return pick_free_port(hostname, 0)
addr, port = s.getsockname()
s.close()
return port | [
"def",
"pick_free_port",
"(",
"hostname",
"=",
"REDIRECT_HOST",
",",
"port",
"=",
"0",
")",
":",
"import",
"socket",
"s",
"=",
"socket",
".",
"socket",
"(",
"socket",
".",
"AF_INET",
",",
"socket",
".",
"SOCK_STREAM",
")",
"try",
":",
"s",
".",
"bind",... | Try to bind a port. Default=0 selects a free port. | [
"Try",
"to",
"bind",
"a",
"port",
".",
"Default",
"=",
"0",
"selects",
"a",
"free",
"port",
"."
] | 517f57dd76284af40ba9766e42d9222b644afd9c | https://github.com/okpy/ok-client/blob/517f57dd76284af40ba9766e42d9222b644afd9c/client/utils/auth.py#L72-L87 | train | 213,494 |
okpy/ok-client | client/utils/auth.py | make_token_post | def make_token_post(server, data):
"""Try getting an access token from the server. If successful, returns the
JSON response. If unsuccessful, raises an OAuthException.
"""
try:
response = requests.post(server + TOKEN_ENDPOINT, data=data, timeout=TIMEOUT)
body = response.json()
except Exception as e:
log.warning('Other error when exchanging code', exc_info=True)
raise OAuthException(
error='Authentication Failed',
error_description=str(e))
if 'error' in body:
log.error(body)
raise OAuthException(
error=body.get('error', 'Unknown Error'),
error_description = body.get('error_description', ''))
return body | python | def make_token_post(server, data):
"""Try getting an access token from the server. If successful, returns the
JSON response. If unsuccessful, raises an OAuthException.
"""
try:
response = requests.post(server + TOKEN_ENDPOINT, data=data, timeout=TIMEOUT)
body = response.json()
except Exception as e:
log.warning('Other error when exchanging code', exc_info=True)
raise OAuthException(
error='Authentication Failed',
error_description=str(e))
if 'error' in body:
log.error(body)
raise OAuthException(
error=body.get('error', 'Unknown Error'),
error_description = body.get('error_description', ''))
return body | [
"def",
"make_token_post",
"(",
"server",
",",
"data",
")",
":",
"try",
":",
"response",
"=",
"requests",
".",
"post",
"(",
"server",
"+",
"TOKEN_ENDPOINT",
",",
"data",
"=",
"data",
",",
"timeout",
"=",
"TIMEOUT",
")",
"body",
"=",
"response",
".",
"js... | Try getting an access token from the server. If successful, returns the
JSON response. If unsuccessful, raises an OAuthException. | [
"Try",
"getting",
"an",
"access",
"token",
"from",
"the",
"server",
".",
"If",
"successful",
"returns",
"the",
"JSON",
"response",
".",
"If",
"unsuccessful",
"raises",
"an",
"OAuthException",
"."
] | 517f57dd76284af40ba9766e42d9222b644afd9c | https://github.com/okpy/ok-client/blob/517f57dd76284af40ba9766e42d9222b644afd9c/client/utils/auth.py#L89-L106 | train | 213,495 |
okpy/ok-client | client/utils/auth.py | authenticate | def authenticate(cmd_args, endpoint='', force=False):
"""Returns an OAuth token that can be passed to the server for
identification. If FORCE is False, it will attempt to use a cached token
or refresh the OAuth token.
"""
server = server_url(cmd_args)
network.check_ssl()
access_token = None
try:
assert not force
access_token = refresh_local_token(server)
except Exception:
print('Performing authentication')
access_token = perform_oauth(get_code, cmd_args, endpoint)
email = display_student_email(cmd_args, access_token)
if not email:
log.warning('Could not get login email. Try logging in again.')
log.debug('Authenticated with access token={}'.format(access_token))
return access_token | python | def authenticate(cmd_args, endpoint='', force=False):
"""Returns an OAuth token that can be passed to the server for
identification. If FORCE is False, it will attempt to use a cached token
or refresh the OAuth token.
"""
server = server_url(cmd_args)
network.check_ssl()
access_token = None
try:
assert not force
access_token = refresh_local_token(server)
except Exception:
print('Performing authentication')
access_token = perform_oauth(get_code, cmd_args, endpoint)
email = display_student_email(cmd_args, access_token)
if not email:
log.warning('Could not get login email. Try logging in again.')
log.debug('Authenticated with access token={}'.format(access_token))
return access_token | [
"def",
"authenticate",
"(",
"cmd_args",
",",
"endpoint",
"=",
"''",
",",
"force",
"=",
"False",
")",
":",
"server",
"=",
"server_url",
"(",
"cmd_args",
")",
"network",
".",
"check_ssl",
"(",
")",
"access_token",
"=",
"None",
"try",
":",
"assert",
"not",
... | Returns an OAuth token that can be passed to the server for
identification. If FORCE is False, it will attempt to use a cached token
or refresh the OAuth token. | [
"Returns",
"an",
"OAuth",
"token",
"that",
"can",
"be",
"passed",
"to",
"the",
"server",
"for",
"identification",
".",
"If",
"FORCE",
"is",
"False",
"it",
"will",
"attempt",
"to",
"use",
"a",
"cached",
"token",
"or",
"refresh",
"the",
"OAuth",
"token",
"... | 517f57dd76284af40ba9766e42d9222b644afd9c | https://github.com/okpy/ok-client/blob/517f57dd76284af40ba9766e42d9222b644afd9c/client/utils/auth.py#L187-L208 | train | 213,496 |
okpy/ok-client | client/utils/auth.py | notebook_authenticate | def notebook_authenticate(cmd_args, force=False, silent=True):
""" Similiar to authenticate but prints student emails after
all calls and uses a different way to get codes. If SILENT is True,
it will suppress the error message and redirect to FORCE=True
"""
server = server_url(cmd_args)
network.check_ssl()
access_token = None
if not force:
try:
access_token = refresh_local_token(server)
except OAuthException as e:
# Account for Invalid Grant Error During make_token_post
if not silent:
raise e
return notebook_authenticate(cmd_args, force=True, silent=False)
if not access_token:
access_token = perform_oauth(
get_code_via_terminal,
cmd_args,
copy_msg=NOTEBOOK_COPY_MESSAGE,
paste_msg=NOTEBOOK_PASTE_MESSAGE)
# Always display email
email = display_student_email(cmd_args, access_token)
if email is None and not force:
return notebook_authenticate(cmd_args, force=True) # Token has expired
elif email is None:
# Did not get a valid token even after a fresh login
log.warning('Could not get login email. You may have been logged out. '
' Try logging in again.')
return access_token | python | def notebook_authenticate(cmd_args, force=False, silent=True):
""" Similiar to authenticate but prints student emails after
all calls and uses a different way to get codes. If SILENT is True,
it will suppress the error message and redirect to FORCE=True
"""
server = server_url(cmd_args)
network.check_ssl()
access_token = None
if not force:
try:
access_token = refresh_local_token(server)
except OAuthException as e:
# Account for Invalid Grant Error During make_token_post
if not silent:
raise e
return notebook_authenticate(cmd_args, force=True, silent=False)
if not access_token:
access_token = perform_oauth(
get_code_via_terminal,
cmd_args,
copy_msg=NOTEBOOK_COPY_MESSAGE,
paste_msg=NOTEBOOK_PASTE_MESSAGE)
# Always display email
email = display_student_email(cmd_args, access_token)
if email is None and not force:
return notebook_authenticate(cmd_args, force=True) # Token has expired
elif email is None:
# Did not get a valid token even after a fresh login
log.warning('Could not get login email. You may have been logged out. '
' Try logging in again.')
return access_token | [
"def",
"notebook_authenticate",
"(",
"cmd_args",
",",
"force",
"=",
"False",
",",
"silent",
"=",
"True",
")",
":",
"server",
"=",
"server_url",
"(",
"cmd_args",
")",
"network",
".",
"check_ssl",
"(",
")",
"access_token",
"=",
"None",
"if",
"not",
"force",
... | Similiar to authenticate but prints student emails after
all calls and uses a different way to get codes. If SILENT is True,
it will suppress the error message and redirect to FORCE=True | [
"Similiar",
"to",
"authenticate",
"but",
"prints",
"student",
"emails",
"after",
"all",
"calls",
"and",
"uses",
"a",
"different",
"way",
"to",
"get",
"codes",
".",
"If",
"SILENT",
"is",
"True",
"it",
"will",
"suppress",
"the",
"error",
"message",
"and",
"r... | 517f57dd76284af40ba9766e42d9222b644afd9c | https://github.com/okpy/ok-client/blob/517f57dd76284af40ba9766e42d9222b644afd9c/client/utils/auth.py#L210-L242 | train | 213,497 |
okpy/ok-client | client/utils/auth.py | get_student_email | def get_student_email(cmd_args, endpoint=''):
"""Attempts to get the student's email. Returns the email, or None."""
log.info("Attempting to get student email")
if cmd_args.local:
return None
access_token = authenticate(cmd_args, endpoint=endpoint, force=False)
if not access_token:
return None
try:
return get_info(cmd_args, access_token)['email']
except IOError as e:
return None | python | def get_student_email(cmd_args, endpoint=''):
"""Attempts to get the student's email. Returns the email, or None."""
log.info("Attempting to get student email")
if cmd_args.local:
return None
access_token = authenticate(cmd_args, endpoint=endpoint, force=False)
if not access_token:
return None
try:
return get_info(cmd_args, access_token)['email']
except IOError as e:
return None | [
"def",
"get_student_email",
"(",
"cmd_args",
",",
"endpoint",
"=",
"''",
")",
":",
"log",
".",
"info",
"(",
"\"Attempting to get student email\"",
")",
"if",
"cmd_args",
".",
"local",
":",
"return",
"None",
"access_token",
"=",
"authenticate",
"(",
"cmd_args",
... | Attempts to get the student's email. Returns the email, or None. | [
"Attempts",
"to",
"get",
"the",
"student",
"s",
"email",
".",
"Returns",
"the",
"email",
"or",
"None",
"."
] | 517f57dd76284af40ba9766e42d9222b644afd9c | https://github.com/okpy/ok-client/blob/517f57dd76284af40ba9766e42d9222b644afd9c/client/utils/auth.py#L365-L376 | train | 213,498 |
okpy/ok-client | client/utils/auth.py | get_identifier | def get_identifier(cmd_args, endpoint=''):
""" Obtain anonmyzied identifier."""
student_email = get_student_email(cmd_args, endpoint)
if not student_email:
return "Unknown"
return hashlib.md5(student_email.encode()).hexdigest() | python | def get_identifier(cmd_args, endpoint=''):
""" Obtain anonmyzied identifier."""
student_email = get_student_email(cmd_args, endpoint)
if not student_email:
return "Unknown"
return hashlib.md5(student_email.encode()).hexdigest() | [
"def",
"get_identifier",
"(",
"cmd_args",
",",
"endpoint",
"=",
"''",
")",
":",
"student_email",
"=",
"get_student_email",
"(",
"cmd_args",
",",
"endpoint",
")",
"if",
"not",
"student_email",
":",
"return",
"\"Unknown\"",
"return",
"hashlib",
".",
"md5",
"(",
... | Obtain anonmyzied identifier. | [
"Obtain",
"anonmyzied",
"identifier",
"."
] | 517f57dd76284af40ba9766e42d9222b644afd9c | https://github.com/okpy/ok-client/blob/517f57dd76284af40ba9766e42d9222b644afd9c/client/utils/auth.py#L378-L383 | train | 213,499 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.