repo stringlengths 7 55 | path stringlengths 4 127 | func_name stringlengths 1 88 | original_string stringlengths 75 19.8k | language stringclasses 1 value | code stringlengths 75 19.8k | code_tokens listlengths 20 707 | docstring stringlengths 3 17.3k | docstring_tokens listlengths 3 222 | sha stringlengths 40 40 | url stringlengths 87 242 | partition stringclasses 1 value | idx int64 0 252k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
erdc/RAPIDpy | RAPIDpy/gis/taudem.py | TauDEM.rasterToPolygon | def rasterToPolygon(raster_file, polygon_file):
"""
Converts watershed raster to polygon and then dissolves it.
It dissolves features based on the LINKNO attribute.
"""
log("Process: Raster to Polygon ...")
time_start = datetime.utcnow()
temp_polygon_file = \
"{0}_temp.shp".format(
os.path.splitext(os.path.basename(polygon_file))[0])
GDALGrid(raster_file).to_polygon(out_shapefile=temp_polygon_file,
fieldname="LINKNO",
self_mask=True)
log("Time to convert to polygon: {0}"
.format(datetime.utcnow()-time_start))
log("Dissolving ...")
time_start_dissolve = datetime.utcnow()
ogr_polygin_shapefile = ogr.Open(temp_polygon_file)
ogr_polygon_shapefile_lyr = ogr_polygin_shapefile.GetLayer()
number_of_features = ogr_polygon_shapefile_lyr.GetFeatureCount()
polygon_rivid_list = np.zeros(number_of_features, dtype=np.int32)
for feature_idx, catchment_feature in \
enumerate(ogr_polygon_shapefile_lyr):
polygon_rivid_list[feature_idx] = \
catchment_feature.GetField('LINKNO')
shp_drv = ogr.GetDriverByName('ESRI Shapefile')
# Remove output shapefile if it already exists
if os.path.exists(polygon_file):
shp_drv.DeleteDataSource(polygon_file)
dissolve_shapefile = shp_drv.CreateDataSource(polygon_file)
dissolve_layer = \
dissolve_shapefile.CreateLayer(
'',
ogr_polygon_shapefile_lyr.GetSpatialRef(),
ogr.wkbPolygon)
dissolve_layer.CreateField(ogr.FieldDefn('LINKNO', ogr.OFTInteger))
dissolve_layer_defn = dissolve_layer.GetLayerDefn()
for unique_rivid in np.unique(polygon_rivid_list):
# get indices where it is in the polygon
feature_indices = np.where(polygon_rivid_list == unique_rivid)[0]
new_feat = ogr.Feature(dissolve_layer_defn)
new_feat.SetField('LINKNO', int(unique_rivid))
if len(feature_indices) == 1:
# write feature to file
feature = \
ogr_polygon_shapefile_lyr.GetFeature(feature_indices[0])
new_feat.SetGeometry(feature.GetGeometryRef())
else:
# dissolve
dissolve_poly_list = []
for feature_index in feature_indices:
feature = \
ogr_polygon_shapefile_lyr.GetFeature(feature_index)
feat_geom = feature.GetGeometryRef()
dissolve_poly_list.append(
shapely_loads(feat_geom.ExportToWkb()))
dissolve_polygon = cascaded_union(dissolve_poly_list)
new_feat.SetGeometry(
ogr.CreateGeometryFromWkb(dissolve_polygon.wkb))
dissolve_layer.CreateFeature(new_feat)
# clean up
shp_drv.DeleteDataSource(temp_polygon_file)
log("Time to dissolve: {0}".format(datetime.utcnow() -
time_start_dissolve))
log("Total time to convert: {0}".format(datetime.utcnow() -
time_start)) | python | def rasterToPolygon(raster_file, polygon_file):
"""
Converts watershed raster to polygon and then dissolves it.
It dissolves features based on the LINKNO attribute.
"""
log("Process: Raster to Polygon ...")
time_start = datetime.utcnow()
temp_polygon_file = \
"{0}_temp.shp".format(
os.path.splitext(os.path.basename(polygon_file))[0])
GDALGrid(raster_file).to_polygon(out_shapefile=temp_polygon_file,
fieldname="LINKNO",
self_mask=True)
log("Time to convert to polygon: {0}"
.format(datetime.utcnow()-time_start))
log("Dissolving ...")
time_start_dissolve = datetime.utcnow()
ogr_polygin_shapefile = ogr.Open(temp_polygon_file)
ogr_polygon_shapefile_lyr = ogr_polygin_shapefile.GetLayer()
number_of_features = ogr_polygon_shapefile_lyr.GetFeatureCount()
polygon_rivid_list = np.zeros(number_of_features, dtype=np.int32)
for feature_idx, catchment_feature in \
enumerate(ogr_polygon_shapefile_lyr):
polygon_rivid_list[feature_idx] = \
catchment_feature.GetField('LINKNO')
shp_drv = ogr.GetDriverByName('ESRI Shapefile')
# Remove output shapefile if it already exists
if os.path.exists(polygon_file):
shp_drv.DeleteDataSource(polygon_file)
dissolve_shapefile = shp_drv.CreateDataSource(polygon_file)
dissolve_layer = \
dissolve_shapefile.CreateLayer(
'',
ogr_polygon_shapefile_lyr.GetSpatialRef(),
ogr.wkbPolygon)
dissolve_layer.CreateField(ogr.FieldDefn('LINKNO', ogr.OFTInteger))
dissolve_layer_defn = dissolve_layer.GetLayerDefn()
for unique_rivid in np.unique(polygon_rivid_list):
# get indices where it is in the polygon
feature_indices = np.where(polygon_rivid_list == unique_rivid)[0]
new_feat = ogr.Feature(dissolve_layer_defn)
new_feat.SetField('LINKNO', int(unique_rivid))
if len(feature_indices) == 1:
# write feature to file
feature = \
ogr_polygon_shapefile_lyr.GetFeature(feature_indices[0])
new_feat.SetGeometry(feature.GetGeometryRef())
else:
# dissolve
dissolve_poly_list = []
for feature_index in feature_indices:
feature = \
ogr_polygon_shapefile_lyr.GetFeature(feature_index)
feat_geom = feature.GetGeometryRef()
dissolve_poly_list.append(
shapely_loads(feat_geom.ExportToWkb()))
dissolve_polygon = cascaded_union(dissolve_poly_list)
new_feat.SetGeometry(
ogr.CreateGeometryFromWkb(dissolve_polygon.wkb))
dissolve_layer.CreateFeature(new_feat)
# clean up
shp_drv.DeleteDataSource(temp_polygon_file)
log("Time to dissolve: {0}".format(datetime.utcnow() -
time_start_dissolve))
log("Total time to convert: {0}".format(datetime.utcnow() -
time_start)) | [
"def",
"rasterToPolygon",
"(",
"raster_file",
",",
"polygon_file",
")",
":",
"log",
"(",
"\"Process: Raster to Polygon ...\"",
")",
"time_start",
"=",
"datetime",
".",
"utcnow",
"(",
")",
"temp_polygon_file",
"=",
"\"{0}_temp.shp\"",
".",
"format",
"(",
"os",
".",... | Converts watershed raster to polygon and then dissolves it.
It dissolves features based on the LINKNO attribute. | [
"Converts",
"watershed",
"raster",
"to",
"polygon",
"and",
"then",
"dissolves",
"it",
".",
"It",
"dissolves",
"features",
"based",
"on",
"the",
"LINKNO",
"attribute",
"."
] | 50e14e130554b254a00ff23b226cd7e4c6cfe91a | https://github.com/erdc/RAPIDpy/blob/50e14e130554b254a00ff23b226cd7e4c6cfe91a/RAPIDpy/gis/taudem.py#L453-L525 | train | 33,300 |
erdc/RAPIDpy | RAPIDpy/gis/taudem.py | TauDEM.pitRemove | def pitRemove(self,
elevation_grid,
pit_filled_elevation_grid,
input_depression_mask_grid=None,
consider4way=False,
):
"""
Remove low spots from DEM.
"""
log("PROCESS: PitRemove")
self.pit_filled_elevation_grid = pit_filled_elevation_grid
# Construct the taudem command line.
cmd = [os.path.join(self.taudem_exe_path, 'pitremove'),
'-z', elevation_grid,
'-fel', self.pit_filled_elevation_grid,
]
if input_depression_mask_grid:
cmd += ['-depmask', input_depression_mask_grid]
if consider4way:
cmd += ['-4way']
self._run_mpi_cmd(cmd)
# create projection file
self._add_prj_file(elevation_grid,
self.pit_filled_elevation_grid) | python | def pitRemove(self,
elevation_grid,
pit_filled_elevation_grid,
input_depression_mask_grid=None,
consider4way=False,
):
"""
Remove low spots from DEM.
"""
log("PROCESS: PitRemove")
self.pit_filled_elevation_grid = pit_filled_elevation_grid
# Construct the taudem command line.
cmd = [os.path.join(self.taudem_exe_path, 'pitremove'),
'-z', elevation_grid,
'-fel', self.pit_filled_elevation_grid,
]
if input_depression_mask_grid:
cmd += ['-depmask', input_depression_mask_grid]
if consider4way:
cmd += ['-4way']
self._run_mpi_cmd(cmd)
# create projection file
self._add_prj_file(elevation_grid,
self.pit_filled_elevation_grid) | [
"def",
"pitRemove",
"(",
"self",
",",
"elevation_grid",
",",
"pit_filled_elevation_grid",
",",
"input_depression_mask_grid",
"=",
"None",
",",
"consider4way",
"=",
"False",
",",
")",
":",
"log",
"(",
"\"PROCESS: PitRemove\"",
")",
"self",
".",
"pit_filled_elevation_... | Remove low spots from DEM. | [
"Remove",
"low",
"spots",
"from",
"DEM",
"."
] | 50e14e130554b254a00ff23b226cd7e4c6cfe91a | https://github.com/erdc/RAPIDpy/blob/50e14e130554b254a00ff23b226cd7e4c6cfe91a/RAPIDpy/gis/taudem.py#L592-L619 | train | 33,301 |
erdc/RAPIDpy | RAPIDpy/gis/taudem.py | TauDEM.dinfFlowDirection | def dinfFlowDirection(self,
flow_dir_grid,
slope_grid,
pit_filled_elevation_grid=None):
"""
Calculates flow direction with Dinf method
"""
log("PROCESS: DinfFlowDirection")
if pit_filled_elevation_grid:
self.pit_filled_elevation_grid = pit_filled_elevation_grid
# Construct the taudem command line.
cmd = [os.path.join(self.taudem_exe_path, 'dinfflowdir'),
'-fel', self.pit_filled_elevation_grid,
'-ang', flow_dir_grid,
'-slp', slope_grid,
]
self._run_mpi_cmd(cmd)
# create projection files
self._add_prj_file(self.pit_filled_elevation_grid,
flow_dir_grid)
self._add_prj_file(self.pit_filled_elevation_grid,
slope_grid) | python | def dinfFlowDirection(self,
flow_dir_grid,
slope_grid,
pit_filled_elevation_grid=None):
"""
Calculates flow direction with Dinf method
"""
log("PROCESS: DinfFlowDirection")
if pit_filled_elevation_grid:
self.pit_filled_elevation_grid = pit_filled_elevation_grid
# Construct the taudem command line.
cmd = [os.path.join(self.taudem_exe_path, 'dinfflowdir'),
'-fel', self.pit_filled_elevation_grid,
'-ang', flow_dir_grid,
'-slp', slope_grid,
]
self._run_mpi_cmd(cmd)
# create projection files
self._add_prj_file(self.pit_filled_elevation_grid,
flow_dir_grid)
self._add_prj_file(self.pit_filled_elevation_grid,
slope_grid) | [
"def",
"dinfFlowDirection",
"(",
"self",
",",
"flow_dir_grid",
",",
"slope_grid",
",",
"pit_filled_elevation_grid",
"=",
"None",
")",
":",
"log",
"(",
"\"PROCESS: DinfFlowDirection\"",
")",
"if",
"pit_filled_elevation_grid",
":",
"self",
".",
"pit_filled_elevation_grid"... | Calculates flow direction with Dinf method | [
"Calculates",
"flow",
"direction",
"with",
"Dinf",
"method"
] | 50e14e130554b254a00ff23b226cd7e4c6cfe91a | https://github.com/erdc/RAPIDpy/blob/50e14e130554b254a00ff23b226cd7e4c6cfe91a/RAPIDpy/gis/taudem.py#L621-L645 | train | 33,302 |
erdc/RAPIDpy | RAPIDpy/gis/taudem.py | TauDEM.dinfContributingArea | def dinfContributingArea(self,
contributing_area_grid,
flow_dir_grid,
outlet_shapefile=None,
weight_grid=None,
edge_contamination=False,
):
"""
Calculates contributing area with Dinf method.
"""
log("PROCESS: DinfContributingArea")
# Construct the taudem command line.
cmd = [os.path.join(self.taudem_exe_path, 'areadinf'),
'-ang', flow_dir_grid,
'-sca', contributing_area_grid,
]
if outlet_shapefile:
cmd += ['-o', outlet_shapefile]
if weight_grid:
cmd += ['-wg', weight_grid]
if not edge_contamination:
cmd = cmd + ['-nc']
self._run_mpi_cmd(cmd)
# create projection file
self._add_prj_file(flow_dir_grid,
contributing_area_grid) | python | def dinfContributingArea(self,
contributing_area_grid,
flow_dir_grid,
outlet_shapefile=None,
weight_grid=None,
edge_contamination=False,
):
"""
Calculates contributing area with Dinf method.
"""
log("PROCESS: DinfContributingArea")
# Construct the taudem command line.
cmd = [os.path.join(self.taudem_exe_path, 'areadinf'),
'-ang', flow_dir_grid,
'-sca', contributing_area_grid,
]
if outlet_shapefile:
cmd += ['-o', outlet_shapefile]
if weight_grid:
cmd += ['-wg', weight_grid]
if not edge_contamination:
cmd = cmd + ['-nc']
self._run_mpi_cmd(cmd)
# create projection file
self._add_prj_file(flow_dir_grid,
contributing_area_grid) | [
"def",
"dinfContributingArea",
"(",
"self",
",",
"contributing_area_grid",
",",
"flow_dir_grid",
",",
"outlet_shapefile",
"=",
"None",
",",
"weight_grid",
"=",
"None",
",",
"edge_contamination",
"=",
"False",
",",
")",
":",
"log",
"(",
"\"PROCESS: DinfContributingAr... | Calculates contributing area with Dinf method. | [
"Calculates",
"contributing",
"area",
"with",
"Dinf",
"method",
"."
] | 50e14e130554b254a00ff23b226cd7e4c6cfe91a | https://github.com/erdc/RAPIDpy/blob/50e14e130554b254a00ff23b226cd7e4c6cfe91a/RAPIDpy/gis/taudem.py#L675-L704 | train | 33,303 |
erdc/RAPIDpy | RAPIDpy/gis/taudem.py | TauDEM.streamDefByThreshold | def streamDefByThreshold(self,
stream_raster_grid,
threshold,
contributing_area_grid,
mask_grid=None,
):
"""
Calculates the stream definition by threshold.
"""
log("PROCESS: StreamDefByThreshold")
self.stream_raster_grid = stream_raster_grid
# Construct the taudem command line.
cmd = [os.path.join(self.taudem_exe_path, 'threshold'),
'-ssa', contributing_area_grid,
'-src', self.stream_raster_grid,
'-thresh', str(threshold),
]
if mask_grid:
cmd += ['-mask', mask_grid]
self._run_mpi_cmd(cmd)
# create projection file
self._add_prj_file(contributing_area_grid,
self.stream_raster_grid) | python | def streamDefByThreshold(self,
stream_raster_grid,
threshold,
contributing_area_grid,
mask_grid=None,
):
"""
Calculates the stream definition by threshold.
"""
log("PROCESS: StreamDefByThreshold")
self.stream_raster_grid = stream_raster_grid
# Construct the taudem command line.
cmd = [os.path.join(self.taudem_exe_path, 'threshold'),
'-ssa', contributing_area_grid,
'-src', self.stream_raster_grid,
'-thresh', str(threshold),
]
if mask_grid:
cmd += ['-mask', mask_grid]
self._run_mpi_cmd(cmd)
# create projection file
self._add_prj_file(contributing_area_grid,
self.stream_raster_grid) | [
"def",
"streamDefByThreshold",
"(",
"self",
",",
"stream_raster_grid",
",",
"threshold",
",",
"contributing_area_grid",
",",
"mask_grid",
"=",
"None",
",",
")",
":",
"log",
"(",
"\"PROCESS: StreamDefByThreshold\"",
")",
"self",
".",
"stream_raster_grid",
"=",
"strea... | Calculates the stream definition by threshold. | [
"Calculates",
"the",
"stream",
"definition",
"by",
"threshold",
"."
] | 50e14e130554b254a00ff23b226cd7e4c6cfe91a | https://github.com/erdc/RAPIDpy/blob/50e14e130554b254a00ff23b226cd7e4c6cfe91a/RAPIDpy/gis/taudem.py#L739-L765 | train | 33,304 |
erdc/RAPIDpy | RAPIDpy/gis/taudem.py | TauDEM.streamReachAndWatershed | def streamReachAndWatershed(self,
delineate,
out_stream_order_grid,
out_network_connectivity_tree,
out_network_coordinates,
out_stream_reach_file,
out_watershed_grid,
pit_filled_elevation_grid=None,
flow_dir_grid=None,
contributing_area_grid=None,
stream_raster_grid=None,
outlet_shapefile=None
):
"""
Creates vector network and shapefile from stream raster grid
"""
log("PROCESS: StreamReachAndWatershed")
if pit_filled_elevation_grid:
self.pit_filled_elevation_grid = pit_filled_elevation_grid
if flow_dir_grid:
self.flow_dir_grid = flow_dir_grid
if contributing_area_grid:
self.contributing_area_grid = contributing_area_grid
if stream_raster_grid:
self.stream_raster_grid = stream_raster_grid
# Construct the taudem command line.
cmd = [os.path.join(self.taudem_exe_path, 'streamnet'),
'-fel', self.pit_filled_elevation_grid,
'-p', self.flow_dir_grid,
'-ad8', self.contributing_area_grid,
'-src', self.stream_raster_grid,
'-ord', out_stream_order_grid,
'-tree', out_network_connectivity_tree,
'-coord', out_network_coordinates,
'-net', out_stream_reach_file,
'-w', out_watershed_grid,
]
if outlet_shapefile:
cmd += ['-o', outlet_shapefile]
if delineate:
cmd += ['-sw']
self._run_mpi_cmd(cmd)
# create projection file
self._add_prj_file(self.pit_filled_elevation_grid,
out_stream_order_grid)
self._add_prj_file(self.pit_filled_elevation_grid,
out_stream_reach_file)
self._add_prj_file(self.pit_filled_elevation_grid,
out_watershed_grid) | python | def streamReachAndWatershed(self,
delineate,
out_stream_order_grid,
out_network_connectivity_tree,
out_network_coordinates,
out_stream_reach_file,
out_watershed_grid,
pit_filled_elevation_grid=None,
flow_dir_grid=None,
contributing_area_grid=None,
stream_raster_grid=None,
outlet_shapefile=None
):
"""
Creates vector network and shapefile from stream raster grid
"""
log("PROCESS: StreamReachAndWatershed")
if pit_filled_elevation_grid:
self.pit_filled_elevation_grid = pit_filled_elevation_grid
if flow_dir_grid:
self.flow_dir_grid = flow_dir_grid
if contributing_area_grid:
self.contributing_area_grid = contributing_area_grid
if stream_raster_grid:
self.stream_raster_grid = stream_raster_grid
# Construct the taudem command line.
cmd = [os.path.join(self.taudem_exe_path, 'streamnet'),
'-fel', self.pit_filled_elevation_grid,
'-p', self.flow_dir_grid,
'-ad8', self.contributing_area_grid,
'-src', self.stream_raster_grid,
'-ord', out_stream_order_grid,
'-tree', out_network_connectivity_tree,
'-coord', out_network_coordinates,
'-net', out_stream_reach_file,
'-w', out_watershed_grid,
]
if outlet_shapefile:
cmd += ['-o', outlet_shapefile]
if delineate:
cmd += ['-sw']
self._run_mpi_cmd(cmd)
# create projection file
self._add_prj_file(self.pit_filled_elevation_grid,
out_stream_order_grid)
self._add_prj_file(self.pit_filled_elevation_grid,
out_stream_reach_file)
self._add_prj_file(self.pit_filled_elevation_grid,
out_watershed_grid) | [
"def",
"streamReachAndWatershed",
"(",
"self",
",",
"delineate",
",",
"out_stream_order_grid",
",",
"out_network_connectivity_tree",
",",
"out_network_coordinates",
",",
"out_stream_reach_file",
",",
"out_watershed_grid",
",",
"pit_filled_elevation_grid",
"=",
"None",
",",
... | Creates vector network and shapefile from stream raster grid | [
"Creates",
"vector",
"network",
"and",
"shapefile",
"from",
"stream",
"raster",
"grid"
] | 50e14e130554b254a00ff23b226cd7e4c6cfe91a | https://github.com/erdc/RAPIDpy/blob/50e14e130554b254a00ff23b226cd7e4c6cfe91a/RAPIDpy/gis/taudem.py#L767-L819 | train | 33,305 |
erdc/RAPIDpy | RAPIDpy/gis/taudem.py | TauDEM.demToStreamNetwork | def demToStreamNetwork(self,
output_directory,
raw_elevation_dem="",
pit_filled_elevation_grid="",
flow_dir_grid_d8="",
contributing_area_grid_d8="",
flow_dir_grid_dinf="",
contributing_area_grid_dinf="",
use_dinf=False,
threshold=1000,
delineate=False):
"""
This function will run all of the TauDEM processes to generate
a stream network from an elevation raster.
.. note:: For information about the stream reach and watershed process:
http://hydrology.usu.edu/taudem/taudem5/help53/StreamReachAndWatershed.html
.. note:: For information about the *threshold* parameter, see:
http://hydrology.usu.edu/taudem/taudem5/help53/StreamDefinitionByThreshold.html
Parameters
----------
output_directory: str
Path to output generated files to.
raw_elevation_dem: str, optional
Path to original elevation DEM file. Required if
*pit_filled_elevation_grid* is not used.
pit_filled_elevation_grid: str, optional
Path to pit filled elevation DEM file. Required if
*raw_elevation_dem* is not used.
flow_dir_grid_d8: str, optional
Path to flow direction grid generated using TauDEM's D8 method.
contributing_area_grid_d8: str, optional
Path to contributing area grid generated using TauDEM's D8 method.
flow_dir_grid_dinf: str, optional
Path to flow direction grid generated using TauDEM's
D-Infinity method (EXPERIMENTAL).
contributing_area_grid_dinf: str, optional
Path to contributing area grid generated using TauDEM's
D-Infinity method (EXPERIMENTAL).
use_dinf: bool, optional
Use the D-Infinity method to get stream definition (EXPERIMENTAL).
threshold: int, optional
The stream threshold or maximum number of upstream grid cells.
See above note.
delineate: bool, optional
If True, this will use the delineate option for theis method
using TauDEM. Default is False.
Here is an example of how to use this:
.. code:: python
from RAPIDpy.gis.taudem import TauDEM
elevation_dem = '/path/to/dem.tif'
output_directory = '/path/to/output/files'
td = TauDEM("/path/to/scripts/TauDEM")
td.demToStreamNetwork(output_directory,
elevation_dem,
threshold=1000)
"""
time_start = datetime.utcnow()
# FILL PITS IF NEEDED
self.pit_filled_elevation_grid = pit_filled_elevation_grid
if not pit_filled_elevation_grid:
pit_filled_elevation_grid = \
os.path.join(output_directory, 'pit_filled_elevation_grid.tif')
self.pitRemove(raw_elevation_dem,
pit_filled_elevation_grid)
# GENERATE D8 RASTERS
self.flow_dir_grid = flow_dir_grid_d8
if not flow_dir_grid_d8:
flow_dir_grid_d8 = \
os.path.join(output_directory, 'flow_dir_grid_d8.tif')
slope_grid_d8 = os.path.join(output_directory, 'slope_grid_d8.tif')
self.d8FlowDirection(flow_dir_grid_d8,
slope_grid_d8)
self.contributing_area_grid = contributing_area_grid_d8
if not contributing_area_grid_d8:
contributing_area_grid_d8 = \
os.path.join(output_directory, 'contributing_area_grid_d8.tif')
self.d8ContributingArea(contributing_area_grid_d8)
stream_raster_grid = \
os.path.join(output_directory, 'stream_raster_grid.tif')
if use_dinf:
log("USING DINF METHOD TO GET STREAM DEFINITION ...")
if not flow_dir_grid_dinf:
flow_dir_grid_dinf = \
os.path.join(output_directory, 'flow_dir_grid_dinf.tif')
slope_grid_dinf = \
os.path.join(output_directory, 'slope_grid_dinf.tif')
self.dinfFlowDirection(flow_dir_grid_dinf,
slope_grid_dinf)
if not contributing_area_grid_dinf:
contributing_area_grid_dinf = \
os.path.join(output_directory,
'contributing_area_grid_dinf.tif')
self.dinfContributingArea(contributing_area_grid_dinf,
flow_dir_grid_dinf)
self.streamDefByThreshold(stream_raster_grid,
threshold,
contributing_area_grid_dinf)
else:
log("USING D8 METHOD TO GET STREAM DEFINITION ...")
self.streamDefByThreshold(stream_raster_grid,
threshold,
contributing_area_grid_d8)
# GENERATE STREAM NETWORK
out_stream_order_grid = \
os.path.join(output_directory, 'stream_order_grid.tif')
out_network_connectivity_tree = \
os.path.join(output_directory, 'network_connectivity_tree.txt')
out_network_coordinates = \
os.path.join(output_directory, 'network_coordinates.txt')
out_stream_reach_file = \
os.path.join(output_directory, 'stream_reach_file.shp')
out_watershed_grid = \
os.path.join(output_directory, 'watershed_grid.tif')
self.streamReachAndWatershed(delineate,
out_stream_order_grid,
out_network_connectivity_tree,
out_network_coordinates,
out_stream_reach_file,
out_watershed_grid)
# convert watersed grid to shapefile
out_watershed_shapefile = \
os.path.join(output_directory, 'watershed_shapefile.shp')
self.rasterToPolygon(out_watershed_grid, out_watershed_shapefile)
log("Total time to complete: {0}".format(datetime.utcnow() -
time_start)) | python | def demToStreamNetwork(self,
output_directory,
raw_elevation_dem="",
pit_filled_elevation_grid="",
flow_dir_grid_d8="",
contributing_area_grid_d8="",
flow_dir_grid_dinf="",
contributing_area_grid_dinf="",
use_dinf=False,
threshold=1000,
delineate=False):
"""
This function will run all of the TauDEM processes to generate
a stream network from an elevation raster.
.. note:: For information about the stream reach and watershed process:
http://hydrology.usu.edu/taudem/taudem5/help53/StreamReachAndWatershed.html
.. note:: For information about the *threshold* parameter, see:
http://hydrology.usu.edu/taudem/taudem5/help53/StreamDefinitionByThreshold.html
Parameters
----------
output_directory: str
Path to output generated files to.
raw_elevation_dem: str, optional
Path to original elevation DEM file. Required if
*pit_filled_elevation_grid* is not used.
pit_filled_elevation_grid: str, optional
Path to pit filled elevation DEM file. Required if
*raw_elevation_dem* is not used.
flow_dir_grid_d8: str, optional
Path to flow direction grid generated using TauDEM's D8 method.
contributing_area_grid_d8: str, optional
Path to contributing area grid generated using TauDEM's D8 method.
flow_dir_grid_dinf: str, optional
Path to flow direction grid generated using TauDEM's
D-Infinity method (EXPERIMENTAL).
contributing_area_grid_dinf: str, optional
Path to contributing area grid generated using TauDEM's
D-Infinity method (EXPERIMENTAL).
use_dinf: bool, optional
Use the D-Infinity method to get stream definition (EXPERIMENTAL).
threshold: int, optional
The stream threshold or maximum number of upstream grid cells.
See above note.
delineate: bool, optional
If True, this will use the delineate option for theis method
using TauDEM. Default is False.
Here is an example of how to use this:
.. code:: python
from RAPIDpy.gis.taudem import TauDEM
elevation_dem = '/path/to/dem.tif'
output_directory = '/path/to/output/files'
td = TauDEM("/path/to/scripts/TauDEM")
td.demToStreamNetwork(output_directory,
elevation_dem,
threshold=1000)
"""
time_start = datetime.utcnow()
# FILL PITS IF NEEDED
self.pit_filled_elevation_grid = pit_filled_elevation_grid
if not pit_filled_elevation_grid:
pit_filled_elevation_grid = \
os.path.join(output_directory, 'pit_filled_elevation_grid.tif')
self.pitRemove(raw_elevation_dem,
pit_filled_elevation_grid)
# GENERATE D8 RASTERS
self.flow_dir_grid = flow_dir_grid_d8
if not flow_dir_grid_d8:
flow_dir_grid_d8 = \
os.path.join(output_directory, 'flow_dir_grid_d8.tif')
slope_grid_d8 = os.path.join(output_directory, 'slope_grid_d8.tif')
self.d8FlowDirection(flow_dir_grid_d8,
slope_grid_d8)
self.contributing_area_grid = contributing_area_grid_d8
if not contributing_area_grid_d8:
contributing_area_grid_d8 = \
os.path.join(output_directory, 'contributing_area_grid_d8.tif')
self.d8ContributingArea(contributing_area_grid_d8)
stream_raster_grid = \
os.path.join(output_directory, 'stream_raster_grid.tif')
if use_dinf:
log("USING DINF METHOD TO GET STREAM DEFINITION ...")
if not flow_dir_grid_dinf:
flow_dir_grid_dinf = \
os.path.join(output_directory, 'flow_dir_grid_dinf.tif')
slope_grid_dinf = \
os.path.join(output_directory, 'slope_grid_dinf.tif')
self.dinfFlowDirection(flow_dir_grid_dinf,
slope_grid_dinf)
if not contributing_area_grid_dinf:
contributing_area_grid_dinf = \
os.path.join(output_directory,
'contributing_area_grid_dinf.tif')
self.dinfContributingArea(contributing_area_grid_dinf,
flow_dir_grid_dinf)
self.streamDefByThreshold(stream_raster_grid,
threshold,
contributing_area_grid_dinf)
else:
log("USING D8 METHOD TO GET STREAM DEFINITION ...")
self.streamDefByThreshold(stream_raster_grid,
threshold,
contributing_area_grid_d8)
# GENERATE STREAM NETWORK
out_stream_order_grid = \
os.path.join(output_directory, 'stream_order_grid.tif')
out_network_connectivity_tree = \
os.path.join(output_directory, 'network_connectivity_tree.txt')
out_network_coordinates = \
os.path.join(output_directory, 'network_coordinates.txt')
out_stream_reach_file = \
os.path.join(output_directory, 'stream_reach_file.shp')
out_watershed_grid = \
os.path.join(output_directory, 'watershed_grid.tif')
self.streamReachAndWatershed(delineate,
out_stream_order_grid,
out_network_connectivity_tree,
out_network_coordinates,
out_stream_reach_file,
out_watershed_grid)
# convert watersed grid to shapefile
out_watershed_shapefile = \
os.path.join(output_directory, 'watershed_shapefile.shp')
self.rasterToPolygon(out_watershed_grid, out_watershed_shapefile)
log("Total time to complete: {0}".format(datetime.utcnow() -
time_start)) | [
"def",
"demToStreamNetwork",
"(",
"self",
",",
"output_directory",
",",
"raw_elevation_dem",
"=",
"\"\"",
",",
"pit_filled_elevation_grid",
"=",
"\"\"",
",",
"flow_dir_grid_d8",
"=",
"\"\"",
",",
"contributing_area_grid_d8",
"=",
"\"\"",
",",
"flow_dir_grid_dinf",
"="... | This function will run all of the TauDEM processes to generate
a stream network from an elevation raster.
.. note:: For information about the stream reach and watershed process:
http://hydrology.usu.edu/taudem/taudem5/help53/StreamReachAndWatershed.html
.. note:: For information about the *threshold* parameter, see:
http://hydrology.usu.edu/taudem/taudem5/help53/StreamDefinitionByThreshold.html
Parameters
----------
output_directory: str
Path to output generated files to.
raw_elevation_dem: str, optional
Path to original elevation DEM file. Required if
*pit_filled_elevation_grid* is not used.
pit_filled_elevation_grid: str, optional
Path to pit filled elevation DEM file. Required if
*raw_elevation_dem* is not used.
flow_dir_grid_d8: str, optional
Path to flow direction grid generated using TauDEM's D8 method.
contributing_area_grid_d8: str, optional
Path to contributing area grid generated using TauDEM's D8 method.
flow_dir_grid_dinf: str, optional
Path to flow direction grid generated using TauDEM's
D-Infinity method (EXPERIMENTAL).
contributing_area_grid_dinf: str, optional
Path to contributing area grid generated using TauDEM's
D-Infinity method (EXPERIMENTAL).
use_dinf: bool, optional
Use the D-Infinity method to get stream definition (EXPERIMENTAL).
threshold: int, optional
The stream threshold or maximum number of upstream grid cells.
See above note.
delineate: bool, optional
If True, this will use the delineate option for theis method
using TauDEM. Default is False.
Here is an example of how to use this:
.. code:: python
from RAPIDpy.gis.taudem import TauDEM
elevation_dem = '/path/to/dem.tif'
output_directory = '/path/to/output/files'
td = TauDEM("/path/to/scripts/TauDEM")
td.demToStreamNetwork(output_directory,
elevation_dem,
threshold=1000) | [
"This",
"function",
"will",
"run",
"all",
"of",
"the",
"TauDEM",
"processes",
"to",
"generate",
"a",
"stream",
"network",
"from",
"an",
"elevation",
"raster",
"."
] | 50e14e130554b254a00ff23b226cd7e4c6cfe91a | https://github.com/erdc/RAPIDpy/blob/50e14e130554b254a00ff23b226cd7e4c6cfe91a/RAPIDpy/gis/taudem.py#L821-L963 | train | 33,306 |
ic-labs/django-icekit | icekit/plugins/iiif/utils.py | build_iiif_file_storage_path | def build_iiif_file_storage_path(url_path, ik_image, iiif_storage):
"""
Return the file storage path for a given IIIF Image API URL path.
NOTE: The returned file storage path includes the given ``Image``
instance's ID to ensure the path is unique and identifiable, and its
modified timestamp to act as a primitive cache-busting mechanism for
when the image is changed but there are pre-existing image conversions.
TODO: Ideally we should use a hash or timestamp for Image's actual
image data changes, not the whole instance which could change but
have same image.
"""
storage_path = url_path[1:] # Stip leading slash
# Strip redundant 'iiif-' prefix if present (re-added below)
if storage_path.startswith('iiif/'):
storage_path = storage_path[5:]
# Add Image's modified timestamp to storage path as a primitive
# cache-busting mechanism.
ik_image_ts = str(calendar.timegm(ik_image.date_modified.timetuple()))
splits = storage_path.split('/')
storage_path = '/'.join(
[splits[0]] + # Image ID
[ik_image_ts] + # Image instance modified timestamp
splits[1:] # Remainder of storage path
)
# Replace '/' & ',' with '-' to keep separators of some kind in
# storage file name, otherwise the characters get purged and
# produce storage names with potentially ambiguous and clashing
# values e.g. /3/100,100,200,200/... => iiif3100100200200
storage_path = storage_path.replace('/', '-').replace(',', '-')
# Convert URL path format to a valid file name for a given storage engine
storage_path = iiif_storage.get_valid_name(storage_path)
# Add path prefix to storage path to avoid dumping image files
# into the location of a storage location that might be used for many
# purposes.
if iiif_storage.location != 'iiif':
storage_path = 'iiif/' + storage_path
return storage_path | python | def build_iiif_file_storage_path(url_path, ik_image, iiif_storage):
"""
Return the file storage path for a given IIIF Image API URL path.
NOTE: The returned file storage path includes the given ``Image``
instance's ID to ensure the path is unique and identifiable, and its
modified timestamp to act as a primitive cache-busting mechanism for
when the image is changed but there are pre-existing image conversions.
TODO: Ideally we should use a hash or timestamp for Image's actual
image data changes, not the whole instance which could change but
have same image.
"""
storage_path = url_path[1:] # Stip leading slash
# Strip redundant 'iiif-' prefix if present (re-added below)
if storage_path.startswith('iiif/'):
storage_path = storage_path[5:]
# Add Image's modified timestamp to storage path as a primitive
# cache-busting mechanism.
ik_image_ts = str(calendar.timegm(ik_image.date_modified.timetuple()))
splits = storage_path.split('/')
storage_path = '/'.join(
[splits[0]] + # Image ID
[ik_image_ts] + # Image instance modified timestamp
splits[1:] # Remainder of storage path
)
# Replace '/' & ',' with '-' to keep separators of some kind in
# storage file name, otherwise the characters get purged and
# produce storage names with potentially ambiguous and clashing
# values e.g. /3/100,100,200,200/... => iiif3100100200200
storage_path = storage_path.replace('/', '-').replace(',', '-')
# Convert URL path format to a valid file name for a given storage engine
storage_path = iiif_storage.get_valid_name(storage_path)
# Add path prefix to storage path to avoid dumping image files
# into the location of a storage location that might be used for many
# purposes.
if iiif_storage.location != 'iiif':
storage_path = 'iiif/' + storage_path
return storage_path | [
"def",
"build_iiif_file_storage_path",
"(",
"url_path",
",",
"ik_image",
",",
"iiif_storage",
")",
":",
"storage_path",
"=",
"url_path",
"[",
"1",
":",
"]",
"# Stip leading slash",
"# Strip redundant 'iiif-' prefix if present (re-added below)",
"if",
"storage_path",
".",
... | Return the file storage path for a given IIIF Image API URL path.
NOTE: The returned file storage path includes the given ``Image``
instance's ID to ensure the path is unique and identifiable, and its
modified timestamp to act as a primitive cache-busting mechanism for
when the image is changed but there are pre-existing image conversions.
TODO: Ideally we should use a hash or timestamp for Image's actual
image data changes, not the whole instance which could change but
have same image. | [
"Return",
"the",
"file",
"storage",
"path",
"for",
"a",
"given",
"IIIF",
"Image",
"API",
"URL",
"path",
"."
] | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/icekit/plugins/iiif/utils.py#L277-L321 | train | 33,307 |
ic-labs/django-icekit | glamkit_collections/contrib/work_creator/models.py | CreatorBase.get_roles | def get_roles(self):
"""Return the m2m relations connecting me to works"""
work_ids = self.get_works().values_list('id', flat=True)
return self.works.through.objects.filter(
creator=self.get_draft(),
work_id__in=work_ids,
).select_related('role') | python | def get_roles(self):
"""Return the m2m relations connecting me to works"""
work_ids = self.get_works().values_list('id', flat=True)
return self.works.through.objects.filter(
creator=self.get_draft(),
work_id__in=work_ids,
).select_related('role') | [
"def",
"get_roles",
"(",
"self",
")",
":",
"work_ids",
"=",
"self",
".",
"get_works",
"(",
")",
".",
"values_list",
"(",
"'id'",
",",
"flat",
"=",
"True",
")",
"return",
"self",
".",
"works",
".",
"through",
".",
"objects",
".",
"filter",
"(",
"creat... | Return the m2m relations connecting me to works | [
"Return",
"the",
"m2m",
"relations",
"connecting",
"me",
"to",
"works"
] | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/glamkit_collections/contrib/work_creator/models.py#L271-L277 | train | 33,308 |
ic-labs/django-icekit | glamkit_collections/contrib/work_creator/models.py | WorkBase.derive_and_set_slug | def derive_and_set_slug(self, set_name_sort=True, set_slug=True):
"""
Derive `slug` field from `title` unless it is set in its own right.
This method is called during `save()`
"""
# `title` is the primary required name field. It must be set.
if is_empty(self.title):
raise ValueError(
u"%s.title cannot be empty at save" % type(self).__name__)
# if empty, `slug` is set to slugified `title`
if set_slug and is_empty(self.slug):
self.slug = slugify(self.title) | python | def derive_and_set_slug(self, set_name_sort=True, set_slug=True):
"""
Derive `slug` field from `title` unless it is set in its own right.
This method is called during `save()`
"""
# `title` is the primary required name field. It must be set.
if is_empty(self.title):
raise ValueError(
u"%s.title cannot be empty at save" % type(self).__name__)
# if empty, `slug` is set to slugified `title`
if set_slug and is_empty(self.slug):
self.slug = slugify(self.title) | [
"def",
"derive_and_set_slug",
"(",
"self",
",",
"set_name_sort",
"=",
"True",
",",
"set_slug",
"=",
"True",
")",
":",
"# `title` is the primary required name field. It must be set.",
"if",
"is_empty",
"(",
"self",
".",
"title",
")",
":",
"raise",
"ValueError",
"(",
... | Derive `slug` field from `title` unless it is set in its own right.
This method is called during `save()` | [
"Derive",
"slug",
"field",
"from",
"title",
"unless",
"it",
"is",
"set",
"in",
"its",
"own",
"right",
"."
] | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/glamkit_collections/contrib/work_creator/models.py#L414-L426 | train | 33,309 |
ic-labs/django-icekit | glamkit_collections/contrib/work_creator/models.py | WorkBase.get_roles | def get_roles(self):
"""
Return the m2m relations connecting me to creators.
There's some publishing-related complexity here. The role relations
(self.creators.through) connect to draft objects, which then need to
be modified to point to visible() objects.
"""
creator_ids = self.get_creators().values_list('id', flat=True)
return self.creators.through.objects.filter(
work=self.get_draft(),
creator_id__in=creator_ids,
).select_related('role') | python | def get_roles(self):
"""
Return the m2m relations connecting me to creators.
There's some publishing-related complexity here. The role relations
(self.creators.through) connect to draft objects, which then need to
be modified to point to visible() objects.
"""
creator_ids = self.get_creators().values_list('id', flat=True)
return self.creators.through.objects.filter(
work=self.get_draft(),
creator_id__in=creator_ids,
).select_related('role') | [
"def",
"get_roles",
"(",
"self",
")",
":",
"creator_ids",
"=",
"self",
".",
"get_creators",
"(",
")",
".",
"values_list",
"(",
"'id'",
",",
"flat",
"=",
"True",
")",
"return",
"self",
".",
"creators",
".",
"through",
".",
"objects",
".",
"filter",
"(",... | Return the m2m relations connecting me to creators.
There's some publishing-related complexity here. The role relations
(self.creators.through) connect to draft objects, which then need to
be modified to point to visible() objects. | [
"Return",
"the",
"m2m",
"relations",
"connecting",
"me",
"to",
"creators",
"."
] | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/glamkit_collections/contrib/work_creator/models.py#L467-L482 | train | 33,310 |
ic-labs/django-icekit | icekit_events/admin.py | EventAdmin.get_urls | def get_urls(self):
"""
Add a calendar URL.
"""
from django.conf.urls import patterns, url
urls = super(EventAdmin, self).get_urls()
my_urls = patterns(
'',
url(
r'^calendar/$',
self.admin_site.admin_view(self.calendar),
name='icekit_events_eventbase_calendar'
),
url(
r'^calendar_data/$',
self.admin_site.admin_view(self.calendar_data),
name='icekit_events_eventbase_calendar_data'
),
)
return my_urls + urls | python | def get_urls(self):
"""
Add a calendar URL.
"""
from django.conf.urls import patterns, url
urls = super(EventAdmin, self).get_urls()
my_urls = patterns(
'',
url(
r'^calendar/$',
self.admin_site.admin_view(self.calendar),
name='icekit_events_eventbase_calendar'
),
url(
r'^calendar_data/$',
self.admin_site.admin_view(self.calendar_data),
name='icekit_events_eventbase_calendar_data'
),
)
return my_urls + urls | [
"def",
"get_urls",
"(",
"self",
")",
":",
"from",
"django",
".",
"conf",
".",
"urls",
"import",
"patterns",
",",
"url",
"urls",
"=",
"super",
"(",
"EventAdmin",
",",
"self",
")",
".",
"get_urls",
"(",
")",
"my_urls",
"=",
"patterns",
"(",
"''",
",",
... | Add a calendar URL. | [
"Add",
"a",
"calendar",
"URL",
"."
] | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/icekit_events/admin.py#L224-L243 | train | 33,311 |
ic-labs/django-icekit | icekit_events/admin.py | EventAdmin.calendar | def calendar(self, request):
"""
Return a calendar page to be loaded in an iframe.
"""
context = {
'is_popup': bool(int(request.GET.get('_popup', 0))),
}
return TemplateResponse(
request, 'admin/icekit_events/eventbase/calendar.html', context) | python | def calendar(self, request):
"""
Return a calendar page to be loaded in an iframe.
"""
context = {
'is_popup': bool(int(request.GET.get('_popup', 0))),
}
return TemplateResponse(
request, 'admin/icekit_events/eventbase/calendar.html', context) | [
"def",
"calendar",
"(",
"self",
",",
"request",
")",
":",
"context",
"=",
"{",
"'is_popup'",
":",
"bool",
"(",
"int",
"(",
"request",
".",
"GET",
".",
"get",
"(",
"'_popup'",
",",
"0",
")",
")",
")",
",",
"}",
"return",
"TemplateResponse",
"(",
"re... | Return a calendar page to be loaded in an iframe. | [
"Return",
"a",
"calendar",
"page",
"to",
"be",
"loaded",
"in",
"an",
"iframe",
"."
] | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/icekit_events/admin.py#L245-L253 | train | 33,312 |
ic-labs/django-icekit | icekit_events/admin.py | EventAdmin.calendar_data | def calendar_data(self, request):
"""
Return event data in JSON format for AJAX requests, or a calendar page
to be loaded in an iframe.
"""
# mutable copy
request.GET = request.GET.copy()
if 'timezone' in request.GET:
tz = djtz.get(request.GET.pop('timezone'))
else:
tz = get_current_timezone()
if 'start' in request.GET:
start_dt = self._parse_dt_from_request(request, 'start')
if start_dt:
start = djtz.localize(start_dt, tz)
else:
start = None
else:
start = None
if 'end' in request.GET:
end_dt = self._parse_dt_from_request(request, 'end')
if end_dt:
end = djtz.localize(end_dt, tz)
else:
end = None
else:
end = None
# filter the qs like the changelist filters
cl = ChangeList(request, self.model, self.list_display,
self.list_display_links, self.list_filter,
self.date_hierarchy, self.search_fields,
self.list_select_related, self.list_per_page,
self.list_max_show_all, self.list_editable, self)
filtered_event_ids = cl.get_queryset(request).values_list('id', flat=True)
all_occurrences = models.Occurrence.objects.filter(event__id__in=filtered_event_ids).overlapping(start, end)
data = []
for occurrence in all_occurrences.all():
data.append(self._calendar_json_for_occurrence(occurrence))
data = json.dumps(data, cls=DjangoJSONEncoder)
return HttpResponse(content=data, content_type='application/json') | python | def calendar_data(self, request):
"""
Return event data in JSON format for AJAX requests, or a calendar page
to be loaded in an iframe.
"""
# mutable copy
request.GET = request.GET.copy()
if 'timezone' in request.GET:
tz = djtz.get(request.GET.pop('timezone'))
else:
tz = get_current_timezone()
if 'start' in request.GET:
start_dt = self._parse_dt_from_request(request, 'start')
if start_dt:
start = djtz.localize(start_dt, tz)
else:
start = None
else:
start = None
if 'end' in request.GET:
end_dt = self._parse_dt_from_request(request, 'end')
if end_dt:
end = djtz.localize(end_dt, tz)
else:
end = None
else:
end = None
# filter the qs like the changelist filters
cl = ChangeList(request, self.model, self.list_display,
self.list_display_links, self.list_filter,
self.date_hierarchy, self.search_fields,
self.list_select_related, self.list_per_page,
self.list_max_show_all, self.list_editable, self)
filtered_event_ids = cl.get_queryset(request).values_list('id', flat=True)
all_occurrences = models.Occurrence.objects.filter(event__id__in=filtered_event_ids).overlapping(start, end)
data = []
for occurrence in all_occurrences.all():
data.append(self._calendar_json_for_occurrence(occurrence))
data = json.dumps(data, cls=DjangoJSONEncoder)
return HttpResponse(content=data, content_type='application/json') | [
"def",
"calendar_data",
"(",
"self",
",",
"request",
")",
":",
"# mutable copy",
"request",
".",
"GET",
"=",
"request",
".",
"GET",
".",
"copy",
"(",
")",
"if",
"'timezone'",
"in",
"request",
".",
"GET",
":",
"tz",
"=",
"djtz",
".",
"get",
"(",
"requ... | Return event data in JSON format for AJAX requests, or a calendar page
to be loaded in an iframe. | [
"Return",
"event",
"data",
"in",
"JSON",
"format",
"for",
"AJAX",
"requests",
"or",
"a",
"calendar",
"page",
"to",
"be",
"loaded",
"in",
"an",
"iframe",
"."
] | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/icekit_events/admin.py#L255-L300 | train | 33,313 |
ic-labs/django-icekit | icekit_events/admin.py | EventAdmin._calendar_json_for_occurrence | def _calendar_json_for_occurrence(self, occurrence):
"""
Return JSON for a single Occurrence
"""
# Slugify the plugin's verbose name for use as a class name.
if occurrence.is_all_day:
start = occurrence.start
# `end` is exclusive according to the doc in
# http://fullcalendar.io/docs/event_data/Event_Object/, so
# we need to add 1 day to ``end`` to have the end date
# included in the calendar.
end = occurrence.start + timedelta(days=1)
else:
start = djtz.localize(occurrence.start)
end = djtz.localize(occurrence.end)
if occurrence.is_cancelled and occurrence.cancel_reason:
title = u"{0} [{1}]".format(
occurrence.event.title, occurrence.cancel_reason)
else:
title = occurrence.event.title
if occurrence.event.primary_type:
color = occurrence.event.primary_type.color
else:
color = "#cccccc"
return {
'title': title,
'allDay': occurrence.is_all_day or occurrence.event.contained_events.exists(),
'start': start,
'end': end,
'url': reverse('admin:icekit_events_eventbase_change',
args=[occurrence.event.pk]),
'className': self._calendar_classes_for_occurrence(occurrence),
'backgroundColor': color,
} | python | def _calendar_json_for_occurrence(self, occurrence):
"""
Return JSON for a single Occurrence
"""
# Slugify the plugin's verbose name for use as a class name.
if occurrence.is_all_day:
start = occurrence.start
# `end` is exclusive according to the doc in
# http://fullcalendar.io/docs/event_data/Event_Object/, so
# we need to add 1 day to ``end`` to have the end date
# included in the calendar.
end = occurrence.start + timedelta(days=1)
else:
start = djtz.localize(occurrence.start)
end = djtz.localize(occurrence.end)
if occurrence.is_cancelled and occurrence.cancel_reason:
title = u"{0} [{1}]".format(
occurrence.event.title, occurrence.cancel_reason)
else:
title = occurrence.event.title
if occurrence.event.primary_type:
color = occurrence.event.primary_type.color
else:
color = "#cccccc"
return {
'title': title,
'allDay': occurrence.is_all_day or occurrence.event.contained_events.exists(),
'start': start,
'end': end,
'url': reverse('admin:icekit_events_eventbase_change',
args=[occurrence.event.pk]),
'className': self._calendar_classes_for_occurrence(occurrence),
'backgroundColor': color,
} | [
"def",
"_calendar_json_for_occurrence",
"(",
"self",
",",
"occurrence",
")",
":",
"# Slugify the plugin's verbose name for use as a class name.",
"if",
"occurrence",
".",
"is_all_day",
":",
"start",
"=",
"occurrence",
".",
"start",
"# `end` is exclusive according to the doc in"... | Return JSON for a single Occurrence | [
"Return",
"JSON",
"for",
"a",
"single",
"Occurrence"
] | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/icekit_events/admin.py#L313-L347 | train | 33,314 |
ic-labs/django-icekit | icekit_events/admin.py | EventAdmin._calendar_classes_for_occurrence | def _calendar_classes_for_occurrence(self, occurrence):
"""
Return css classes to be used in admin calendar JSON
"""
classes = [slugify(occurrence.event.polymorphic_ctype.name)]
# Add a class name for the type of event.
if occurrence.is_all_day:
classes.append('is-all-day')
if occurrence.is_protected_from_regeneration:
classes.append('is-user-modified')
if occurrence.is_cancelled:
classes.append('is-cancelled')
# if an event isn't published or does not have show_in_calendar ticked,
# indicate that it is hidden
if not occurrence.event.show_in_calendar:
classes.append('do-not-show-in-calendar')
# Prefix class names with "fcc-" (full calendar class).
classes = ['fcc-%s' % class_ for class_ in classes]
return classes | python | def _calendar_classes_for_occurrence(self, occurrence):
"""
Return css classes to be used in admin calendar JSON
"""
classes = [slugify(occurrence.event.polymorphic_ctype.name)]
# Add a class name for the type of event.
if occurrence.is_all_day:
classes.append('is-all-day')
if occurrence.is_protected_from_regeneration:
classes.append('is-user-modified')
if occurrence.is_cancelled:
classes.append('is-cancelled')
# if an event isn't published or does not have show_in_calendar ticked,
# indicate that it is hidden
if not occurrence.event.show_in_calendar:
classes.append('do-not-show-in-calendar')
# Prefix class names with "fcc-" (full calendar class).
classes = ['fcc-%s' % class_ for class_ in classes]
return classes | [
"def",
"_calendar_classes_for_occurrence",
"(",
"self",
",",
"occurrence",
")",
":",
"classes",
"=",
"[",
"slugify",
"(",
"occurrence",
".",
"event",
".",
"polymorphic_ctype",
".",
"name",
")",
"]",
"# Add a class name for the type of event.",
"if",
"occurrence",
".... | Return css classes to be used in admin calendar JSON | [
"Return",
"css",
"classes",
"to",
"be",
"used",
"in",
"admin",
"calendar",
"JSON"
] | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/icekit_events/admin.py#L349-L371 | train | 33,315 |
ic-labs/django-icekit | icekit_events/admin.py | RecurrenceRuleAdmin.get_urls | def get_urls(self):
"""
Add a preview URL.
"""
from django.conf.urls import patterns, url
urls = super(RecurrenceRuleAdmin, self).get_urls()
my_urls = patterns(
'',
url(
r'^preview/$',
self.admin_site.admin_view(self.preview),
name='icekit_events_recurrencerule_preview'
),
)
return my_urls + urls | python | def get_urls(self):
"""
Add a preview URL.
"""
from django.conf.urls import patterns, url
urls = super(RecurrenceRuleAdmin, self).get_urls()
my_urls = patterns(
'',
url(
r'^preview/$',
self.admin_site.admin_view(self.preview),
name='icekit_events_recurrencerule_preview'
),
)
return my_urls + urls | [
"def",
"get_urls",
"(",
"self",
")",
":",
"from",
"django",
".",
"conf",
".",
"urls",
"import",
"patterns",
",",
"url",
"urls",
"=",
"super",
"(",
"RecurrenceRuleAdmin",
",",
"self",
")",
".",
"get_urls",
"(",
")",
"my_urls",
"=",
"patterns",
"(",
"''"... | Add a preview URL. | [
"Add",
"a",
"preview",
"URL",
"."
] | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/icekit_events/admin.py#L391-L405 | train | 33,316 |
ic-labs/django-icekit | icekit_events/admin.py | RecurrenceRuleAdmin.preview | def preview(self, request):
"""
Return a occurrences in JSON format up until the configured limit.
"""
recurrence_rule = request.POST.get('recurrence_rule')
limit = int(request.POST.get('limit', 10))
try:
rruleset = rrule.rrulestr(
recurrence_rule, dtstart=djtz.now(), forceset=True)
except ValueError as e:
data = {
'error': six.text_type(e),
}
else:
data = {
'occurrences': rruleset[:limit]
}
return JsonResponse(data) | python | def preview(self, request):
"""
Return a occurrences in JSON format up until the configured limit.
"""
recurrence_rule = request.POST.get('recurrence_rule')
limit = int(request.POST.get('limit', 10))
try:
rruleset = rrule.rrulestr(
recurrence_rule, dtstart=djtz.now(), forceset=True)
except ValueError as e:
data = {
'error': six.text_type(e),
}
else:
data = {
'occurrences': rruleset[:limit]
}
return JsonResponse(data) | [
"def",
"preview",
"(",
"self",
",",
"request",
")",
":",
"recurrence_rule",
"=",
"request",
".",
"POST",
".",
"get",
"(",
"'recurrence_rule'",
")",
"limit",
"=",
"int",
"(",
"request",
".",
"POST",
".",
"get",
"(",
"'limit'",
",",
"10",
")",
")",
"tr... | Return a occurrences in JSON format up until the configured limit. | [
"Return",
"a",
"occurrences",
"in",
"JSON",
"format",
"up",
"until",
"the",
"configured",
"limit",
"."
] | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/icekit_events/admin.py#L408-L425 | train | 33,317 |
ic-labs/django-icekit | icekit_events/page_types/advancedeventlisting/models.py | AbstractAdvancedEventListingPage.parse_primary_types | def parse_primary_types(self, request):
"""
Return primary event types that occurrences must belong to, or `None`
if there is no constraint on primary type.
"""
if request.GET.getlist('primary_types'):
return EventType.objects.get(
slug__in=request.GET.getlist('primary_types'))
return None | python | def parse_primary_types(self, request):
"""
Return primary event types that occurrences must belong to, or `None`
if there is no constraint on primary type.
"""
if request.GET.getlist('primary_types'):
return EventType.objects.get(
slug__in=request.GET.getlist('primary_types'))
return None | [
"def",
"parse_primary_types",
"(",
"self",
",",
"request",
")",
":",
"if",
"request",
".",
"GET",
".",
"getlist",
"(",
"'primary_types'",
")",
":",
"return",
"EventType",
".",
"objects",
".",
"get",
"(",
"slug__in",
"=",
"request",
".",
"GET",
".",
"getl... | Return primary event types that occurrences must belong to, or `None`
if there is no constraint on primary type. | [
"Return",
"primary",
"event",
"types",
"that",
"occurrences",
"must",
"belong",
"to",
"or",
"None",
"if",
"there",
"is",
"no",
"constraint",
"on",
"primary",
"type",
"."
] | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/icekit_events/page_types/advancedeventlisting/models.py#L101-L109 | train | 33,318 |
ic-labs/django-icekit | icekit_events/page_types/advancedeventlisting/models.py | AbstractAdvancedEventListingPage.parse_secondary_types | def parse_secondary_types(self, request):
"""
Return secondary event types that occurrences must belong to, or `None`
if there is no constraint on secondary type.
"""
if request.GET.getlist('secondary_types'):
return EventType.objects.filter(
slug__in=request.GET.getlist('secondary_types'))
return None | python | def parse_secondary_types(self, request):
"""
Return secondary event types that occurrences must belong to, or `None`
if there is no constraint on secondary type.
"""
if request.GET.getlist('secondary_types'):
return EventType.objects.filter(
slug__in=request.GET.getlist('secondary_types'))
return None | [
"def",
"parse_secondary_types",
"(",
"self",
",",
"request",
")",
":",
"if",
"request",
".",
"GET",
".",
"getlist",
"(",
"'secondary_types'",
")",
":",
"return",
"EventType",
".",
"objects",
".",
"filter",
"(",
"slug__in",
"=",
"request",
".",
"GET",
".",
... | Return secondary event types that occurrences must belong to, or `None`
if there is no constraint on secondary type. | [
"Return",
"secondary",
"event",
"types",
"that",
"occurrences",
"must",
"belong",
"to",
"or",
"None",
"if",
"there",
"is",
"no",
"constraint",
"on",
"secondary",
"type",
"."
] | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/icekit_events/page_types/advancedeventlisting/models.py#L111-L119 | train | 33,319 |
ic-labs/django-icekit | icekit/admin_tools/forms.py | PasswordResetForm.get_users | def get_users(self, email):
"""
Make sure users are staff users.
Additionally to the other PasswordResetForm conditions ensure
that the user is a staff user before sending them a password
reset email.
:param email: Textual email address.
:return: List of users.
"""
# Django 1.8 supports this feature.
if hasattr(super(PasswordResetForm, self), 'get_users'):
return (
u for u in super(PasswordResetForm, self).get_users(email)
if u.is_staff and u.is_active
)
# Django Django < 1.8 support we can do this manually.
active_users = get_user_model()._default_manager.filter(email__iexact=email, is_active=True)
return (u for u in active_users if u.has_usable_password() and u.is_staff and u.is_active) | python | def get_users(self, email):
"""
Make sure users are staff users.
Additionally to the other PasswordResetForm conditions ensure
that the user is a staff user before sending them a password
reset email.
:param email: Textual email address.
:return: List of users.
"""
# Django 1.8 supports this feature.
if hasattr(super(PasswordResetForm, self), 'get_users'):
return (
u for u in super(PasswordResetForm, self).get_users(email)
if u.is_staff and u.is_active
)
# Django Django < 1.8 support we can do this manually.
active_users = get_user_model()._default_manager.filter(email__iexact=email, is_active=True)
return (u for u in active_users if u.has_usable_password() and u.is_staff and u.is_active) | [
"def",
"get_users",
"(",
"self",
",",
"email",
")",
":",
"# Django 1.8 supports this feature.",
"if",
"hasattr",
"(",
"super",
"(",
"PasswordResetForm",
",",
"self",
")",
",",
"'get_users'",
")",
":",
"return",
"(",
"u",
"for",
"u",
"in",
"super",
"(",
"Pa... | Make sure users are staff users.
Additionally to the other PasswordResetForm conditions ensure
that the user is a staff user before sending them a password
reset email.
:param email: Textual email address.
:return: List of users. | [
"Make",
"sure",
"users",
"are",
"staff",
"users",
"."
] | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/icekit/admin_tools/forms.py#L12-L32 | train | 33,320 |
ic-labs/django-icekit | icekit/workflow/models.py | send_email_notifications_for_workflow_state_change | def send_email_notifications_for_workflow_state_change(
sender, instance, *args, **kwargs
):
"""
Send email notifications for save events on ``WorkflowState`` based on
settings in this module's `appsettings`.
"""
# Short-circuit processing if we have no notification targets
if not appsettings.WORKFLOW_EMAIL_NOTIFICATION_TARGETS:
return
obj = instance.content_object
# Look up admin URL for object, getting the admin site domain from
# ``settings.SITE_URL` if available otherwise punt to the first available
# ``Site`` domain.
obj_ct = ContentType.objects.get_for_model(obj)
obj_app_name = '_'.join(obj_ct.natural_key())
admin_path = reverse(
'admin:%s_change' % obj_app_name, args=(obj.pk,))
admin_domain = getattr(
settings,
'SITE_URL',
Site.objects.all()[0].domain)
if '://' not in admin_domain:
# TODO Detect when we should use 'https' instead
admin_domain = 'http://' + admin_domain
admin_url = ''.join([admin_domain, admin_path])
# Send an email notification to each target configured in settings
for target_settings in appsettings.WORKFLOW_EMAIL_NOTIFICATION_TARGETS:
# Send email to specific address if configured...
if 'to' in target_settings:
to_addresses = target_settings['to']
# ... otherwise to the assigned user if any...
elif instance.assigned_to:
to_addresses = instance.assigned_to.email
# ... otherwise skip, since we have no TO addressee
else:
continue
if not isinstance(to_addresses, list):
to_addresses = [to_addresses]
from_address = target_settings.get(
'from', appsettings.WORKFLOW_EMAIL_NOTIFICATION_DEFAULT_FROM)
subject_template = target_settings.get(
'subject_template',
appsettings.WORKFLOW_EMAIL_NOTIFICATION_SUBJECT_TEMPLATE)
message_template = target_settings.get(
'message_template',
appsettings.WORKFLOW_EMAIL_NOTIFICATION_MESSAGE_TEMPLATE)
# Optionally map assigned user to some other identifier, such as
# a Slack username etc.
assigned_to_mapper = target_settings.get(
'map_assigned_to_fn', lambda x: x)
# Render subject and message templates
template_context = Context({
'state': instance,
'object': obj,
'admin_url': admin_url,
'assigned_to': assigned_to_mapper(instance.assigned_to),
})
subject = Template(subject_template).render(template_context)
message = Template(message_template).render(template_context)
send_mail(subject, message, from_address, to_addresses) | python | def send_email_notifications_for_workflow_state_change(
sender, instance, *args, **kwargs
):
"""
Send email notifications for save events on ``WorkflowState`` based on
settings in this module's `appsettings`.
"""
# Short-circuit processing if we have no notification targets
if not appsettings.WORKFLOW_EMAIL_NOTIFICATION_TARGETS:
return
obj = instance.content_object
# Look up admin URL for object, getting the admin site domain from
# ``settings.SITE_URL` if available otherwise punt to the first available
# ``Site`` domain.
obj_ct = ContentType.objects.get_for_model(obj)
obj_app_name = '_'.join(obj_ct.natural_key())
admin_path = reverse(
'admin:%s_change' % obj_app_name, args=(obj.pk,))
admin_domain = getattr(
settings,
'SITE_URL',
Site.objects.all()[0].domain)
if '://' not in admin_domain:
# TODO Detect when we should use 'https' instead
admin_domain = 'http://' + admin_domain
admin_url = ''.join([admin_domain, admin_path])
# Send an email notification to each target configured in settings
for target_settings in appsettings.WORKFLOW_EMAIL_NOTIFICATION_TARGETS:
# Send email to specific address if configured...
if 'to' in target_settings:
to_addresses = target_settings['to']
# ... otherwise to the assigned user if any...
elif instance.assigned_to:
to_addresses = instance.assigned_to.email
# ... otherwise skip, since we have no TO addressee
else:
continue
if not isinstance(to_addresses, list):
to_addresses = [to_addresses]
from_address = target_settings.get(
'from', appsettings.WORKFLOW_EMAIL_NOTIFICATION_DEFAULT_FROM)
subject_template = target_settings.get(
'subject_template',
appsettings.WORKFLOW_EMAIL_NOTIFICATION_SUBJECT_TEMPLATE)
message_template = target_settings.get(
'message_template',
appsettings.WORKFLOW_EMAIL_NOTIFICATION_MESSAGE_TEMPLATE)
# Optionally map assigned user to some other identifier, such as
# a Slack username etc.
assigned_to_mapper = target_settings.get(
'map_assigned_to_fn', lambda x: x)
# Render subject and message templates
template_context = Context({
'state': instance,
'object': obj,
'admin_url': admin_url,
'assigned_to': assigned_to_mapper(instance.assigned_to),
})
subject = Template(subject_template).render(template_context)
message = Template(message_template).render(template_context)
send_mail(subject, message, from_address, to_addresses) | [
"def",
"send_email_notifications_for_workflow_state_change",
"(",
"sender",
",",
"instance",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"# Short-circuit processing if we have no notification targets",
"if",
"not",
"appsettings",
".",
"WORKFLOW_EMAIL_NOTIFICATION_TAR... | Send email notifications for save events on ``WorkflowState`` based on
settings in this module's `appsettings`. | [
"Send",
"email",
"notifications",
"for",
"save",
"events",
"on",
"WorkflowState",
"based",
"on",
"settings",
"in",
"this",
"module",
"s",
"appsettings",
"."
] | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/icekit/workflow/models.py#L86-L155 | train | 33,321 |
ic-labs/django-icekit | icekit/plugins/iiif/views.py | _get_image_or_404 | def _get_image_or_404(identifier, load_image=False):
"""
Return image matching `identifier`.
The `identifier` is expected to be a raw image ID for now, but may be
more complex later.
"""
try:
image_id = int(identifier)
except ValueError:
raise Http404()
ik_image = get_object_or_404(ICEkitImage, id=image_id)
if not load_image:
return ik_image, None
####################################################################
# Image-loading incantation cribbed from easythumbnail's `pil_image`
image = Image.open(BytesIO(ik_image.image.read()))
# Fully load the image now to catch any problems with the image contents.
try:
# An "Image file truncated" exception can occur for some images that
# are still mostly valid -- we'll swallow the exception.
image.load()
except IOError:
pass
# Try a second time to catch any other potential exceptions.
image.load()
if True: # Support EXIF orientation data
image = et_utils.exif_orientation(image)
####################################################################
return ik_image, image | python | def _get_image_or_404(identifier, load_image=False):
"""
Return image matching `identifier`.
The `identifier` is expected to be a raw image ID for now, but may be
more complex later.
"""
try:
image_id = int(identifier)
except ValueError:
raise Http404()
ik_image = get_object_or_404(ICEkitImage, id=image_id)
if not load_image:
return ik_image, None
####################################################################
# Image-loading incantation cribbed from easythumbnail's `pil_image`
image = Image.open(BytesIO(ik_image.image.read()))
# Fully load the image now to catch any problems with the image contents.
try:
# An "Image file truncated" exception can occur for some images that
# are still mostly valid -- we'll swallow the exception.
image.load()
except IOError:
pass
# Try a second time to catch any other potential exceptions.
image.load()
if True: # Support EXIF orientation data
image = et_utils.exif_orientation(image)
####################################################################
return ik_image, image | [
"def",
"_get_image_or_404",
"(",
"identifier",
",",
"load_image",
"=",
"False",
")",
":",
"try",
":",
"image_id",
"=",
"int",
"(",
"identifier",
")",
"except",
"ValueError",
":",
"raise",
"Http404",
"(",
")",
"ik_image",
"=",
"get_object_or_404",
"(",
"ICEki... | Return image matching `identifier`.
The `identifier` is expected to be a raw image ID for now, but may be
more complex later. | [
"Return",
"image",
"matching",
"identifier",
"."
] | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/icekit/plugins/iiif/views.py#L40-L72 | train | 33,322 |
ic-labs/django-icekit | icekit/plugins/iiif/views.py | iiif_image_api | def iiif_image_api(request, identifier_param, region_param, size_param,
rotation_param, quality_param, format_param):
""" Image repurposing endpoint for IIIF Image API 2.1 """
ik_image, image = _get_image_or_404(identifier_param, load_image=True)
is_transparent = et_utils.is_transparent(image)
is_grayscale = image.mode in ('L', 'LA')
# Map format names used for IIIF URL path extension to proper name
format_mapping = {
'jpg': 'jpeg',
'tif': 'tiff',
}
try:
# Parse region
x, y, r_width, r_height = parse_region(
region_param, image.width, image.height)
# Parse size
s_width, s_height = parse_size(size_param, r_width, r_height)
# Parse rotation
is_mirrored, rotation_degrees = \
parse_rotation(rotation_param, s_width, s_height)
# Parse quality
quality = parse_quality(quality_param)
# Parse format
# TODO Add support for unsupported formats (see `parse_format`)
image_format = os.path.splitext(ik_image.image.name)[1][1:].lower()
output_format = parse_format(format_param, image_format)
corrected_format = format_mapping.get(output_format, output_format)
# Redirect to canonical URL if appropriate, per
# http://iiif.io/api/image/2.1/#canonical-uri-syntax
canonical_path = make_canonical_path(
identifier_param, image.width, image.height,
(x, y, r_width, r_height), # Region
(s_width, s_height), # Size
(is_mirrored, rotation_degrees), # Rotation
quality,
output_format
)
if request.path != canonical_path:
return HttpResponseRedirect(canonical_path)
# Determine storage file name for item
if iiif_storage:
storage_path = build_iiif_file_storage_path(
canonical_path, ik_image, iiif_storage)
else:
storage_path = None
# Load pre-generated image from storage if one exists and is up-to-date
# with the original image (per timestampt info embedded in the storage
# path)
# TODO The exists lookup is slow for S3 storage, cache metadata?
# TODO Detect when original image would be unchanged & use it directly?
if (
storage_path and
iiif_storage.exists(storage_path)
):
if is_remote_storage(iiif_storage, storage_path):
return HttpResponseRedirect(iiif_storage.url(storage_path))
else:
return FileResponse(
iiif_storage.open(storage_path),
content_type='image/%s' % corrected_format,
)
##################
# Generate image #
##################
# Apply region
if x or y or r_width != image.width or r_height != image.height:
box = (x, y, x + r_width, y + r_height)
image = image.crop(box)
# Apply size
if s_width != r_width or s_height != r_height:
size = (s_width, s_height)
image = image.resize(size)
# TODO Apply rotation
# Apply quality
# Much of this is cribbed from easythumbnails' `colorspace` processor
# TODO Replace with glamkit-imagetools' sRGB colour space converter?
if quality in ('default', 'color') and not is_grayscale:
if is_transparent:
new_mode = 'RGBA'
else:
new_mode = 'RGB'
elif is_grayscale or quality == 'gray':
if is_transparent:
new_mode = 'LA'
else:
new_mode = 'L'
if new_mode != image.mode:
image = image.convert(new_mode)
# Apply format and "save"
result_image = BytesIO()
image.save(result_image, format=corrected_format)
# Save generated image to storage if possible
if storage_path:
iiif_storage.save(storage_path, result_image)
if iiif_storage and is_remote_storage(iiif_storage, storage_path):
return HttpResponseRedirect(iiif_storage.url(storage_path))
else:
result_image.seek(0) # Reset image file in case it's just created
return FileResponse(
result_image.read(),
content_type='image/%s' % corrected_format,
)
# Handle error conditions per iiif.io/api/image/2.1/#server-responses
except ClientError, ex:
return HttpResponseBadRequest(ex.message) # 400 response
except UnsupportedError, ex:
return HttpResponseNotImplemented(ex.message) | python | def iiif_image_api(request, identifier_param, region_param, size_param,
rotation_param, quality_param, format_param):
""" Image repurposing endpoint for IIIF Image API 2.1 """
ik_image, image = _get_image_or_404(identifier_param, load_image=True)
is_transparent = et_utils.is_transparent(image)
is_grayscale = image.mode in ('L', 'LA')
# Map format names used for IIIF URL path extension to proper name
format_mapping = {
'jpg': 'jpeg',
'tif': 'tiff',
}
try:
# Parse region
x, y, r_width, r_height = parse_region(
region_param, image.width, image.height)
# Parse size
s_width, s_height = parse_size(size_param, r_width, r_height)
# Parse rotation
is_mirrored, rotation_degrees = \
parse_rotation(rotation_param, s_width, s_height)
# Parse quality
quality = parse_quality(quality_param)
# Parse format
# TODO Add support for unsupported formats (see `parse_format`)
image_format = os.path.splitext(ik_image.image.name)[1][1:].lower()
output_format = parse_format(format_param, image_format)
corrected_format = format_mapping.get(output_format, output_format)
# Redirect to canonical URL if appropriate, per
# http://iiif.io/api/image/2.1/#canonical-uri-syntax
canonical_path = make_canonical_path(
identifier_param, image.width, image.height,
(x, y, r_width, r_height), # Region
(s_width, s_height), # Size
(is_mirrored, rotation_degrees), # Rotation
quality,
output_format
)
if request.path != canonical_path:
return HttpResponseRedirect(canonical_path)
# Determine storage file name for item
if iiif_storage:
storage_path = build_iiif_file_storage_path(
canonical_path, ik_image, iiif_storage)
else:
storage_path = None
# Load pre-generated image from storage if one exists and is up-to-date
# with the original image (per timestampt info embedded in the storage
# path)
# TODO The exists lookup is slow for S3 storage, cache metadata?
# TODO Detect when original image would be unchanged & use it directly?
if (
storage_path and
iiif_storage.exists(storage_path)
):
if is_remote_storage(iiif_storage, storage_path):
return HttpResponseRedirect(iiif_storage.url(storage_path))
else:
return FileResponse(
iiif_storage.open(storage_path),
content_type='image/%s' % corrected_format,
)
##################
# Generate image #
##################
# Apply region
if x or y or r_width != image.width or r_height != image.height:
box = (x, y, x + r_width, y + r_height)
image = image.crop(box)
# Apply size
if s_width != r_width or s_height != r_height:
size = (s_width, s_height)
image = image.resize(size)
# TODO Apply rotation
# Apply quality
# Much of this is cribbed from easythumbnails' `colorspace` processor
# TODO Replace with glamkit-imagetools' sRGB colour space converter?
if quality in ('default', 'color') and not is_grayscale:
if is_transparent:
new_mode = 'RGBA'
else:
new_mode = 'RGB'
elif is_grayscale or quality == 'gray':
if is_transparent:
new_mode = 'LA'
else:
new_mode = 'L'
if new_mode != image.mode:
image = image.convert(new_mode)
# Apply format and "save"
result_image = BytesIO()
image.save(result_image, format=corrected_format)
# Save generated image to storage if possible
if storage_path:
iiif_storage.save(storage_path, result_image)
if iiif_storage and is_remote_storage(iiif_storage, storage_path):
return HttpResponseRedirect(iiif_storage.url(storage_path))
else:
result_image.seek(0) # Reset image file in case it's just created
return FileResponse(
result_image.read(),
content_type='image/%s' % corrected_format,
)
# Handle error conditions per iiif.io/api/image/2.1/#server-responses
except ClientError, ex:
return HttpResponseBadRequest(ex.message) # 400 response
except UnsupportedError, ex:
return HttpResponseNotImplemented(ex.message) | [
"def",
"iiif_image_api",
"(",
"request",
",",
"identifier_param",
",",
"region_param",
",",
"size_param",
",",
"rotation_param",
",",
"quality_param",
",",
"format_param",
")",
":",
"ik_image",
",",
"image",
"=",
"_get_image_or_404",
"(",
"identifier_param",
",",
... | Image repurposing endpoint for IIIF Image API 2.1 | [
"Image",
"repurposing",
"endpoint",
"for",
"IIIF",
"Image",
"API",
"2",
".",
"1"
] | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/icekit/plugins/iiif/views.py#L116-L240 | train | 33,323 |
ic-labs/django-icekit | icekit/plugins/instagram_embed/forms.py | InstagramEmbedAdminForm.clean_url | def clean_url(self):
"""
Make sure the URL provided matches the instagram URL format.
"""
url = self.cleaned_data['url']
if url:
pattern = re.compile(r'https?://(www\.)?instagr(\.am|am\.com)/p/\S+')
if not pattern.match(url):
raise forms.ValidationError('Please provide a valid instagram link.')
return url | python | def clean_url(self):
"""
Make sure the URL provided matches the instagram URL format.
"""
url = self.cleaned_data['url']
if url:
pattern = re.compile(r'https?://(www\.)?instagr(\.am|am\.com)/p/\S+')
if not pattern.match(url):
raise forms.ValidationError('Please provide a valid instagram link.')
return url | [
"def",
"clean_url",
"(",
"self",
")",
":",
"url",
"=",
"self",
".",
"cleaned_data",
"[",
"'url'",
"]",
"if",
"url",
":",
"pattern",
"=",
"re",
".",
"compile",
"(",
"r'https?://(www\\.)?instagr(\\.am|am\\.com)/p/\\S+'",
")",
"if",
"not",
"pattern",
".",
"matc... | Make sure the URL provided matches the instagram URL format. | [
"Make",
"sure",
"the",
"URL",
"provided",
"matches",
"the",
"instagram",
"URL",
"format",
"."
] | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/icekit/plugins/instagram_embed/forms.py#L8-L19 | train | 33,324 |
ic-labs/django-icekit | icekit/api/base_views.py | RedirectViewset.retrieve | def retrieve(self, request, *args, **kwargs):
"""
If the URL slug doesn't match an object, try slugifying the URL param
and searching alt_url for that.
If found, redirect to the canonical URL.
If still not found, raise 404.
"""
try:
instance = self.get_object()
serializer = self.get_serializer(instance)
return Response(serializer.data)
except Http404:
pass
lookup_url_kwarg = self.lookup_url_kwarg or self.lookup_field
slug = self.kwargs[lookup_url_kwarg]
# fall back to accession_temp_fallback
try:
tmp = get_object_or_404(
self.queryset, accession_temp_fallback=slug)
return HttpResponseRedirect(
reverse(self.redirect_view_name, (tmp.slug, ))
)
except (Http404, FieldError):
pass
# fall back to embark ID
try:
embark = get_object_or_404(self.queryset, id=int(slug))
return HttpResponseRedirect(
reverse(self.redirect_view_name, (embark.slug, ))
)
except (Http404, ValueError):
pass
# fall back to alt slug
try:
from glamkit_collections.utils import alt_slugify
alt_slug = alt_slugify(slug)
alt = get_object_or_404(self.queryset, alt_slug=alt_slug)
return HttpResponseRedirect(
reverse(self.redirect_view_name, (alt.slug, ))
)
except Http404:
pass
# well, we tried
raise Http404 | python | def retrieve(self, request, *args, **kwargs):
"""
If the URL slug doesn't match an object, try slugifying the URL param
and searching alt_url for that.
If found, redirect to the canonical URL.
If still not found, raise 404.
"""
try:
instance = self.get_object()
serializer = self.get_serializer(instance)
return Response(serializer.data)
except Http404:
pass
lookup_url_kwarg = self.lookup_url_kwarg or self.lookup_field
slug = self.kwargs[lookup_url_kwarg]
# fall back to accession_temp_fallback
try:
tmp = get_object_or_404(
self.queryset, accession_temp_fallback=slug)
return HttpResponseRedirect(
reverse(self.redirect_view_name, (tmp.slug, ))
)
except (Http404, FieldError):
pass
# fall back to embark ID
try:
embark = get_object_or_404(self.queryset, id=int(slug))
return HttpResponseRedirect(
reverse(self.redirect_view_name, (embark.slug, ))
)
except (Http404, ValueError):
pass
# fall back to alt slug
try:
from glamkit_collections.utils import alt_slugify
alt_slug = alt_slugify(slug)
alt = get_object_or_404(self.queryset, alt_slug=alt_slug)
return HttpResponseRedirect(
reverse(self.redirect_view_name, (alt.slug, ))
)
except Http404:
pass
# well, we tried
raise Http404 | [
"def",
"retrieve",
"(",
"self",
",",
"request",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"try",
":",
"instance",
"=",
"self",
".",
"get_object",
"(",
")",
"serializer",
"=",
"self",
".",
"get_serializer",
"(",
"instance",
")",
"return",
"... | If the URL slug doesn't match an object, try slugifying the URL param
and searching alt_url for that.
If found, redirect to the canonical URL.
If still not found, raise 404. | [
"If",
"the",
"URL",
"slug",
"doesn",
"t",
"match",
"an",
"object",
"try",
"slugifying",
"the",
"URL",
"param",
"and",
"searching",
"alt_url",
"for",
"that",
".",
"If",
"found",
"redirect",
"to",
"the",
"canonical",
"URL",
".",
"If",
"still",
"not",
"foun... | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/icekit/api/base_views.py#L22-L71 | train | 33,325 |
ic-labs/django-icekit | icekit_events/sample_data/migrations/0001_initial.py | forwards | def forwards(apps, schema_editor):
"""
Create sample events.
"""
starts = timeutils.round_datetime(
when=timezone.now(),
precision=timedelta(days=1),
rounding=timeutils.ROUND_DOWN)
ends = starts + appsettings.DEFAULT_ENDS_DELTA
recurrence_rules = dict(
RecurrenceRule.objects.values_list('description', 'recurrence_rule'))
daily = recurrence_rules['Daily']
weekdays = recurrence_rules['Daily, Weekdays']
weekends = recurrence_rules['Daily, Weekends']
weekly = recurrence_rules['Weekly']
monthly = recurrence_rules['Monthly']
yearly = recurrence_rules['Yearly']
daily_event = G(
EventBase,
title='Daily Event',
starts=starts + timedelta(hours=9),
ends=ends + timedelta(hours=9),
recurrence_rule=daily,
)
weekday_event = G(
EventBase,
title='Weekday Event',
starts=starts + timedelta(hours=11),
ends=ends + timedelta(hours=11),
recurrence_rule=weekdays,
)
weekend_event = G(
EventBase,
title='Weekend Event',
starts=starts + timedelta(hours=13),
ends=ends + timedelta(hours=13),
recurrence_rule=weekends,
)
weekly_event = G(
EventBase,
title='Weekly Event',
starts=starts + timedelta(hours=15),
ends=ends + timedelta(hours=15),
recurrence_rule=weekly,
)
monthly_event = G(
EventBase,
title='Monthly Event',
starts=starts + timedelta(hours=17),
ends=ends + timedelta(hours=17),
recurrence_rule=monthly,
)
yearly_event = G(
EventBase,
title='Yearly Event',
starts=starts + timedelta(hours=19),
ends=ends + timedelta(hours=19),
recurrence_rule=yearly,
) | python | def forwards(apps, schema_editor):
"""
Create sample events.
"""
starts = timeutils.round_datetime(
when=timezone.now(),
precision=timedelta(days=1),
rounding=timeutils.ROUND_DOWN)
ends = starts + appsettings.DEFAULT_ENDS_DELTA
recurrence_rules = dict(
RecurrenceRule.objects.values_list('description', 'recurrence_rule'))
daily = recurrence_rules['Daily']
weekdays = recurrence_rules['Daily, Weekdays']
weekends = recurrence_rules['Daily, Weekends']
weekly = recurrence_rules['Weekly']
monthly = recurrence_rules['Monthly']
yearly = recurrence_rules['Yearly']
daily_event = G(
EventBase,
title='Daily Event',
starts=starts + timedelta(hours=9),
ends=ends + timedelta(hours=9),
recurrence_rule=daily,
)
weekday_event = G(
EventBase,
title='Weekday Event',
starts=starts + timedelta(hours=11),
ends=ends + timedelta(hours=11),
recurrence_rule=weekdays,
)
weekend_event = G(
EventBase,
title='Weekend Event',
starts=starts + timedelta(hours=13),
ends=ends + timedelta(hours=13),
recurrence_rule=weekends,
)
weekly_event = G(
EventBase,
title='Weekly Event',
starts=starts + timedelta(hours=15),
ends=ends + timedelta(hours=15),
recurrence_rule=weekly,
)
monthly_event = G(
EventBase,
title='Monthly Event',
starts=starts + timedelta(hours=17),
ends=ends + timedelta(hours=17),
recurrence_rule=monthly,
)
yearly_event = G(
EventBase,
title='Yearly Event',
starts=starts + timedelta(hours=19),
ends=ends + timedelta(hours=19),
recurrence_rule=yearly,
) | [
"def",
"forwards",
"(",
"apps",
",",
"schema_editor",
")",
":",
"starts",
"=",
"timeutils",
".",
"round_datetime",
"(",
"when",
"=",
"timezone",
".",
"now",
"(",
")",
",",
"precision",
"=",
"timedelta",
"(",
"days",
"=",
"1",
")",
",",
"rounding",
"=",... | Create sample events. | [
"Create",
"sample",
"events",
"."
] | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/icekit_events/sample_data/migrations/0001_initial.py#L15-L80 | train | 33,326 |
ic-labs/django-icekit | icekit_events/sample_data/migrations/0001_initial.py | backwards | def backwards(apps, schema_editor):
"""
Delete sample events, including derivative repeat and variation events.
"""
titles = [
'Daily Event',
'Weekday Event',
'Weekend Event',
'Weekly Event',
'Monthly Event',
'Yearly Event',
]
samples = EventBase.objects.filter(title__in=titles)
samples.delete() | python | def backwards(apps, schema_editor):
"""
Delete sample events, including derivative repeat and variation events.
"""
titles = [
'Daily Event',
'Weekday Event',
'Weekend Event',
'Weekly Event',
'Monthly Event',
'Yearly Event',
]
samples = EventBase.objects.filter(title__in=titles)
samples.delete() | [
"def",
"backwards",
"(",
"apps",
",",
"schema_editor",
")",
":",
"titles",
"=",
"[",
"'Daily Event'",
",",
"'Weekday Event'",
",",
"'Weekend Event'",
",",
"'Weekly Event'",
",",
"'Monthly Event'",
",",
"'Yearly Event'",
",",
"]",
"samples",
"=",
"EventBase",
"."... | Delete sample events, including derivative repeat and variation events. | [
"Delete",
"sample",
"events",
"including",
"derivative",
"repeat",
"and",
"variation",
"events",
"."
] | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/icekit_events/sample_data/migrations/0001_initial.py#L83-L96 | train | 33,327 |
ic-labs/django-icekit | icekit/api/management/commands/dump_collection_api.py | dump_viewset | def dump_viewset(viewset_class, root_folder, folder_fn=lambda i: ".", sample_size=None):
"""
Dump the contents of a rest-api queryset to a folder structure.
:param viewset_class: A rest-api viewset to iterate through
:param root_folder: The root folder to write results to.
:param folder_fn: A function to generate a subfolder name for the instance.
:param sample_size: Number of items to process, for test purposes.
:return:
"""
if os.path.exists(root_folder):
shutil.rmtree(root_folder)
os.makedirs(root_folder)
vs = viewset_class()
vs.request = rf.get('')
serializer_class = vs.get_serializer_class()
serializer = serializer_class(context={'request': vs.request, 'format': 'json', 'view': vs})
renderer = PrettyJSONRenderer()
bar = progressbar.ProgressBar()
for instance in bar(vs.get_queryset()[:sample_size]):
dct = serializer.to_representation(instance)
content = renderer.render(dct)
folder = os.path.join(root_folder, folder_fn(instance))
if not os.path.exists(folder):
os.makedirs(folder)
filename = "%s.json" % instance.slug
f = file(os.path.join(folder, filename), 'w')
f.write(content)
f.close() | python | def dump_viewset(viewset_class, root_folder, folder_fn=lambda i: ".", sample_size=None):
"""
Dump the contents of a rest-api queryset to a folder structure.
:param viewset_class: A rest-api viewset to iterate through
:param root_folder: The root folder to write results to.
:param folder_fn: A function to generate a subfolder name for the instance.
:param sample_size: Number of items to process, for test purposes.
:return:
"""
if os.path.exists(root_folder):
shutil.rmtree(root_folder)
os.makedirs(root_folder)
vs = viewset_class()
vs.request = rf.get('')
serializer_class = vs.get_serializer_class()
serializer = serializer_class(context={'request': vs.request, 'format': 'json', 'view': vs})
renderer = PrettyJSONRenderer()
bar = progressbar.ProgressBar()
for instance in bar(vs.get_queryset()[:sample_size]):
dct = serializer.to_representation(instance)
content = renderer.render(dct)
folder = os.path.join(root_folder, folder_fn(instance))
if not os.path.exists(folder):
os.makedirs(folder)
filename = "%s.json" % instance.slug
f = file(os.path.join(folder, filename), 'w')
f.write(content)
f.close() | [
"def",
"dump_viewset",
"(",
"viewset_class",
",",
"root_folder",
",",
"folder_fn",
"=",
"lambda",
"i",
":",
"\".\"",
",",
"sample_size",
"=",
"None",
")",
":",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"root_folder",
")",
":",
"shutil",
".",
"rmtree",... | Dump the contents of a rest-api queryset to a folder structure.
:param viewset_class: A rest-api viewset to iterate through
:param root_folder: The root folder to write results to.
:param folder_fn: A function to generate a subfolder name for the instance.
:param sample_size: Number of items to process, for test purposes.
:return: | [
"Dump",
"the",
"contents",
"of",
"a",
"rest",
"-",
"api",
"queryset",
"to",
"a",
"folder",
"structure",
"."
] | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/icekit/api/management/commands/dump_collection_api.py#L28-L61 | train | 33,328 |
ic-labs/django-icekit | icekit/api/management/commands/dump_collection_api.py | concatenate_json | def concatenate_json(source_folder, destination_file):
"""
Concatenate all the json files in a folder to one big JSON file.
"""
matches = []
for root, dirnames, filenames in os.walk(source_folder):
for filename in fnmatch.filter(filenames, '*.json'):
matches.append(os.path.join(root, filename))
with open(destination_file, "wb") as f:
f.write("[\n")
for m in matches[:-1]:
f.write(open(m, "rb").read())
f.write(",\n")
f.write(open(matches[-1], "rb").read())
f.write("\n]") | python | def concatenate_json(source_folder, destination_file):
"""
Concatenate all the json files in a folder to one big JSON file.
"""
matches = []
for root, dirnames, filenames in os.walk(source_folder):
for filename in fnmatch.filter(filenames, '*.json'):
matches.append(os.path.join(root, filename))
with open(destination_file, "wb") as f:
f.write("[\n")
for m in matches[:-1]:
f.write(open(m, "rb").read())
f.write(",\n")
f.write(open(matches[-1], "rb").read())
f.write("\n]") | [
"def",
"concatenate_json",
"(",
"source_folder",
",",
"destination_file",
")",
":",
"matches",
"=",
"[",
"]",
"for",
"root",
",",
"dirnames",
",",
"filenames",
"in",
"os",
".",
"walk",
"(",
"source_folder",
")",
":",
"for",
"filename",
"in",
"fnmatch",
"."... | Concatenate all the json files in a folder to one big JSON file. | [
"Concatenate",
"all",
"the",
"json",
"files",
"in",
"a",
"folder",
"to",
"one",
"big",
"JSON",
"file",
"."
] | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/icekit/api/management/commands/dump_collection_api.py#L63-L78 | train | 33,329 |
ic-labs/django-icekit | icekit/content_collections/abstract_models.py | TitleSlugMixin.validate_unique_slug | def validate_unique_slug(self):
"""
Ensure slug is unique for this model. This check is aware of publishing
but is otherwise fairly basic and will need to be customised for
situations where models with slugs are not in a flat hierarchy etc.
"""
clashes_qs = type(self).objects.filter(slug=self.slug)
if self.pk:
clashes_qs = clashes_qs.exclude(pk=self.pk)
if isinstance(self, PublishingModel):
clashes_qs = clashes_qs.filter(
publishing_is_draft=self.publishing_is_draft)
if clashes_qs:
raise ValidationError(
"Slug '%s' clashes with other items: %s"
% (self.slug, clashes_qs)) | python | def validate_unique_slug(self):
"""
Ensure slug is unique for this model. This check is aware of publishing
but is otherwise fairly basic and will need to be customised for
situations where models with slugs are not in a flat hierarchy etc.
"""
clashes_qs = type(self).objects.filter(slug=self.slug)
if self.pk:
clashes_qs = clashes_qs.exclude(pk=self.pk)
if isinstance(self, PublishingModel):
clashes_qs = clashes_qs.filter(
publishing_is_draft=self.publishing_is_draft)
if clashes_qs:
raise ValidationError(
"Slug '%s' clashes with other items: %s"
% (self.slug, clashes_qs)) | [
"def",
"validate_unique_slug",
"(",
"self",
")",
":",
"clashes_qs",
"=",
"type",
"(",
"self",
")",
".",
"objects",
".",
"filter",
"(",
"slug",
"=",
"self",
".",
"slug",
")",
"if",
"self",
".",
"pk",
":",
"clashes_qs",
"=",
"clashes_qs",
".",
"exclude",... | Ensure slug is unique for this model. This check is aware of publishing
but is otherwise fairly basic and will need to be customised for
situations where models with slugs are not in a flat hierarchy etc. | [
"Ensure",
"slug",
"is",
"unique",
"for",
"this",
"model",
".",
"This",
"check",
"is",
"aware",
"of",
"publishing",
"but",
"is",
"otherwise",
"fairly",
"basic",
"and",
"will",
"need",
"to",
"be",
"customised",
"for",
"situations",
"where",
"models",
"with",
... | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/icekit/content_collections/abstract_models.py#L31-L46 | train | 33,330 |
ic-labs/django-icekit | icekit/content_collections/abstract_models.py | AbstractCollectedContent.get_absolute_url | def get_absolute_url(self):
"""
The majority of the time, the URL is the parent's URL plus the slug.
If not, override this function.
"""
# Get the appropriate draft or published parent object for the current
# item, otherwise we risk mixing the draft parent URL with a published
# item's URL etc.
if self.is_draft:
visible_parent = self.parent.get_draft()
else:
# Fallback to draft URL for unpublished parent.
visible_parent = self.parent.get_published() or \
self.parent.get_draft()
# Appending "/" to avoid a) django redirect and b) incorrect edit slug
# for the admin.
return urljoin(visible_parent.get_absolute_url(), self.slug + "/") | python | def get_absolute_url(self):
"""
The majority of the time, the URL is the parent's URL plus the slug.
If not, override this function.
"""
# Get the appropriate draft or published parent object for the current
# item, otherwise we risk mixing the draft parent URL with a published
# item's URL etc.
if self.is_draft:
visible_parent = self.parent.get_draft()
else:
# Fallback to draft URL for unpublished parent.
visible_parent = self.parent.get_published() or \
self.parent.get_draft()
# Appending "/" to avoid a) django redirect and b) incorrect edit slug
# for the admin.
return urljoin(visible_parent.get_absolute_url(), self.slug + "/") | [
"def",
"get_absolute_url",
"(",
"self",
")",
":",
"# Get the appropriate draft or published parent object for the current",
"# item, otherwise we risk mixing the draft parent URL with a published",
"# item's URL etc.",
"if",
"self",
".",
"is_draft",
":",
"visible_parent",
"=",
"self"... | The majority of the time, the URL is the parent's URL plus the slug.
If not, override this function. | [
"The",
"majority",
"of",
"the",
"time",
"the",
"URL",
"is",
"the",
"parent",
"s",
"URL",
"plus",
"the",
"slug",
".",
"If",
"not",
"override",
"this",
"function",
"."
] | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/icekit/content_collections/abstract_models.py#L140-L157 | train | 33,331 |
ic-labs/django-icekit | icekit/content_collections/abstract_models.py | AbstractCollectedContent.get_response | def get_response(self, request, parent, *args, **kwargs):
"""
Render this collected content to a response.
:param request: the request
:param parent: the parent collection
:param args:
:param kwargs:
:return:
"""
context = {
'page': self,
}
try:
return TemplateResponse(
request,
self.get_layout_template_name(),
context
)
except AttributeError:
raise AttributeError("You need to define "
"`get_layout_template_name()` on your `%s` model, "
"or override `get_response()`" % type(self).__name__) | python | def get_response(self, request, parent, *args, **kwargs):
"""
Render this collected content to a response.
:param request: the request
:param parent: the parent collection
:param args:
:param kwargs:
:return:
"""
context = {
'page': self,
}
try:
return TemplateResponse(
request,
self.get_layout_template_name(),
context
)
except AttributeError:
raise AttributeError("You need to define "
"`get_layout_template_name()` on your `%s` model, "
"or override `get_response()`" % type(self).__name__) | [
"def",
"get_response",
"(",
"self",
",",
"request",
",",
"parent",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"context",
"=",
"{",
"'page'",
":",
"self",
",",
"}",
"try",
":",
"return",
"TemplateResponse",
"(",
"request",
",",
"self",
".",
... | Render this collected content to a response.
:param request: the request
:param parent: the parent collection
:param args:
:param kwargs:
:return: | [
"Render",
"this",
"collected",
"content",
"to",
"a",
"response",
"."
] | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/icekit/content_collections/abstract_models.py#L159-L181 | train | 33,332 |
ic-labs/django-icekit | icekit_events/migrations/0029_fix_malformed_rrules_20170830_1101.py | forwards_migration | def forwards_migration(apps, schema_editor):
"""
"0002_recurrence_rules" added malformed recurrence with trailing
semi-colons. While the JS parser on the front-end handles them,
the python parser will crash
"""
RecurrenceRule = apps.get_model('icekit_events', 'RecurrenceRule')
for rrule in RecurrenceRule.objects.all():
if ';\n' in rrule.recurrence_rule:
parts = rrule.recurrence_rule.split(';\n')
rrule.recurrence_rule = '\n'.join(parts)
rrule.save() | python | def forwards_migration(apps, schema_editor):
"""
"0002_recurrence_rules" added malformed recurrence with trailing
semi-colons. While the JS parser on the front-end handles them,
the python parser will crash
"""
RecurrenceRule = apps.get_model('icekit_events', 'RecurrenceRule')
for rrule in RecurrenceRule.objects.all():
if ';\n' in rrule.recurrence_rule:
parts = rrule.recurrence_rule.split(';\n')
rrule.recurrence_rule = '\n'.join(parts)
rrule.save() | [
"def",
"forwards_migration",
"(",
"apps",
",",
"schema_editor",
")",
":",
"RecurrenceRule",
"=",
"apps",
".",
"get_model",
"(",
"'icekit_events'",
",",
"'RecurrenceRule'",
")",
"for",
"rrule",
"in",
"RecurrenceRule",
".",
"objects",
".",
"all",
"(",
")",
":",
... | "0002_recurrence_rules" added malformed recurrence with trailing
semi-colons. While the JS parser on the front-end handles them,
the python parser will crash | [
"0002_recurrence_rules",
"added",
"malformed",
"recurrence",
"with",
"trailing",
"semi",
"-",
"colons",
".",
"While",
"the",
"JS",
"parser",
"on",
"the",
"front",
"-",
"end",
"handles",
"them",
"the",
"python",
"parser",
"will",
"crash"
] | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/icekit_events/migrations/0029_fix_malformed_rrules_20170830_1101.py#L7-L18 | train | 33,333 |
ic-labs/django-icekit | icekit/response_pages/views.py | get_response_page | def get_response_page(request, return_type, template_location, response_page_type):
"""
Helper function to get an appropriate response page if it exists.
This function is not designed to be used directly as a view. It is
a helper function which can be called to check if a ResponsePage
exists for a ResponsePage type (which is also active).
:param request:
:param return_type:
:param template_location:
:param response_page_type:
:return:
"""
try:
page = models.ResponsePage.objects.get(
is_active=True,
type=response_page_type,
)
template = loader.get_template(template_location)
content_type = None
body = template.render(
RequestContext(request, {'request_path': request.path, 'page': page, })
)
return return_type(body, content_type=content_type)
except models.ResponsePage.DoesNotExist:
return None | python | def get_response_page(request, return_type, template_location, response_page_type):
"""
Helper function to get an appropriate response page if it exists.
This function is not designed to be used directly as a view. It is
a helper function which can be called to check if a ResponsePage
exists for a ResponsePage type (which is also active).
:param request:
:param return_type:
:param template_location:
:param response_page_type:
:return:
"""
try:
page = models.ResponsePage.objects.get(
is_active=True,
type=response_page_type,
)
template = loader.get_template(template_location)
content_type = None
body = template.render(
RequestContext(request, {'request_path': request.path, 'page': page, })
)
return return_type(body, content_type=content_type)
except models.ResponsePage.DoesNotExist:
return None | [
"def",
"get_response_page",
"(",
"request",
",",
"return_type",
",",
"template_location",
",",
"response_page_type",
")",
":",
"try",
":",
"page",
"=",
"models",
".",
"ResponsePage",
".",
"objects",
".",
"get",
"(",
"is_active",
"=",
"True",
",",
"type",
"="... | Helper function to get an appropriate response page if it exists.
This function is not designed to be used directly as a view. It is
a helper function which can be called to check if a ResponsePage
exists for a ResponsePage type (which is also active).
:param request:
:param return_type:
:param template_location:
:param response_page_type:
:return: | [
"Helper",
"function",
"to",
"get",
"an",
"appropriate",
"response",
"page",
"if",
"it",
"exists",
"."
] | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/icekit/response_pages/views.py#L13-L40 | train | 33,334 |
ic-labs/django-icekit | icekit/response_pages/views.py | server_error | def server_error(request, template_name='500.html'):
"""
Custom 500 error handler.
The exception clause is so broad to capture any 500 errors that
may have been generated from getting the response page e.g. if the
database was down. If they were not handled they would cause a 500
themselves and form an infinite loop.
If no ResponsePage exists for with type ``RESPONSE_HTTP500`` then
the default template render view will be used.
Templates: :template:`500.html`
Context:
page
A ResponsePage with type ``RESPONSE_HTTP500`` if it exists.
"""
try:
rendered_page = get_response_page(
request,
http.HttpResponseServerError,
'icekit/response_pages/500.html',
abstract_models.RESPONSE_HTTP500
)
if rendered_page is not None:
return rendered_page
except Exception:
pass
return defaults.server_error(request, template_name) | python | def server_error(request, template_name='500.html'):
"""
Custom 500 error handler.
The exception clause is so broad to capture any 500 errors that
may have been generated from getting the response page e.g. if the
database was down. If they were not handled they would cause a 500
themselves and form an infinite loop.
If no ResponsePage exists for with type ``RESPONSE_HTTP500`` then
the default template render view will be used.
Templates: :template:`500.html`
Context:
page
A ResponsePage with type ``RESPONSE_HTTP500`` if it exists.
"""
try:
rendered_page = get_response_page(
request,
http.HttpResponseServerError,
'icekit/response_pages/500.html',
abstract_models.RESPONSE_HTTP500
)
if rendered_page is not None:
return rendered_page
except Exception:
pass
return defaults.server_error(request, template_name) | [
"def",
"server_error",
"(",
"request",
",",
"template_name",
"=",
"'500.html'",
")",
":",
"try",
":",
"rendered_page",
"=",
"get_response_page",
"(",
"request",
",",
"http",
".",
"HttpResponseServerError",
",",
"'icekit/response_pages/500.html'",
",",
"abstract_models... | Custom 500 error handler.
The exception clause is so broad to capture any 500 errors that
may have been generated from getting the response page e.g. if the
database was down. If they were not handled they would cause a 500
themselves and form an infinite loop.
If no ResponsePage exists for with type ``RESPONSE_HTTP500`` then
the default template render view will be used.
Templates: :template:`500.html`
Context:
page
A ResponsePage with type ``RESPONSE_HTTP500`` if it exists. | [
"Custom",
"500",
"error",
"handler",
"."
] | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/icekit/response_pages/views.py#L73-L102 | train | 33,335 |
ic-labs/django-icekit | icekit_events/utils/permissions.py | allowed_to_preview | def allowed_to_preview(user):
"""
Is the user allowed to view the preview?
Users are only allowed to view the preview if they are authenticated, active and staff.
:param user: A User object instance.
:return: Boolean.
"""
if (
user.is_authenticated and
user.is_active and
user.is_staff
):
return True
return False | python | def allowed_to_preview(user):
"""
Is the user allowed to view the preview?
Users are only allowed to view the preview if they are authenticated, active and staff.
:param user: A User object instance.
:return: Boolean.
"""
if (
user.is_authenticated and
user.is_active and
user.is_staff
):
return True
return False | [
"def",
"allowed_to_preview",
"(",
"user",
")",
":",
"if",
"(",
"user",
".",
"is_authenticated",
"and",
"user",
".",
"is_active",
"and",
"user",
".",
"is_staff",
")",
":",
"return",
"True",
"return",
"False"
] | Is the user allowed to view the preview?
Users are only allowed to view the preview if they are authenticated, active and staff.
:param user: A User object instance.
:return: Boolean. | [
"Is",
"the",
"user",
"allowed",
"to",
"view",
"the",
"preview?"
] | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/icekit_events/utils/permissions.py#L1-L16 | train | 33,336 |
ic-labs/django-icekit | icekit/admin_tools/polymorphic.py | ChildModelPluginPolymorphicParentModelAdmin.get_child_type_choices | def get_child_type_choices(self, request, action):
"""
Override choice labels with ``verbose_name`` from plugins and sort.
"""
# Get choices from the super class to check permissions.
choices = super(ChildModelPluginPolymorphicParentModelAdmin, self) \
.get_child_type_choices(request, action)
# Update label with verbose name from plugins.
plugins = self.child_model_plugin_class.get_plugins()
labels = {}
sort_priorities = {}
if plugins:
for plugin in plugins:
pk = plugin.content_type.pk
labels[pk] = capfirst(plugin.verbose_name)
sort_priorities[pk] = getattr(plugin, 'sort_priority', labels[pk])
choices = [(ctype, labels[ctype]) for ctype, _ in choices]
return sorted(choices,
cmp=lambda a, b: cmp(
sort_priorities[a[0]],
sort_priorities[b[0]]
)
)
return choices | python | def get_child_type_choices(self, request, action):
"""
Override choice labels with ``verbose_name`` from plugins and sort.
"""
# Get choices from the super class to check permissions.
choices = super(ChildModelPluginPolymorphicParentModelAdmin, self) \
.get_child_type_choices(request, action)
# Update label with verbose name from plugins.
plugins = self.child_model_plugin_class.get_plugins()
labels = {}
sort_priorities = {}
if plugins:
for plugin in plugins:
pk = plugin.content_type.pk
labels[pk] = capfirst(plugin.verbose_name)
sort_priorities[pk] = getattr(plugin, 'sort_priority', labels[pk])
choices = [(ctype, labels[ctype]) for ctype, _ in choices]
return sorted(choices,
cmp=lambda a, b: cmp(
sort_priorities[a[0]],
sort_priorities[b[0]]
)
)
return choices | [
"def",
"get_child_type_choices",
"(",
"self",
",",
"request",
",",
"action",
")",
":",
"# Get choices from the super class to check permissions.",
"choices",
"=",
"super",
"(",
"ChildModelPluginPolymorphicParentModelAdmin",
",",
"self",
")",
".",
"get_child_type_choices",
"... | Override choice labels with ``verbose_name`` from plugins and sort. | [
"Override",
"choice",
"labels",
"with",
"verbose_name",
"from",
"plugins",
"and",
"sort",
"."
] | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/icekit/admin_tools/polymorphic.py#L44-L70 | train | 33,337 |
ic-labs/django-icekit | icekit/admin_tools/polymorphic.py | ChildModelPluginPolymorphicParentModelAdmin.get_child_models | def get_child_models(self):
"""
Get child models from registered plugins. Fallback to the child model
admin and its base model if no plugins are registered.
"""
child_models = []
for plugin in self.child_model_plugin_class.get_plugins():
child_models.append((plugin.model, plugin.model_admin))
if not child_models:
child_models.append((
self.child_model_admin.base_model,
self.child_model_admin,
))
return child_models | python | def get_child_models(self):
"""
Get child models from registered plugins. Fallback to the child model
admin and its base model if no plugins are registered.
"""
child_models = []
for plugin in self.child_model_plugin_class.get_plugins():
child_models.append((plugin.model, plugin.model_admin))
if not child_models:
child_models.append((
self.child_model_admin.base_model,
self.child_model_admin,
))
return child_models | [
"def",
"get_child_models",
"(",
"self",
")",
":",
"child_models",
"=",
"[",
"]",
"for",
"plugin",
"in",
"self",
".",
"child_model_plugin_class",
".",
"get_plugins",
"(",
")",
":",
"child_models",
".",
"append",
"(",
"(",
"plugin",
".",
"model",
",",
"plugi... | Get child models from registered plugins. Fallback to the child model
admin and its base model if no plugins are registered. | [
"Get",
"child",
"models",
"from",
"registered",
"plugins",
".",
"Fallback",
"to",
"the",
"child",
"model",
"admin",
"and",
"its",
"base",
"model",
"if",
"no",
"plugins",
"are",
"registered",
"."
] | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/icekit/admin_tools/polymorphic.py#L72-L85 | train | 33,338 |
ic-labs/django-icekit | icekit/admin_tools/polymorphic.py | PolymorphicAdminRawIdFix._get_child_admin_site | def _get_child_admin_site(self, rel):
"""
Returns the separate AdminSite instance that django-polymorphic
maintains for child models.
This admin site needs to be passed to the widget so that it passes the
check of whether the field is pointing to a model that's registered
in the admin.
The hackiness of this implementation reflects the hackiness of the way
django-polymorphic does things.
"""
if rel.to not in self.admin_site._registry:
# Go through the objects the model inherits from and find one
# that's registered in the main admin and has a reference to the
# child admin site in it attributes.
for parent in rel.to.mro():
if parent in self.admin_site._registry \
and hasattr(self.admin_site._registry[parent], '_child_admin_site'):
return self.admin_site._registry[parent]._child_admin_site
return self.admin_site | python | def _get_child_admin_site(self, rel):
"""
Returns the separate AdminSite instance that django-polymorphic
maintains for child models.
This admin site needs to be passed to the widget so that it passes the
check of whether the field is pointing to a model that's registered
in the admin.
The hackiness of this implementation reflects the hackiness of the way
django-polymorphic does things.
"""
if rel.to not in self.admin_site._registry:
# Go through the objects the model inherits from and find one
# that's registered in the main admin and has a reference to the
# child admin site in it attributes.
for parent in rel.to.mro():
if parent in self.admin_site._registry \
and hasattr(self.admin_site._registry[parent], '_child_admin_site'):
return self.admin_site._registry[parent]._child_admin_site
return self.admin_site | [
"def",
"_get_child_admin_site",
"(",
"self",
",",
"rel",
")",
":",
"if",
"rel",
".",
"to",
"not",
"in",
"self",
".",
"admin_site",
".",
"_registry",
":",
"# Go through the objects the model inherits from and find one",
"# that's registered in the main admin and has a refere... | Returns the separate AdminSite instance that django-polymorphic
maintains for child models.
This admin site needs to be passed to the widget so that it passes the
check of whether the field is pointing to a model that's registered
in the admin.
The hackiness of this implementation reflects the hackiness of the way
django-polymorphic does things. | [
"Returns",
"the",
"separate",
"AdminSite",
"instance",
"that",
"django",
"-",
"polymorphic",
"maintains",
"for",
"child",
"models",
"."
] | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/icekit/admin_tools/polymorphic.py#L97-L117 | train | 33,339 |
ic-labs/django-icekit | icekit/admin_tools/polymorphic.py | PolymorphicAdminRawIdFix.formfield_for_foreignkey | def formfield_for_foreignkey(self, db_field, request=None, **kwargs):
"""
Replicates the logic in ModelAdmin.forfield_for_foreignkey, replacing
the widget with the patched one above, initialising it with the child
admin site.
"""
db = kwargs.get('using')
if db_field.name in self.raw_id_fields:
kwargs['widget'] = PolymorphicForeignKeyRawIdWidget(
db_field.rel,
admin_site=self._get_child_admin_site(db_field.rel),
using=db
)
if 'queryset' not in kwargs:
queryset = self.get_field_queryset(db, db_field, request)
if queryset is not None:
kwargs['queryset'] = queryset
return db_field.formfield(**kwargs)
return super(PolymorphicAdminRawIdFix, self).formfield_for_foreignkey(
db_field, request=request, **kwargs) | python | def formfield_for_foreignkey(self, db_field, request=None, **kwargs):
"""
Replicates the logic in ModelAdmin.forfield_for_foreignkey, replacing
the widget with the patched one above, initialising it with the child
admin site.
"""
db = kwargs.get('using')
if db_field.name in self.raw_id_fields:
kwargs['widget'] = PolymorphicForeignKeyRawIdWidget(
db_field.rel,
admin_site=self._get_child_admin_site(db_field.rel),
using=db
)
if 'queryset' not in kwargs:
queryset = self.get_field_queryset(db, db_field, request)
if queryset is not None:
kwargs['queryset'] = queryset
return db_field.formfield(**kwargs)
return super(PolymorphicAdminRawIdFix, self).formfield_for_foreignkey(
db_field, request=request, **kwargs) | [
"def",
"formfield_for_foreignkey",
"(",
"self",
",",
"db_field",
",",
"request",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"db",
"=",
"kwargs",
".",
"get",
"(",
"'using'",
")",
"if",
"db_field",
".",
"name",
"in",
"self",
".",
"raw_id_fields",
":... | Replicates the logic in ModelAdmin.forfield_for_foreignkey, replacing
the widget with the patched one above, initialising it with the child
admin site. | [
"Replicates",
"the",
"logic",
"in",
"ModelAdmin",
".",
"forfield_for_foreignkey",
"replacing",
"the",
"widget",
"with",
"the",
"patched",
"one",
"above",
"initialising",
"it",
"with",
"the",
"child",
"admin",
"site",
"."
] | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/icekit/admin_tools/polymorphic.py#L119-L138 | train | 33,340 |
ic-labs/django-icekit | glamkit_collections/utils.py | wikipedia_slugify | def wikipedia_slugify(value, do_unidecode=False):
"""
Converts to ASCII via unidecode.
Converts spaces to underscore.
Removes characters that
aren't alphanumerics, underscores, or hyphens.
Preserve case.
Also strips leading and trailing whitespace.
"""
if do_unidecode:
value = unidecode(value)
value = value.strip()
return mark_safe(re.sub('[\s/#\?:@]+', '_', value)) | python | def wikipedia_slugify(value, do_unidecode=False):
"""
Converts to ASCII via unidecode.
Converts spaces to underscore.
Removes characters that
aren't alphanumerics, underscores, or hyphens.
Preserve case.
Also strips leading and trailing whitespace.
"""
if do_unidecode:
value = unidecode(value)
value = value.strip()
return mark_safe(re.sub('[\s/#\?:@]+', '_', value)) | [
"def",
"wikipedia_slugify",
"(",
"value",
",",
"do_unidecode",
"=",
"False",
")",
":",
"if",
"do_unidecode",
":",
"value",
"=",
"unidecode",
"(",
"value",
")",
"value",
"=",
"value",
".",
"strip",
"(",
")",
"return",
"mark_safe",
"(",
"re",
".",
"sub",
... | Converts to ASCII via unidecode.
Converts spaces to underscore.
Removes characters that
aren't alphanumerics, underscores, or hyphens.
Preserve case.
Also strips leading and trailing whitespace. | [
"Converts",
"to",
"ASCII",
"via",
"unidecode",
".",
"Converts",
"spaces",
"to",
"underscore",
"."
] | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/glamkit_collections/utils.py#L13-L28 | train | 33,341 |
ic-labs/django-icekit | glamkit_collections/utils.py | ensure_unique | def ensure_unique(qs, field_name, value, exclude_id=None):
"""
Makes sure that `value` is unique on model.fieldname. And nonempty.
"""
orig = value
if not value:
value = "None"
for x in itertools.count(1):
if not qs.exclude(id=exclude_id).filter(**{field_name: value}).exists():
break
if orig:
value = '%s-%d' % (orig, x)
else:
value = '%d' % x
return value | python | def ensure_unique(qs, field_name, value, exclude_id=None):
"""
Makes sure that `value` is unique on model.fieldname. And nonempty.
"""
orig = value
if not value:
value = "None"
for x in itertools.count(1):
if not qs.exclude(id=exclude_id).filter(**{field_name: value}).exists():
break
if orig:
value = '%s-%d' % (orig, x)
else:
value = '%d' % x
return value | [
"def",
"ensure_unique",
"(",
"qs",
",",
"field_name",
",",
"value",
",",
"exclude_id",
"=",
"None",
")",
":",
"orig",
"=",
"value",
"if",
"not",
"value",
":",
"value",
"=",
"\"None\"",
"for",
"x",
"in",
"itertools",
".",
"count",
"(",
"1",
")",
":",
... | Makes sure that `value` is unique on model.fieldname. And nonempty. | [
"Makes",
"sure",
"that",
"value",
"is",
"unique",
"on",
"model",
".",
"fieldname",
".",
"And",
"nonempty",
"."
] | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/glamkit_collections/utils.py#L40-L55 | train | 33,342 |
ic-labs/django-icekit | glamkit_collections/utils.py | fix_line_breaks | def fix_line_breaks(s):
"""
Convert \r\n and \r to \n chars. Strip any leading or trailing whitespace
on each line. Remove blank lines.
"""
l = s.splitlines()
x = [i.strip() for i in l]
x = [i for i in x if i] # remove blank lines
return "\n".join(x) | python | def fix_line_breaks(s):
"""
Convert \r\n and \r to \n chars. Strip any leading or trailing whitespace
on each line. Remove blank lines.
"""
l = s.splitlines()
x = [i.strip() for i in l]
x = [i for i in x if i] # remove blank lines
return "\n".join(x) | [
"def",
"fix_line_breaks",
"(",
"s",
")",
":",
"l",
"=",
"s",
".",
"splitlines",
"(",
")",
"x",
"=",
"[",
"i",
".",
"strip",
"(",
")",
"for",
"i",
"in",
"l",
"]",
"x",
"=",
"[",
"i",
"for",
"i",
"in",
"x",
"if",
"i",
"]",
"# remove blank lines... | Convert \r\n and \r to \n chars. Strip any leading or trailing whitespace
on each line. Remove blank lines. | [
"Convert",
"\\",
"r",
"\\",
"n",
"and",
"\\",
"r",
"to",
"\\",
"n",
"chars",
".",
"Strip",
"any",
"leading",
"or",
"trailing",
"whitespace",
"on",
"each",
"line",
".",
"Remove",
"blank",
"lines",
"."
] | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/glamkit_collections/utils.py#L69-L77 | train | 33,343 |
ic-labs/django-icekit | glamkit_collections/management/commands/analyze_json.py | update_json_analysis | def update_json_analysis(analysis, j):
"""
Step through the items in a piece of json, and update an analysis dict
with the values found.
"""
def _analyze_list(l, parent=""):
for v in l:
if isinstance(v, (dict, CaseInsensitiveDict)):
_analyze_json(v, parent=parent)
elif isinstance(v, list):
_analyze_list(v, parent=parent+"[]")
else:
analysis[parent].add(v)
def _analyze_json(d, parent=""):
for k, v in d.iteritems():
if parent:
path = ".".join([parent, k])
else:
path = k
if isinstance(v, (dict, CaseInsensitiveDict)):
_analyze_json(v, parent=path)
elif isinstance(v, list):
_analyze_list(v, parent=path+"[]")
else:
analysis[path].add(v)
if isinstance(j, list):
_analyze_list(j)
if isinstance(j, (dict, CaseInsensitiveDict)):
_analyze_json(j) | python | def update_json_analysis(analysis, j):
"""
Step through the items in a piece of json, and update an analysis dict
with the values found.
"""
def _analyze_list(l, parent=""):
for v in l:
if isinstance(v, (dict, CaseInsensitiveDict)):
_analyze_json(v, parent=parent)
elif isinstance(v, list):
_analyze_list(v, parent=parent+"[]")
else:
analysis[parent].add(v)
def _analyze_json(d, parent=""):
for k, v in d.iteritems():
if parent:
path = ".".join([parent, k])
else:
path = k
if isinstance(v, (dict, CaseInsensitiveDict)):
_analyze_json(v, parent=path)
elif isinstance(v, list):
_analyze_list(v, parent=path+"[]")
else:
analysis[path].add(v)
if isinstance(j, list):
_analyze_list(j)
if isinstance(j, (dict, CaseInsensitiveDict)):
_analyze_json(j) | [
"def",
"update_json_analysis",
"(",
"analysis",
",",
"j",
")",
":",
"def",
"_analyze_list",
"(",
"l",
",",
"parent",
"=",
"\"\"",
")",
":",
"for",
"v",
"in",
"l",
":",
"if",
"isinstance",
"(",
"v",
",",
"(",
"dict",
",",
"CaseInsensitiveDict",
")",
"... | Step through the items in a piece of json, and update an analysis dict
with the values found. | [
"Step",
"through",
"the",
"items",
"in",
"a",
"piece",
"of",
"json",
"and",
"update",
"an",
"analysis",
"dict",
"with",
"the",
"values",
"found",
"."
] | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/glamkit_collections/management/commands/analyze_json.py#L8-L39 | train | 33,344 |
ic-labs/django-icekit | icekit/mixins.py | LayoutFieldMixin.add_missing_placeholders | def add_missing_placeholders(self):
"""
Add missing placeholders from templates. Return `True` if any missing
placeholders were created.
"""
content_type = ContentType.objects.get_for_model(self)
result = False
if self.layout:
for data in self.layout.get_placeholder_data():
placeholder, created = Placeholder.objects.update_or_create(
parent_type=content_type,
parent_id=self.pk,
slot=data.slot,
defaults=dict(
role=data.role,
title=data.title,
))
result = result or created
return result | python | def add_missing_placeholders(self):
"""
Add missing placeholders from templates. Return `True` if any missing
placeholders were created.
"""
content_type = ContentType.objects.get_for_model(self)
result = False
if self.layout:
for data in self.layout.get_placeholder_data():
placeholder, created = Placeholder.objects.update_or_create(
parent_type=content_type,
parent_id=self.pk,
slot=data.slot,
defaults=dict(
role=data.role,
title=data.title,
))
result = result or created
return result | [
"def",
"add_missing_placeholders",
"(",
"self",
")",
":",
"content_type",
"=",
"ContentType",
".",
"objects",
".",
"get_for_model",
"(",
"self",
")",
"result",
"=",
"False",
"if",
"self",
".",
"layout",
":",
"for",
"data",
"in",
"self",
".",
"layout",
".",... | Add missing placeholders from templates. Return `True` if any missing
placeholders were created. | [
"Add",
"missing",
"placeholders",
"from",
"templates",
".",
"Return",
"True",
"if",
"any",
"missing",
"placeholders",
"were",
"created",
"."
] | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/icekit/mixins.py#L55-L73 | train | 33,345 |
ic-labs/django-icekit | icekit/mixins.py | ListableMixin.get_og_title | def get_og_title(self):
"""
return meta_title if exists otherwise fall back to title
"""
if hasattr(self, 'meta_title') and self.meta_title:
return self.meta_title
return self.get_title() | python | def get_og_title(self):
"""
return meta_title if exists otherwise fall back to title
"""
if hasattr(self, 'meta_title') and self.meta_title:
return self.meta_title
return self.get_title() | [
"def",
"get_og_title",
"(",
"self",
")",
":",
"if",
"hasattr",
"(",
"self",
",",
"'meta_title'",
")",
"and",
"self",
".",
"meta_title",
":",
"return",
"self",
".",
"meta_title",
"return",
"self",
".",
"get_title",
"(",
")"
] | return meta_title if exists otherwise fall back to title | [
"return",
"meta_title",
"if",
"exists",
"otherwise",
"fall",
"back",
"to",
"title"
] | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/icekit/mixins.py#L213-L219 | train | 33,346 |
ic-labs/django-icekit | icekit/mixins.py | GoogleMapMixin.get_map_data | def get_map_data(self):
"""
Returns a serializable data set describing the map location
"""
return {
'containerSelector': '#' + self.get_map_element_id(),
'center': self.map_center_description,
'marker': self.map_marker_description or self.map_center_description,
'zoom': self.map_zoom,
'href': self.get_map_href(),
'key': getattr(settings, 'GOOGLE_MAPS_API_KEY', ''),
# Python's line-splitting is more cross-OS compatible, so we feed
# a pre-built array to the front-end
'description': [
line for line in self.map_description.splitlines() if line
],
} | python | def get_map_data(self):
"""
Returns a serializable data set describing the map location
"""
return {
'containerSelector': '#' + self.get_map_element_id(),
'center': self.map_center_description,
'marker': self.map_marker_description or self.map_center_description,
'zoom': self.map_zoom,
'href': self.get_map_href(),
'key': getattr(settings, 'GOOGLE_MAPS_API_KEY', ''),
# Python's line-splitting is more cross-OS compatible, so we feed
# a pre-built array to the front-end
'description': [
line for line in self.map_description.splitlines() if line
],
} | [
"def",
"get_map_data",
"(",
"self",
")",
":",
"return",
"{",
"'containerSelector'",
":",
"'#'",
"+",
"self",
".",
"get_map_element_id",
"(",
")",
",",
"'center'",
":",
"self",
".",
"map_center_description",
",",
"'marker'",
":",
"self",
".",
"map_marker_descri... | Returns a serializable data set describing the map location | [
"Returns",
"a",
"serializable",
"data",
"set",
"describing",
"the",
"map",
"location"
] | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/icekit/mixins.py#L434-L451 | train | 33,347 |
ic-labs/django-icekit | icekit/mixins.py | GoogleMapMixin.get_map_href | def get_map_href(self):
"""
Returns a link to an external view of the map
"""
if self.map_center_lat and self.map_center_long:
params = {
'll': '%s,%s' % (self.map_center_lat, self.map_center_long)
}
else:
params = {'q': self.map_center_description}
return self.GOOGLE_MAPS_HREF_ROOT + urllib.urlencode(params) | python | def get_map_href(self):
"""
Returns a link to an external view of the map
"""
if self.map_center_lat and self.map_center_long:
params = {
'll': '%s,%s' % (self.map_center_lat, self.map_center_long)
}
else:
params = {'q': self.map_center_description}
return self.GOOGLE_MAPS_HREF_ROOT + urllib.urlencode(params) | [
"def",
"get_map_href",
"(",
"self",
")",
":",
"if",
"self",
".",
"map_center_lat",
"and",
"self",
".",
"map_center_long",
":",
"params",
"=",
"{",
"'ll'",
":",
"'%s,%s'",
"%",
"(",
"self",
".",
"map_center_lat",
",",
"self",
".",
"map_center_long",
")",
... | Returns a link to an external view of the map | [
"Returns",
"a",
"link",
"to",
"an",
"external",
"view",
"of",
"the",
"map"
] | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/icekit/mixins.py#L453-L464 | train | 33,348 |
ic-labs/django-icekit | icekit_events/utils/timeutils.py | round_datetime | def round_datetime(when=None, precision=60, rounding=ROUND_NEAREST):
"""
Round a datetime object to a time that matches the given precision.
when (datetime), default now
The datetime object to be rounded.
precision (int, timedelta, str), default 60
The number of seconds, weekday (MON, TUE, WED, etc.) or timedelta
object to which the datetime object should be rounded.
rounding (str), default ROUND_NEAREST
The rounding method to use (ROUND_DOWN, ROUND_NEAREST, ROUND_UP).
"""
when = when or djtz.now()
weekday = WEEKDAYS.get(precision, WEEKDAYS['MON'])
if precision in WEEKDAYS:
precision = int(timedelta(days=7).total_seconds())
elif isinstance(precision, timedelta):
precision = int(precision.total_seconds())
# Get delta between the beginning of time and the given datetime object.
# If precision is a weekday, the beginning of time must be that same day.
when_min = when.min + timedelta(days=weekday)
if djtz.is_aware(when):
# It doesn't seem to be possible to localise `datetime.min` without
# raising `OverflowError`, so create a timezone aware object manually.
when_min = datetime(tzinfo=when.tzinfo, *when_min.timetuple()[:3])
delta = when - when_min
remainder = int(delta.total_seconds()) % precision
# First round down and strip microseconds.
when -= timedelta(seconds=remainder, microseconds=when.microsecond)
# Then add precision to round up.
if rounding == ROUND_UP or (
rounding == ROUND_NEAREST and remainder >= precision / 2):
when += timedelta(seconds=precision)
return when | python | def round_datetime(when=None, precision=60, rounding=ROUND_NEAREST):
"""
Round a datetime object to a time that matches the given precision.
when (datetime), default now
The datetime object to be rounded.
precision (int, timedelta, str), default 60
The number of seconds, weekday (MON, TUE, WED, etc.) or timedelta
object to which the datetime object should be rounded.
rounding (str), default ROUND_NEAREST
The rounding method to use (ROUND_DOWN, ROUND_NEAREST, ROUND_UP).
"""
when = when or djtz.now()
weekday = WEEKDAYS.get(precision, WEEKDAYS['MON'])
if precision in WEEKDAYS:
precision = int(timedelta(days=7).total_seconds())
elif isinstance(precision, timedelta):
precision = int(precision.total_seconds())
# Get delta between the beginning of time and the given datetime object.
# If precision is a weekday, the beginning of time must be that same day.
when_min = when.min + timedelta(days=weekday)
if djtz.is_aware(when):
# It doesn't seem to be possible to localise `datetime.min` without
# raising `OverflowError`, so create a timezone aware object manually.
when_min = datetime(tzinfo=when.tzinfo, *when_min.timetuple()[:3])
delta = when - when_min
remainder = int(delta.total_seconds()) % precision
# First round down and strip microseconds.
when -= timedelta(seconds=remainder, microseconds=when.microsecond)
# Then add precision to round up.
if rounding == ROUND_UP or (
rounding == ROUND_NEAREST and remainder >= precision / 2):
when += timedelta(seconds=precision)
return when | [
"def",
"round_datetime",
"(",
"when",
"=",
"None",
",",
"precision",
"=",
"60",
",",
"rounding",
"=",
"ROUND_NEAREST",
")",
":",
"when",
"=",
"when",
"or",
"djtz",
".",
"now",
"(",
")",
"weekday",
"=",
"WEEKDAYS",
".",
"get",
"(",
"precision",
",",
"... | Round a datetime object to a time that matches the given precision.
when (datetime), default now
The datetime object to be rounded.
precision (int, timedelta, str), default 60
The number of seconds, weekday (MON, TUE, WED, etc.) or timedelta
object to which the datetime object should be rounded.
rounding (str), default ROUND_NEAREST
The rounding method to use (ROUND_DOWN, ROUND_NEAREST, ROUND_UP). | [
"Round",
"a",
"datetime",
"object",
"to",
"a",
"time",
"that",
"matches",
"the",
"given",
"precision",
"."
] | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/icekit_events/utils/timeutils.py#L31-L67 | train | 33,349 |
ic-labs/django-icekit | icekit_events/utils/timeutils.py | coerce_dt_awareness | def coerce_dt_awareness(date_or_datetime, tz=None, t=None):
"""
Coerce the given `datetime` or `date` object into a timezone-aware or
timezone-naive `datetime` result, depending on which is appropriate for
the project's settings.
"""
if isinstance(date_or_datetime, datetime):
dt = date_or_datetime
else:
dt = datetime.combine(date_or_datetime, t or time.min)
is_project_tz_aware = settings.USE_TZ
if is_project_tz_aware:
return coerce_aware(dt, tz)
elif not is_project_tz_aware:
return coerce_naive(dt, tz)
# No changes necessary
return dt | python | def coerce_dt_awareness(date_or_datetime, tz=None, t=None):
"""
Coerce the given `datetime` or `date` object into a timezone-aware or
timezone-naive `datetime` result, depending on which is appropriate for
the project's settings.
"""
if isinstance(date_or_datetime, datetime):
dt = date_or_datetime
else:
dt = datetime.combine(date_or_datetime, t or time.min)
is_project_tz_aware = settings.USE_TZ
if is_project_tz_aware:
return coerce_aware(dt, tz)
elif not is_project_tz_aware:
return coerce_naive(dt, tz)
# No changes necessary
return dt | [
"def",
"coerce_dt_awareness",
"(",
"date_or_datetime",
",",
"tz",
"=",
"None",
",",
"t",
"=",
"None",
")",
":",
"if",
"isinstance",
"(",
"date_or_datetime",
",",
"datetime",
")",
":",
"dt",
"=",
"date_or_datetime",
"else",
":",
"dt",
"=",
"datetime",
".",
... | Coerce the given `datetime` or `date` object into a timezone-aware or
timezone-naive `datetime` result, depending on which is appropriate for
the project's settings. | [
"Coerce",
"the",
"given",
"datetime",
"or",
"date",
"object",
"into",
"a",
"timezone",
"-",
"aware",
"or",
"timezone",
"-",
"naive",
"datetime",
"result",
"depending",
"on",
"which",
"is",
"appropriate",
"for",
"the",
"project",
"s",
"settings",
"."
] | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/icekit_events/utils/timeutils.py#L80-L96 | train | 33,350 |
ic-labs/django-icekit | icekit/publishing/utils.py | get_draft_secret_key | def get_draft_secret_key():
"""
Return the secret key used to generate draft mode HMACs. It will be
randomly generated on first access. Existing draft URLs can be invalidated
by deleting or updating the ``DRAFT_SECRET_KEY`` setting.
"""
# TODO: Per URL secret keys, so we can invalidate draft URLs for individual
# pages. For example, on publish.
draft_secret_key, created = Text.objects.get_or_create(
name='DRAFT_SECRET_KEY',
defaults=dict(
value=get_random_string(50),
))
return draft_secret_key.value | python | def get_draft_secret_key():
"""
Return the secret key used to generate draft mode HMACs. It will be
randomly generated on first access. Existing draft URLs can be invalidated
by deleting or updating the ``DRAFT_SECRET_KEY`` setting.
"""
# TODO: Per URL secret keys, so we can invalidate draft URLs for individual
# pages. For example, on publish.
draft_secret_key, created = Text.objects.get_or_create(
name='DRAFT_SECRET_KEY',
defaults=dict(
value=get_random_string(50),
))
return draft_secret_key.value | [
"def",
"get_draft_secret_key",
"(",
")",
":",
"# TODO: Per URL secret keys, so we can invalidate draft URLs for individual",
"# pages. For example, on publish.",
"draft_secret_key",
",",
"created",
"=",
"Text",
".",
"objects",
".",
"get_or_create",
"(",
"name",
"=",
"'DRA... | Return the secret key used to generate draft mode HMACs. It will be
randomly generated on first access. Existing draft URLs can be invalidated
by deleting or updating the ``DRAFT_SECRET_KEY`` setting. | [
"Return",
"the",
"secret",
"key",
"used",
"to",
"generate",
"draft",
"mode",
"HMACs",
".",
"It",
"will",
"be",
"randomly",
"generated",
"on",
"first",
"access",
".",
"Existing",
"draft",
"URLs",
"can",
"be",
"invalidated",
"by",
"deleting",
"or",
"updating",... | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/icekit/publishing/utils.py#L49-L62 | train | 33,351 |
ic-labs/django-icekit | icekit/publishing/utils.py | get_draft_url | def get_draft_url(url):
"""
Return the given URL with a draft mode HMAC in its querystring.
"""
if verify_draft_url(url):
# Nothing to do. Already a valid draft URL.
return url
# Parse querystring and add draft mode HMAC.
url = urlparse.urlparse(url)
salt = get_random_string(5)
# QueryDict requires a bytestring as its first argument
query = QueryDict(force_bytes(url.query), mutable=True)
query['preview'] = '%s:%s' % (salt, get_draft_hmac(salt, url.path))
# Reconstruct URL.
parts = list(url)
parts[4] = query.urlencode(safe=':')
return urlparse.urlunparse(parts) | python | def get_draft_url(url):
"""
Return the given URL with a draft mode HMAC in its querystring.
"""
if verify_draft_url(url):
# Nothing to do. Already a valid draft URL.
return url
# Parse querystring and add draft mode HMAC.
url = urlparse.urlparse(url)
salt = get_random_string(5)
# QueryDict requires a bytestring as its first argument
query = QueryDict(force_bytes(url.query), mutable=True)
query['preview'] = '%s:%s' % (salt, get_draft_hmac(salt, url.path))
# Reconstruct URL.
parts = list(url)
parts[4] = query.urlencode(safe=':')
return urlparse.urlunparse(parts) | [
"def",
"get_draft_url",
"(",
"url",
")",
":",
"if",
"verify_draft_url",
"(",
"url",
")",
":",
"# Nothing to do. Already a valid draft URL.",
"return",
"url",
"# Parse querystring and add draft mode HMAC.",
"url",
"=",
"urlparse",
".",
"urlparse",
"(",
"url",
")",
"sal... | Return the given URL with a draft mode HMAC in its querystring. | [
"Return",
"the",
"given",
"URL",
"with",
"a",
"draft",
"mode",
"HMAC",
"in",
"its",
"querystring",
"."
] | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/icekit/publishing/utils.py#L65-L81 | train | 33,352 |
ic-labs/django-icekit | icekit/publishing/utils.py | verify_draft_url | def verify_draft_url(url):
"""
Return ``True`` if the given URL has a valid draft mode HMAC in its
querystring.
"""
url = urlparse.urlparse(url)
# QueryDict requires a bytestring as its first argument
query = QueryDict(force_bytes(url.query))
# TODO Support legacy 'edit' param name for now
preview_hmac = query.get('preview') or query.get('edit')
if preview_hmac:
salt, hmac = preview_hmac.split(':')
return hmac == get_draft_hmac(salt, url.path)
return False | python | def verify_draft_url(url):
"""
Return ``True`` if the given URL has a valid draft mode HMAC in its
querystring.
"""
url = urlparse.urlparse(url)
# QueryDict requires a bytestring as its first argument
query = QueryDict(force_bytes(url.query))
# TODO Support legacy 'edit' param name for now
preview_hmac = query.get('preview') or query.get('edit')
if preview_hmac:
salt, hmac = preview_hmac.split(':')
return hmac == get_draft_hmac(salt, url.path)
return False | [
"def",
"verify_draft_url",
"(",
"url",
")",
":",
"url",
"=",
"urlparse",
".",
"urlparse",
"(",
"url",
")",
"# QueryDict requires a bytestring as its first argument",
"query",
"=",
"QueryDict",
"(",
"force_bytes",
"(",
"url",
".",
"query",
")",
")",
"# TODO Support... | Return ``True`` if the given URL has a valid draft mode HMAC in its
querystring. | [
"Return",
"True",
"if",
"the",
"given",
"URL",
"has",
"a",
"valid",
"draft",
"mode",
"HMAC",
"in",
"its",
"querystring",
"."
] | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/icekit/publishing/utils.py#L84-L97 | train | 33,353 |
ic-labs/django-icekit | icekit/plugins/descriptors.py | contribute_to_class | def contribute_to_class(model_class, name='slots', descriptor=None):
"""
Function that adds a description to a model Class.
:param model_class: The model class the descriptor is to be added
to.
:param name: The attribute name the descriptor will be assigned to.
:param descriptor: The descriptor instance to be used. If none is
specified it will default to
``icekit.plugins.descriptors.PlaceholderDescriptor``.
:return: True
"""
rel_obj = descriptor or PlaceholderDescriptor()
rel_obj.contribute_to_class(model_class, name)
setattr(model_class, name, rel_obj)
return True | python | def contribute_to_class(model_class, name='slots', descriptor=None):
"""
Function that adds a description to a model Class.
:param model_class: The model class the descriptor is to be added
to.
:param name: The attribute name the descriptor will be assigned to.
:param descriptor: The descriptor instance to be used. If none is
specified it will default to
``icekit.plugins.descriptors.PlaceholderDescriptor``.
:return: True
"""
rel_obj = descriptor or PlaceholderDescriptor()
rel_obj.contribute_to_class(model_class, name)
setattr(model_class, name, rel_obj)
return True | [
"def",
"contribute_to_class",
"(",
"model_class",
",",
"name",
"=",
"'slots'",
",",
"descriptor",
"=",
"None",
")",
":",
"rel_obj",
"=",
"descriptor",
"or",
"PlaceholderDescriptor",
"(",
")",
"rel_obj",
".",
"contribute_to_class",
"(",
"model_class",
",",
"name"... | Function that adds a description to a model Class.
:param model_class: The model class the descriptor is to be added
to.
:param name: The attribute name the descriptor will be assigned to.
:param descriptor: The descriptor instance to be used. If none is
specified it will default to
``icekit.plugins.descriptors.PlaceholderDescriptor``.
:return: True | [
"Function",
"that",
"adds",
"a",
"description",
"to",
"a",
"model",
"Class",
"."
] | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/icekit/plugins/descriptors.py#L100-L115 | train | 33,354 |
ic-labs/django-icekit | icekit/plugins/descriptors.py | PlaceholderDescriptor.create_placeholder_access_object | def create_placeholder_access_object(self, instance):
"""
Created objects with placeholder slots as properties.
Each placeholder created for an object will be added to a
`PlaceHolderAccess` object as a set property.
"""
related_model = self.related_model
def get_related_model_objects(name):
"""
Obtains the related model objects based upon the slot name.
:param name: The slot name in string form.
:returns; Related model contents if they exist or it will
raise a `DoesNotExist` exception.
"""
# Parent type, parent id and slot are set to be unique on the default
# related model and therefore treated as such here.
return related_model.objects.get(
parent_type=ContentType.objects.get_for_model(type(instance)),
parent_id=instance.id,
slot=name,
).get_content_items()
class PlaceholderAccess(object):
def __getattribute__(self, name):
"""
Allow placeholder contents to be accessed via slot
name on the descriptor object.
For example if the slot was named `main` and you had
a descriptor named `slots` on an object named `page`
you would call it by `page.slots.main`.
If a slot name is used that does not exist an
`AttributeError` will be raised.
If you get this error for a slot that should exist, you may
need to run `manage.py add_missing_placeholders`.
"""
try:
return get_related_model_objects(name)
except related_model.DoesNotExist:
return super(PlaceholderAccess, self).__getattribute__(name)
def __getitem__(self, item):
"""
Allow placeholder contents to be accessed via slot
name one the descriptor object via a dictionary
lookup.
For example if the slot was named `main` and you had
a descriptor named `slots` on an object named `page`
you would call it by `page.slots['main']`.
If a slot name is used that does not exist a
`KeyError` will be raised.
"""
try:
return get_related_model_objects(item)
except related_model.DoesNotExist:
raise KeyError
return PlaceholderAccess() | python | def create_placeholder_access_object(self, instance):
"""
Created objects with placeholder slots as properties.
Each placeholder created for an object will be added to a
`PlaceHolderAccess` object as a set property.
"""
related_model = self.related_model
def get_related_model_objects(name):
"""
Obtains the related model objects based upon the slot name.
:param name: The slot name in string form.
:returns; Related model contents if they exist or it will
raise a `DoesNotExist` exception.
"""
# Parent type, parent id and slot are set to be unique on the default
# related model and therefore treated as such here.
return related_model.objects.get(
parent_type=ContentType.objects.get_for_model(type(instance)),
parent_id=instance.id,
slot=name,
).get_content_items()
class PlaceholderAccess(object):
def __getattribute__(self, name):
"""
Allow placeholder contents to be accessed via slot
name on the descriptor object.
For example if the slot was named `main` and you had
a descriptor named `slots` on an object named `page`
you would call it by `page.slots.main`.
If a slot name is used that does not exist an
`AttributeError` will be raised.
If you get this error for a slot that should exist, you may
need to run `manage.py add_missing_placeholders`.
"""
try:
return get_related_model_objects(name)
except related_model.DoesNotExist:
return super(PlaceholderAccess, self).__getattribute__(name)
def __getitem__(self, item):
"""
Allow placeholder contents to be accessed via slot
name one the descriptor object via a dictionary
lookup.
For example if the slot was named `main` and you had
a descriptor named `slots` on an object named `page`
you would call it by `page.slots['main']`.
If a slot name is used that does not exist a
`KeyError` will be raised.
"""
try:
return get_related_model_objects(item)
except related_model.DoesNotExist:
raise KeyError
return PlaceholderAccess() | [
"def",
"create_placeholder_access_object",
"(",
"self",
",",
"instance",
")",
":",
"related_model",
"=",
"self",
".",
"related_model",
"def",
"get_related_model_objects",
"(",
"name",
")",
":",
"\"\"\"\n Obtains the related model objects based upon the slot name.\n\n... | Created objects with placeholder slots as properties.
Each placeholder created for an object will be added to a
`PlaceHolderAccess` object as a set property. | [
"Created",
"objects",
"with",
"placeholder",
"slots",
"as",
"properties",
"."
] | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/icekit/plugins/descriptors.py#L33-L97 | train | 33,355 |
ic-labs/django-icekit | icekit/workflow/admin.py | get_users_for_assigned_to | def get_users_for_assigned_to():
""" Return a list of users who can be assigned to workflow states """
User = get_user_model()
return User.objects.filter(is_active=True, is_staff=True) | python | def get_users_for_assigned_to():
""" Return a list of users who can be assigned to workflow states """
User = get_user_model()
return User.objects.filter(is_active=True, is_staff=True) | [
"def",
"get_users_for_assigned_to",
"(",
")",
":",
"User",
"=",
"get_user_model",
"(",
")",
"return",
"User",
".",
"objects",
".",
"filter",
"(",
"is_active",
"=",
"True",
",",
"is_staff",
"=",
"True",
")"
] | Return a list of users who can be assigned to workflow states | [
"Return",
"a",
"list",
"of",
"users",
"who",
"can",
"be",
"assigned",
"to",
"workflow",
"states"
] | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/icekit/workflow/admin.py#L11-L14 | train | 33,356 |
ic-labs/django-icekit | icekit/workflow/admin.py | WorkflowMixinAdmin._get_obj_ct | def _get_obj_ct(self, obj):
""" Look up and return object's content type and cache for reuse """
if not hasattr(obj, '_wfct'):
# Use polymorpic content type if available
if hasattr(obj, 'polymorphic_ctype'):
obj._wfct = obj.polymorphic_ctype
else:
obj._wfct = ContentType.objects.get_for_model(obj)
return obj._wfct | python | def _get_obj_ct(self, obj):
""" Look up and return object's content type and cache for reuse """
if not hasattr(obj, '_wfct'):
# Use polymorpic content type if available
if hasattr(obj, 'polymorphic_ctype'):
obj._wfct = obj.polymorphic_ctype
else:
obj._wfct = ContentType.objects.get_for_model(obj)
return obj._wfct | [
"def",
"_get_obj_ct",
"(",
"self",
",",
"obj",
")",
":",
"if",
"not",
"hasattr",
"(",
"obj",
",",
"'_wfct'",
")",
":",
"# Use polymorpic content type if available",
"if",
"hasattr",
"(",
"obj",
",",
"'polymorphic_ctype'",
")",
":",
"obj",
".",
"_wfct",
"=",
... | Look up and return object's content type and cache for reuse | [
"Look",
"up",
"and",
"return",
"object",
"s",
"content",
"type",
"and",
"cache",
"for",
"reuse"
] | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/icekit/workflow/admin.py#L116-L124 | train | 33,357 |
ic-labs/django-icekit | icekit/workflow/admin.py | WorkflowMixinAdmin.workflow_states_column | def workflow_states_column(self, obj):
""" Return text description of workflow states assigned to object """
workflow_states = models.WorkflowState.objects.filter(
content_type=self._get_obj_ct(obj),
object_id=obj.pk,
)
return ', '.join([unicode(wfs) for wfs in workflow_states]) | python | def workflow_states_column(self, obj):
""" Return text description of workflow states assigned to object """
workflow_states = models.WorkflowState.objects.filter(
content_type=self._get_obj_ct(obj),
object_id=obj.pk,
)
return ', '.join([unicode(wfs) for wfs in workflow_states]) | [
"def",
"workflow_states_column",
"(",
"self",
",",
"obj",
")",
":",
"workflow_states",
"=",
"models",
".",
"WorkflowState",
".",
"objects",
".",
"filter",
"(",
"content_type",
"=",
"self",
".",
"_get_obj_ct",
"(",
"obj",
")",
",",
"object_id",
"=",
"obj",
... | Return text description of workflow states assigned to object | [
"Return",
"text",
"description",
"of",
"workflow",
"states",
"assigned",
"to",
"object"
] | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/icekit/workflow/admin.py#L126-L132 | train | 33,358 |
ic-labs/django-icekit | icekit/workflow/admin.py | WorkflowMixinAdmin.created_by_column | def created_by_column(self, obj):
""" Return user who first created an item in Django admin """
try:
first_addition_logentry = admin.models.LogEntry.objects.filter(
object_id=obj.pk,
content_type_id=self._get_obj_ct(obj).pk,
action_flag=admin.models.ADDITION,
).get()
return first_addition_logentry.user
except admin.models.LogEntry.DoesNotExist:
return None | python | def created_by_column(self, obj):
""" Return user who first created an item in Django admin """
try:
first_addition_logentry = admin.models.LogEntry.objects.filter(
object_id=obj.pk,
content_type_id=self._get_obj_ct(obj).pk,
action_flag=admin.models.ADDITION,
).get()
return first_addition_logentry.user
except admin.models.LogEntry.DoesNotExist:
return None | [
"def",
"created_by_column",
"(",
"self",
",",
"obj",
")",
":",
"try",
":",
"first_addition_logentry",
"=",
"admin",
".",
"models",
".",
"LogEntry",
".",
"objects",
".",
"filter",
"(",
"object_id",
"=",
"obj",
".",
"pk",
",",
"content_type_id",
"=",
"self",... | Return user who first created an item in Django admin | [
"Return",
"user",
"who",
"first",
"created",
"an",
"item",
"in",
"Django",
"admin"
] | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/icekit/workflow/admin.py#L135-L145 | train | 33,359 |
ic-labs/django-icekit | icekit/utils/fluent_contents.py | create_content_instance | def create_content_instance(content_plugin_class, page, placeholder_name='main', **kwargs):
"""
Creates a content instance from a content plugin class.
:param content_plugin_class: The class of the content plugin.
:param page: The fluent_page instance to create the content
instance one.
:param placeholder_name: The placeholder name defined in the
template. [DEFAULT: main]
:param kwargs: Additional keyword arguments to be used in the
content instance creation.
:return: The content instance created.
"""
# Get the placeholders that are currently available for the slot.
placeholders = page.get_placeholder_by_slot(placeholder_name)
# If a placeholder exists for the placeholder_name use the first one provided otherwise create
# a new placeholder instance.
if placeholders.exists():
placeholder = placeholders[0]
else:
placeholder = page.create_placeholder(placeholder_name)
# Obtain the content type for the page instance class.
ct = ContentType.objects.get_for_model(type(page))
# Create the actual plugin instance.
try:
content_instance = content_plugin_class.objects.create(
parent_type=ct,
parent_id=page.id,
placeholder=placeholder,
**kwargs
)
except TypeError:
raise Exception(
'Could not create content item instance, ensure you '
'have all required field values for the Model.'
)
return content_instance | python | def create_content_instance(content_plugin_class, page, placeholder_name='main', **kwargs):
"""
Creates a content instance from a content plugin class.
:param content_plugin_class: The class of the content plugin.
:param page: The fluent_page instance to create the content
instance one.
:param placeholder_name: The placeholder name defined in the
template. [DEFAULT: main]
:param kwargs: Additional keyword arguments to be used in the
content instance creation.
:return: The content instance created.
"""
# Get the placeholders that are currently available for the slot.
placeholders = page.get_placeholder_by_slot(placeholder_name)
# If a placeholder exists for the placeholder_name use the first one provided otherwise create
# a new placeholder instance.
if placeholders.exists():
placeholder = placeholders[0]
else:
placeholder = page.create_placeholder(placeholder_name)
# Obtain the content type for the page instance class.
ct = ContentType.objects.get_for_model(type(page))
# Create the actual plugin instance.
try:
content_instance = content_plugin_class.objects.create(
parent_type=ct,
parent_id=page.id,
placeholder=placeholder,
**kwargs
)
except TypeError:
raise Exception(
'Could not create content item instance, ensure you '
'have all required field values for the Model.'
)
return content_instance | [
"def",
"create_content_instance",
"(",
"content_plugin_class",
",",
"page",
",",
"placeholder_name",
"=",
"'main'",
",",
"*",
"*",
"kwargs",
")",
":",
"# Get the placeholders that are currently available for the slot.",
"placeholders",
"=",
"page",
".",
"get_placeholder_by_... | Creates a content instance from a content plugin class.
:param content_plugin_class: The class of the content plugin.
:param page: The fluent_page instance to create the content
instance one.
:param placeholder_name: The placeholder name defined in the
template. [DEFAULT: main]
:param kwargs: Additional keyword arguments to be used in the
content instance creation.
:return: The content instance created. | [
"Creates",
"a",
"content",
"instance",
"from",
"a",
"content",
"plugin",
"class",
"."
] | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/icekit/utils/fluent_contents.py#L7-L46 | train | 33,360 |
peterbe/hashin | hashin.py | expand_python_version | def expand_python_version(version):
"""
Expand Python versions to all identifiers used on PyPI.
>>> expand_python_version('3.5')
['3.5', 'py3', 'py2.py3', 'cp35']
"""
if not re.match(r"^\d\.\d$", version):
return [version]
major, minor = version.split(".")
patterns = [
"{major}.{minor}",
"cp{major}{minor}",
"py{major}",
"py{major}.{minor}",
"py{major}{minor}",
"source",
"py2.py3",
]
return set(pattern.format(major=major, minor=minor) for pattern in patterns) | python | def expand_python_version(version):
"""
Expand Python versions to all identifiers used on PyPI.
>>> expand_python_version('3.5')
['3.5', 'py3', 'py2.py3', 'cp35']
"""
if not re.match(r"^\d\.\d$", version):
return [version]
major, minor = version.split(".")
patterns = [
"{major}.{minor}",
"cp{major}{minor}",
"py{major}",
"py{major}.{minor}",
"py{major}{minor}",
"source",
"py2.py3",
]
return set(pattern.format(major=major, minor=minor) for pattern in patterns) | [
"def",
"expand_python_version",
"(",
"version",
")",
":",
"if",
"not",
"re",
".",
"match",
"(",
"r\"^\\d\\.\\d$\"",
",",
"version",
")",
":",
"return",
"[",
"version",
"]",
"major",
",",
"minor",
"=",
"version",
".",
"split",
"(",
"\".\"",
")",
"patterns... | Expand Python versions to all identifiers used on PyPI.
>>> expand_python_version('3.5')
['3.5', 'py3', 'py2.py3', 'cp35'] | [
"Expand",
"Python",
"versions",
"to",
"all",
"identifiers",
"used",
"on",
"PyPI",
"."
] | c3efcba050961257622c475114c087efe675c58a | https://github.com/peterbe/hashin/blob/c3efcba050961257622c475114c087efe675c58a/hashin.py#L464-L484 | train | 33,361 |
peterbe/hashin | hashin.py | get_package_hashes | def get_package_hashes(
package,
version=None,
algorithm=DEFAULT_ALGORITHM,
python_versions=(),
verbose=False,
include_prereleases=False,
lookup_memory=None,
index_url=DEFAULT_INDEX_URL,
):
"""
Gets the hashes for the given package.
>>> get_package_hashes('hashin')
{
'package': 'hashin',
'version': '0.10',
'hashes': [
{
'url': 'https://pypi.org/packages/[...]',
'hash': '45d1c5d2237a3b4f78b4198709fb2ecf[...]'
},
{
'url': 'https://pypi.org/packages/[...]',
'hash': '0d63bf4c115154781846ecf573049324[...]'
},
{
'url': 'https://pypi.org/packages/[...]',
'hash': 'c32e6d9fb09dc36ab9222c4606a1f43a[...]'
}
]
}
"""
if lookup_memory is not None and package in lookup_memory:
data = lookup_memory[package]
else:
data = get_package_data(package, index_url, verbose)
if not version:
version = get_latest_version(data, include_prereleases)
assert version
if verbose:
_verbose("Latest version for {0} is {1}".format(package, version))
# Independent of how you like to case type it, pick the correct
# name from the PyPI index.
package = data["info"]["name"]
try:
releases = data["releases"][version]
except KeyError:
raise PackageError("No data found for version {0}".format(version))
if python_versions:
releases = filter_releases(releases, python_versions)
if not releases:
if python_versions:
raise PackageError(
"No releases could be found for "
"{0} matching Python versions {1}".format(version, python_versions)
)
else:
raise PackageError("No releases could be found for {0}".format(version))
hashes = list(
get_releases_hashes(releases=releases, algorithm=algorithm, verbose=verbose)
)
return {"package": package, "version": version, "hashes": hashes} | python | def get_package_hashes(
package,
version=None,
algorithm=DEFAULT_ALGORITHM,
python_versions=(),
verbose=False,
include_prereleases=False,
lookup_memory=None,
index_url=DEFAULT_INDEX_URL,
):
"""
Gets the hashes for the given package.
>>> get_package_hashes('hashin')
{
'package': 'hashin',
'version': '0.10',
'hashes': [
{
'url': 'https://pypi.org/packages/[...]',
'hash': '45d1c5d2237a3b4f78b4198709fb2ecf[...]'
},
{
'url': 'https://pypi.org/packages/[...]',
'hash': '0d63bf4c115154781846ecf573049324[...]'
},
{
'url': 'https://pypi.org/packages/[...]',
'hash': 'c32e6d9fb09dc36ab9222c4606a1f43a[...]'
}
]
}
"""
if lookup_memory is not None and package in lookup_memory:
data = lookup_memory[package]
else:
data = get_package_data(package, index_url, verbose)
if not version:
version = get_latest_version(data, include_prereleases)
assert version
if verbose:
_verbose("Latest version for {0} is {1}".format(package, version))
# Independent of how you like to case type it, pick the correct
# name from the PyPI index.
package = data["info"]["name"]
try:
releases = data["releases"][version]
except KeyError:
raise PackageError("No data found for version {0}".format(version))
if python_versions:
releases = filter_releases(releases, python_versions)
if not releases:
if python_versions:
raise PackageError(
"No releases could be found for "
"{0} matching Python versions {1}".format(version, python_versions)
)
else:
raise PackageError("No releases could be found for {0}".format(version))
hashes = list(
get_releases_hashes(releases=releases, algorithm=algorithm, verbose=verbose)
)
return {"package": package, "version": version, "hashes": hashes} | [
"def",
"get_package_hashes",
"(",
"package",
",",
"version",
"=",
"None",
",",
"algorithm",
"=",
"DEFAULT_ALGORITHM",
",",
"python_versions",
"=",
"(",
")",
",",
"verbose",
"=",
"False",
",",
"include_prereleases",
"=",
"False",
",",
"lookup_memory",
"=",
"Non... | Gets the hashes for the given package.
>>> get_package_hashes('hashin')
{
'package': 'hashin',
'version': '0.10',
'hashes': [
{
'url': 'https://pypi.org/packages/[...]',
'hash': '45d1c5d2237a3b4f78b4198709fb2ecf[...]'
},
{
'url': 'https://pypi.org/packages/[...]',
'hash': '0d63bf4c115154781846ecf573049324[...]'
},
{
'url': 'https://pypi.org/packages/[...]',
'hash': 'c32e6d9fb09dc36ab9222c4606a1f43a[...]'
}
]
} | [
"Gets",
"the",
"hashes",
"for",
"the",
"given",
"package",
"."
] | c3efcba050961257622c475114c087efe675c58a | https://github.com/peterbe/hashin/blob/c3efcba050961257622c475114c087efe675c58a/hashin.py#L621-L688 | train | 33,362 |
ic-labs/django-icekit | icekit/fields.py | TemplateNameField.formfield | def formfield(self, **kwargs):
"""
Get choices from plugins, if necessary.
"""
if self.plugin_class:
self._choices = self.plugin_class.get_all_choices(field=self)
return super(TemplateNameField, self).formfield(**kwargs) | python | def formfield(self, **kwargs):
"""
Get choices from plugins, if necessary.
"""
if self.plugin_class:
self._choices = self.plugin_class.get_all_choices(field=self)
return super(TemplateNameField, self).formfield(**kwargs) | [
"def",
"formfield",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"self",
".",
"plugin_class",
":",
"self",
".",
"_choices",
"=",
"self",
".",
"plugin_class",
".",
"get_all_choices",
"(",
"field",
"=",
"self",
")",
"return",
"super",
"(",
"Temp... | Get choices from plugins, if necessary. | [
"Get",
"choices",
"from",
"plugins",
"if",
"necessary",
"."
] | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/icekit/fields.py#L39-L45 | train | 33,363 |
ic-labs/django-icekit | icekit/fields.py | ICEkitURLField.register_model_once | def register_model_once(cls, ModelClass, **kwargs):
"""
Tweaked version of `AnyUrlField.register_model` that only registers the
given model after checking that it is not already registered.
"""
if cls._static_registry.get_for_model(ModelClass) is None:
logger.warn("Model is already registered with {0}: '{1}'"
.format(cls, ModelClass))
else:
cls.register_model.register(ModelClass, **kwargs) | python | def register_model_once(cls, ModelClass, **kwargs):
"""
Tweaked version of `AnyUrlField.register_model` that only registers the
given model after checking that it is not already registered.
"""
if cls._static_registry.get_for_model(ModelClass) is None:
logger.warn("Model is already registered with {0}: '{1}'"
.format(cls, ModelClass))
else:
cls.register_model.register(ModelClass, **kwargs) | [
"def",
"register_model_once",
"(",
"cls",
",",
"ModelClass",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"cls",
".",
"_static_registry",
".",
"get_for_model",
"(",
"ModelClass",
")",
"is",
"None",
":",
"logger",
".",
"warn",
"(",
"\"Model is already registered wi... | Tweaked version of `AnyUrlField.register_model` that only registers the
given model after checking that it is not already registered. | [
"Tweaked",
"version",
"of",
"AnyUrlField",
".",
"register_model",
"that",
"only",
"registers",
"the",
"given",
"model",
"after",
"checking",
"that",
"it",
"is",
"not",
"already",
"registered",
"."
] | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/icekit/fields.py#L54-L63 | train | 33,364 |
ic-labs/django-icekit | icekit/admin_tools/templatetags/dashboard_tags.py | filter_featured_apps | def filter_featured_apps(admin_apps, request):
"""
Given a list of apps return a set of pseudo-apps considered featured.
Apps are considered featured if the are defined in the settings
property called `DASHBOARD_FEATURED_APPS` which contains a list of the apps
that are considered to be featured.
:param admin_apps: A list of apps.
:param request: Django request.
:return: Subset of app like objects that are listed in
the settings `DASHBOARD_FEATURED_APPS` setting.
"""
featured_apps = []
# Build the featured apps list based upon settings.
for orig_app_spec in appsettings.DASHBOARD_FEATURED_APPS:
# make a copy that we can write to, to fix deprecations without
# changing settings
app_spec = orig_app_spec.copy()
if "verbose_name" in app_spec:
warnings.warn(
"DASHBOARD_FEATURED_APPS[]['verbose_name'] = '%s' is deprecated. "
"Use 'name' instead)" % app_spec['verbose_name'],
DeprecationWarning, stacklevel=2
)
app_spec['name'] = app_spec['verbose_name']
if hasattr(app_spec['models'], 'items'):
warnings.warn(
"DASHBOARD_FEATURED_APPS[]['models'] for '%s' should now be a "
"list of tuples, not a dict." % app_spec['name'],
DeprecationWarning, stacklevel=2
)
app_spec['models'] = app_spec['models'].items()
# lookup the models from the names
app_spec['models'] = _build_app_models(
request, admin_apps, app_spec['models']
)
# Only add the panel if at least one model is listed.
if app_spec['models']:
featured_apps.append(app_spec)
return featured_apps | python | def filter_featured_apps(admin_apps, request):
"""
Given a list of apps return a set of pseudo-apps considered featured.
Apps are considered featured if the are defined in the settings
property called `DASHBOARD_FEATURED_APPS` which contains a list of the apps
that are considered to be featured.
:param admin_apps: A list of apps.
:param request: Django request.
:return: Subset of app like objects that are listed in
the settings `DASHBOARD_FEATURED_APPS` setting.
"""
featured_apps = []
# Build the featured apps list based upon settings.
for orig_app_spec in appsettings.DASHBOARD_FEATURED_APPS:
# make a copy that we can write to, to fix deprecations without
# changing settings
app_spec = orig_app_spec.copy()
if "verbose_name" in app_spec:
warnings.warn(
"DASHBOARD_FEATURED_APPS[]['verbose_name'] = '%s' is deprecated. "
"Use 'name' instead)" % app_spec['verbose_name'],
DeprecationWarning, stacklevel=2
)
app_spec['name'] = app_spec['verbose_name']
if hasattr(app_spec['models'], 'items'):
warnings.warn(
"DASHBOARD_FEATURED_APPS[]['models'] for '%s' should now be a "
"list of tuples, not a dict." % app_spec['name'],
DeprecationWarning, stacklevel=2
)
app_spec['models'] = app_spec['models'].items()
# lookup the models from the names
app_spec['models'] = _build_app_models(
request, admin_apps, app_spec['models']
)
# Only add the panel if at least one model is listed.
if app_spec['models']:
featured_apps.append(app_spec)
return featured_apps | [
"def",
"filter_featured_apps",
"(",
"admin_apps",
",",
"request",
")",
":",
"featured_apps",
"=",
"[",
"]",
"# Build the featured apps list based upon settings.",
"for",
"orig_app_spec",
"in",
"appsettings",
".",
"DASHBOARD_FEATURED_APPS",
":",
"# make a copy that we can writ... | Given a list of apps return a set of pseudo-apps considered featured.
Apps are considered featured if the are defined in the settings
property called `DASHBOARD_FEATURED_APPS` which contains a list of the apps
that are considered to be featured.
:param admin_apps: A list of apps.
:param request: Django request.
:return: Subset of app like objects that are listed in
the settings `DASHBOARD_FEATURED_APPS` setting. | [
"Given",
"a",
"list",
"of",
"apps",
"return",
"a",
"set",
"of",
"pseudo",
"-",
"apps",
"considered",
"featured",
"."
] | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/icekit/admin_tools/templatetags/dashboard_tags.py#L63-L109 | train | 33,365 |
ic-labs/django-icekit | icekit/admin_tools/templatetags/dashboard_tags.py | _remove_app_models | def _remove_app_models(all_apps, models_to_remove):
"""
Remove the model specs in models_to_remove from the models specs in the
apps in all_apps. If an app has no models left, don't include it in the
output.
This has the side-effect that the app view e.g. /admin/app/ may not be
accessible from the dashboard, only the breadcrumbs.
"""
filtered_apps = []
for app in all_apps:
models = [x for x in app['models'] if x not in models_to_remove]
if models:
app['models'] = models
filtered_apps.append(app)
return filtered_apps | python | def _remove_app_models(all_apps, models_to_remove):
"""
Remove the model specs in models_to_remove from the models specs in the
apps in all_apps. If an app has no models left, don't include it in the
output.
This has the side-effect that the app view e.g. /admin/app/ may not be
accessible from the dashboard, only the breadcrumbs.
"""
filtered_apps = []
for app in all_apps:
models = [x for x in app['models'] if x not in models_to_remove]
if models:
app['models'] = models
filtered_apps.append(app)
return filtered_apps | [
"def",
"_remove_app_models",
"(",
"all_apps",
",",
"models_to_remove",
")",
":",
"filtered_apps",
"=",
"[",
"]",
"for",
"app",
"in",
"all_apps",
":",
"models",
"=",
"[",
"x",
"for",
"x",
"in",
"app",
"[",
"'models'",
"]",
"if",
"x",
"not",
"in",
"model... | Remove the model specs in models_to_remove from the models specs in the
apps in all_apps. If an app has no models left, don't include it in the
output.
This has the side-effect that the app view e.g. /admin/app/ may not be
accessible from the dashboard, only the breadcrumbs. | [
"Remove",
"the",
"model",
"specs",
"in",
"models_to_remove",
"from",
"the",
"models",
"specs",
"in",
"the",
"apps",
"in",
"all_apps",
".",
"If",
"an",
"app",
"has",
"no",
"models",
"left",
"don",
"t",
"include",
"it",
"in",
"the",
"output",
"."
] | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/icekit/admin_tools/templatetags/dashboard_tags.py#L112-L130 | train | 33,366 |
ic-labs/django-icekit | icekit/admin_tools/templatetags/dashboard_tags.py | filter_sorted_apps | def filter_sorted_apps(admin_apps, request):
"""
Filter admin_apps to show the ones in ``DASHBOARD_SORTED_APPS`` first,
and remove them from the subsequent listings.
"""
sorted_apps = []
for orig_app_spec in appsettings.DASHBOARD_SORTED_APPS:
# make a copy that we can write to, to fix deprecations without
# changing settings
app_spec = orig_app_spec.copy()
# lookup the models from the names
app_spec['models'] = _build_app_models(
request, admin_apps, app_spec['models'], ensure_all_models=True
)
# Only add the panel if at least one model is listed.
if app_spec['models']:
sorted_apps.append(app_spec)
used_models = []
for app in sorted_apps:
used_models += app['models']
sorted_apps += _remove_app_models(admin_apps, used_models)
return sorted_apps | python | def filter_sorted_apps(admin_apps, request):
"""
Filter admin_apps to show the ones in ``DASHBOARD_SORTED_APPS`` first,
and remove them from the subsequent listings.
"""
sorted_apps = []
for orig_app_spec in appsettings.DASHBOARD_SORTED_APPS:
# make a copy that we can write to, to fix deprecations without
# changing settings
app_spec = orig_app_spec.copy()
# lookup the models from the names
app_spec['models'] = _build_app_models(
request, admin_apps, app_spec['models'], ensure_all_models=True
)
# Only add the panel if at least one model is listed.
if app_spec['models']:
sorted_apps.append(app_spec)
used_models = []
for app in sorted_apps:
used_models += app['models']
sorted_apps += _remove_app_models(admin_apps, used_models)
return sorted_apps | [
"def",
"filter_sorted_apps",
"(",
"admin_apps",
",",
"request",
")",
":",
"sorted_apps",
"=",
"[",
"]",
"for",
"orig_app_spec",
"in",
"appsettings",
".",
"DASHBOARD_SORTED_APPS",
":",
"# make a copy that we can write to, to fix deprecations without",
"# changing settings",
... | Filter admin_apps to show the ones in ``DASHBOARD_SORTED_APPS`` first,
and remove them from the subsequent listings. | [
"Filter",
"admin_apps",
"to",
"show",
"the",
"ones",
"in",
"DASHBOARD_SORTED_APPS",
"first",
"and",
"remove",
"them",
"from",
"the",
"subsequent",
"listings",
"."
] | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/icekit/admin_tools/templatetags/dashboard_tags.py#L134-L160 | train | 33,367 |
ic-labs/django-icekit | icekit/admin_tools/mixins.py | FluentLayoutsMixin.formfield_for_foreignkey | def formfield_for_foreignkey(self, db_field, *args, **kwargs):
"""
Update queryset for ``layout`` field.
"""
formfield = super(FluentLayoutsMixin, self).formfield_for_foreignkey(
db_field, *args, **kwargs)
if db_field.name == 'layout':
formfield.queryset = formfield.queryset.for_model(self.model)
return formfield | python | def formfield_for_foreignkey(self, db_field, *args, **kwargs):
"""
Update queryset for ``layout`` field.
"""
formfield = super(FluentLayoutsMixin, self).formfield_for_foreignkey(
db_field, *args, **kwargs)
if db_field.name == 'layout':
formfield.queryset = formfield.queryset.for_model(self.model)
return formfield | [
"def",
"formfield_for_foreignkey",
"(",
"self",
",",
"db_field",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"formfield",
"=",
"super",
"(",
"FluentLayoutsMixin",
",",
"self",
")",
".",
"formfield_for_foreignkey",
"(",
"db_field",
",",
"*",
"args",
... | Update queryset for ``layout`` field. | [
"Update",
"queryset",
"for",
"layout",
"field",
"."
] | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/icekit/admin_tools/mixins.py#L36-L44 | train | 33,368 |
ic-labs/django-icekit | icekit/admin_tools/mixins.py | FluentLayoutsMixin.get_placeholder_data | def get_placeholder_data(self, request, obj):
"""
Get placeholder data from layout.
"""
if not obj or not getattr(obj, 'layout', None):
data = [PlaceholderData(slot='main', role='m', title='Main')]
else:
data = obj.layout.get_placeholder_data()
return data | python | def get_placeholder_data(self, request, obj):
"""
Get placeholder data from layout.
"""
if not obj or not getattr(obj, 'layout', None):
data = [PlaceholderData(slot='main', role='m', title='Main')]
else:
data = obj.layout.get_placeholder_data()
return data | [
"def",
"get_placeholder_data",
"(",
"self",
",",
"request",
",",
"obj",
")",
":",
"if",
"not",
"obj",
"or",
"not",
"getattr",
"(",
"obj",
",",
"'layout'",
",",
"None",
")",
":",
"data",
"=",
"[",
"PlaceholderData",
"(",
"slot",
"=",
"'main'",
",",
"r... | Get placeholder data from layout. | [
"Get",
"placeholder",
"data",
"from",
"layout",
"."
] | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/icekit/admin_tools/mixins.py#L46-L54 | train | 33,369 |
ic-labs/django-icekit | icekit/admin_tools/mixins.py | ThumbnailAdminMixin.get_thumbnail_source | def get_thumbnail_source(self, obj):
"""
Obtains the source image field for the thumbnail.
:param obj: An object with a thumbnail_field defined.
:return: Image field for thumbnail or None if not found.
"""
if hasattr(self, 'thumbnail_field') and self.thumbnail_field:
return resolve(obj, self.thumbnail_field)
# try get_list_image, from ListableMixin
if hasattr(obj, 'get_list_image'):
return resolve(obj, "get_list_image")
logger.warning('ThumbnailAdminMixin.thumbnail_field unspecified')
return None | python | def get_thumbnail_source(self, obj):
"""
Obtains the source image field for the thumbnail.
:param obj: An object with a thumbnail_field defined.
:return: Image field for thumbnail or None if not found.
"""
if hasattr(self, 'thumbnail_field') and self.thumbnail_field:
return resolve(obj, self.thumbnail_field)
# try get_list_image, from ListableMixin
if hasattr(obj, 'get_list_image'):
return resolve(obj, "get_list_image")
logger.warning('ThumbnailAdminMixin.thumbnail_field unspecified')
return None | [
"def",
"get_thumbnail_source",
"(",
"self",
",",
"obj",
")",
":",
"if",
"hasattr",
"(",
"self",
",",
"'thumbnail_field'",
")",
"and",
"self",
".",
"thumbnail_field",
":",
"return",
"resolve",
"(",
"obj",
",",
"self",
".",
"thumbnail_field",
")",
"# try get_l... | Obtains the source image field for the thumbnail.
:param obj: An object with a thumbnail_field defined.
:return: Image field for thumbnail or None if not found. | [
"Obtains",
"the",
"source",
"image",
"field",
"for",
"the",
"thumbnail",
"."
] | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/icekit/admin_tools/mixins.py#L134-L149 | train | 33,370 |
ic-labs/django-icekit | icekit/admin_tools/mixins.py | ThumbnailAdminMixin.preview | def preview(self, obj, request=None):
"""
Generate the HTML to display for the image.
:param obj: An object with a thumbnail_field defined.
:return: HTML for image display.
"""
source = self.get_thumbnail_source(obj)
if source:
try:
from easy_thumbnails.files import get_thumbnailer
except ImportError:
logger.warning(
_(
'`easy_thumbnails` is not installed and required for '
'icekit.admin_tools.mixins.ThumbnailAdminMixin'
)
)
return ''
try:
thumbnailer = get_thumbnailer(source)
thumbnail = thumbnailer.get_thumbnail(self.thumbnail_options)
return '<img class="thumbnail" src="{0}" />'.format(
thumbnail.url)
except Exception as ex:
logger.warning(
_(u'`easy_thumbnails` failed to generate a thumbnail image'
u' for {0}'.format(source)))
if self.thumbnail_show_exceptions:
return 'Thumbnail exception: {0}'.format(ex)
return '' | python | def preview(self, obj, request=None):
"""
Generate the HTML to display for the image.
:param obj: An object with a thumbnail_field defined.
:return: HTML for image display.
"""
source = self.get_thumbnail_source(obj)
if source:
try:
from easy_thumbnails.files import get_thumbnailer
except ImportError:
logger.warning(
_(
'`easy_thumbnails` is not installed and required for '
'icekit.admin_tools.mixins.ThumbnailAdminMixin'
)
)
return ''
try:
thumbnailer = get_thumbnailer(source)
thumbnail = thumbnailer.get_thumbnail(self.thumbnail_options)
return '<img class="thumbnail" src="{0}" />'.format(
thumbnail.url)
except Exception as ex:
logger.warning(
_(u'`easy_thumbnails` failed to generate a thumbnail image'
u' for {0}'.format(source)))
if self.thumbnail_show_exceptions:
return 'Thumbnail exception: {0}'.format(ex)
return '' | [
"def",
"preview",
"(",
"self",
",",
"obj",
",",
"request",
"=",
"None",
")",
":",
"source",
"=",
"self",
".",
"get_thumbnail_source",
"(",
"obj",
")",
"if",
"source",
":",
"try",
":",
"from",
"easy_thumbnails",
".",
"files",
"import",
"get_thumbnailer",
... | Generate the HTML to display for the image.
:param obj: An object with a thumbnail_field defined.
:return: HTML for image display. | [
"Generate",
"the",
"HTML",
"to",
"display",
"for",
"the",
"image",
"."
] | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/icekit/admin_tools/mixins.py#L151-L181 | train | 33,371 |
ic-labs/django-icekit | icekit/tasks.py | one_instance | def one_instance(function=None, key='', timeout=DEFAULT_ONE_INSTANCE_TIMEOUT):
"""
Decorator to enforce only one Celery task execution at a time when multiple
workers are available.
See http://loose-bits.com/2010/10/distributed-task-locking-in-celery.html
"""
def _dec(run_func):
def _caller(*args, **kwargs):
ret_value = None
have_lock = False
# Use Redis AOF for persistent lock.
if REDIS_CLIENT.config_get('appendonly').values()[0] == 'no':
REDIS_CLIENT.config_set('appendonly', 'yes')
lock = REDIS_CLIENT.lock(key, timeout=timeout)
try:
logger.debug(
'%s: trying to acquire lock (PID %d).'
% (key, os.getpid()))
have_lock = lock.acquire(blocking=False)
if have_lock:
logger.debug(
'%s: acquired lock (PID %d).' % (key, os.getpid()))
ret_value = run_func(*args, **kwargs)
else:
logger.error(
'%s: did not acquire lock (PID %d).'
% (key, os.getpid()))
finally:
if have_lock:
lock.release()
logger.debug(
'%s: released lock (PID %d).' % (key, os.getpid()))
return ret_value
return _caller
return _dec(function) if function is not None else _dec | python | def one_instance(function=None, key='', timeout=DEFAULT_ONE_INSTANCE_TIMEOUT):
"""
Decorator to enforce only one Celery task execution at a time when multiple
workers are available.
See http://loose-bits.com/2010/10/distributed-task-locking-in-celery.html
"""
def _dec(run_func):
def _caller(*args, **kwargs):
ret_value = None
have_lock = False
# Use Redis AOF for persistent lock.
if REDIS_CLIENT.config_get('appendonly').values()[0] == 'no':
REDIS_CLIENT.config_set('appendonly', 'yes')
lock = REDIS_CLIENT.lock(key, timeout=timeout)
try:
logger.debug(
'%s: trying to acquire lock (PID %d).'
% (key, os.getpid()))
have_lock = lock.acquire(blocking=False)
if have_lock:
logger.debug(
'%s: acquired lock (PID %d).' % (key, os.getpid()))
ret_value = run_func(*args, **kwargs)
else:
logger.error(
'%s: did not acquire lock (PID %d).'
% (key, os.getpid()))
finally:
if have_lock:
lock.release()
logger.debug(
'%s: released lock (PID %d).' % (key, os.getpid()))
return ret_value
return _caller
return _dec(function) if function is not None else _dec | [
"def",
"one_instance",
"(",
"function",
"=",
"None",
",",
"key",
"=",
"''",
",",
"timeout",
"=",
"DEFAULT_ONE_INSTANCE_TIMEOUT",
")",
":",
"def",
"_dec",
"(",
"run_func",
")",
":",
"def",
"_caller",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
... | Decorator to enforce only one Celery task execution at a time when multiple
workers are available.
See http://loose-bits.com/2010/10/distributed-task-locking-in-celery.html | [
"Decorator",
"to",
"enforce",
"only",
"one",
"Celery",
"task",
"execution",
"at",
"a",
"time",
"when",
"multiple",
"workers",
"are",
"available",
"."
] | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/icekit/tasks.py#L28-L63 | train | 33,372 |
ic-labs/django-icekit | icekit/search_indexes.py | PageIndex.index_queryset | def index_queryset(self, using=None):
"""
Index current language translation of published objects.
TODO: Find a way to index all translations of the given model, not just
the current site language's translation.
"""
translation.activate(settings.LANGUAGE_CODE)
return self.get_model().objects.filter(status=UrlNode.PUBLISHED).select_related() | python | def index_queryset(self, using=None):
"""
Index current language translation of published objects.
TODO: Find a way to index all translations of the given model, not just
the current site language's translation.
"""
translation.activate(settings.LANGUAGE_CODE)
return self.get_model().objects.filter(status=UrlNode.PUBLISHED).select_related() | [
"def",
"index_queryset",
"(",
"self",
",",
"using",
"=",
"None",
")",
":",
"translation",
".",
"activate",
"(",
"settings",
".",
"LANGUAGE_CODE",
")",
"return",
"self",
".",
"get_model",
"(",
")",
".",
"objects",
".",
"filter",
"(",
"status",
"=",
"UrlNo... | Index current language translation of published objects.
TODO: Find a way to index all translations of the given model, not just
the current site language's translation. | [
"Index",
"current",
"language",
"translation",
"of",
"published",
"objects",
"."
] | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/icekit/search_indexes.py#L15-L23 | train | 33,373 |
ic-labs/django-icekit | icekit/content_collections/page_type_plugins.py | ListingPagePlugin.get_context | def get_context(self, request, page, **kwargs):
""" Include in context items to be visible on listing page """
context = super(ListingPagePlugin, self).get_context(
request, page, **kwargs)
context['items_to_list'] = page.get_items_to_list(request)
return context | python | def get_context(self, request, page, **kwargs):
""" Include in context items to be visible on listing page """
context = super(ListingPagePlugin, self).get_context(
request, page, **kwargs)
context['items_to_list'] = page.get_items_to_list(request)
return context | [
"def",
"get_context",
"(",
"self",
",",
"request",
",",
"page",
",",
"*",
"*",
"kwargs",
")",
":",
"context",
"=",
"super",
"(",
"ListingPagePlugin",
",",
"self",
")",
".",
"get_context",
"(",
"request",
",",
"page",
",",
"*",
"*",
"kwargs",
")",
"co... | Include in context items to be visible on listing page | [
"Include",
"in",
"context",
"items",
"to",
"be",
"visible",
"on",
"listing",
"page"
] | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/icekit/content_collections/page_type_plugins.py#L13-L18 | train | 33,374 |
ic-labs/django-icekit | icekit/content_collections/page_type_plugins.py | ListingPagePlugin.get_view_response | def get_view_response(self, request, page, view_func, view_args, view_kwargs):
"""
Render the custom view that was exposed by the extra plugin URL patterns.
This gives the ability to add extra middleware logic.
"""
return view_func(request, page, *view_args, **view_kwargs) | python | def get_view_response(self, request, page, view_func, view_args, view_kwargs):
"""
Render the custom view that was exposed by the extra plugin URL patterns.
This gives the ability to add extra middleware logic.
"""
return view_func(request, page, *view_args, **view_kwargs) | [
"def",
"get_view_response",
"(",
"self",
",",
"request",
",",
"page",
",",
"view_func",
",",
"view_args",
",",
"view_kwargs",
")",
":",
"return",
"view_func",
"(",
"request",
",",
"page",
",",
"*",
"view_args",
",",
"*",
"*",
"view_kwargs",
")"
] | Render the custom view that was exposed by the extra plugin URL patterns.
This gives the ability to add extra middleware logic. | [
"Render",
"the",
"custom",
"view",
"that",
"was",
"exposed",
"by",
"the",
"extra",
"plugin",
"URL",
"patterns",
".",
"This",
"gives",
"the",
"ability",
"to",
"add",
"extra",
"middleware",
"logic",
"."
] | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/icekit/content_collections/page_type_plugins.py#L20-L25 | train | 33,375 |
ic-labs/django-icekit | icekit/publishing/admin.py | PublishingAdminForm.clean | def clean(self):
"""
Additional clean data checks for path and keys.
These are not cleaned in their respective methods e.g.
`clean_slug` as they depend upon other field data.
:return: Cleaned data.
"""
data = super(PublishingAdminForm, self).clean()
cleaned_data = self.cleaned_data
instance = self.instance
# work out which fields are unique_together
unique_fields_set = instance.get_unique_together()
if not unique_fields_set:
return data
for unique_fields in unique_fields_set:
unique_filter = {}
for unique_field in unique_fields:
field = instance.get_field(unique_field)
# Get value from the form or the model
if field.editable and unique_field in cleaned_data:
unique_filter[unique_field] = cleaned_data[unique_field]
else:
unique_filter[unique_field] = \
getattr(instance, unique_field)
# try to find if any models already exist in the db; I find all
# models and then exclude those matching the current model.
existing_instances = type(instance).objects \
.filter(**unique_filter) \
.exclude(pk=instance.pk)
if instance.publishing_linked:
existing_instances = existing_instances.exclude(
pk=instance.publishing_linked.pk)
if existing_instances:
for unique_field in unique_fields:
self._errors[unique_field] = self.error_class(
[_('This value must be unique.')])
return data | python | def clean(self):
"""
Additional clean data checks for path and keys.
These are not cleaned in their respective methods e.g.
`clean_slug` as they depend upon other field data.
:return: Cleaned data.
"""
data = super(PublishingAdminForm, self).clean()
cleaned_data = self.cleaned_data
instance = self.instance
# work out which fields are unique_together
unique_fields_set = instance.get_unique_together()
if not unique_fields_set:
return data
for unique_fields in unique_fields_set:
unique_filter = {}
for unique_field in unique_fields:
field = instance.get_field(unique_field)
# Get value from the form or the model
if field.editable and unique_field in cleaned_data:
unique_filter[unique_field] = cleaned_data[unique_field]
else:
unique_filter[unique_field] = \
getattr(instance, unique_field)
# try to find if any models already exist in the db; I find all
# models and then exclude those matching the current model.
existing_instances = type(instance).objects \
.filter(**unique_filter) \
.exclude(pk=instance.pk)
if instance.publishing_linked:
existing_instances = existing_instances.exclude(
pk=instance.publishing_linked.pk)
if existing_instances:
for unique_field in unique_fields:
self._errors[unique_field] = self.error_class(
[_('This value must be unique.')])
return data | [
"def",
"clean",
"(",
"self",
")",
":",
"data",
"=",
"super",
"(",
"PublishingAdminForm",
",",
"self",
")",
".",
"clean",
"(",
")",
"cleaned_data",
"=",
"self",
".",
"cleaned_data",
"instance",
"=",
"self",
".",
"instance",
"# work out which fields are unique_t... | Additional clean data checks for path and keys.
These are not cleaned in their respective methods e.g.
`clean_slug` as they depend upon other field data.
:return: Cleaned data. | [
"Additional",
"clean",
"data",
"checks",
"for",
"path",
"and",
"keys",
"."
] | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/icekit/publishing/admin.py#L189-L235 | train | 33,376 |
ic-labs/django-icekit | icekit/publishing/admin.py | _PublishingHelpersMixin.has_publish_permission | def has_publish_permission(self, request, obj=None):
"""
Determines if the user has permissions to publish.
:param request: Django request object.
:param obj: The object to determine if the user has
permissions to publish.
:return: Boolean.
"""
# If auto-publishing is enabled, no user has "permission" to publish
# because it happens automatically
if is_automatic_publishing_enabled(self.model):
return False
user_obj = request.user
if not user_obj.is_active:
return False
if user_obj.is_superuser:
return True
# Normal user with `can_publish` permission can always publish
if user_obj.has_perm('%s.can_publish' % self.opts.app_label):
return True
# Normal user with `can_republish` permission can only publish if the
# item is already published.
if user_obj.has_perm('%s.can_republish' % self.opts.app_label) and \
obj and getattr(obj, 'has_been_published', False):
return True
# User does not meet any publishing permisison requirements; reject!
return False | python | def has_publish_permission(self, request, obj=None):
"""
Determines if the user has permissions to publish.
:param request: Django request object.
:param obj: The object to determine if the user has
permissions to publish.
:return: Boolean.
"""
# If auto-publishing is enabled, no user has "permission" to publish
# because it happens automatically
if is_automatic_publishing_enabled(self.model):
return False
user_obj = request.user
if not user_obj.is_active:
return False
if user_obj.is_superuser:
return True
# Normal user with `can_publish` permission can always publish
if user_obj.has_perm('%s.can_publish' % self.opts.app_label):
return True
# Normal user with `can_republish` permission can only publish if the
# item is already published.
if user_obj.has_perm('%s.can_republish' % self.opts.app_label) and \
obj and getattr(obj, 'has_been_published', False):
return True
# User does not meet any publishing permisison requirements; reject!
return False | [
"def",
"has_publish_permission",
"(",
"self",
",",
"request",
",",
"obj",
"=",
"None",
")",
":",
"# If auto-publishing is enabled, no user has \"permission\" to publish",
"# because it happens automatically",
"if",
"is_automatic_publishing_enabled",
"(",
"self",
".",
"model",
... | Determines if the user has permissions to publish.
:param request: Django request object.
:param obj: The object to determine if the user has
permissions to publish.
:return: Boolean. | [
"Determines",
"if",
"the",
"user",
"has",
"permissions",
"to",
"publish",
"."
] | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/icekit/publishing/admin.py#L283-L310 | train | 33,377 |
ic-labs/django-icekit | icekit/publishing/admin.py | _PublishingHelpersMixin.has_preview_permission | def has_preview_permission(self, request, obj=None):
"""
Return `True` if the user has permissions to preview a publishable
item.
NOTE: this method does not actually change who can or cannot preview
any particular item, just whether to show the preview link. The real
dcision is made by a combination of:
- `PublishingMiddleware` which chooses who can view draft content
- the view code for a particular item, which may or may not render
draft content for a specific user.
:param request: Django request object.
:param obj: The object the user would preview, if permitted.
:return: Boolean.
"""
# User who can publish always has preview permission.
if self.has_publish_permission(request, obj=obj):
return True
user_obj = request.user
if not user_obj.is_active:
return False
if user_obj.is_staff:
return True
return False | python | def has_preview_permission(self, request, obj=None):
"""
Return `True` if the user has permissions to preview a publishable
item.
NOTE: this method does not actually change who can or cannot preview
any particular item, just whether to show the preview link. The real
dcision is made by a combination of:
- `PublishingMiddleware` which chooses who can view draft content
- the view code for a particular item, which may or may not render
draft content for a specific user.
:param request: Django request object.
:param obj: The object the user would preview, if permitted.
:return: Boolean.
"""
# User who can publish always has preview permission.
if self.has_publish_permission(request, obj=obj):
return True
user_obj = request.user
if not user_obj.is_active:
return False
if user_obj.is_staff:
return True
return False | [
"def",
"has_preview_permission",
"(",
"self",
",",
"request",
",",
"obj",
"=",
"None",
")",
":",
"# User who can publish always has preview permission.",
"if",
"self",
".",
"has_publish_permission",
"(",
"request",
",",
"obj",
"=",
"obj",
")",
":",
"return",
"True... | Return `True` if the user has permissions to preview a publishable
item.
NOTE: this method does not actually change who can or cannot preview
any particular item, just whether to show the preview link. The real
dcision is made by a combination of:
- `PublishingMiddleware` which chooses who can view draft content
- the view code for a particular item, which may or may not render
draft content for a specific user.
:param request: Django request object.
:param obj: The object the user would preview, if permitted.
:return: Boolean. | [
"Return",
"True",
"if",
"the",
"user",
"has",
"permissions",
"to",
"preview",
"a",
"publishable",
"item",
"."
] | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/icekit/publishing/admin.py#L312-L337 | train | 33,378 |
ic-labs/django-icekit | icekit/publishing/admin.py | _PublishingHelpersMixin.publishing_column | def publishing_column(self, obj):
"""
Render publishing-related status icons and view links for display in
the admin.
"""
# TODO Hack to convert polymorphic objects to real instances
if hasattr(obj, 'get_real_instance'):
obj = obj.get_real_instance()
try:
object_url = obj.get_absolute_url()
except (NoReverseMatch, AttributeError):
object_url = ''
template_name = 'admin/publishing/_change_list_publishing_column.html'
t = loader.get_template(template_name)
c = Context({
'object': obj,
'object_url': object_url,
'has_publish_permission':
self.has_publish_permission(self.request, obj),
'has_preview_permission':
self.has_preview_permission(self.request, obj),
})
try:
if isinstance(obj, PublishingModel):
c['publish_url'] = reverse(
self.publish_reverse(type(obj)), args=(obj.pk, ))
c['unpublish_url'] = reverse(
self.unpublish_reverse(type(obj)), args=(obj.pk, ))
except NoReverseMatch:
pass
return t.render(c) | python | def publishing_column(self, obj):
"""
Render publishing-related status icons and view links for display in
the admin.
"""
# TODO Hack to convert polymorphic objects to real instances
if hasattr(obj, 'get_real_instance'):
obj = obj.get_real_instance()
try:
object_url = obj.get_absolute_url()
except (NoReverseMatch, AttributeError):
object_url = ''
template_name = 'admin/publishing/_change_list_publishing_column.html'
t = loader.get_template(template_name)
c = Context({
'object': obj,
'object_url': object_url,
'has_publish_permission':
self.has_publish_permission(self.request, obj),
'has_preview_permission':
self.has_preview_permission(self.request, obj),
})
try:
if isinstance(obj, PublishingModel):
c['publish_url'] = reverse(
self.publish_reverse(type(obj)), args=(obj.pk, ))
c['unpublish_url'] = reverse(
self.unpublish_reverse(type(obj)), args=(obj.pk, ))
except NoReverseMatch:
pass
return t.render(c) | [
"def",
"publishing_column",
"(",
"self",
",",
"obj",
")",
":",
"# TODO Hack to convert polymorphic objects to real instances",
"if",
"hasattr",
"(",
"obj",
",",
"'get_real_instance'",
")",
":",
"obj",
"=",
"obj",
".",
"get_real_instance",
"(",
")",
"try",
":",
"ob... | Render publishing-related status icons and view links for display in
the admin. | [
"Render",
"publishing",
"-",
"related",
"status",
"icons",
"and",
"view",
"links",
"for",
"display",
"in",
"the",
"admin",
"."
] | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/icekit/publishing/admin.py#L339-L371 | train | 33,379 |
ic-labs/django-icekit | icekit/publishing/admin.py | _PublishingHelpersMixin.publish | def publish(self, request, qs):
""" Publish bulk action """
# Convert polymorphic queryset instances to real ones if/when necessary
try:
qs = self.model.objects.get_real_instances(qs)
except AttributeError:
pass
for q in qs:
if self.has_publish_permission(request, q):
q.publish() | python | def publish(self, request, qs):
""" Publish bulk action """
# Convert polymorphic queryset instances to real ones if/when necessary
try:
qs = self.model.objects.get_real_instances(qs)
except AttributeError:
pass
for q in qs:
if self.has_publish_permission(request, q):
q.publish() | [
"def",
"publish",
"(",
"self",
",",
"request",
",",
"qs",
")",
":",
"# Convert polymorphic queryset instances to real ones if/when necessary",
"try",
":",
"qs",
"=",
"self",
".",
"model",
".",
"objects",
".",
"get_real_instances",
"(",
"qs",
")",
"except",
"Attrib... | Publish bulk action | [
"Publish",
"bulk",
"action"
] | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/icekit/publishing/admin.py#L375-L384 | train | 33,380 |
ic-labs/django-icekit | icekit/publishing/admin.py | _PublishingHelpersMixin.unpublish | def unpublish(self, request, qs):
""" Unpublish bulk action """
# Convert polymorphic queryset instances to real ones if/when necessary
try:
qs = self.model.objects.get_real_instances(qs)
except AttributeError:
pass
for q in qs:
q.unpublish() | python | def unpublish(self, request, qs):
""" Unpublish bulk action """
# Convert polymorphic queryset instances to real ones if/when necessary
try:
qs = self.model.objects.get_real_instances(qs)
except AttributeError:
pass
for q in qs:
q.unpublish() | [
"def",
"unpublish",
"(",
"self",
",",
"request",
",",
"qs",
")",
":",
"# Convert polymorphic queryset instances to real ones if/when necessary",
"try",
":",
"qs",
"=",
"self",
".",
"model",
".",
"objects",
".",
"get_real_instances",
"(",
"qs",
")",
"except",
"Attr... | Unpublish bulk action | [
"Unpublish",
"bulk",
"action"
] | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/icekit/publishing/admin.py#L386-L394 | train | 33,381 |
ic-labs/django-icekit | icekit/publishing/admin.py | PublishingAdmin.find_first_available_template | def find_first_available_template(self, template_name_list):
"""
Given a list of template names, find the first one that actually exists
and is available.
"""
if isinstance(template_name_list, six.string_types):
return template_name_list
else:
# Take advantage of fluent_pages' internal implementation
return _select_template_name(template_name_list) | python | def find_first_available_template(self, template_name_list):
"""
Given a list of template names, find the first one that actually exists
and is available.
"""
if isinstance(template_name_list, six.string_types):
return template_name_list
else:
# Take advantage of fluent_pages' internal implementation
return _select_template_name(template_name_list) | [
"def",
"find_first_available_template",
"(",
"self",
",",
"template_name_list",
")",
":",
"if",
"isinstance",
"(",
"template_name_list",
",",
"six",
".",
"string_types",
")",
":",
"return",
"template_name_list",
"else",
":",
"# Take advantage of fluent_pages' internal imp... | Given a list of template names, find the first one that actually exists
and is available. | [
"Given",
"a",
"list",
"of",
"template",
"names",
"find",
"the",
"first",
"one",
"that",
"actually",
"exists",
"and",
"is",
"available",
"."
] | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/icekit/publishing/admin.py#L436-L445 | train | 33,382 |
ic-labs/django-icekit | icekit/publishing/admin.py | PublishingAdmin.get_queryset | def get_queryset(self, request):
"""
The queryset to use for the administration list page.
:param request: Django request object.
:return: QuerySet.
"""
# TODO Can we remove this hack?
self.request = request
# Obtain the full queryset defined on the registered model.
qs = self.model.objects
# Determine if a specific language should be used and filter by it if
# required.
try:
qs_language = self.get_queryset_language(request)
if qs_language:
qs = qs.language(qs_language)
except AttributeError: # this isn't translatable
pass
# Use all draft object versions in the admin.
qs = self.publishing_admin_filter_for_drafts(qs)
# If ordering has been specified in the admin definition order by it.
ordering = getattr(self, 'ordering', None) or ()
if ordering:
qs = qs.order_by(*ordering)
# Return the filtered queryset.
return qs | python | def get_queryset(self, request):
"""
The queryset to use for the administration list page.
:param request: Django request object.
:return: QuerySet.
"""
# TODO Can we remove this hack?
self.request = request
# Obtain the full queryset defined on the registered model.
qs = self.model.objects
# Determine if a specific language should be used and filter by it if
# required.
try:
qs_language = self.get_queryset_language(request)
if qs_language:
qs = qs.language(qs_language)
except AttributeError: # this isn't translatable
pass
# Use all draft object versions in the admin.
qs = self.publishing_admin_filter_for_drafts(qs)
# If ordering has been specified in the admin definition order by it.
ordering = getattr(self, 'ordering', None) or ()
if ordering:
qs = qs.order_by(*ordering)
# Return the filtered queryset.
return qs | [
"def",
"get_queryset",
"(",
"self",
",",
"request",
")",
":",
"# TODO Can we remove this hack?",
"self",
".",
"request",
"=",
"request",
"# Obtain the full queryset defined on the registered model.",
"qs",
"=",
"self",
".",
"model",
".",
"objects",
"# Determine if a speci... | The queryset to use for the administration list page.
:param request: Django request object.
:return: QuerySet. | [
"The",
"queryset",
"to",
"use",
"for",
"the",
"administration",
"list",
"page",
"."
] | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/icekit/publishing/admin.py#L455-L486 | train | 33,383 |
ic-labs/django-icekit | icekit/publishing/admin.py | PublishingAdmin.save_related | def save_related(self, request, form, *args, **kwargs):
"""
Send the signal `publishing_post_save_related` when a draft copy is
saved and all its relationships have also been created.
"""
result = super(PublishingAdmin, self) \
.save_related(request, form, *args, **kwargs)
# Send signal that draft has been saved and all relationships created
if form.instance:
publishing_signals.publishing_post_save_related.send(
sender=type(self), instance=form.instance)
return result | python | def save_related(self, request, form, *args, **kwargs):
"""
Send the signal `publishing_post_save_related` when a draft copy is
saved and all its relationships have also been created.
"""
result = super(PublishingAdmin, self) \
.save_related(request, form, *args, **kwargs)
# Send signal that draft has been saved and all relationships created
if form.instance:
publishing_signals.publishing_post_save_related.send(
sender=type(self), instance=form.instance)
return result | [
"def",
"save_related",
"(",
"self",
",",
"request",
",",
"form",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"result",
"=",
"super",
"(",
"PublishingAdmin",
",",
"self",
")",
".",
"save_related",
"(",
"request",
",",
"form",
",",
"*",
"args"... | Send the signal `publishing_post_save_related` when a draft copy is
saved and all its relationships have also been created. | [
"Send",
"the",
"signal",
"publishing_post_save_related",
"when",
"a",
"draft",
"copy",
"is",
"saved",
"and",
"all",
"its",
"relationships",
"have",
"also",
"been",
"created",
"."
] | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/icekit/publishing/admin.py#L577-L588 | train | 33,384 |
ic-labs/django-icekit | icekit/publishing/admin.py | PublishingAdmin.render_change_form | def render_change_form(self, request, context, add=False, change=False,
form_url='', obj=None):
"""
Provides the context and rendering for the admin change form.
:param request: Django request object.
:param context: The context dictionary to be passed to the template.
:param add: Should the add form be rendered? Boolean input.
:param change: Should the change for be rendered? Boolean input.
:param form_url: The URL to use for the form submit action.
:param obj: An object the render change form is for.
"""
obj = context.get('original', None)
if obj:
context['object'] = obj
context['has_been_published'] = obj.has_been_published
context['is_dirty'] = obj.is_dirty
context['has_preview_permission'] = \
self.has_preview_permission(request, obj)
if not self.has_publish_permission(request, obj):
context['has_publish_permission'] = False
else:
context['has_publish_permission'] = True
try:
object_url = obj.get_absolute_url()
except (NoReverseMatch, AttributeError):
object_url = ''
# If the user has publishing permission and if there are
# changes which are to be published show the publish button
# with relevant URL.
publish_btn = None
if obj.is_dirty:
publish_btn = reverse(
self.publish_reverse(type(obj)), args=(obj.pk, ))
# If the user has publishing permission, a draft object and
# a published object show the unpublish button with relevant
# URL.
unpublish_btn = None
if obj.is_draft and obj.publishing_linked:
unpublish_btn = reverse(
self.unpublish_reverse(type(obj)), args=(obj.pk, ))
# If the user has publishing permission, the object has draft
# changes and a published version show a revert button to
# change back to the published information.
revert_btn = None
if obj.is_dirty and obj.publishing_linked:
revert_btn = reverse(self.revert_reverse, args=(obj.pk, ))
context.update({
'object_url': object_url,
'publish_btn': publish_btn,
'unpublish_btn': unpublish_btn,
'revert_btn': revert_btn,
})
# Make the original non-publishing template available to the publishing
# change form template, so it knows what base it extends.
# The `original_change_form_template` context variable does the same
# job for reversion's versioning change form template.
context.update({
'non_publishing_change_form_template':
self.non_publishing_change_form_template,
'original_change_form_template':
self.non_publishing_change_form_template,
})
# Hook to permit subclasses to modify context in change form
try:
self.update_context_for_render_change_form(context, obj)
except AttributeError:
pass
# Override this admin's change form template to point to the publishing
# admin page template, but only for long enough to render the change
# form.
if hasattr(type(self).change_form_template, '__get__') \
and isinstance(self.change_form_template, (list, tuple)):
# Special handling for class that provide multiple templates via,
# a `change_form_template` get-only property instead of the usual
# plain class attribute. In this case, find the best available
# template preferring publishing-specific templates and apply it
# via a context variable that should be respected by Fluent's base
# templates, which we are most likely using at this point.
opts = self.model._meta
app_label = opts.app_label
extra_change_form_templates = [
"admin/%s/%s/publishing_change_form.html" % (
app_label, opts.model_name),
"admin/%s/publishing_change_form.html" % app_label,
"admin/publishing/publishing_change_form.html"
]
context['default_change_form_template'] = \
self.find_first_available_template(
extra_change_form_templates + self.change_form_template)
# Directly overriding the `change_form_template` attr here is
# particularly messy since it can be a property getter, not
# a normal attr, and any normal way of overriding or updating the
# value will not work.
try:
self._change_form_template_getter = \
type(self).change_form_template.__get__
type(self).change_form_template = \
context['default_change_form_template']
has_change_form_attr_getter = True
except AttributeError:
# AttributeError presumably from `.__get__` hack above, in
# which case we don't need to worry about getters and can just
# do things the usual way
self.change_form_template = \
context['default_change_form_template']
has_change_form_attr_getter = False
else:
self.change_form_template = \
"admin/publishing/publishing_change_form.html"
has_change_form_attr_getter = False
response = super(PublishingAdmin, self).render_change_form(
request, context,
add=add, change=change, form_url=form_url, obj=obj)
# Reset change form template
if has_change_form_attr_getter:
type(self).change_form_template = \
self._change_form_template_getter
else:
self.change_form_template = \
self.non_publishing_change_form_template
return response | python | def render_change_form(self, request, context, add=False, change=False,
form_url='', obj=None):
"""
Provides the context and rendering for the admin change form.
:param request: Django request object.
:param context: The context dictionary to be passed to the template.
:param add: Should the add form be rendered? Boolean input.
:param change: Should the change for be rendered? Boolean input.
:param form_url: The URL to use for the form submit action.
:param obj: An object the render change form is for.
"""
obj = context.get('original', None)
if obj:
context['object'] = obj
context['has_been_published'] = obj.has_been_published
context['is_dirty'] = obj.is_dirty
context['has_preview_permission'] = \
self.has_preview_permission(request, obj)
if not self.has_publish_permission(request, obj):
context['has_publish_permission'] = False
else:
context['has_publish_permission'] = True
try:
object_url = obj.get_absolute_url()
except (NoReverseMatch, AttributeError):
object_url = ''
# If the user has publishing permission and if there are
# changes which are to be published show the publish button
# with relevant URL.
publish_btn = None
if obj.is_dirty:
publish_btn = reverse(
self.publish_reverse(type(obj)), args=(obj.pk, ))
# If the user has publishing permission, a draft object and
# a published object show the unpublish button with relevant
# URL.
unpublish_btn = None
if obj.is_draft and obj.publishing_linked:
unpublish_btn = reverse(
self.unpublish_reverse(type(obj)), args=(obj.pk, ))
# If the user has publishing permission, the object has draft
# changes and a published version show a revert button to
# change back to the published information.
revert_btn = None
if obj.is_dirty and obj.publishing_linked:
revert_btn = reverse(self.revert_reverse, args=(obj.pk, ))
context.update({
'object_url': object_url,
'publish_btn': publish_btn,
'unpublish_btn': unpublish_btn,
'revert_btn': revert_btn,
})
# Make the original non-publishing template available to the publishing
# change form template, so it knows what base it extends.
# The `original_change_form_template` context variable does the same
# job for reversion's versioning change form template.
context.update({
'non_publishing_change_form_template':
self.non_publishing_change_form_template,
'original_change_form_template':
self.non_publishing_change_form_template,
})
# Hook to permit subclasses to modify context in change form
try:
self.update_context_for_render_change_form(context, obj)
except AttributeError:
pass
# Override this admin's change form template to point to the publishing
# admin page template, but only for long enough to render the change
# form.
if hasattr(type(self).change_form_template, '__get__') \
and isinstance(self.change_form_template, (list, tuple)):
# Special handling for class that provide multiple templates via,
# a `change_form_template` get-only property instead of the usual
# plain class attribute. In this case, find the best available
# template preferring publishing-specific templates and apply it
# via a context variable that should be respected by Fluent's base
# templates, which we are most likely using at this point.
opts = self.model._meta
app_label = opts.app_label
extra_change_form_templates = [
"admin/%s/%s/publishing_change_form.html" % (
app_label, opts.model_name),
"admin/%s/publishing_change_form.html" % app_label,
"admin/publishing/publishing_change_form.html"
]
context['default_change_form_template'] = \
self.find_first_available_template(
extra_change_form_templates + self.change_form_template)
# Directly overriding the `change_form_template` attr here is
# particularly messy since it can be a property getter, not
# a normal attr, and any normal way of overriding or updating the
# value will not work.
try:
self._change_form_template_getter = \
type(self).change_form_template.__get__
type(self).change_form_template = \
context['default_change_form_template']
has_change_form_attr_getter = True
except AttributeError:
# AttributeError presumably from `.__get__` hack above, in
# which case we don't need to worry about getters and can just
# do things the usual way
self.change_form_template = \
context['default_change_form_template']
has_change_form_attr_getter = False
else:
self.change_form_template = \
"admin/publishing/publishing_change_form.html"
has_change_form_attr_getter = False
response = super(PublishingAdmin, self).render_change_form(
request, context,
add=add, change=change, form_url=form_url, obj=obj)
# Reset change form template
if has_change_form_attr_getter:
type(self).change_form_template = \
self._change_form_template_getter
else:
self.change_form_template = \
self.non_publishing_change_form_template
return response | [
"def",
"render_change_form",
"(",
"self",
",",
"request",
",",
"context",
",",
"add",
"=",
"False",
",",
"change",
"=",
"False",
",",
"form_url",
"=",
"''",
",",
"obj",
"=",
"None",
")",
":",
"obj",
"=",
"context",
".",
"get",
"(",
"'original'",
",",... | Provides the context and rendering for the admin change form.
:param request: Django request object.
:param context: The context dictionary to be passed to the template.
:param add: Should the add form be rendered? Boolean input.
:param change: Should the change for be rendered? Boolean input.
:param form_url: The URL to use for the form submit action.
:param obj: An object the render change form is for. | [
"Provides",
"the",
"context",
"and",
"rendering",
"for",
"the",
"admin",
"change",
"form",
"."
] | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/icekit/publishing/admin.py#L590-L724 | train | 33,385 |
ic-labs/django-icekit | icekit/publishing/admin.py | PublishingFluentPagesParentAdminMixin.get_queryset | def get_queryset(self, request):
"""
Show only DRAFT Fluent page items in admin.
NOTE: We rely on the `UrlNode.status` to recognise DRAFT versus
PUBLISHED objects, since there the top-level `UrlNode` model and
queryset don't know about ICEKit publishing.
"""
self.request = request
qs = super(PublishingFluentPagesParentAdminMixin, self) \
.get_queryset(request)
qs = qs.filter(status=UrlNode.DRAFT)
return qs | python | def get_queryset(self, request):
"""
Show only DRAFT Fluent page items in admin.
NOTE: We rely on the `UrlNode.status` to recognise DRAFT versus
PUBLISHED objects, since there the top-level `UrlNode` model and
queryset don't know about ICEKit publishing.
"""
self.request = request
qs = super(PublishingFluentPagesParentAdminMixin, self) \
.get_queryset(request)
qs = qs.filter(status=UrlNode.DRAFT)
return qs | [
"def",
"get_queryset",
"(",
"self",
",",
"request",
")",
":",
"self",
".",
"request",
"=",
"request",
"qs",
"=",
"super",
"(",
"PublishingFluentPagesParentAdminMixin",
",",
"self",
")",
".",
"get_queryset",
"(",
"request",
")",
"qs",
"=",
"qs",
".",
"filte... | Show only DRAFT Fluent page items in admin.
NOTE: We rely on the `UrlNode.status` to recognise DRAFT versus
PUBLISHED objects, since there the top-level `UrlNode` model and
queryset don't know about ICEKit publishing. | [
"Show",
"only",
"DRAFT",
"Fluent",
"page",
"items",
"in",
"admin",
"."
] | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/icekit/publishing/admin.py#L729-L742 | train | 33,386 |
ic-labs/django-icekit | icekit/management/commands/import_site_map.py | Command.find_existing_page | def find_existing_page(self, titles_hierarchy):
"""
Find and return existing page matching the given titles hierarchy
"""
# Step backwards through import doc's titles hierarchy with a parent
# count which is the offset from the current level to each ancestor
titles_filters = {'publishing_is_draft': True}
for parent_count, ancestor_title \
in enumerate(titles_hierarchy[::-1]):
# Convert import doc's ancestor title and parent count into
# a filter query of the form:
# translations__title=<entry (ancestor 0) title>
# parent__translations__title=<ancestor 1 title>
# parent__parent__translations__title=<ancestor 2 title>
# parent__parent__parent__translations__title=<ancestor 3 title>
parent_path = '__'.join(['parent'] * parent_count)
filter_name = '%s%stranslations__title' % (
parent_path, parent_path and '__' or '')
titles_filters[filter_name] = ancestor_title
# Return a corresponding page if one exists
return self.page_model.objects \
.filter(**titles_filters) \
.first() | python | def find_existing_page(self, titles_hierarchy):
"""
Find and return existing page matching the given titles hierarchy
"""
# Step backwards through import doc's titles hierarchy with a parent
# count which is the offset from the current level to each ancestor
titles_filters = {'publishing_is_draft': True}
for parent_count, ancestor_title \
in enumerate(titles_hierarchy[::-1]):
# Convert import doc's ancestor title and parent count into
# a filter query of the form:
# translations__title=<entry (ancestor 0) title>
# parent__translations__title=<ancestor 1 title>
# parent__parent__translations__title=<ancestor 2 title>
# parent__parent__parent__translations__title=<ancestor 3 title>
parent_path = '__'.join(['parent'] * parent_count)
filter_name = '%s%stranslations__title' % (
parent_path, parent_path and '__' or '')
titles_filters[filter_name] = ancestor_title
# Return a corresponding page if one exists
return self.page_model.objects \
.filter(**titles_filters) \
.first() | [
"def",
"find_existing_page",
"(",
"self",
",",
"titles_hierarchy",
")",
":",
"# Step backwards through import doc's titles hierarchy with a parent",
"# count which is the offset from the current level to each ancestor",
"titles_filters",
"=",
"{",
"'publishing_is_draft'",
":",
"True",
... | Find and return existing page matching the given titles hierarchy | [
"Find",
"and",
"return",
"existing",
"page",
"matching",
"the",
"given",
"titles",
"hierarchy"
] | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/icekit/management/commands/import_site_map.py#L49-L71 | train | 33,387 |
ic-labs/django-icekit | icekit/utils/search/facets.py | Facet.apply_request_and_page_to_values | def apply_request_and_page_to_values(self, request, page=None):
"""
Use the request and page config to figure out which values are active
"""
value_is_set = False
for value in self._values:
value.apply_request_and_page(request, page) | python | def apply_request_and_page_to_values(self, request, page=None):
"""
Use the request and page config to figure out which values are active
"""
value_is_set = False
for value in self._values:
value.apply_request_and_page(request, page) | [
"def",
"apply_request_and_page_to_values",
"(",
"self",
",",
"request",
",",
"page",
"=",
"None",
")",
":",
"value_is_set",
"=",
"False",
"for",
"value",
"in",
"self",
".",
"_values",
":",
"value",
".",
"apply_request_and_page",
"(",
"request",
",",
"page",
... | Use the request and page config to figure out which values are active | [
"Use",
"the",
"request",
"and",
"page",
"config",
"to",
"figure",
"out",
"which",
"values",
"are",
"active"
] | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/icekit/utils/search/facets.py#L81-L87 | train | 33,388 |
ic-labs/django-icekit | icekit/utils/search/facets.py | Facet.get_applicable_values | def get_applicable_values(self):
"""Return selected values that will affect the search result"""
return [v for v in self._values if v.is_active and not v.is_all_results] | python | def get_applicable_values(self):
"""Return selected values that will affect the search result"""
return [v for v in self._values if v.is_active and not v.is_all_results] | [
"def",
"get_applicable_values",
"(",
"self",
")",
":",
"return",
"[",
"v",
"for",
"v",
"in",
"self",
".",
"_values",
"if",
"v",
".",
"is_active",
"and",
"not",
"v",
".",
"is_all_results",
"]"
] | Return selected values that will affect the search result | [
"Return",
"selected",
"values",
"that",
"will",
"affect",
"the",
"search",
"result"
] | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/icekit/utils/search/facets.py#L118-L120 | train | 33,389 |
ic-labs/django-icekit | icekit/utils/search/facets.py | Facet.get_value | def get_value(self):
"""Returns the label of the first value"""
try:
return self.get_applicable_values()[0]
except IndexError:
if not self.select_many and self.get_values():
return self.get_values()[0] | python | def get_value(self):
"""Returns the label of the first value"""
try:
return self.get_applicable_values()[0]
except IndexError:
if not self.select_many and self.get_values():
return self.get_values()[0] | [
"def",
"get_value",
"(",
"self",
")",
":",
"try",
":",
"return",
"self",
".",
"get_applicable_values",
"(",
")",
"[",
"0",
"]",
"except",
"IndexError",
":",
"if",
"not",
"self",
".",
"select_many",
"and",
"self",
".",
"get_values",
"(",
")",
":",
"retu... | Returns the label of the first value | [
"Returns",
"the",
"label",
"of",
"the",
"first",
"value"
] | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/icekit/utils/search/facets.py#L122-L128 | train | 33,390 |
ic-labs/django-icekit | icekit/utils/search/facets.py | Facet.is_default | def is_default(self):
"""Return True if no active values, or if the active value is the default"""
if not self.get_applicable_values():
return True
if self.get_value().is_default:
return True
return False | python | def is_default(self):
"""Return True if no active values, or if the active value is the default"""
if not self.get_applicable_values():
return True
if self.get_value().is_default:
return True
return False | [
"def",
"is_default",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"get_applicable_values",
"(",
")",
":",
"return",
"True",
"if",
"self",
".",
"get_value",
"(",
")",
".",
"is_default",
":",
"return",
"True",
"return",
"False"
] | Return True if no active values, or if the active value is the default | [
"Return",
"True",
"if",
"no",
"active",
"values",
"or",
"if",
"the",
"active",
"value",
"is",
"the",
"default"
] | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/icekit/utils/search/facets.py#L131-L139 | train | 33,391 |
ic-labs/django-icekit | icekit/abstract_models.py | AbstractBaseModel.save | def save(self, *args, **kwargs):
"""
Update ``self.modified``.
"""
self.modified = timezone.now()
super(AbstractBaseModel, self).save(*args, **kwargs) | python | def save(self, *args, **kwargs):
"""
Update ``self.modified``.
"""
self.modified = timezone.now()
super(AbstractBaseModel, self).save(*args, **kwargs) | [
"def",
"save",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"self",
".",
"modified",
"=",
"timezone",
".",
"now",
"(",
")",
"super",
"(",
"AbstractBaseModel",
",",
"self",
")",
".",
"save",
"(",
"*",
"args",
",",
"*",
"*",
... | Update ``self.modified``. | [
"Update",
"self",
".",
"modified",
"."
] | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/icekit/abstract_models.py#L32-L37 | train | 33,392 |
ic-labs/django-icekit | icekit/publishing/models.py | publishing_set_update_time | def publishing_set_update_time(sender, instance, **kwargs):
""" Update the time modified before saving a publishable object. """
if hasattr(instance, 'publishing_linked'):
# Hack to avoid updating `publishing_modified_at` field when a draft
# publishable item is saved as part of a `publish` operation. This
# ensures that the `publishing_published_at` timestamp is later than
# the `publishing_modified_at` timestamp when we publish, which is
# vital for us to correctly detect whether a draft is "dirty".
if getattr(instance, '_skip_update_publishing_modified_at', False):
# Reset flag, in case instance is re-used (e.g. in tests)
instance._skip_update_publishing_modified_at = False
return
instance.publishing_modified_at = timezone.now() | python | def publishing_set_update_time(sender, instance, **kwargs):
""" Update the time modified before saving a publishable object. """
if hasattr(instance, 'publishing_linked'):
# Hack to avoid updating `publishing_modified_at` field when a draft
# publishable item is saved as part of a `publish` operation. This
# ensures that the `publishing_published_at` timestamp is later than
# the `publishing_modified_at` timestamp when we publish, which is
# vital for us to correctly detect whether a draft is "dirty".
if getattr(instance, '_skip_update_publishing_modified_at', False):
# Reset flag, in case instance is re-used (e.g. in tests)
instance._skip_update_publishing_modified_at = False
return
instance.publishing_modified_at = timezone.now() | [
"def",
"publishing_set_update_time",
"(",
"sender",
",",
"instance",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"hasattr",
"(",
"instance",
",",
"'publishing_linked'",
")",
":",
"# Hack to avoid updating `publishing_modified_at` field when a draft",
"# publishable item is sav... | Update the time modified before saving a publishable object. | [
"Update",
"the",
"time",
"modified",
"before",
"saving",
"a",
"publishable",
"object",
"."
] | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/icekit/publishing/models.py#L724-L736 | train | 33,393 |
ic-labs/django-icekit | icekit/publishing/models.py | handle_publishable_m2m_changed | def handle_publishable_m2m_changed(
sender, instance, action, reverse, model, pk_set, **kwargs):
"""
Cache related published objects in `pre_clear` so they can be restored in
`post_clear`.
"""
# Do nothing if the target model is not publishable.
if not issubclass(model, PublishingModel):
return
# Get the right `ManyRelatedManager`. Iterate M2Ms and compare `sender`
# (the through model), in case there are multiple M2Ms to the same model.
if reverse:
for rel_obj in instance._meta.get_all_related_many_to_many_objects():
if rel_obj.field.rel.through == sender:
m2m = getattr(instance, rel_obj.get_accessor_name())
break
else:
for field in instance._meta.many_to_many:
if field.rel.through == sender:
m2m = getattr(instance, field.attname)
break
# Cache published PKs on the instance.
if action == 'pre_clear':
instance._published_m2m_cache = set(
m2m.filter(publishing_is_draft=False).values_list('pk', flat=True))
# Add published PKs from the cache.
if action == 'post_clear':
m2m.add(*instance._published_m2m_cache)
del instance._published_m2m_cache | python | def handle_publishable_m2m_changed(
sender, instance, action, reverse, model, pk_set, **kwargs):
"""
Cache related published objects in `pre_clear` so they can be restored in
`post_clear`.
"""
# Do nothing if the target model is not publishable.
if not issubclass(model, PublishingModel):
return
# Get the right `ManyRelatedManager`. Iterate M2Ms and compare `sender`
# (the through model), in case there are multiple M2Ms to the same model.
if reverse:
for rel_obj in instance._meta.get_all_related_many_to_many_objects():
if rel_obj.field.rel.through == sender:
m2m = getattr(instance, rel_obj.get_accessor_name())
break
else:
for field in instance._meta.many_to_many:
if field.rel.through == sender:
m2m = getattr(instance, field.attname)
break
# Cache published PKs on the instance.
if action == 'pre_clear':
instance._published_m2m_cache = set(
m2m.filter(publishing_is_draft=False).values_list('pk', flat=True))
# Add published PKs from the cache.
if action == 'post_clear':
m2m.add(*instance._published_m2m_cache)
del instance._published_m2m_cache | [
"def",
"handle_publishable_m2m_changed",
"(",
"sender",
",",
"instance",
",",
"action",
",",
"reverse",
",",
"model",
",",
"pk_set",
",",
"*",
"*",
"kwargs",
")",
":",
"# Do nothing if the target model is not publishable.",
"if",
"not",
"issubclass",
"(",
"model",
... | Cache related published objects in `pre_clear` so they can be restored in
`post_clear`. | [
"Cache",
"related",
"published",
"objects",
"in",
"pre_clear",
"so",
"they",
"can",
"be",
"restored",
"in",
"post_clear",
"."
] | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/icekit/publishing/models.py#L740-L768 | train | 33,394 |
ic-labs/django-icekit | icekit/publishing/models.py | sync_mptt_tree_fields_from_draft_to_published_post_save | def sync_mptt_tree_fields_from_draft_to_published_post_save(
sender, instance, **kwargs):
"""
Post save trigger to immediately sync MPTT tree structure field changes
made to draft copies to their corresponding published copy.
"""
mptt_opts = getattr(instance, '_mptt_meta', None)
published_copy = getattr(instance, 'publishing_linked', None)
if mptt_opts and published_copy:
sync_mptt_tree_fields_from_draft_to_published(instance) | python | def sync_mptt_tree_fields_from_draft_to_published_post_save(
sender, instance, **kwargs):
"""
Post save trigger to immediately sync MPTT tree structure field changes
made to draft copies to their corresponding published copy.
"""
mptt_opts = getattr(instance, '_mptt_meta', None)
published_copy = getattr(instance, 'publishing_linked', None)
if mptt_opts and published_copy:
sync_mptt_tree_fields_from_draft_to_published(instance) | [
"def",
"sync_mptt_tree_fields_from_draft_to_published_post_save",
"(",
"sender",
",",
"instance",
",",
"*",
"*",
"kwargs",
")",
":",
"mptt_opts",
"=",
"getattr",
"(",
"instance",
",",
"'_mptt_meta'",
",",
"None",
")",
"published_copy",
"=",
"getattr",
"(",
"instan... | Post save trigger to immediately sync MPTT tree structure field changes
made to draft copies to their corresponding published copy. | [
"Post",
"save",
"trigger",
"to",
"immediately",
"sync",
"MPTT",
"tree",
"structure",
"field",
"changes",
"made",
"to",
"draft",
"copies",
"to",
"their",
"corresponding",
"published",
"copy",
"."
] | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/icekit/publishing/models.py#L780-L789 | train | 33,395 |
ic-labs/django-icekit | icekit/publishing/models.py | sync_mptt_tree_fields_from_draft_to_published | def sync_mptt_tree_fields_from_draft_to_published(
draft_copy, dry_run=False, force_update_cached_urls=False):
"""
Sync tree structure changes from a draft publishable object to its
published copy, and updates the published copy's Fluent cached URLs when
necessary. Or simulates doing this if ``dry_run`` is ``True``.
Syncs both actual structural changes (i.e. different parent) and MPTT's
fields which are a cached representation (and may or may not be correct).
"""
mptt_opts = getattr(draft_copy, '_mptt_meta', None)
published_copy = getattr(draft_copy, 'publishing_linked', None)
if not mptt_opts or not published_copy:
return {}
# Identify changed values and prepare dict of changes to apply to DB
parent_changed = draft_copy.parent != published_copy.parent
update_kwargs = {
mptt_opts.parent_attr: draft_copy._mpttfield('parent'),
mptt_opts.tree_id_attr: draft_copy._mpttfield('tree_id'),
mptt_opts.left_attr: draft_copy._mpttfield('left'),
mptt_opts.right_attr: draft_copy._mpttfield('right'),
mptt_opts.level_attr: draft_copy._mpttfield('level'),
}
# Strip out DB update entries for unchanged or invalid tree fields
update_kwargs = dict(
(field, value) for field, value in update_kwargs.items()
if getattr(draft_copy, field) != getattr(published_copy, field)
# Only parent may be None, never set tree_id/left/right/level to None
and not (field != 'parent' and value is None)
)
change_report = []
for field, new_value in update_kwargs.items():
old_value = getattr(published_copy, field)
change_report.append((draft_copy, field, old_value, new_value))
# Forcibly update MPTT field values via UPDATE commands instead of normal
# model attr changes, which MPTT ignores when you `save`
if update_kwargs and not dry_run:
type(published_copy).objects.filter(pk=published_copy.pk).update(
**update_kwargs)
# If real tree structure (not just MPTT fields) has changed we must
# regenerate the cached URLs for published copy translations.
if parent_changed or force_update_cached_urls:
# Make our local published obj aware of DB change made by `update`
published_copy.parent = draft_copy.parent
# Regenerate the cached URLs for published copy translations.
change_report += \
update_fluent_cached_urls(published_copy, dry_run=dry_run)
return change_report | python | def sync_mptt_tree_fields_from_draft_to_published(
draft_copy, dry_run=False, force_update_cached_urls=False):
"""
Sync tree structure changes from a draft publishable object to its
published copy, and updates the published copy's Fluent cached URLs when
necessary. Or simulates doing this if ``dry_run`` is ``True``.
Syncs both actual structural changes (i.e. different parent) and MPTT's
fields which are a cached representation (and may or may not be correct).
"""
mptt_opts = getattr(draft_copy, '_mptt_meta', None)
published_copy = getattr(draft_copy, 'publishing_linked', None)
if not mptt_opts or not published_copy:
return {}
# Identify changed values and prepare dict of changes to apply to DB
parent_changed = draft_copy.parent != published_copy.parent
update_kwargs = {
mptt_opts.parent_attr: draft_copy._mpttfield('parent'),
mptt_opts.tree_id_attr: draft_copy._mpttfield('tree_id'),
mptt_opts.left_attr: draft_copy._mpttfield('left'),
mptt_opts.right_attr: draft_copy._mpttfield('right'),
mptt_opts.level_attr: draft_copy._mpttfield('level'),
}
# Strip out DB update entries for unchanged or invalid tree fields
update_kwargs = dict(
(field, value) for field, value in update_kwargs.items()
if getattr(draft_copy, field) != getattr(published_copy, field)
# Only parent may be None, never set tree_id/left/right/level to None
and not (field != 'parent' and value is None)
)
change_report = []
for field, new_value in update_kwargs.items():
old_value = getattr(published_copy, field)
change_report.append((draft_copy, field, old_value, new_value))
# Forcibly update MPTT field values via UPDATE commands instead of normal
# model attr changes, which MPTT ignores when you `save`
if update_kwargs and not dry_run:
type(published_copy).objects.filter(pk=published_copy.pk).update(
**update_kwargs)
# If real tree structure (not just MPTT fields) has changed we must
# regenerate the cached URLs for published copy translations.
if parent_changed or force_update_cached_urls:
# Make our local published obj aware of DB change made by `update`
published_copy.parent = draft_copy.parent
# Regenerate the cached URLs for published copy translations.
change_report += \
update_fluent_cached_urls(published_copy, dry_run=dry_run)
return change_report | [
"def",
"sync_mptt_tree_fields_from_draft_to_published",
"(",
"draft_copy",
",",
"dry_run",
"=",
"False",
",",
"force_update_cached_urls",
"=",
"False",
")",
":",
"mptt_opts",
"=",
"getattr",
"(",
"draft_copy",
",",
"'_mptt_meta'",
",",
"None",
")",
"published_copy",
... | Sync tree structure changes from a draft publishable object to its
published copy, and updates the published copy's Fluent cached URLs when
necessary. Or simulates doing this if ``dry_run`` is ``True``.
Syncs both actual structural changes (i.e. different parent) and MPTT's
fields which are a cached representation (and may or may not be correct). | [
"Sync",
"tree",
"structure",
"changes",
"from",
"a",
"draft",
"publishable",
"object",
"to",
"its",
"published",
"copy",
"and",
"updates",
"the",
"published",
"copy",
"s",
"Fluent",
"cached",
"URLs",
"when",
"necessary",
".",
"Or",
"simulates",
"doing",
"this"... | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/icekit/publishing/models.py#L792-L843 | train | 33,396 |
ic-labs/django-icekit | icekit/publishing/models.py | create_can_publish_and_can_republish_permissions | def create_can_publish_and_can_republish_permissions(sender, **kwargs):
"""
Add `can_publish` and `ca_nrepublish` permissions for each publishable
model in the system.
"""
for model in sender.get_models():
if not issubclass(model, PublishingModel):
continue
content_type = ContentType.objects.get_for_model(model)
permission, created = Permission.objects.get_or_create(
content_type=content_type, codename='can_publish',
defaults=dict(name='Can Publish %s' % model.__name__))
permission, created = Permission.objects.get_or_create(
content_type=content_type, codename='can_republish',
defaults=dict(name='Can Republish %s' % model.__name__)) | python | def create_can_publish_and_can_republish_permissions(sender, **kwargs):
"""
Add `can_publish` and `ca_nrepublish` permissions for each publishable
model in the system.
"""
for model in sender.get_models():
if not issubclass(model, PublishingModel):
continue
content_type = ContentType.objects.get_for_model(model)
permission, created = Permission.objects.get_or_create(
content_type=content_type, codename='can_publish',
defaults=dict(name='Can Publish %s' % model.__name__))
permission, created = Permission.objects.get_or_create(
content_type=content_type, codename='can_republish',
defaults=dict(name='Can Republish %s' % model.__name__)) | [
"def",
"create_can_publish_and_can_republish_permissions",
"(",
"sender",
",",
"*",
"*",
"kwargs",
")",
":",
"for",
"model",
"in",
"sender",
".",
"get_models",
"(",
")",
":",
"if",
"not",
"issubclass",
"(",
"model",
",",
"PublishingModel",
")",
":",
"continue"... | Add `can_publish` and `ca_nrepublish` permissions for each publishable
model in the system. | [
"Add",
"can_publish",
"and",
"ca_nrepublish",
"permissions",
"for",
"each",
"publishable",
"model",
"in",
"the",
"system",
"."
] | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/icekit/publishing/models.py#L896-L910 | train | 33,397 |
ic-labs/django-icekit | icekit/publishing/models.py | maybe_automatically_publish_drafts_on_save | def maybe_automatically_publish_drafts_on_save(sender, instance, **kwargs):
"""
If automatic publishing is enabled, immediately publish a draft copy after
it has been saved.
"""
# Skip processing if auto-publishing is not enabled
if not is_automatic_publishing_enabled(sender):
return
# Skip missing or unpublishable instances
if not instance or not hasattr(instance, 'publishing_linked'):
return
# Ignore saves of published copies
if instance.is_published:
return
# Ignore saves of already-published draft copies
if not instance.is_dirty:
return
# Immediately publish saved draft copy
instance.publish() | python | def maybe_automatically_publish_drafts_on_save(sender, instance, **kwargs):
"""
If automatic publishing is enabled, immediately publish a draft copy after
it has been saved.
"""
# Skip processing if auto-publishing is not enabled
if not is_automatic_publishing_enabled(sender):
return
# Skip missing or unpublishable instances
if not instance or not hasattr(instance, 'publishing_linked'):
return
# Ignore saves of published copies
if instance.is_published:
return
# Ignore saves of already-published draft copies
if not instance.is_dirty:
return
# Immediately publish saved draft copy
instance.publish() | [
"def",
"maybe_automatically_publish_drafts_on_save",
"(",
"sender",
",",
"instance",
",",
"*",
"*",
"kwargs",
")",
":",
"# Skip processing if auto-publishing is not enabled",
"if",
"not",
"is_automatic_publishing_enabled",
"(",
"sender",
")",
":",
"return",
"# Skip missing ... | If automatic publishing is enabled, immediately publish a draft copy after
it has been saved. | [
"If",
"automatic",
"publishing",
"is",
"enabled",
"immediately",
"publish",
"a",
"draft",
"copy",
"after",
"it",
"has",
"been",
"saved",
"."
] | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/icekit/publishing/models.py#L914-L932 | train | 33,398 |
ic-labs/django-icekit | icekit/publishing/models.py | PublishingModel.has_been_published | def has_been_published(self):
"""
Return True if the item is either published itself, or has an
associated published copy.
This is in contrast to ``is_published`` which only returns True if
the specific object is a published copy, and will return False for
a draft object that has an associated published copy.
"""
if self.is_published:
return True
elif self.is_draft:
return self.publishing_linked_id is not None
raise ValueError( # pragma: no cover
"Publishable object %r is neither draft nor published" % self) | python | def has_been_published(self):
"""
Return True if the item is either published itself, or has an
associated published copy.
This is in contrast to ``is_published`` which only returns True if
the specific object is a published copy, and will return False for
a draft object that has an associated published copy.
"""
if self.is_published:
return True
elif self.is_draft:
return self.publishing_linked_id is not None
raise ValueError( # pragma: no cover
"Publishable object %r is neither draft nor published" % self) | [
"def",
"has_been_published",
"(",
"self",
")",
":",
"if",
"self",
".",
"is_published",
":",
"return",
"True",
"elif",
"self",
".",
"is_draft",
":",
"return",
"self",
".",
"publishing_linked_id",
"is",
"not",
"None",
"raise",
"ValueError",
"(",
"# pragma: no co... | Return True if the item is either published itself, or has an
associated published copy.
This is in contrast to ``is_published`` which only returns True if
the specific object is a published copy, and will return False for
a draft object that has an associated published copy. | [
"Return",
"True",
"if",
"the",
"item",
"is",
"either",
"published",
"itself",
"or",
"has",
"an",
"associated",
"published",
"copy",
"."
] | c507ea5b1864303732c53ad7c5800571fca5fa94 | https://github.com/ic-labs/django-icekit/blob/c507ea5b1864303732c53ad7c5800571fca5fa94/icekit/publishing/models.py#L127-L141 | train | 33,399 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.