repo stringlengths 7 90 | file_url stringlengths 81 315 | file_path stringlengths 4 228 | content stringlengths 0 32.8k | language stringclasses 1
value | license stringclasses 7
values | commit_sha stringlengths 40 40 | retrieved_at stringdate 2026-01-04 14:38:15 2026-01-05 02:33:18 | truncated bool 2
classes |
|---|---|---|---|---|---|---|---|---|
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/contrib/gis/db/backends/__init__.py | django/contrib/gis/db/backends/__init__.py | python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false | |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/contrib/gis/db/backends/oracle/operations.py | django/contrib/gis/db/backends/oracle/operations.py | """
This module contains the spatial lookup types, and the `get_geo_where_clause`
routine for Oracle Spatial.
Please note that WKT support is broken on the XE version, and thus
this backend will not work on such platforms. Specifically, XE lacks
support for an internal JVM, and Java libraries are required to use
the WKT constructors.
"""
import re
from django.contrib.gis.db import models
from django.contrib.gis.db.backends.base.operations import BaseSpatialOperations
from django.contrib.gis.db.backends.oracle.adapter import OracleSpatialAdapter
from django.contrib.gis.db.backends.utils import SpatialOperator
from django.contrib.gis.geos.geometry import GEOSGeometry, GEOSGeometryBase
from django.contrib.gis.geos.prototypes.io import wkb_r
from django.contrib.gis.measure import Distance
from django.db.backends.oracle.operations import DatabaseOperations
from django.utils.functional import cached_property
DEFAULT_TOLERANCE = "0.05"
class SDOOperator(SpatialOperator):
sql_template = "%(func)s(%(lhs)s, %(rhs)s) = 'TRUE'"
class SDODWithin(SpatialOperator):
sql_template = "SDO_WITHIN_DISTANCE(%(lhs)s, %(rhs)s, %%s) = 'TRUE'"
class SDODisjoint(SpatialOperator):
sql_template = (
"SDO_GEOM.RELATE(%%(lhs)s, 'DISJOINT', %%(rhs)s, %s) = 'DISJOINT'"
% DEFAULT_TOLERANCE
)
class SDORelate(SpatialOperator):
sql_template = "SDO_RELATE(%(lhs)s, %(rhs)s, 'mask=%(mask)s') = 'TRUE'"
def check_relate_argument(self, arg):
masks = (
"TOUCH|OVERLAPBDYDISJOINT|OVERLAPBDYINTERSECT|EQUAL|INSIDE|COVEREDBY|"
"CONTAINS|COVERS|ANYINTERACT|ON"
)
mask_regex = re.compile(r"^(%s)(\+(%s))*$" % (masks, masks), re.I)
if not isinstance(arg, str) or not mask_regex.match(arg):
raise ValueError('Invalid SDO_RELATE mask: "%s"' % arg)
def as_sql(self, connection, lookup, template_params, sql_params):
template_params["mask"] = sql_params[-1]
return super().as_sql(connection, lookup, template_params, sql_params[:-1])
class OracleOperations(BaseSpatialOperations, DatabaseOperations):
name = "oracle"
oracle = True
disallowed_aggregates = (models.Collect, models.Extent3D, models.MakeLine)
Adapter = OracleSpatialAdapter
extent = "SDO_AGGR_MBR"
unionagg = "SDO_AGGR_UNION"
from_text = "SDO_GEOMETRY"
function_names = {
"Area": "SDO_GEOM.SDO_AREA",
"AsGeoJSON": "SDO_UTIL.TO_GEOJSON",
"AsWKB": "SDO_UTIL.TO_WKBGEOMETRY",
"AsWKT": "SDO_UTIL.TO_WKTGEOMETRY",
"BoundingCircle": "SDO_GEOM.SDO_MBC",
"Centroid": "SDO_GEOM.SDO_CENTROID",
"Difference": "SDO_GEOM.SDO_DIFFERENCE",
"Distance": "SDO_GEOM.SDO_DISTANCE",
"Envelope": "SDO_GEOM_MBR",
"FromWKB": "SDO_UTIL.FROM_WKBGEOMETRY",
"FromWKT": "SDO_UTIL.FROM_WKTGEOMETRY",
"Intersection": "SDO_GEOM.SDO_INTERSECTION",
"IsValid": "SDO_GEOM.VALIDATE_GEOMETRY_WITH_CONTEXT",
"Length": "SDO_GEOM.SDO_LENGTH",
"NumGeometries": "SDO_UTIL.GETNUMELEM",
"NumPoints": "SDO_UTIL.GETNUMVERTICES",
"Perimeter": "SDO_GEOM.SDO_LENGTH",
"PointOnSurface": "SDO_GEOM.SDO_POINTONSURFACE",
"Reverse": "SDO_UTIL.REVERSE_LINESTRING",
"SymDifference": "SDO_GEOM.SDO_XOR",
"Transform": "SDO_CS.TRANSFORM",
"Union": "SDO_GEOM.SDO_UNION",
}
# We want to get SDO Geometries as WKT because it is much easier to
# instantiate GEOS proxies from WKT than SDO_GEOMETRY(...) strings.
# However, this adversely affects performance (i.e., Java is called
# to convert to WKT on every query). If someone wishes to write a
# SDO_GEOMETRY(...) parser in Python, let me know =)
select = "SDO_UTIL.TO_WKBGEOMETRY(%s)"
gis_operators = {
"contains": SDOOperator(func="SDO_CONTAINS"),
"coveredby": SDOOperator(func="SDO_COVEREDBY"),
"covers": SDOOperator(func="SDO_COVERS"),
"disjoint": SDODisjoint(),
"intersects": SDOOperator(
func="SDO_OVERLAPBDYINTERSECT"
), # TODO: Is this really the same as ST_Intersects()?
"equals": SDOOperator(func="SDO_EQUAL"),
"exact": SDOOperator(func="SDO_EQUAL"),
"overlaps": SDOOperator(func="SDO_OVERLAPS"),
"same_as": SDOOperator(func="SDO_EQUAL"),
# Oracle uses a different syntax, e.g., 'mask=inside+touch'
"relate": SDORelate(),
"touches": SDOOperator(func="SDO_TOUCH"),
"within": SDOOperator(func="SDO_INSIDE"),
"dwithin": SDODWithin(),
}
@cached_property
def unsupported_functions(self):
unsupported = {
"AsKML",
"AsSVG",
"Azimuth",
"ClosestPoint",
"ForcePolygonCW",
"GeoHash",
"GeometryDistance",
"IsEmpty",
"LineLocatePoint",
"MakeValid",
"MemSize",
"NumDimensions",
"Rotate",
"Scale",
"SnapToGrid",
"Translate",
}
if self.connection.oracle_version < (23,):
unsupported.add("GeometryType")
return unsupported
def geo_quote_name(self, name):
return super().geo_quote_name(name).upper()
def convert_extent(self, clob):
if clob:
# Generally, Oracle returns a polygon for the extent -- however,
# it can return a single point if there's only one Point in the
# table.
ext_geom = GEOSGeometry(memoryview(clob.read()))
gtype = str(ext_geom.geom_type)
if gtype == "Polygon":
# Construct the 4-tuple from the coordinates in the polygon.
shell = ext_geom.shell
ll, ur = shell[0][:2], shell[2][:2]
elif gtype == "Point":
ll = ext_geom.coords[:2]
ur = ll
else:
raise Exception(
"Unexpected geometry type returned for extent: %s" % gtype
)
xmin, ymin = ll
xmax, ymax = ur
return (xmin, ymin, xmax, ymax)
else:
return None
def geo_db_type(self, f):
"""
Return the geometry database type for Oracle. Unlike other spatial
backends, no stored procedure is necessary and it's the same for all
geometry types.
"""
return "MDSYS.SDO_GEOMETRY"
def get_distance(self, f, value, lookup_type):
"""
Return the distance parameters given the value and the lookup type.
On Oracle, geometry columns with a geodetic coordinate system behave
implicitly like a geography column, and thus meters will be used as
the distance parameter on them.
"""
if not value:
return []
value = value[0]
if isinstance(value, Distance):
if f.geodetic(self.connection):
dist_param = value.m
else:
dist_param = getattr(
value, Distance.unit_attname(f.units_name(self.connection))
)
else:
dist_param = value
# dwithin lookups on Oracle require a special string parameter
# that starts with "distance=".
if lookup_type == "dwithin":
dist_param = "distance=%s" % dist_param
return [dist_param]
def get_geom_placeholder(self, f, value, compiler):
if value is None:
return "NULL"
return super().get_geom_placeholder(f, value, compiler)
def spatial_aggregate_name(self, agg_name):
"""
Return the spatial aggregate SQL name.
"""
agg_name = "unionagg" if agg_name.lower() == "union" else agg_name.lower()
return getattr(self, agg_name)
# Routines for getting the OGC-compliant models.
def geometry_columns(self):
from django.contrib.gis.db.backends.oracle.models import OracleGeometryColumns
return OracleGeometryColumns
def spatial_ref_sys(self):
from django.contrib.gis.db.backends.oracle.models import OracleSpatialRefSys
return OracleSpatialRefSys
def modify_insert_params(self, placeholder, params):
"""Drop out insert parameters for NULL placeholder. Needed for Oracle
Spatial backend due to #10888.
"""
if placeholder == "NULL":
return []
return super().modify_insert_params(placeholder, params)
def get_geometry_converter(self, expression):
read = wkb_r().read
srid = expression.output_field.srid
if srid == -1:
srid = None
geom_class = expression.output_field.geom_class
def converter(value, expression, connection):
if value is not None:
geom = GEOSGeometryBase(read(memoryview(value.read())), geom_class)
if srid:
geom.srid = srid
return geom
return converter
def get_area_att_for_field(self, field):
return "sq_m"
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/contrib/gis/db/backends/oracle/schema.py | django/contrib/gis/db/backends/oracle/schema.py | from django.contrib.gis.db.models import GeometryField
from django.db.backends.oracle.schema import DatabaseSchemaEditor
from django.db.backends.utils import strip_quotes, truncate_name
class OracleGISSchemaEditor(DatabaseSchemaEditor):
sql_add_geometry_metadata = """
INSERT INTO USER_SDO_GEOM_METADATA
("TABLE_NAME", "COLUMN_NAME", "DIMINFO", "SRID")
VALUES (
%(table)s,
%(column)s,
MDSYS.SDO_DIM_ARRAY(
MDSYS.SDO_DIM_ELEMENT('LONG', %(dim0)s, %(dim2)s, %(tolerance)s),
MDSYS.SDO_DIM_ELEMENT('LAT', %(dim1)s, %(dim3)s, %(tolerance)s)
),
%(srid)s
)"""
sql_add_spatial_index = (
"CREATE INDEX %(index)s ON %(table)s(%(column)s) "
"INDEXTYPE IS MDSYS.SPATIAL_INDEX"
)
sql_clear_geometry_table_metadata = (
"DELETE FROM USER_SDO_GEOM_METADATA WHERE TABLE_NAME = %(table)s"
)
sql_clear_geometry_field_metadata = (
"DELETE FROM USER_SDO_GEOM_METADATA WHERE TABLE_NAME = %(table)s "
"AND COLUMN_NAME = %(column)s"
)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.geometry_sql = []
def geo_quote_name(self, name):
return self.connection.ops.geo_quote_name(name)
def quote_value(self, value):
if isinstance(value, self.connection.ops.Adapter):
return super().quote_value(str(value))
return super().quote_value(value)
def _field_indexes_sql(self, model, field):
if isinstance(field, GeometryField) and field.spatial_index:
return [self._create_spatial_index_sql(model, field)]
return super()._field_indexes_sql(model, field)
def column_sql(self, model, field, include_default=False):
column_sql = super().column_sql(model, field, include_default)
if isinstance(field, GeometryField):
self.geometry_sql.append(
self.sql_add_geometry_metadata
% {
"table": self.geo_quote_name(model._meta.db_table),
"column": self.geo_quote_name(field.column),
"dim0": field._extent[0],
"dim1": field._extent[1],
"dim2": field._extent[2],
"dim3": field._extent[3],
"tolerance": field._tolerance,
"srid": field.srid,
}
)
return column_sql
def create_model(self, model):
super().create_model(model)
self.run_geometry_sql()
def delete_model(self, model):
super().delete_model(model)
self.execute(
self.sql_clear_geometry_table_metadata
% {
"table": self.geo_quote_name(model._meta.db_table),
}
)
def add_field(self, model, field):
super().add_field(model, field)
self.run_geometry_sql()
def remove_field(self, model, field):
if isinstance(field, GeometryField):
self.execute(
self.sql_clear_geometry_field_metadata
% {
"table": self.geo_quote_name(model._meta.db_table),
"column": self.geo_quote_name(field.column),
}
)
if field.spatial_index:
self.execute(self._delete_spatial_index_sql(model, field))
super().remove_field(model, field)
def run_geometry_sql(self):
for sql in self.geometry_sql:
self.execute(sql)
self.geometry_sql = []
def _alter_field(
self,
model,
old_field,
new_field,
old_type,
new_type,
old_db_params,
new_db_params,
strict=False,
):
super()._alter_field(
model,
old_field,
new_field,
old_type,
new_type,
old_db_params,
new_db_params,
strict=strict,
)
old_field_spatial_index = (
isinstance(old_field, GeometryField) and old_field.spatial_index
)
new_field_spatial_index = (
isinstance(new_field, GeometryField) and new_field.spatial_index
)
if not old_field_spatial_index and new_field_spatial_index:
self.execute(self._create_spatial_index_sql(model, new_field))
elif old_field_spatial_index and not new_field_spatial_index:
self.execute(self._delete_spatial_index_sql(model, old_field))
def _create_spatial_index_name(self, model, field):
# Oracle doesn't allow object names > 30 characters. Use this scheme
# instead of self._create_index_name() for backwards compatibility.
return truncate_name(
"%s_%s_id" % (strip_quotes(model._meta.db_table), field.column), 30
)
def _create_spatial_index_sql(self, model, field):
index_name = self._create_spatial_index_name(model, field)
return self.sql_add_spatial_index % {
"index": self.quote_name(index_name),
"table": self.quote_name(model._meta.db_table),
"column": self.quote_name(field.column),
}
def _delete_spatial_index_sql(self, model, field):
index_name = self._create_spatial_index_name(model, field)
return self._delete_index_sql(model, index_name)
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/contrib/gis/db/backends/oracle/models.py | django/contrib/gis/db/backends/oracle/models.py | """
The GeometryColumns and SpatialRefSys models for the Oracle spatial
backend.
It should be noted that Oracle Spatial does not have database tables
named according to the OGC standard, so the closest analogs are used.
For example, the `USER_SDO_GEOM_METADATA` is used for the GeometryColumns
model and the `SDO_COORD_REF_SYS` is used for the SpatialRefSys model.
"""
from django.contrib.gis.db import models
from django.contrib.gis.db.backends.base.models import SpatialRefSysMixin
class OracleGeometryColumns(models.Model):
"Maps to the Oracle USER_SDO_GEOM_METADATA table."
table_name = models.CharField(max_length=32)
column_name = models.CharField(max_length=1024)
srid = models.IntegerField(primary_key=True)
# TODO: Add support for `diminfo` column (type MDSYS.SDO_DIM_ARRAY).
class Meta:
app_label = "gis"
db_table = "USER_SDO_GEOM_METADATA"
managed = False
def __str__(self):
return "%s - %s (SRID: %s)" % (self.table_name, self.column_name, self.srid)
@classmethod
def table_name_col(cls):
"""
Return the name of the metadata column used to store the feature table
name.
"""
return "table_name"
@classmethod
def geom_col_name(cls):
"""
Return the name of the metadata column used to store the feature
geometry column.
"""
return "column_name"
class OracleSpatialRefSys(models.Model, SpatialRefSysMixin):
"Maps to the Oracle MDSYS.CS_SRS table."
cs_name = models.CharField(max_length=68)
srid = models.IntegerField(primary_key=True)
auth_srid = models.IntegerField()
auth_name = models.CharField(max_length=256)
wktext = models.CharField(max_length=2046)
# Optional geometry representing the bounds of this coordinate
# system. By default, all are NULL in the table.
cs_bounds = models.PolygonField(null=True)
class Meta:
app_label = "gis"
db_table = "CS_SRS"
managed = False
@property
def wkt(self):
return self.wktext
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/contrib/gis/db/backends/oracle/introspection.py | django/contrib/gis/db/backends/oracle/introspection.py | import oracledb
from django.db.backends.oracle.introspection import DatabaseIntrospection
from django.utils.functional import cached_property
class OracleIntrospection(DatabaseIntrospection):
# Associating any OBJECTVAR instances with GeometryField. This won't work
# right on Oracle objects that aren't MDSYS.SDO_GEOMETRY, but it is the
# only object type supported within Django anyways.
@cached_property
def data_types_reverse(self):
return {
**super().data_types_reverse,
oracledb.DB_TYPE_OBJECT: "GeometryField",
}
def get_geometry_type(self, table_name, description):
with self.connection.cursor() as cursor:
# Querying USER_SDO_GEOM_METADATA to get the SRID and dimension
# information.
try:
cursor.execute(
'SELECT "DIMINFO", "SRID" FROM "USER_SDO_GEOM_METADATA" '
'WHERE "TABLE_NAME"=%s AND "COLUMN_NAME"=%s',
(table_name.upper(), description.name.upper()),
)
row = cursor.fetchone()
except Exception as exc:
raise Exception(
"Could not find entry in USER_SDO_GEOM_METADATA "
'corresponding to "%s"."%s"' % (table_name, description.name)
) from exc
# TODO: Research way to find a more specific geometry field type
# for the column's contents.
field_type = "GeometryField"
# Getting the field parameters.
field_params = {}
dim, srid = row
if srid != 4326:
field_params["srid"] = srid
# Size of object array (SDO_DIM_ARRAY) is number of dimensions.
dim = dim.size()
if dim != 2:
field_params["dim"] = dim
return field_type, field_params
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/contrib/gis/db/backends/oracle/__init__.py | django/contrib/gis/db/backends/oracle/__init__.py | python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false | |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/contrib/gis/db/backends/oracle/base.py | django/contrib/gis/db/backends/oracle/base.py | from django.db.backends.oracle.base import DatabaseWrapper as OracleDatabaseWrapper
from .features import DatabaseFeatures
from .introspection import OracleIntrospection
from .operations import OracleOperations
from .schema import OracleGISSchemaEditor
class DatabaseWrapper(OracleDatabaseWrapper):
SchemaEditorClass = OracleGISSchemaEditor
# Classes instantiated in __init__().
features_class = DatabaseFeatures
introspection_class = OracleIntrospection
ops_class = OracleOperations
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/contrib/gis/db/backends/oracle/adapter.py | django/contrib/gis/db/backends/oracle/adapter.py | import oracledb
from django.contrib.gis.db.backends.base.adapter import WKTAdapter
from django.contrib.gis.geos import GeometryCollection, Polygon
class OracleSpatialAdapter(WKTAdapter):
input_size = oracledb.CLOB
def __init__(self, geom):
"""
Oracle requires that polygon rings are in proper orientation. This
affects spatial operations and an invalid orientation may cause
failures. Correct orientations are:
* Outer ring - counter clockwise
* Inner ring(s) - clockwise
"""
if isinstance(geom, Polygon):
if self._polygon_must_be_fixed(geom):
geom = self._fix_polygon(geom)
elif isinstance(geom, GeometryCollection):
if any(
isinstance(g, Polygon) and self._polygon_must_be_fixed(g) for g in geom
):
geom = self._fix_geometry_collection(geom)
self.wkt = geom.wkt
self.srid = geom.srid
@staticmethod
def _polygon_must_be_fixed(poly):
return not poly.empty and (
not poly.exterior_ring.is_counterclockwise
or any(x.is_counterclockwise for x in poly)
)
@classmethod
def _fix_polygon(cls, poly, clone=True):
"""Fix single polygon orientation as described in __init__()."""
if clone:
poly = poly.clone()
if not poly.exterior_ring.is_counterclockwise:
poly.exterior_ring = list(reversed(poly.exterior_ring))
for i in range(1, len(poly)):
if poly[i].is_counterclockwise:
poly[i] = list(reversed(poly[i]))
return poly
@classmethod
def _fix_geometry_collection(cls, coll):
"""
Fix polygon orientations in geometry collections as described in
__init__().
"""
coll = coll.clone()
for i, geom in enumerate(coll):
if isinstance(geom, Polygon):
coll[i] = cls._fix_polygon(geom, clone=False)
return coll
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/contrib/gis/db/backends/oracle/features.py | django/contrib/gis/db/backends/oracle/features.py | from django.contrib.gis.db.backends.base.features import BaseSpatialFeatures
from django.db.backends.oracle.features import (
DatabaseFeatures as OracleDatabaseFeatures,
)
from django.utils.functional import cached_property
class DatabaseFeatures(BaseSpatialFeatures, OracleDatabaseFeatures):
supports_add_srs_entry = False
supports_geometry_field_introspection = False
supports_geometry_field_unique_index = False
supports_perimeter_geodetic = True
supports_dwithin_distance_expr = False
supports_tolerance_parameter = True
unsupported_geojson_options = {"bbox", "crs", "precision"}
@cached_property
def django_test_skips(self):
skips = super().django_test_skips
skips.update(
{
"Oracle doesn't support spatial operators in constraints.": {
"gis_tests.gis_migrations.test_operations.OperationTests."
"test_add_check_constraint",
},
}
)
return skips
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/contrib/gis/db/backends/postgis/operations.py | django/contrib/gis/db/backends/postgis/operations.py | import re
from django.conf import settings
from django.contrib.gis.db.backends.base.operations import BaseSpatialOperations
from django.contrib.gis.db.backends.utils import SpatialOperator
from django.contrib.gis.db.models import GeometryField, RasterField
from django.contrib.gis.gdal import GDALRaster
from django.contrib.gis.geos.geometry import GEOSGeometryBase
from django.contrib.gis.geos.prototypes.io import wkb_r
from django.contrib.gis.measure import Distance
from django.core.exceptions import ImproperlyConfigured
from django.db import NotSupportedError, ProgrammingError
from django.db.backends.postgresql.operations import DatabaseOperations
from django.db.backends.postgresql.psycopg_any import is_psycopg3
from django.db.models import Func, Value
from django.utils.functional import cached_property
from django.utils.version import get_version_tuple
from .adapter import PostGISAdapter
from .models import PostGISGeometryColumns, PostGISSpatialRefSys
from .pgraster import from_pgraster
# Identifier to mark raster lookups as bilateral.
BILATERAL = "bilateral"
class PostGISOperator(SpatialOperator):
def __init__(self, geography=False, raster=False, **kwargs):
# Only a subset of the operators and functions are available for the
# geography type. Lookups that don't support geography will be cast to
# geometry.
self.geography = geography
# Only a subset of the operators and functions are available for the
# raster type. Lookups that don't support raster will be converted to
# polygons. If the raster argument is set to BILATERAL, then the
# operator cannot handle mixed geom-raster lookups.
self.raster = raster
super().__init__(**kwargs)
def as_sql(self, connection, lookup, template_params, *args):
template_params = self.check_raster(lookup, template_params)
template_params = self.check_geography(lookup, template_params)
return super().as_sql(connection, lookup, template_params, *args)
def check_raster(self, lookup, template_params):
spheroid = lookup.rhs_params and lookup.rhs_params[-1] == "spheroid"
# Check which input is a raster.
lhs_is_raster = lookup.lhs.field.geom_type == "RASTER"
rhs_is_raster = isinstance(lookup.rhs, GDALRaster)
# Look for band indices and inject them if provided.
if lookup.band_lhs is not None and lhs_is_raster:
if not self.func:
raise ValueError(
"Band indices are not allowed for this operator, it works on bbox "
"only."
)
template_params["lhs"] = "%s, %s" % (
template_params["lhs"],
lookup.band_lhs,
)
if lookup.band_rhs is not None and rhs_is_raster:
if not self.func:
raise ValueError(
"Band indices are not allowed for this operator, it works on bbox "
"only."
)
template_params["rhs"] = "%s, %s" % (
template_params["rhs"],
lookup.band_rhs,
)
# Convert rasters to polygons if necessary.
if not self.raster or spheroid:
# Operators without raster support.
if lhs_is_raster:
template_params["lhs"] = "ST_Polygon(%s)" % template_params["lhs"]
if rhs_is_raster:
template_params["rhs"] = "ST_Polygon(%s)" % template_params["rhs"]
elif self.raster == BILATERAL:
# Operators with raster support but don't support mixed (rast-geom)
# lookups.
if lhs_is_raster and not rhs_is_raster:
template_params["lhs"] = "ST_Polygon(%s)" % template_params["lhs"]
elif rhs_is_raster and not lhs_is_raster:
template_params["rhs"] = "ST_Polygon(%s)" % template_params["rhs"]
return template_params
def check_geography(self, lookup, template_params):
"""Convert geography fields to geometry types, if necessary."""
if lookup.lhs.output_field.geography and not self.geography:
template_params["lhs"] += "::geometry"
return template_params
class ST_Polygon(Func):
function = "ST_Polygon"
def __init__(self, expr):
super().__init__(expr)
expr = self.source_expressions[0]
if isinstance(expr, Value) and not expr._output_field_or_none:
self.source_expressions[0] = Value(
expr.value, output_field=RasterField(srid=expr.value.srid)
)
@cached_property
def output_field(self):
return GeometryField(srid=self.source_expressions[0].field.srid)
class PostGISOperations(BaseSpatialOperations, DatabaseOperations):
name = "postgis"
postgis = True
geom_func_prefix = "ST_"
Adapter = PostGISAdapter
collect = geom_func_prefix + "Collect"
extent = geom_func_prefix + "Extent"
extent3d = geom_func_prefix + "3DExtent"
length3d = geom_func_prefix + "3DLength"
makeline = geom_func_prefix + "MakeLine"
perimeter3d = geom_func_prefix + "3DPerimeter"
unionagg = geom_func_prefix + "Union"
gis_operators = {
"bbcontains": PostGISOperator(op="~", raster=True),
"bboverlaps": PostGISOperator(op="&&", geography=True, raster=True),
"contained": PostGISOperator(op="@", raster=True),
"overlaps_left": PostGISOperator(op="&<", raster=BILATERAL),
"overlaps_right": PostGISOperator(op="&>", raster=BILATERAL),
"overlaps_below": PostGISOperator(op="&<|"),
"overlaps_above": PostGISOperator(op="|&>"),
"left": PostGISOperator(op="<<"),
"right": PostGISOperator(op=">>"),
"strictly_below": PostGISOperator(op="<<|"),
"strictly_above": PostGISOperator(op="|>>"),
"same_as": PostGISOperator(op="~=", raster=BILATERAL),
"exact": PostGISOperator(op="~=", raster=BILATERAL), # alias of same_as
"contains": PostGISOperator(func="ST_Contains", raster=BILATERAL),
"contains_properly": PostGISOperator(
func="ST_ContainsProperly", raster=BILATERAL
),
"coveredby": PostGISOperator(
func="ST_CoveredBy", geography=True, raster=BILATERAL
),
"covers": PostGISOperator(func="ST_Covers", geography=True, raster=BILATERAL),
"crosses": PostGISOperator(func="ST_Crosses"),
"disjoint": PostGISOperator(func="ST_Disjoint", raster=BILATERAL),
"equals": PostGISOperator(func="ST_Equals"),
"intersects": PostGISOperator(
func="ST_Intersects", geography=True, raster=BILATERAL
),
"overlaps": PostGISOperator(func="ST_Overlaps", raster=BILATERAL),
"relate": PostGISOperator(func="ST_Relate"),
"touches": PostGISOperator(func="ST_Touches", raster=BILATERAL),
"within": PostGISOperator(func="ST_Within", raster=BILATERAL),
"dwithin": PostGISOperator(func="ST_DWithin", geography=True, raster=BILATERAL),
}
unsupported_functions = set()
select = "%s" if is_psycopg3 else "%s::bytea"
select_extent = None
@cached_property
def function_names(self):
function_names = {
"AsWKB": "ST_AsBinary",
"AsWKT": "ST_AsText",
"BoundingCircle": "ST_MinimumBoundingCircle",
"FromWKB": "ST_GeomFromWKB",
"FromWKT": "ST_GeomFromText",
"NumDimensions": "ST_NDims",
"NumPoints": "ST_NPoints",
"GeometryType": "GeometryType",
}
return function_names
@cached_property
def spatial_version(self):
"""Determine the version of the PostGIS library."""
# Trying to get the PostGIS version because the function
# signatures will depend on the version used. The cost
# here is a database query to determine the version, which
# can be mitigated by setting `POSTGIS_VERSION` with a 3-tuple
# comprising user-supplied values for the major, minor, and
# subminor revision of PostGIS.
if hasattr(settings, "POSTGIS_VERSION"):
version = settings.POSTGIS_VERSION
else:
# Run a basic query to check the status of the connection so we're
# sure we only raise the error below if the problem comes from
# PostGIS and not from PostgreSQL itself (see #24862).
self._get_postgis_func("version")
try:
vtup = self.postgis_version_tuple()
except ProgrammingError:
raise ImproperlyConfigured(
'Cannot determine PostGIS version for database "%s" '
'using command "SELECT postgis_lib_version()". '
"GeoDjango requires at least PostGIS version 3.2. "
"Was the database created from a spatial database "
"template?" % self.connection.settings_dict["NAME"]
)
version = vtup[1:]
return version
def convert_extent(self, box):
"""
Return a 4-tuple extent for the `Extent` aggregate by converting
the bounding box text returned by PostGIS (`box` argument), for
example: "BOX(-90.0 30.0, -85.0 40.0)".
"""
if box is None:
return None
ll, ur = box[4:-1].split(",")
xmin, ymin = map(float, ll.split())
xmax, ymax = map(float, ur.split())
return (xmin, ymin, xmax, ymax)
def convert_extent3d(self, box3d):
"""
Return a 6-tuple extent for the `Extent3D` aggregate by converting
the 3d bounding-box text returned by PostGIS (`box3d` argument), for
example: "BOX3D(-90.0 30.0 1, -85.0 40.0 2)".
"""
if box3d is None:
return None
ll, ur = box3d[6:-1].split(",")
xmin, ymin, zmin = map(float, ll.split())
xmax, ymax, zmax = map(float, ur.split())
return (xmin, ymin, zmin, xmax, ymax, zmax)
def geo_db_type(self, f):
"""
Return the database field type for the given spatial field.
"""
if f.geom_type == "RASTER":
return "raster"
# Type-based geometries.
# TODO: Support 'M' extension.
if f.dim == 3:
geom_type = f.geom_type + "Z"
else:
geom_type = f.geom_type
if f.geography:
if f.srid != 4326:
raise NotSupportedError(
"PostGIS only supports geography columns with an SRID of 4326."
)
return "geography(%s,%d)" % (geom_type, f.srid)
else:
return "geometry(%s,%d)" % (geom_type, f.srid)
def get_distance(self, f, dist_val, lookup_type):
"""
Retrieve the distance parameters for the given geometry field,
distance lookup value, and the distance lookup type.
This is the most complex implementation of the spatial backends due to
what is supported on geodetic geometry columns vs. what's available on
projected geometry columns. In addition, it has to take into account
the geography column type.
"""
# Getting the distance parameter
value = dist_val[0]
# Shorthand boolean flags.
geodetic = f.geodetic(self.connection)
geography = f.geography
if isinstance(value, Distance):
if geography:
dist_param = value.m
elif geodetic:
if lookup_type == "dwithin":
raise ValueError(
"Only numeric values of degree units are "
"allowed on geographic DWithin queries."
)
dist_param = value.m
else:
dist_param = getattr(
value, Distance.unit_attname(f.units_name(self.connection))
)
else:
# Assuming the distance is in the units of the field.
dist_param = value
return [dist_param]
def get_geom_placeholder(self, f, value, compiler):
"""
Provide a proper substitution value for Geometries or rasters that are
not in the SRID of the field. Specifically, this routine will
substitute in the ST_Transform() function call.
"""
transform_func = self.spatial_function_name("Transform")
if hasattr(value, "as_sql"):
if value.field.srid == f.srid:
placeholder = "%s"
else:
placeholder = "%s(%%s, %s)" % (transform_func, f.srid)
return placeholder
# Get the srid for this object
if value is None:
value_srid = None
else:
value_srid = value.srid
# Adding Transform() to the SQL placeholder if the value srid
# is not equal to the field srid.
if value_srid is None or value_srid == f.srid:
placeholder = "%s"
else:
placeholder = "%s(%%s, %s)" % (transform_func, f.srid)
return placeholder
def _get_postgis_func(self, func):
"""
Helper routine for calling PostGIS functions and returning their
result.
"""
# Close out the connection. See #9437.
with self.connection.temporary_connection() as cursor:
cursor.execute("SELECT %s()" % func)
return cursor.fetchone()[0]
def postgis_geos_version(self):
"Return the version of the GEOS library used with PostGIS."
return self._get_postgis_func("postgis_geos_version")
def postgis_lib_version(self):
"""
Return the version number of the PostGIS library used with PostgreSQL.
"""
return self._get_postgis_func("postgis_lib_version")
def postgis_proj_version(self):
"""Return the version of the PROJ library used with PostGIS."""
return self._get_postgis_func("postgis_proj_version")
def postgis_version(self):
"Return PostGIS version number and compile-time options."
return self._get_postgis_func("postgis_version")
def postgis_full_version(self):
"Return PostGIS version number and compile-time options."
return self._get_postgis_func("postgis_full_version")
def postgis_version_tuple(self):
"""
Return the PostGIS version as a tuple (version string, major,
minor, subminor).
"""
version = self.postgis_lib_version()
return (version, *get_version_tuple(version))
def proj_version_tuple(self):
"""
Return the version of PROJ used by PostGIS as a tuple of the
major, minor, and subminor release numbers.
"""
proj_regex = re.compile(r"(\d+)\.(\d+)\.(\d+)")
proj_ver_str = self.postgis_proj_version()
m = proj_regex.search(proj_ver_str)
if m:
return tuple(map(int, m.groups()))
else:
raise Exception("Could not determine PROJ version from PostGIS.")
def spatial_aggregate_name(self, agg_name):
if agg_name == "Extent3D":
return self.extent3d
else:
return self.geom_func_prefix + agg_name
# Routines for getting the OGC-compliant models.
def geometry_columns(self):
return PostGISGeometryColumns
def spatial_ref_sys(self):
return PostGISSpatialRefSys
def parse_raster(self, value):
"""Convert a PostGIS HEX String into a dict readable by GDALRaster."""
return from_pgraster(value)
def distance_expr_for_lookup(self, lhs, rhs, **kwargs):
return super().distance_expr_for_lookup(
self._normalize_distance_lookup_arg(lhs),
self._normalize_distance_lookup_arg(rhs),
**kwargs,
)
@staticmethod
def _normalize_distance_lookup_arg(arg):
is_raster = (
arg.field.geom_type == "RASTER"
if hasattr(arg, "field")
else isinstance(arg, GDALRaster)
)
return ST_Polygon(arg) if is_raster else arg
def get_geometry_converter(self, expression):
read = wkb_r().read
geom_class = expression.output_field.geom_class
def converter(value, expression, connection):
if isinstance(value, str): # Coming from hex strings.
value = value.encode("ascii")
return None if value is None else GEOSGeometryBase(read(value), geom_class)
return converter
def get_area_att_for_field(self, field):
return "sq_m"
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/contrib/gis/db/backends/postgis/pgraster.py | django/contrib/gis/db/backends/postgis/pgraster.py | import struct
from django.core.exceptions import ValidationError
from .const import (
BANDTYPE_FLAG_HASNODATA,
BANDTYPE_PIXTYPE_MASK,
GDAL_TO_POSTGIS,
GDAL_TO_STRUCT,
POSTGIS_HEADER_STRUCTURE,
POSTGIS_TO_GDAL,
STRUCT_SIZE,
)
def pack(structure, data):
"""
Pack data into hex string with little endian format.
"""
return struct.pack("<" + structure, *data)
def unpack(structure, data):
"""
Unpack little endian hexlified binary string into a list.
"""
return struct.unpack("<" + structure, bytes.fromhex(data))
def chunk(data, index):
"""
Split a string into two parts at the input index.
"""
return data[:index], data[index:]
def from_pgraster(data):
"""
Convert a PostGIS HEX String into a dictionary.
"""
if data is None:
return
# Split raster header from data
header, data = chunk(data, 122)
header = unpack(POSTGIS_HEADER_STRUCTURE, header)
# Parse band data
bands = []
pixeltypes = []
while data:
# Get pixel type for this band
pixeltype_with_flags, data = chunk(data, 2)
pixeltype_with_flags = unpack("B", pixeltype_with_flags)[0]
pixeltype = pixeltype_with_flags & BANDTYPE_PIXTYPE_MASK
# Convert datatype from PostGIS to GDAL & get pack type and size
pixeltype = POSTGIS_TO_GDAL[pixeltype]
pack_type = GDAL_TO_STRUCT[pixeltype]
pack_size = 2 * STRUCT_SIZE[pack_type]
# Parse band nodata value. The nodata value is part of the
# PGRaster string even if the nodata flag is True, so it always
# has to be chunked off the data string.
nodata, data = chunk(data, pack_size)
nodata = unpack(pack_type, nodata)[0]
# Chunk and unpack band data (pack size times nr of pixels)
band, data = chunk(data, pack_size * header[10] * header[11])
band_result = {"data": bytes.fromhex(band)}
# Set the nodata value if the nodata flag is set.
if pixeltype_with_flags & BANDTYPE_FLAG_HASNODATA:
band_result["nodata_value"] = nodata
# Append band data to band list
bands.append(band_result)
# Store pixeltype of this band in pixeltypes array
pixeltypes.append(pixeltype)
# Check that all bands have the same pixeltype.
# This is required by GDAL. PostGIS rasters could have different pixeltypes
# for bands of the same raster.
if len(set(pixeltypes)) != 1:
raise ValidationError("Band pixeltypes are not all equal.")
return {
"srid": int(header[9]),
"width": header[10],
"height": header[11],
"datatype": pixeltypes[0],
"origin": (header[5], header[6]),
"scale": (header[3], header[4]),
"skew": (header[7], header[8]),
"bands": bands,
}
def to_pgraster(rast):
"""
Convert a GDALRaster into PostGIS Raster format.
"""
# Prepare the raster header data as a tuple. The first two numbers are
# the endianness and the PostGIS Raster Version, both are fixed by
# PostGIS at the moment.
rasterheader = (
1,
0,
len(rast.bands),
rast.scale.x,
rast.scale.y,
rast.origin.x,
rast.origin.y,
rast.skew.x,
rast.skew.y,
rast.srs.srid,
rast.width,
rast.height,
)
# Pack raster header.
result = pack(POSTGIS_HEADER_STRUCTURE, rasterheader)
for band in rast.bands:
# The PostGIS raster band header has exactly two elements, a 8BUI byte
# and the nodata value.
#
# The 8BUI stores both the PostGIS pixel data type and a nodata flag.
# It is composed as the datatype with BANDTYPE_FLAG_HASNODATA (1 << 6)
# for existing nodata values:
# 8BUI_VALUE = PG_PIXEL_TYPE (0-11) | BANDTYPE_FLAG_HASNODATA
#
# For example, if the byte value is 71, then the datatype is
# 71 & ~BANDTYPE_FLAG_HASNODATA = 7 (32BSI)
# and the nodata value is True.
structure = "B" + GDAL_TO_STRUCT[band.datatype()]
# Get band pixel type in PostGIS notation
pixeltype = GDAL_TO_POSTGIS[band.datatype()]
# Set the nodata flag
if band.nodata_value is not None:
pixeltype |= BANDTYPE_FLAG_HASNODATA
# Pack band header
bandheader = pack(structure, (pixeltype, band.nodata_value or 0))
# Add packed header and band data to result
result += bandheader + band.data(as_memoryview=True)
return result
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/contrib/gis/db/backends/postgis/schema.py | django/contrib/gis/db/backends/postgis/schema.py | from django.contrib.gis.db.models import GeometryField
from django.db.backends.postgresql.schema import DatabaseSchemaEditor
from django.db.models.expressions import Col, Func
class PostGISSchemaEditor(DatabaseSchemaEditor):
geom_index_type = "GIST"
geom_index_ops_nd = "GIST_GEOMETRY_OPS_ND"
rast_index_template = "ST_ConvexHull(%(expressions)s)"
sql_alter_column_to_3d = (
"ALTER COLUMN %(column)s TYPE %(type)s USING ST_Force3D(%(column)s)::%(type)s"
)
sql_alter_column_to_2d = (
"ALTER COLUMN %(column)s TYPE %(type)s USING ST_Force2D(%(column)s)::%(type)s"
)
def geo_quote_name(self, name):
return self.connection.ops.geo_quote_name(name)
def _field_should_be_indexed(self, model, field):
if getattr(field, "spatial_index", False):
return True
return super()._field_should_be_indexed(model, field)
def _create_index_sql(self, model, *, fields=None, **kwargs):
if fields is None or len(fields) != 1 or not hasattr(fields[0], "geodetic"):
return super()._create_index_sql(model, fields=fields, **kwargs)
return self._create_spatial_index_sql(model, fields[0], **kwargs)
def _alter_column_type_sql(
self, table, old_field, new_field, new_type, old_collation, new_collation
):
"""
Special case when dimension changed.
"""
if not hasattr(old_field, "dim") or not hasattr(new_field, "dim"):
return super()._alter_column_type_sql(
table, old_field, new_field, new_type, old_collation, new_collation
)
if old_field.dim == 2 and new_field.dim == 3:
sql_alter = self.sql_alter_column_to_3d
elif old_field.dim == 3 and new_field.dim == 2:
sql_alter = self.sql_alter_column_to_2d
else:
sql_alter = self.sql_alter_column_type
return (
(
sql_alter
% {
"column": self.quote_name(new_field.column),
"type": new_type,
"collation": "",
},
[],
),
[],
)
def _alter_field(
self,
model,
old_field,
new_field,
old_type,
new_type,
old_db_params,
new_db_params,
strict=False,
):
super()._alter_field(
model,
old_field,
new_field,
old_type,
new_type,
old_db_params,
new_db_params,
strict=strict,
)
old_field_spatial_index = (
isinstance(old_field, GeometryField) and old_field.spatial_index
)
new_field_spatial_index = (
isinstance(new_field, GeometryField) and new_field.spatial_index
)
if not old_field_spatial_index and new_field_spatial_index:
self.execute(self._create_spatial_index_sql(model, new_field))
elif old_field_spatial_index and not new_field_spatial_index:
self.execute(self._delete_spatial_index_sql(model, old_field))
def _create_spatial_index_name(self, model, field):
return self._create_index_name(model._meta.db_table, [field.column], "_id")
def _create_spatial_index_sql(self, model, field, **kwargs):
expressions = None
opclasses = None
fields = [field]
if field.geom_type == "RASTER":
# For raster fields, wrap index creation SQL statement with
# ST_ConvexHull. Indexes on raster columns are based on the convex
# hull of the raster.
expressions = Func(Col(None, field), template=self.rast_index_template)
fields = None
elif field.dim > 2 and not field.geography:
# Use "nd" ops which are fast on multidimensional cases
opclasses = [self.geom_index_ops_nd]
if not (name := kwargs.get("name")):
name = self._create_spatial_index_name(model, field)
return super()._create_index_sql(
model,
fields=fields,
name=name,
using=" USING %s" % self.geom_index_type,
opclasses=opclasses,
expressions=expressions,
)
def _delete_spatial_index_sql(self, model, field):
index_name = self._create_spatial_index_name(model, field)
return self._delete_index_sql(model, index_name)
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/contrib/gis/db/backends/postgis/models.py | django/contrib/gis/db/backends/postgis/models.py | """
The GeometryColumns and SpatialRefSys models for the PostGIS backend.
"""
from django.contrib.gis.db.backends.base.models import SpatialRefSysMixin
from django.db import models
class PostGISGeometryColumns(models.Model):
"""
The 'geometry_columns' view from PostGIS. See the PostGIS
documentation at Ch. 4.3.2.
"""
f_table_catalog = models.CharField(max_length=256)
f_table_schema = models.CharField(max_length=256)
f_table_name = models.CharField(max_length=256)
f_geometry_column = models.CharField(max_length=256)
coord_dimension = models.IntegerField()
srid = models.IntegerField(primary_key=True)
type = models.CharField(max_length=30)
class Meta:
app_label = "gis"
db_table = "geometry_columns"
managed = False
def __str__(self):
return "%s.%s - %dD %s field (SRID: %d)" % (
self.f_table_name,
self.f_geometry_column,
self.coord_dimension,
self.type,
self.srid,
)
@classmethod
def table_name_col(cls):
"""
Return the name of the metadata column used to store the feature table
name.
"""
return "f_table_name"
@classmethod
def geom_col_name(cls):
"""
Return the name of the metadata column used to store the feature
geometry column.
"""
return "f_geometry_column"
class PostGISSpatialRefSys(models.Model, SpatialRefSysMixin):
"""
The 'spatial_ref_sys' table from PostGIS. See the PostGIS
documentation at Ch. 4.2.1.
"""
srid = models.IntegerField(primary_key=True)
auth_name = models.CharField(max_length=256)
auth_srid = models.IntegerField()
srtext = models.CharField(max_length=2048)
proj4text = models.CharField(max_length=2048)
class Meta:
app_label = "gis"
db_table = "spatial_ref_sys"
managed = False
@property
def wkt(self):
return self.srtext
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/contrib/gis/db/backends/postgis/const.py | django/contrib/gis/db/backends/postgis/const.py | """
PostGIS to GDAL conversion constant definitions
"""
# Lookup to convert pixel type values from GDAL to PostGIS
GDAL_TO_POSTGIS = [None, 4, 6, 5, 8, 7, 10, 11, None, None, None, None]
# Lookup to convert pixel type values from PostGIS to GDAL
POSTGIS_TO_GDAL = [1, 1, 1, 3, 1, 3, 2, 5, 4, None, 6, 7, None, None]
# Struct pack structure for raster header, the raster header has the
# following structure:
#
# Endianness, PostGIS raster version, number of bands, scale, origin,
# skew, srid, width, and height.
#
# Scale, origin, and skew have x and y values. PostGIS currently uses
# a fixed endianness (1) and there is only one version (0).
POSTGIS_HEADER_STRUCTURE = "B H H d d d d d d i H H"
# Lookup values to convert GDAL pixel types to struct characters. This is
# used to pack and unpack the pixel values of PostGIS raster bands.
GDAL_TO_STRUCT = [
None,
"B",
"H",
"h",
"L",
"l",
"f",
"d",
None,
None,
None,
None,
]
# Size of the packed value in bytes for different numerical types.
# This is needed to cut chunks of band data out of PostGIS raster strings
# when decomposing them into GDALRasters.
# See https://docs.python.org/library/struct.html#format-characters
STRUCT_SIZE = {
"b": 1, # Signed char
"B": 1, # Unsigned char
"?": 1, # _Bool
"h": 2, # Short
"H": 2, # Unsigned short
"i": 4, # Integer
"I": 4, # Unsigned Integer
"l": 4, # Long
"L": 4, # Unsigned Long
"f": 4, # Float
"d": 8, # Double
}
# Pixel type specifies type of pixel values in a band. Storage flag specifies
# whether the band data is stored as part of the datum or is to be found on the
# server's filesystem. There are currently 11 supported pixel value types, so 4
# bits are enough to account for all. Reserve the upper 4 bits for generic
# flags. See
# https://trac.osgeo.org/postgis/wiki/WKTRaster/RFC/RFC1_V0SerialFormat#Pixeltypeandstorageflag
BANDTYPE_PIXTYPE_MASK = 0x0F
BANDTYPE_FLAG_HASNODATA = 1 << 6
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/contrib/gis/db/backends/postgis/introspection.py | django/contrib/gis/db/backends/postgis/introspection.py | from django.contrib.gis.gdal import OGRGeomType
from django.db.backends.postgresql.introspection import DatabaseIntrospection
class PostGISIntrospection(DatabaseIntrospection):
postgis_oid_lookup = {} # Populated when introspection is performed.
ignored_tables = [
*DatabaseIntrospection.ignored_tables,
"geography_columns",
"geometry_columns",
"raster_columns",
"spatial_ref_sys",
"raster_overviews",
]
def get_field_type(self, data_type, description):
if not self.postgis_oid_lookup:
# Query PostgreSQL's pg_type table to determine the OID integers
# for the PostGIS data types used in reverse lookup (the integers
# may be different across versions). To prevent unnecessary
# requests upon connection initialization, the `data_types_reverse`
# dictionary isn't updated until introspection is performed here.
with self.connection.cursor() as cursor:
cursor.execute(
"SELECT oid, typname "
"FROM pg_type "
"WHERE typname IN ('geometry', 'geography')"
)
self.postgis_oid_lookup = dict(cursor.fetchall())
self.data_types_reverse.update(
(oid, "GeometryField") for oid in self.postgis_oid_lookup
)
return super().get_field_type(data_type, description)
def get_geometry_type(self, table_name, description):
"""
The geometry type OID used by PostGIS does not indicate the particular
type of field that a geometry column is (e.g., whether it's a
PointField or a PolygonField). Thus, this routine queries the PostGIS
metadata tables to determine the geometry type.
"""
with self.connection.cursor() as cursor:
cursor.execute(
"""
SELECT t.coord_dimension, t.srid, t.type FROM (
SELECT * FROM geometry_columns
UNION ALL
SELECT * FROM geography_columns
) AS t WHERE t.f_table_name = %s AND t.f_geometry_column = %s
""",
(table_name, description.name),
)
row = cursor.fetchone()
if not row:
raise Exception(
'Could not find a geometry or geography column for "%s"."%s"'
% (table_name, description.name)
)
dim, srid, field_type = row
# OGRGeomType does not require GDAL and makes it easy to convert
# from OGC geom type name to Django field.
field_type = OGRGeomType(field_type).django
# Getting any GeometryField keyword arguments that are not the
# default.
field_params = {}
if self.postgis_oid_lookup.get(description.type_code) == "geography":
field_params["geography"] = True
if srid != 4326:
field_params["srid"] = srid
if dim != 2:
field_params["dim"] = dim
return field_type, field_params
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/contrib/gis/db/backends/postgis/__init__.py | django/contrib/gis/db/backends/postgis/__init__.py | python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false | |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/contrib/gis/db/backends/postgis/base.py | django/contrib/gis/db/backends/postgis/base.py | from functools import lru_cache
from django.db.backends.base.base import NO_DB_ALIAS
from django.db.backends.postgresql.base import DatabaseWrapper as PsycopgDatabaseWrapper
from django.db.backends.postgresql.features import (
DatabaseFeatures as PsycopgDatabaseFeatures,
)
from django.db.backends.postgresql.introspection import (
DatabaseIntrospection as PsycopgDatabaseIntrospection,
)
from django.db.backends.postgresql.operations import (
DatabaseOperations as PsycopgDatabaseOperations,
)
from django.db.backends.postgresql.psycopg_any import is_psycopg3
from .adapter import PostGISAdapter
from .features import DatabaseFeatures
from .introspection import PostGISIntrospection
from .operations import PostGISOperations
from .schema import PostGISSchemaEditor
if is_psycopg3:
from psycopg.adapt import Dumper
from psycopg.pq import Format
from psycopg.types import TypeInfo
from psycopg.types.string import TextBinaryLoader, TextLoader
class GeometryType:
pass
class GeographyType:
pass
class RasterType:
pass
class BaseTextDumper(Dumper):
def dump(self, obj):
# Return bytes as hex for text formatting
return obj.ewkb.hex().encode()
class BaseBinaryDumper(Dumper):
format = Format.BINARY
def dump(self, obj):
return obj.ewkb
@lru_cache
def postgis_adapters(geo_oid, geog_oid, raster_oid):
class BaseDumper(Dumper):
def __init_subclass__(cls, base_dumper):
super().__init_subclass__()
cls.GeometryDumper = type(
"GeometryDumper", (base_dumper,), {"oid": geo_oid}
)
cls.GeographyDumper = type(
"GeographyDumper", (base_dumper,), {"oid": geog_oid}
)
cls.RasterDumper = type(
"RasterDumper", (BaseTextDumper,), {"oid": raster_oid}
)
def get_key(self, obj, format):
if obj.is_geometry:
return GeographyType if obj.geography else GeometryType
else:
return RasterType
def upgrade(self, obj, format):
if obj.is_geometry:
if obj.geography:
return self.GeographyDumper(GeographyType)
else:
return self.GeometryDumper(GeometryType)
else:
return self.RasterDumper(RasterType)
def dump(self, obj):
raise NotImplementedError
class PostGISTextDumper(BaseDumper, base_dumper=BaseTextDumper):
pass
class PostGISBinaryDumper(BaseDumper, base_dumper=BaseBinaryDumper):
format = Format.BINARY
return PostGISTextDumper, PostGISBinaryDumper
class DatabaseWrapper(PsycopgDatabaseWrapper):
SchemaEditorClass = PostGISSchemaEditor
features_class = DatabaseFeatures
ops_class = PostGISOperations
introspection_class = PostGISIntrospection
_type_infos = {
"geometry": {},
"geography": {},
"raster": {},
}
def __init__(self, *args, **kwargs):
if kwargs.get("alias", "") == NO_DB_ALIAS:
# Don't initialize PostGIS-specific stuff for non-db connections.
self.features_class = PsycopgDatabaseFeatures
self.ops_class = PsycopgDatabaseOperations
self.introspection_class = PsycopgDatabaseIntrospection
super().__init__(*args, **kwargs)
def prepare_database(self):
super().prepare_database()
# Check that postgis extension is installed.
with self.cursor() as cursor:
cursor.execute("SELECT 1 FROM pg_extension WHERE extname = %s", ["postgis"])
if bool(cursor.fetchone()):
return
cursor.execute("CREATE EXTENSION IF NOT EXISTS postgis")
if is_psycopg3:
# Ensure adapters are registers if PostGIS is used within this
# connection.
self.register_geometry_adapters(self.connection, True)
def get_new_connection(self, conn_params):
connection = super().get_new_connection(conn_params)
if is_psycopg3:
self.register_geometry_adapters(connection)
return connection
if is_psycopg3:
def _register_type(self, pg_connection, typename):
registry = self._type_infos[typename]
try:
info = registry[self.alias]
except KeyError:
info = TypeInfo.fetch(pg_connection, typename)
registry[self.alias] = info
if info: # Can be None if the type does not exist (yet).
info.register(pg_connection)
pg_connection.adapters.register_loader(info.oid, TextLoader)
pg_connection.adapters.register_loader(info.oid, TextBinaryLoader)
return info.oid if info else None
def register_geometry_adapters(self, pg_connection, clear_caches=False):
if clear_caches:
for typename in self._type_infos:
self._type_infos[typename].pop(self.alias, None)
geo_oid = self._register_type(pg_connection, "geometry")
geog_oid = self._register_type(pg_connection, "geography")
raster_oid = self._register_type(pg_connection, "raster")
PostGISTextDumper, PostGISBinaryDumper = postgis_adapters(
geo_oid, geog_oid, raster_oid
)
pg_connection.adapters.register_dumper(PostGISAdapter, PostGISTextDumper)
pg_connection.adapters.register_dumper(PostGISAdapter, PostGISBinaryDumper)
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/contrib/gis/db/backends/postgis/adapter.py | django/contrib/gis/db/backends/postgis/adapter.py | """
This object provides quoting for GEOS geometries into PostgreSQL/PostGIS.
"""
from django.contrib.gis.db.backends.postgis.pgraster import to_pgraster
from django.contrib.gis.geos import GEOSGeometry
from django.db.backends.postgresql.psycopg_any import sql
class PostGISAdapter:
def __init__(self, obj, geography=False):
"""
Initialize on the spatial object.
"""
self.is_geometry = isinstance(obj, (GEOSGeometry, PostGISAdapter))
# Getting the WKB (in string form, to allow easy pickling of
# the adaptor) and the SRID from the geometry or raster.
if self.is_geometry:
self.ewkb = bytes(obj.ewkb)
else:
self.ewkb = to_pgraster(obj)
self.srid = obj.srid
self.geography = geography
def __conform__(self, proto):
"""Does the given protocol conform to what Psycopg2 expects?"""
from psycopg2.extensions import ISQLQuote
if proto == ISQLQuote:
return self
else:
raise Exception(
"Error implementing psycopg2 protocol. Is psycopg2 installed?"
)
def __eq__(self, other):
return isinstance(other, PostGISAdapter) and self.ewkb == other.ewkb
def __hash__(self):
return hash(self.ewkb)
def __str__(self):
return self.getquoted().decode()
@classmethod
def _fix_polygon(cls, poly):
return poly
def getquoted(self):
"""
Return a properly quoted string for use in PostgreSQL/PostGIS.
"""
if self.is_geometry:
# Psycopg will figure out whether to use E'\\000' or '\000'.
return b"%s(%s)" % (
b"ST_GeogFromWKB" if self.geography else b"ST_GeomFromEWKB",
sql.quote(self.ewkb).encode(),
)
else:
# For rasters, add explicit type cast to WKB string.
return b"'%s'::raster" % self.ewkb.hex().encode()
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/contrib/gis/db/backends/postgis/features.py | django/contrib/gis/db/backends/postgis/features.py | from django.contrib.gis.db.backends.base.features import BaseSpatialFeatures
from django.db.backends.postgresql.features import (
DatabaseFeatures as PsycopgDatabaseFeatures,
)
class DatabaseFeatures(BaseSpatialFeatures, PsycopgDatabaseFeatures):
supports_geography = True
supports_3d_storage = True
supports_3d_functions = True
supports_raster = True
supports_empty_geometries = True
empty_intersection_returns_none = False
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/contrib/gis/db/backends/spatialite/client.py | django/contrib/gis/db/backends/spatialite/client.py | from django.db.backends.sqlite3.client import DatabaseClient
class SpatiaLiteClient(DatabaseClient):
executable_name = "spatialite"
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/contrib/gis/db/backends/spatialite/operations.py | django/contrib/gis/db/backends/spatialite/operations.py | """
SQL functions reference lists:
https://www.gaia-gis.it/gaia-sins/spatialite-sql-4.3.0.html
"""
from django.contrib.gis.db import models
from django.contrib.gis.db.backends.base.operations import BaseSpatialOperations
from django.contrib.gis.db.backends.spatialite.adapter import SpatiaLiteAdapter
from django.contrib.gis.db.backends.utils import SpatialOperator
from django.contrib.gis.geos.geometry import GEOSGeometry, GEOSGeometryBase
from django.contrib.gis.geos.prototypes.io import wkb_r
from django.contrib.gis.measure import Distance
from django.core.exceptions import ImproperlyConfigured
from django.db.backends.sqlite3.operations import DatabaseOperations
from django.utils.functional import cached_property
from django.utils.version import get_version_tuple
class SpatialiteNullCheckOperator(SpatialOperator):
def as_sql(self, connection, lookup, template_params, sql_params):
sql, params = super().as_sql(connection, lookup, template_params, sql_params)
return "%s > 0" % sql, params
class SpatiaLiteOperations(BaseSpatialOperations, DatabaseOperations):
name = "spatialite"
spatialite = True
Adapter = SpatiaLiteAdapter
collect = "Collect"
extent = "Extent"
makeline = "MakeLine"
unionagg = "GUnion"
from_text = "GeomFromText"
gis_operators = {
# Binary predicates
"equals": SpatialiteNullCheckOperator(func="Equals"),
"disjoint": SpatialiteNullCheckOperator(func="Disjoint"),
"touches": SpatialiteNullCheckOperator(func="Touches"),
"crosses": SpatialiteNullCheckOperator(func="Crosses"),
"within": SpatialiteNullCheckOperator(func="Within"),
"overlaps": SpatialiteNullCheckOperator(func="Overlaps"),
"contains": SpatialiteNullCheckOperator(func="Contains"),
"intersects": SpatialiteNullCheckOperator(func="Intersects"),
"relate": SpatialiteNullCheckOperator(func="Relate"),
"coveredby": SpatialiteNullCheckOperator(func="CoveredBy"),
"covers": SpatialiteNullCheckOperator(func="Covers"),
# Returns true if B's bounding box completely contains A's bounding
# box.
"contained": SpatialOperator(func="MbrWithin"),
# Returns true if A's bounding box completely contains B's bounding
# box.
"bbcontains": SpatialOperator(func="MbrContains"),
# Returns true if A's bounding box overlaps B's bounding box.
"bboverlaps": SpatialOperator(func="MbrOverlaps"),
# These are implemented here as synonyms for Equals
"same_as": SpatialiteNullCheckOperator(func="Equals"),
"exact": SpatialiteNullCheckOperator(func="Equals"),
# Distance predicates
"dwithin": SpatialOperator(func="PtDistWithin"),
}
disallowed_aggregates = (models.Extent3D,)
select = "CAST (AsEWKB(%s) AS BLOB)"
function_names = {
"AsWKB": "St_AsBinary",
"BoundingCircle": "GEOSMinimumBoundingCircle",
"ForcePolygonCW": "ST_ForceLHR",
"FromWKB": "ST_GeomFromWKB",
"FromWKT": "ST_GeomFromText",
"IsEmpty": "ST_IsEmpty",
"Length": "ST_Length",
"LineLocatePoint": "ST_Line_Locate_Point",
"NumDimensions": "ST_NDims",
"NumPoints": "ST_NPoints",
"Reverse": "ST_Reverse",
"Scale": "ScaleCoords",
"Translate": "ST_Translate",
"Union": "ST_Union",
}
@cached_property
def unsupported_functions(self):
unsupported = {"GeometryDistance", "MemSize", "Rotate"}
if not self.geom_lib_version():
unsupported |= {"Azimuth", "GeoHash", "MakeValid"}
if self.spatial_version < (5, 1):
unsupported |= {"BoundingCircle"}
return unsupported
@cached_property
def spatial_version(self):
"""Determine the version of the SpatiaLite library."""
try:
version = self.spatialite_version_tuple()[1:]
except Exception as exc:
raise ImproperlyConfigured(
'Cannot determine the SpatiaLite version for the "%s" database. '
"Was the SpatiaLite initialization SQL loaded on this database?"
% (self.connection.settings_dict["NAME"],)
) from exc
if version < (4, 3, 0):
raise ImproperlyConfigured("GeoDjango supports SpatiaLite 4.3.0 and above.")
return version
def convert_extent(self, box):
"""
Convert the polygon data received from SpatiaLite to min/max values.
"""
if box is None:
return None
shell = GEOSGeometry(box).shell
xmin, ymin = shell[0][:2]
xmax, ymax = shell[2][:2]
return (xmin, ymin, xmax, ymax)
def geo_db_type(self, f):
"""
Return None because geometry columns are added via the
`AddGeometryColumn` stored procedure on SpatiaLite.
"""
return None
def get_distance(self, f, value, lookup_type):
"""
Return the distance parameters for the given geometry field,
lookup value, and lookup type.
"""
if not value:
return []
value = value[0]
if isinstance(value, Distance):
if f.geodetic(self.connection):
if lookup_type == "dwithin":
raise ValueError(
"Only numeric values of degree units are allowed on "
"geographic DWithin queries."
)
dist_param = value.m
else:
dist_param = getattr(
value, Distance.unit_attname(f.units_name(self.connection))
)
else:
dist_param = value
return [dist_param]
def _get_spatialite_func(self, func):
"""
Helper routine for calling SpatiaLite functions and returning
their result.
Any error occurring in this method should be handled by the caller.
"""
cursor = self.connection._cursor()
try:
cursor.execute("SELECT %s" % func)
row = cursor.fetchone()
finally:
cursor.close()
return row[0]
def geos_version(self):
"Return the version of GEOS used by SpatiaLite as a string."
return self._get_spatialite_func("geos_version()")
def proj_version(self):
"""Return the version of the PROJ library used by SpatiaLite."""
return self._get_spatialite_func("proj4_version()")
def lwgeom_version(self):
"""Return the version of LWGEOM library used by SpatiaLite."""
return self._get_spatialite_func("lwgeom_version()")
def rttopo_version(self):
"""Return the version of RTTOPO library used by SpatiaLite."""
return self._get_spatialite_func("rttopo_version()")
def geom_lib_version(self):
"""
Return the version of the version-dependant geom library used by
SpatiaLite.
"""
if self.spatial_version >= (5,):
return self.rttopo_version()
else:
return self.lwgeom_version()
def spatialite_version(self):
"Return the SpatiaLite library version as a string."
return self._get_spatialite_func("spatialite_version()")
def spatialite_version_tuple(self):
"""
Return the SpatiaLite version as a tuple (version string, major,
minor, subminor).
"""
version = self.spatialite_version()
return (version, *get_version_tuple(version))
def spatial_aggregate_name(self, agg_name):
"""
Return the spatial aggregate SQL template and function for the
given Aggregate instance.
"""
agg_name = "unionagg" if agg_name.lower() == "union" else agg_name.lower()
return getattr(self, agg_name)
# Routines for getting the OGC-compliant models.
def geometry_columns(self):
from django.contrib.gis.db.backends.spatialite.models import (
SpatialiteGeometryColumns,
)
return SpatialiteGeometryColumns
def spatial_ref_sys(self):
from django.contrib.gis.db.backends.spatialite.models import (
SpatialiteSpatialRefSys,
)
return SpatialiteSpatialRefSys
def get_geometry_converter(self, expression):
geom_class = expression.output_field.geom_class
read = wkb_r().read
def converter(value, expression, connection):
return None if value is None else GEOSGeometryBase(read(value), geom_class)
return converter
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/contrib/gis/db/backends/spatialite/schema.py | django/contrib/gis/db/backends/spatialite/schema.py | from django.db import DatabaseError
from django.db.backends.sqlite3.schema import DatabaseSchemaEditor
class SpatialiteSchemaEditor(DatabaseSchemaEditor):
sql_add_geometry_column = (
"SELECT AddGeometryColumn(%(table)s, %(column)s, %(srid)s, "
"%(geom_type)s, %(dim)s, %(null)s)"
)
sql_add_spatial_index = "SELECT CreateSpatialIndex(%(table)s, %(column)s)"
sql_drop_spatial_index = "DROP TABLE idx_%(table)s_%(column)s"
sql_recover_geometry_metadata = (
"SELECT RecoverGeometryColumn(%(table)s, %(column)s, %(srid)s, "
"%(geom_type)s, %(dim)s)"
)
sql_remove_geometry_metadata = "SELECT DiscardGeometryColumn(%(table)s, %(column)s)"
sql_discard_geometry_columns = (
"DELETE FROM %(geom_table)s WHERE f_table_name = %(table)s"
)
sql_update_geometry_columns = (
"UPDATE %(geom_table)s SET f_table_name = %(new_table)s "
"WHERE f_table_name = %(old_table)s"
)
geometry_tables = [
"geometry_columns",
"geometry_columns_auth",
"geometry_columns_time",
"geometry_columns_statistics",
]
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.geometry_sql = []
def geo_quote_name(self, name):
return self.connection.ops.geo_quote_name(name)
def column_sql(self, model, field, include_default=False):
from django.contrib.gis.db.models import GeometryField
if not isinstance(field, GeometryField):
return super().column_sql(model, field, include_default)
# Geometry columns are created by the `AddGeometryColumn` function
self.geometry_sql.append(
self.sql_add_geometry_column
% {
"table": self.geo_quote_name(model._meta.db_table),
"column": self.geo_quote_name(field.column),
"srid": field.srid,
"geom_type": self.geo_quote_name(field.geom_type),
"dim": field.dim,
"null": int(not field.null),
}
)
if field.spatial_index:
self.geometry_sql.append(
self.sql_add_spatial_index
% {
"table": self.quote_name(model._meta.db_table),
"column": self.quote_name(field.column),
}
)
return None, None
def remove_geometry_metadata(self, model, field):
self.execute(
self.sql_remove_geometry_metadata
% {
"table": self.quote_name(model._meta.db_table),
"column": self.quote_name(field.column),
}
)
self.execute(
self.sql_drop_spatial_index
% {
"table": model._meta.db_table,
"column": field.column,
}
)
def create_model(self, model):
super().create_model(model)
# Create geometry columns
for sql in self.geometry_sql:
self.execute(sql)
self.geometry_sql = []
def delete_model(self, model, **kwargs):
from django.contrib.gis.db.models import GeometryField
# Drop spatial metadata (dropping the table does not automatically
# remove them)
for field in model._meta.local_fields:
if isinstance(field, GeometryField):
self.remove_geometry_metadata(model, field)
# Make sure all geom stuff is gone
for geom_table in self.geometry_tables:
try:
self.execute(
self.sql_discard_geometry_columns
% {
"geom_table": geom_table,
"table": self.quote_name(model._meta.db_table),
}
)
except DatabaseError:
pass
super().delete_model(model, **kwargs)
def add_field(self, model, field):
from django.contrib.gis.db.models import GeometryField
if isinstance(field, GeometryField):
# Populate self.geometry_sql
self.column_sql(model, field)
for sql in self.geometry_sql:
self.execute(sql)
self.geometry_sql = []
else:
super().add_field(model, field)
def remove_field(self, model, field):
from django.contrib.gis.db.models import GeometryField
# NOTE: If the field is a geometry field, the table is just recreated,
# the parent's remove_field can't be used cause it will skip the
# recreation if the field does not have a database type. Geometry
# fields do not have a db type cause they are added and removed via
# stored procedures.
if isinstance(field, GeometryField):
self._remake_table(model, delete_field=field)
else:
super().remove_field(model, field)
def alter_db_table(self, model, old_db_table, new_db_table):
from django.contrib.gis.db.models import GeometryField
if old_db_table == new_db_table or (
self.connection.features.ignores_table_name_case
and old_db_table.lower() == new_db_table.lower()
):
return
# Remove geometry-ness from temp table
for field in model._meta.local_fields:
if isinstance(field, GeometryField):
self.execute(
self.sql_remove_geometry_metadata
% {
"table": self.quote_name(old_db_table),
"column": self.quote_name(field.column),
}
)
# Alter table
super().alter_db_table(model, old_db_table, new_db_table)
# Repoint any straggler names
for geom_table in self.geometry_tables:
try:
self.execute(
self.sql_update_geometry_columns
% {
"geom_table": geom_table,
"old_table": self.quote_name(old_db_table),
"new_table": self.quote_name(new_db_table),
}
)
except DatabaseError:
pass
# Re-add geometry-ness and rename spatial index tables
for field in model._meta.local_fields:
if isinstance(field, GeometryField):
self.execute(
self.sql_recover_geometry_metadata
% {
"table": self.geo_quote_name(new_db_table),
"column": self.geo_quote_name(field.column),
"srid": field.srid,
"geom_type": self.geo_quote_name(field.geom_type),
"dim": field.dim,
}
)
if getattr(field, "spatial_index", False):
self.execute(
self.sql_rename_table
% {
"old_table": self.quote_name(
"idx_%s_%s" % (old_db_table, field.column)
),
"new_table": self.quote_name(
"idx_%s_%s" % (new_db_table, field.column)
),
}
)
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/contrib/gis/db/backends/spatialite/models.py | django/contrib/gis/db/backends/spatialite/models.py | """
The GeometryColumns and SpatialRefSys models for the SpatiaLite backend.
"""
from django.contrib.gis.db.backends.base.models import SpatialRefSysMixin
from django.db import models
class SpatialiteGeometryColumns(models.Model):
"""
The 'geometry_columns' table from SpatiaLite.
"""
f_table_name = models.CharField(max_length=256)
f_geometry_column = models.CharField(max_length=256)
coord_dimension = models.IntegerField()
srid = models.IntegerField(primary_key=True)
spatial_index_enabled = models.IntegerField()
type = models.IntegerField(db_column="geometry_type")
class Meta:
app_label = "gis"
db_table = "geometry_columns"
managed = False
def __str__(self):
return "%s.%s - %dD %s field (SRID: %d)" % (
self.f_table_name,
self.f_geometry_column,
self.coord_dimension,
self.type,
self.srid,
)
@classmethod
def table_name_col(cls):
"""
Return the name of the metadata column used to store the feature table
name.
"""
return "f_table_name"
@classmethod
def geom_col_name(cls):
"""
Return the name of the metadata column used to store the feature
geometry column.
"""
return "f_geometry_column"
class SpatialiteSpatialRefSys(models.Model, SpatialRefSysMixin):
"""
The 'spatial_ref_sys' table from SpatiaLite.
"""
srid = models.IntegerField(primary_key=True)
auth_name = models.CharField(max_length=256)
auth_srid = models.IntegerField()
ref_sys_name = models.CharField(max_length=256)
proj4text = models.CharField(max_length=2048)
srtext = models.CharField(max_length=2048)
class Meta:
app_label = "gis"
db_table = "spatial_ref_sys"
managed = False
@property
def wkt(self):
return self.srtext
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/contrib/gis/db/backends/spatialite/introspection.py | django/contrib/gis/db/backends/spatialite/introspection.py | from django.contrib.gis.gdal import OGRGeomType
from django.db.backends.sqlite3.introspection import (
DatabaseIntrospection,
FlexibleFieldLookupDict,
)
class GeoFlexibleFieldLookupDict(FlexibleFieldLookupDict):
"""
Subclass that includes updates the `base_data_types_reverse` dict
for geometry field types.
"""
base_data_types_reverse = {
**FlexibleFieldLookupDict.base_data_types_reverse,
"point": "GeometryField",
"linestring": "GeometryField",
"polygon": "GeometryField",
"multipoint": "GeometryField",
"multilinestring": "GeometryField",
"multipolygon": "GeometryField",
"geometrycollection": "GeometryField",
}
class SpatiaLiteIntrospection(DatabaseIntrospection):
data_types_reverse = GeoFlexibleFieldLookupDict()
def get_geometry_type(self, table_name, description):
with self.connection.cursor() as cursor:
# Querying the `geometry_columns` table to get additional metadata.
cursor.execute(
"SELECT coord_dimension, srid, geometry_type "
"FROM geometry_columns "
"WHERE f_table_name=%s AND f_geometry_column=%s",
(table_name, description.name),
)
row = cursor.fetchone()
if not row:
raise Exception(
'Could not find a geometry column for "%s"."%s"'
% (table_name, description.name)
)
# OGRGeomType does not require GDAL and makes it easy to convert
# from OGC geom type name to Django field.
ogr_type = row[2]
if isinstance(ogr_type, int) and ogr_type > 1000:
# SpatiaLite uses SFSQL 1.2 offsets 1000 (Z), 2000 (M), and
# 3000 (ZM) to indicate the presence of higher dimensional
# coordinates (M not yet supported by Django).
ogr_type = ogr_type % 1000 + OGRGeomType.wkb25bit
field_type = OGRGeomType(ogr_type).django
# Getting any GeometryField keyword arguments that are not the
# default.
dim = row[0]
srid = row[1]
field_params = {}
if srid != 4326:
field_params["srid"] = srid
if (isinstance(dim, str) and "Z" in dim) or dim == 3:
field_params["dim"] = 3
return field_type, field_params
def get_constraints(self, cursor, table_name):
constraints = super().get_constraints(cursor, table_name)
cursor.execute(
"SELECT f_geometry_column "
"FROM geometry_columns "
"WHERE f_table_name=%s AND spatial_index_enabled=1",
(table_name,),
)
for row in cursor.fetchall():
constraints["%s__spatial__index" % row[0]] = {
"columns": [row[0]],
"primary_key": False,
"unique": False,
"foreign_key": None,
"check": False,
"index": True,
}
return constraints
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/contrib/gis/db/backends/spatialite/__init__.py | django/contrib/gis/db/backends/spatialite/__init__.py | python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false | |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/contrib/gis/db/backends/spatialite/base.py | django/contrib/gis/db/backends/spatialite/base.py | from ctypes.util import find_library
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.db.backends.sqlite3.base import DatabaseWrapper as SQLiteDatabaseWrapper
from .client import SpatiaLiteClient
from .features import DatabaseFeatures
from .introspection import SpatiaLiteIntrospection
from .operations import SpatiaLiteOperations
from .schema import SpatialiteSchemaEditor
class DatabaseWrapper(SQLiteDatabaseWrapper):
SchemaEditorClass = SpatialiteSchemaEditor
# Classes instantiated in __init__().
client_class = SpatiaLiteClient
features_class = DatabaseFeatures
introspection_class = SpatiaLiteIntrospection
ops_class = SpatiaLiteOperations
def __init__(self, *args, **kwargs):
# Trying to find the location of the SpatiaLite library.
# Here we are figuring out the path to the SpatiaLite library
# (`libspatialite`). If it's not in the system library path (e.g., it
# cannot be found by `ctypes.util.find_library`), then it may be set
# manually in the settings via the `SPATIALITE_LIBRARY_PATH` setting.
self.lib_spatialite_paths = [
name
for name in [
getattr(settings, "SPATIALITE_LIBRARY_PATH", None),
"mod_spatialite.so",
"mod_spatialite",
find_library("spatialite"),
]
if name is not None
]
super().__init__(*args, **kwargs)
def get_new_connection(self, conn_params):
conn = super().get_new_connection(conn_params)
# Enabling extension loading on the SQLite connection.
try:
conn.enable_load_extension(True)
except AttributeError:
raise ImproperlyConfigured(
"SpatiaLite requires SQLite to be configured to allow "
"extension loading."
)
# Load the SpatiaLite library extension on the connection.
for path in self.lib_spatialite_paths:
try:
conn.load_extension(path)
except Exception:
if getattr(settings, "SPATIALITE_LIBRARY_PATH", None):
raise ImproperlyConfigured(
"Unable to load the SpatiaLite library extension "
"as specified in your SPATIALITE_LIBRARY_PATH setting."
)
continue
else:
break
else:
raise ImproperlyConfigured(
"Unable to load the SpatiaLite library extension. "
"Library names tried: %s" % ", ".join(self.lib_spatialite_paths)
)
return conn
def prepare_database(self):
super().prepare_database()
# Check if spatial metadata have been initialized in the database
with self.cursor() as cursor:
cursor.execute("PRAGMA table_info(geometry_columns);")
if cursor.fetchall() == []:
if self.ops.spatial_version < (5,):
cursor.execute("SELECT InitSpatialMetaData(1)")
else:
cursor.execute("SELECT InitSpatialMetaDataFull(1)")
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/contrib/gis/db/backends/spatialite/adapter.py | django/contrib/gis/db/backends/spatialite/adapter.py | from django.contrib.gis.db.backends.base.adapter import WKTAdapter
from django.db.backends.sqlite3.base import Database
class SpatiaLiteAdapter(WKTAdapter):
"SQLite adapter for geometry objects."
def __conform__(self, protocol):
if protocol is Database.PrepareProtocol:
return str(self)
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/contrib/gis/db/backends/spatialite/features.py | django/contrib/gis/db/backends/spatialite/features.py | from django.contrib.gis.db.backends.base.features import BaseSpatialFeatures
from django.db.backends.sqlite3.features import (
DatabaseFeatures as SQLiteDatabaseFeatures,
)
from django.utils.functional import cached_property
class DatabaseFeatures(BaseSpatialFeatures, SQLiteDatabaseFeatures):
can_alter_geometry_field = False # Not implemented
supports_3d_storage = True
@cached_property
def supports_area_geodetic(self):
return bool(self.connection.ops.geom_lib_version())
@cached_property
def django_test_skips(self):
skips = super().django_test_skips
skips.update(
{
"SpatiaLite doesn't support distance lookups with Distance objects.": {
"gis_tests.geogapp.tests.GeographyTest.test02_distance_lookup",
},
}
)
return skips
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/contrib/gis/db/backends/base/operations.py | django/contrib/gis/db/backends/base/operations.py | from django.contrib.gis.db.models import GeometryField
from django.contrib.gis.db.models.functions import Distance
from django.contrib.gis.measure import Area as AreaMeasure
from django.contrib.gis.measure import Distance as DistanceMeasure
from django.db import NotSupportedError
from django.utils.functional import cached_property
class BaseSpatialOperations:
# Quick booleans for the type of this spatial backend, and
# an attribute for the spatial database version tuple (if applicable)
postgis = False
spatialite = False
mariadb = False
mysql = False
oracle = False
spatial_version = None
# How the geometry column should be selected.
select = "%s"
@cached_property
def select_extent(self):
return self.select
# Aggregates
disallowed_aggregates = ()
geom_func_prefix = ""
# Mapping between Django function names and backend names, when names do
# not match; used in spatial_function_name().
function_names = {}
# Set of known unsupported functions of the backend
unsupported_functions = {
"Area",
"AsGeoJSON",
"AsGML",
"AsKML",
"AsSVG",
"AsWKB",
"AsWKT",
"Azimuth",
"BoundingCircle",
"Centroid",
"ClosestPoint",
"Difference",
"Distance",
"DistanceSpheroid",
"Envelope",
"ForcePolygonCW",
"FromWKB",
"FromWKT",
"GeoHash",
"GeometryDistance",
"GeometryType",
"Intersection",
"IsEmpty",
"IsValid",
"Length",
"LineLocatePoint",
"MakeValid",
"MemSize",
"NumDimensions",
"NumGeometries",
"NumPoints",
"Perimeter",
"PointOnSurface",
"Reverse",
"Rotate",
"Scale",
"SnapToGrid",
"SymDifference",
"Transform",
"Translate",
"Union",
}
# Constructors
from_text = False
# Default conversion functions for aggregates; will be overridden if
# implemented for the spatial backend.
def convert_extent(self, box, srid):
raise NotImplementedError(
"Aggregate extent not implemented for this spatial backend."
)
def convert_extent3d(self, box, srid):
raise NotImplementedError(
"Aggregate 3D extent not implemented for this spatial backend."
)
# For quoting column values, rather than columns.
def geo_quote_name(self, name):
return "'%s'" % name
# GeometryField operations
def geo_db_type(self, f):
"""
Return the database column type for the geometry field on
the spatial backend.
"""
raise NotImplementedError(
"subclasses of BaseSpatialOperations must provide a geo_db_type() method"
)
def get_distance(self, f, value, lookup_type):
"""
Return the distance parameters for the given geometry field,
lookup value, and lookup type.
"""
raise NotImplementedError(
"Distance operations not available on this spatial backend."
)
def get_geom_placeholder(self, f, value, compiler):
"""
Return the placeholder for the given geometry field with the given
value. Depending on the spatial backend, the placeholder may contain a
stored procedure call to the transformation function of the spatial
backend.
"""
def transform_value(value, field):
return value is not None and value.srid != field.srid
if hasattr(value, "as_sql"):
return (
"%s(%%s, %s)" % (self.spatial_function_name("Transform"), f.srid)
if transform_value(value.output_field, f)
else "%s"
)
if transform_value(value, f):
# Add Transform() to the SQL placeholder.
return "%s(%s(%%s,%s), %s)" % (
self.spatial_function_name("Transform"),
self.from_text,
value.srid,
f.srid,
)
elif self.connection.features.has_spatialrefsys_table:
return "%s(%%s,%s)" % (self.from_text, f.srid)
else:
# For backwards compatibility on MySQL (#27464).
return "%s(%%s)" % self.from_text
def check_expression_support(self, expression):
if isinstance(expression, self.disallowed_aggregates):
raise NotSupportedError(
"%s spatial aggregation is not supported by this database backend."
% expression.name
)
super().check_expression_support(expression)
def spatial_aggregate_name(self, agg_name):
raise NotImplementedError(
"Aggregate support not implemented for this spatial backend."
)
def spatial_function_name(self, func_name):
if func_name in self.unsupported_functions:
raise NotSupportedError(
"This backend doesn't support the %s function." % func_name
)
return self.function_names.get(func_name, self.geom_func_prefix + func_name)
# Routines for getting the OGC-compliant models.
def geometry_columns(self):
raise NotImplementedError(
"Subclasses of BaseSpatialOperations must provide a geometry_columns() "
"method."
)
def spatial_ref_sys(self):
raise NotImplementedError(
"subclasses of BaseSpatialOperations must a provide spatial_ref_sys() "
"method"
)
distance_expr_for_lookup = staticmethod(Distance)
def get_db_converters(self, expression):
converters = super().get_db_converters(expression)
if isinstance(expression.output_field, GeometryField):
converters.append(self.get_geometry_converter(expression))
return converters
def get_geometry_converter(self, expression):
raise NotImplementedError(
"Subclasses of BaseSpatialOperations must provide a "
"get_geometry_converter() method."
)
def get_area_att_for_field(self, field):
if field.geodetic(self.connection):
if self.connection.features.supports_area_geodetic:
return "sq_m"
raise NotImplementedError(
"Area on geodetic coordinate systems not supported."
)
else:
units_name = field.units_name(self.connection)
if units_name:
return AreaMeasure.unit_attname(units_name)
def get_distance_att_for_field(self, field):
dist_att = None
if field.geodetic(self.connection):
if self.connection.features.supports_distance_geodetic:
dist_att = "m"
else:
units = field.units_name(self.connection)
if units:
dist_att = DistanceMeasure.unit_attname(units)
return dist_att
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/contrib/gis/db/backends/base/models.py | django/contrib/gis/db/backends/base/models.py | from django.contrib.gis import gdal
from django.utils.functional import cached_property
class SpatialRefSysMixin:
"""
The SpatialRefSysMixin is a class used by the database-dependent
SpatialRefSys objects to reduce redundant code.
"""
@cached_property
def srs(self):
"""
Return a GDAL SpatialReference object.
"""
try:
return gdal.SpatialReference(self.wkt)
except Exception as e:
wkt_error = e
try:
return gdal.SpatialReference(self.proj4text)
except Exception as e:
proj4_error = e
raise Exception(
"Could not get OSR SpatialReference.\n"
f"Error for WKT '{self.wkt}': {wkt_error}\n"
f"Error for PROJ.4 '{self.proj4text}': {proj4_error}"
)
@property
def ellipsoid(self):
"""
Return a tuple of the ellipsoid parameters:
(semimajor axis, semiminor axis, and inverse flattening).
"""
return self.srs.ellipsoid
@property
def name(self):
"Return the projection name."
return self.srs.name
@property
def spheroid(self):
"Return the spheroid name for this spatial reference."
return self.srs["spheroid"]
@property
def datum(self):
"Return the datum for this spatial reference."
return self.srs["datum"]
@property
def projected(self):
"Is this Spatial Reference projected?"
return self.srs.projected
@property
def local(self):
"Is this Spatial Reference local?"
return self.srs.local
@property
def geographic(self):
"Is this Spatial Reference geographic?"
return self.srs.geographic
@property
def linear_name(self):
"Return the linear units name."
return self.srs.linear_name
@property
def linear_units(self):
"Return the linear units."
return self.srs.linear_units
@property
def angular_name(self):
"Return the name of the angular units."
return self.srs.angular_name
@property
def angular_units(self):
"Return the angular units."
return self.srs.angular_units
@property
def units(self):
"Return a tuple of the units and the name."
if self.projected or self.local:
return (self.linear_units, self.linear_name)
elif self.geographic:
return (self.angular_units, self.angular_name)
else:
return (None, None)
@classmethod
def get_units(cls, wkt):
"""
Return a tuple of (unit_value, unit_name) for the given WKT without
using any of the database fields.
"""
return gdal.SpatialReference(wkt).units
@classmethod
def get_spheroid(cls, wkt, string=True):
"""
Class method used by GeometryField on initialization to
retrieve the `SPHEROID[..]` parameters from the given WKT.
"""
srs = gdal.SpatialReference(wkt)
sphere_params = srs.ellipsoid
sphere_name = srs["spheroid"]
if not string:
return sphere_name, sphere_params
else:
# `string` parameter used to place in format acceptable by PostGIS
if len(sphere_params) == 3:
radius, flattening = sphere_params[0], sphere_params[2]
else:
radius, flattening = sphere_params
return 'SPHEROID["%s",%s,%s]' % (sphere_name, radius, flattening)
def __str__(self):
"""
Return the string representation, a 'pretty' OGC WKT.
"""
return str(self.srs)
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/contrib/gis/db/backends/base/__init__.py | django/contrib/gis/db/backends/base/__init__.py | python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false | |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/contrib/gis/db/backends/base/adapter.py | django/contrib/gis/db/backends/base/adapter.py | class WKTAdapter:
"""
An adaptor for Geometries sent to the MySQL and Oracle database backends.
"""
def __init__(self, geom):
self.wkt = geom.wkt
self.srid = geom.srid
def __eq__(self, other):
return (
isinstance(other, WKTAdapter)
and self.wkt == other.wkt
and self.srid == other.srid
)
def __hash__(self):
return hash((self.wkt, self.srid))
def __str__(self):
return self.wkt
@classmethod
def _fix_polygon(cls, poly):
# Hook for Oracle.
return poly
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/contrib/gis/db/backends/base/features.py | django/contrib/gis/db/backends/base/features.py | import re
from django.contrib.gis.db import models
from .operations import BaseSpatialOperations
class BaseSpatialFeatures:
gis_enabled = True
# Does the database contain a SpatialRefSys model to store SRID
# information?
has_spatialrefsys_table = True
# Does the backend support the django.contrib.gis.utils.add_srs_entry()
# utility?
supports_add_srs_entry = True
# Does the backend introspect GeometryField to its subtypes?
supports_geometry_field_introspection = True
# Does the database have a geography type?
supports_geography = False
# Does the backend support storing 3D geometries?
supports_3d_storage = False
# Reference implementation of 3D functions is:
# https://postgis.net/docs/PostGIS_Special_Functions_Index.html#PostGIS_3D_Functions
supports_3d_functions = False
# Does the database support SRID transform operations?
supports_transform = True
# Can geometry fields be null?
supports_null_geometries = True
# Are empty geometries supported?
supports_empty_geometries = False
# Can the function be applied on geodetic coordinate systems?
supports_distance_geodetic = True
supports_length_geodetic = True
supports_perimeter_geodetic = False
supports_area_geodetic = True
# Is the database able to count vertices on polygons (with `num_points`)?
supports_num_points_poly = True
# Does the backend support expressions for specifying distance in the
# dwithin lookup?
supports_dwithin_distance_expr = True
# Does the database have raster support?
supports_raster = False
# Does the database support a unique index on geometry fields?
supports_geometry_field_unique_index = True
# Can SchemaEditor alter geometry fields?
can_alter_geometry_field = True
# Do the database functions/aggregates support the tolerance parameter?
supports_tolerance_parameter = False
# Set of options that AsGeoJSON() doesn't support.
unsupported_geojson_options = {}
# Does Intersection() return None (rather than an empty GeometryCollection)
# for empty results?
empty_intersection_returns_none = True
@property
def supports_crosses_lookup(self):
return "crosses" in self.connection.ops.gis_operators
@property
def supports_distances_lookups(self):
return self.has_Distance_function
@property
def supports_relate_lookup(self):
return "relate" in self.connection.ops.gis_operators
@property
def supports_isvalid_lookup(self):
return self.has_IsValid_function
# Is the aggregate supported by the database?
@property
def supports_collect_aggr(self):
return models.Collect not in self.connection.ops.disallowed_aggregates
@property
def supports_extent_aggr(self):
return models.Extent not in self.connection.ops.disallowed_aggregates
@property
def supports_make_line_aggr(self):
return models.MakeLine not in self.connection.ops.disallowed_aggregates
@property
def supports_union_aggr(self):
return models.Union not in self.connection.ops.disallowed_aggregates
def __getattr__(self, name):
m = re.match(r"has_(\w*)_function$", name)
if m:
func_name = m[1]
if func_name not in BaseSpatialOperations.unsupported_functions:
raise ValueError(
f"DatabaseFeatures.has_{func_name}_function isn't valid. "
f'Is "{func_name}" missing from '
"BaseSpatialOperations.unsupported_functions?"
)
return func_name not in self.connection.ops.unsupported_functions
raise AttributeError
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/contrib/gis/db/backends/mysql/operations.py | django/contrib/gis/db/backends/mysql/operations.py | from django.contrib.gis.db import models
from django.contrib.gis.db.backends.base.adapter import WKTAdapter
from django.contrib.gis.db.backends.base.operations import BaseSpatialOperations
from django.contrib.gis.db.backends.utils import SpatialOperator
from django.contrib.gis.geos.geometry import GEOSGeometryBase
from django.contrib.gis.geos.prototypes.io import wkb_r
from django.contrib.gis.measure import Distance
from django.db.backends.mysql.operations import DatabaseOperations
from django.utils.functional import cached_property
class MySQLOperations(BaseSpatialOperations, DatabaseOperations):
name = "mysql"
geom_func_prefix = "ST_"
Adapter = WKTAdapter
@cached_property
def mariadb(self):
return self.connection.mysql_is_mariadb
@cached_property
def mysql(self):
return not self.connection.mysql_is_mariadb
@cached_property
def select(self):
return self.geom_func_prefix + "AsBinary(%s)"
@cached_property
def from_text(self):
return self.geom_func_prefix + "GeomFromText"
@cached_property
def collect(self):
if self.connection.features.supports_collect_aggr:
return self.geom_func_prefix + "Collect"
@cached_property
def gis_operators(self):
operators = {
"bbcontains": SpatialOperator(
func="MBRContains"
), # For consistency w/PostGIS API
"bboverlaps": SpatialOperator(func="MBROverlaps"), # ...
"contained": SpatialOperator(func="MBRWithin"), # ...
"contains": SpatialOperator(func="ST_Contains"),
"coveredby": SpatialOperator(func="MBRCoveredBy"),
"crosses": SpatialOperator(func="ST_Crosses"),
"disjoint": SpatialOperator(func="ST_Disjoint"),
"equals": SpatialOperator(func="ST_Equals"),
"exact": SpatialOperator(func="ST_Equals"),
"intersects": SpatialOperator(func="ST_Intersects"),
"overlaps": SpatialOperator(func="ST_Overlaps"),
"same_as": SpatialOperator(func="ST_Equals"),
"touches": SpatialOperator(func="ST_Touches"),
"within": SpatialOperator(func="ST_Within"),
}
if self.connection.mysql_is_mariadb:
operators["relate"] = SpatialOperator(func="ST_Relate")
if self.connection.mysql_version < (12, 0, 1):
del operators["coveredby"]
else:
operators["covers"] = SpatialOperator(func="MBRCovers")
return operators
@cached_property
def disallowed_aggregates(self):
disallowed_aggregates = [
models.Extent,
models.Extent3D,
models.MakeLine,
models.Union,
]
is_mariadb = self.connection.mysql_is_mariadb
if is_mariadb:
if self.connection.mysql_version < (12, 0, 1):
disallowed_aggregates.insert(0, models.Collect)
return tuple(disallowed_aggregates)
function_names = {
"FromWKB": "ST_GeomFromWKB",
"FromWKT": "ST_GeomFromText",
}
@cached_property
def unsupported_functions(self):
unsupported = {
"AsGML",
"AsKML",
"AsSVG",
"Azimuth",
"BoundingCircle",
"ClosestPoint",
"ForcePolygonCW",
"GeometryDistance",
"IsEmpty",
"LineLocatePoint",
"MakeValid",
"MemSize",
"NumDimensions",
"Perimeter",
"PointOnSurface",
"Reverse",
"Rotate",
"Scale",
"SnapToGrid",
"Transform",
"Translate",
}
if self.connection.mysql_is_mariadb:
unsupported.remove("PointOnSurface")
if self.connection.mysql_version < (12, 0, 1):
unsupported.update({"GeoHash", "IsValid"})
return unsupported
def geo_db_type(self, f):
return f.geom_type
def get_distance(self, f, value, lookup_type):
value = value[0]
if isinstance(value, Distance):
if f.geodetic(self.connection):
raise ValueError(
"Only numeric values of degree units are allowed on "
"geodetic distance queries."
)
dist_param = getattr(
value, Distance.unit_attname(f.units_name(self.connection))
)
else:
dist_param = value
return [dist_param]
def get_geometry_converter(self, expression):
read = wkb_r().read
srid = expression.output_field.srid
if srid == -1:
srid = None
geom_class = expression.output_field.geom_class
def converter(value, expression, connection):
if value is not None:
geom = GEOSGeometryBase(read(memoryview(value)), geom_class)
if srid:
geom.srid = srid
return geom
return converter
def spatial_aggregate_name(self, agg_name):
return getattr(self, agg_name.lower())
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/contrib/gis/db/backends/mysql/schema.py | django/contrib/gis/db/backends/mysql/schema.py | import logging
from django.contrib.gis.db.models import GeometryField
from django.db import OperationalError
from django.db.backends.mysql.schema import DatabaseSchemaEditor
logger = logging.getLogger("django.contrib.gis")
class MySQLGISSchemaEditor(DatabaseSchemaEditor):
sql_add_spatial_index = "CREATE SPATIAL INDEX %(index)s ON %(table)s(%(column)s)"
def quote_value(self, value):
if isinstance(value, self.connection.ops.Adapter):
return super().quote_value(str(value))
return super().quote_value(value)
def _field_indexes_sql(self, model, field):
if isinstance(field, GeometryField) and field.spatial_index and not field.null:
with self.connection.cursor() as cursor:
supports_spatial_index = (
self.connection.introspection.supports_spatial_index(
cursor, model._meta.db_table
)
)
sql = self._create_spatial_index_sql(model, field)
if supports_spatial_index:
return [sql]
else:
logger.error(
f"Cannot create SPATIAL INDEX {sql}. Only MyISAM, Aria, and InnoDB "
f"support them.",
)
return []
return super()._field_indexes_sql(model, field)
def remove_field(self, model, field):
if isinstance(field, GeometryField) and field.spatial_index and not field.null:
sql = self._delete_spatial_index_sql(model, field)
try:
self.execute(sql)
except OperationalError:
logger.error(
"Couldn't remove spatial index: %s (may be expected "
"if your storage engine doesn't support them).",
sql,
)
super().remove_field(model, field)
def _alter_field(
self,
model,
old_field,
new_field,
old_type,
new_type,
old_db_params,
new_db_params,
strict=False,
):
super()._alter_field(
model,
old_field,
new_field,
old_type,
new_type,
old_db_params,
new_db_params,
strict=strict,
)
old_field_spatial_index = (
isinstance(old_field, GeometryField)
and old_field.spatial_index
and not old_field.null
)
new_field_spatial_index = (
isinstance(new_field, GeometryField)
and new_field.spatial_index
and not new_field.null
)
if not old_field_spatial_index and new_field_spatial_index:
self.execute(self._create_spatial_index_sql(model, new_field))
elif old_field_spatial_index and not new_field_spatial_index:
self.execute(self._delete_spatial_index_sql(model, old_field))
def _create_spatial_index_name(self, model, field):
return "%s_%s_id" % (model._meta.db_table, field.column)
def _create_spatial_index_sql(self, model, field):
index_name = self._create_spatial_index_name(model, field)
qn = self.connection.ops.quote_name
return self.sql_add_spatial_index % {
"index": qn(index_name),
"table": qn(model._meta.db_table),
"column": qn(field.column),
}
def _delete_spatial_index_sql(self, model, field):
index_name = self._create_spatial_index_name(model, field)
return self._delete_index_sql(model, index_name)
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/contrib/gis/db/backends/mysql/introspection.py | django/contrib/gis/db/backends/mysql/introspection.py | from MySQLdb.constants import FIELD_TYPE
from django.contrib.gis.gdal import OGRGeomType
from django.db.backends.mysql.introspection import DatabaseIntrospection
class MySQLIntrospection(DatabaseIntrospection):
# Updating the data_types_reverse dictionary with the appropriate
# type for Geometry fields.
data_types_reverse = DatabaseIntrospection.data_types_reverse.copy()
data_types_reverse[FIELD_TYPE.GEOMETRY] = "GeometryField"
def get_geometry_type(self, table_name, description):
with self.connection.cursor() as cursor:
# In order to get the specific geometry type of the field,
# we introspect on the table definition using `DESCRIBE`.
cursor.execute("DESCRIBE %s" % self.connection.ops.quote_name(table_name))
# Increment over description info until we get to the geometry
# column.
for column, typ, null, key, default, extra in cursor.fetchall():
if column == description.name:
# Using OGRGeomType to convert from OGC name to Django
# field. MySQL does not support 3D or SRIDs, so the field
# params are empty.
field_type = OGRGeomType(typ).django
field_params = {}
break
return field_type, field_params
def supports_spatial_index(self, cursor, table_name):
# Supported with MyISAM, Aria, or InnoDB.
storage_engine = self.get_storage_engine(cursor, table_name)
return storage_engine in ("MyISAM", "Aria", "InnoDB")
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/contrib/gis/db/backends/mysql/__init__.py | django/contrib/gis/db/backends/mysql/__init__.py | python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false | |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/contrib/gis/db/backends/mysql/base.py | django/contrib/gis/db/backends/mysql/base.py | from django.db.backends.mysql.base import DatabaseWrapper as MySQLDatabaseWrapper
from .features import DatabaseFeatures
from .introspection import MySQLIntrospection
from .operations import MySQLOperations
from .schema import MySQLGISSchemaEditor
class DatabaseWrapper(MySQLDatabaseWrapper):
SchemaEditorClass = MySQLGISSchemaEditor
# Classes instantiated in __init__().
features_class = DatabaseFeatures
introspection_class = MySQLIntrospection
ops_class = MySQLOperations
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/contrib/gis/db/backends/mysql/features.py | django/contrib/gis/db/backends/mysql/features.py | from django.contrib.gis.db.backends.base.features import BaseSpatialFeatures
from django.db.backends.mysql.features import DatabaseFeatures as MySQLDatabaseFeatures
from django.utils.functional import cached_property
class DatabaseFeatures(BaseSpatialFeatures, MySQLDatabaseFeatures):
empty_intersection_returns_none = False
has_spatialrefsys_table = False
supports_add_srs_entry = False
supports_distance_geodetic = False
supports_length_geodetic = False
supports_area_geodetic = False
supports_transform = False
supports_null_geometries = False
supports_num_points_poly = False
unsupported_geojson_options = {"crs"}
@cached_property
def supports_geometry_field_unique_index(self):
# Not supported in MySQL since
# https://dev.mysql.com/worklog/task/?id=11808
return self.connection.mysql_is_mariadb
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/contrib/gis/admin/__init__.py | django/contrib/gis/admin/__init__.py | from django.contrib.admin import (
HORIZONTAL,
VERTICAL,
AdminSite,
ModelAdmin,
StackedInline,
TabularInline,
action,
autodiscover,
display,
register,
site,
)
from django.contrib.gis.admin.options import GISModelAdmin
__all__ = [
"HORIZONTAL",
"VERTICAL",
"AdminSite",
"ModelAdmin",
"StackedInline",
"TabularInline",
"action",
"autodiscover",
"display",
"register",
"site",
"GISModelAdmin",
]
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/contrib/gis/admin/options.py | django/contrib/gis/admin/options.py | from django.contrib.admin import ModelAdmin
from django.contrib.gis.db import models
from django.contrib.gis.forms import OSMWidget
class GeoModelAdminMixin:
gis_widget = OSMWidget
gis_widget_kwargs = {}
def formfield_for_dbfield(self, db_field, request, **kwargs):
if isinstance(db_field, models.GeometryField) and (
db_field.dim < 3 or self.gis_widget.supports_3d
):
kwargs["widget"] = self.gis_widget(**self.gis_widget_kwargs)
return db_field.formfield(**kwargs)
else:
return super().formfield_for_dbfield(db_field, request, **kwargs)
class GISModelAdmin(GeoModelAdminMixin, ModelAdmin):
pass
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/contrib/gis/forms/widgets.py | django/contrib/gis/forms/widgets.py | import logging
from django.contrib.gis import gdal
from django.contrib.gis.geometry import json_regex
from django.contrib.gis.geos import GEOSException, GEOSGeometry
from django.forms.widgets import Widget
logger = logging.getLogger("django.contrib.gis")
class BaseGeometryWidget(Widget):
"""
The base class for rich geometry widgets.
Render a map using the WKT of the geometry.
"""
base_layer = None
geom_type = "GEOMETRY"
map_srid = 4326
display_raw = False
supports_3d = False
template_name = "" # set on subclasses
def __init__(self, attrs=None):
self.attrs = {
key: getattr(self, key)
for key in ("base_layer", "geom_type", "map_srid", "display_raw")
}
if attrs:
self.attrs.update(attrs)
def serialize(self, value):
return value.wkt if value else ""
def deserialize(self, value):
try:
return GEOSGeometry(value)
except (GEOSException, ValueError, TypeError) as err:
logger.error("Error creating geometry from value '%s' (%s)", value, err)
return None
def get_context(self, name, value, attrs):
context = super().get_context(name, value, attrs)
# If a string reaches here (via a validation error on another
# field) then just reconstruct the Geometry.
if value and isinstance(value, str):
value = self.deserialize(value)
if value:
# Check that srid of value and map match
if value.srid and value.srid != self.map_srid:
try:
ogr = value.ogr
ogr.transform(self.map_srid)
value = ogr
except gdal.GDALException as err:
logger.error(
"Error transforming geometry from srid '%s' to srid '%s' (%s)",
value.srid,
self.map_srid,
err,
)
context["serialized"] = self.serialize(value)
geom_type = gdal.OGRGeomType(self.attrs["geom_type"]).name
context["widget"]["attrs"]["geom_name"] = (
"Geometry" if geom_type == "Unknown" else geom_type
)
return context
class OpenLayersWidget(BaseGeometryWidget):
base_layer = "nasaWorldview"
template_name = "gis/openlayers.html"
map_srid = 3857
class Media:
css = {
"all": (
"https://cdn.jsdelivr.net/npm/ol@v7.2.2/ol.css",
"gis/css/ol3.css",
)
}
js = (
"https://cdn.jsdelivr.net/npm/ol@v7.2.2/dist/ol.js",
"gis/js/OLMapWidget.js",
)
def serialize(self, value):
return value.json if value else ""
def deserialize(self, value):
geom = super().deserialize(value)
# GeoJSON assumes WGS84 (4326). Use the map's SRID instead.
if geom and json_regex.match(value) and self.map_srid != 4326:
geom.srid = self.map_srid
return geom
class OSMWidget(OpenLayersWidget):
"""
An OpenLayers/OpenStreetMap-based widget.
"""
base_layer = "osm"
default_lon = 5
default_lat = 47
default_zoom = 12
def __init__(self, attrs=None):
if attrs is None:
attrs = {}
attrs.setdefault("default_lon", self.default_lon)
attrs.setdefault("default_lat", self.default_lat)
attrs.setdefault("default_zoom", self.default_zoom)
super().__init__(attrs=attrs)
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/contrib/gis/forms/fields.py | django/contrib/gis/forms/fields.py | from django import forms
from django.contrib.gis.gdal import GDALException
from django.contrib.gis.geos import GEOSException, GEOSGeometry
from django.core.exceptions import ValidationError
from django.utils.translation import gettext_lazy as _
from .widgets import OpenLayersWidget
class GeometryField(forms.Field):
"""
This is the basic form field for a Geometry. Any textual input that is
accepted by GEOSGeometry is accepted by this form. By default,
this includes WKT, HEXEWKB, WKB (in a buffer), and GeoJSON.
"""
widget = OpenLayersWidget
geom_type = "GEOMETRY"
default_error_messages = {
"required": _("No geometry value provided."),
"invalid_geom": _("Invalid geometry value."),
"invalid_geom_type": _("Invalid geometry type."),
"transform_error": _(
"An error occurred when transforming the geometry "
"to the SRID of the geometry form field."
),
}
def __init__(self, *, srid=None, geom_type=None, **kwargs):
self.srid = srid
if geom_type is not None:
self.geom_type = geom_type
super().__init__(**kwargs)
self.widget.attrs["geom_type"] = self.geom_type
def to_python(self, value):
"""Transform the value to a Geometry object."""
if value in self.empty_values:
return None
if not isinstance(value, GEOSGeometry):
if hasattr(self.widget, "deserialize"):
try:
value = self.widget.deserialize(value)
except GDALException:
value = None
else:
try:
value = GEOSGeometry(value)
except (GEOSException, ValueError, TypeError):
value = None
if value is None:
raise ValidationError(
self.error_messages["invalid_geom"], code="invalid_geom"
)
# Try to set the srid
if not value.srid:
try:
value.srid = self.widget.map_srid
except AttributeError:
if self.srid:
value.srid = self.srid
return value
def clean(self, value):
"""
Validate that the input value can be converted to a Geometry object
and return it. Raise a ValidationError if the value cannot be
instantiated as a Geometry.
"""
geom = super().clean(value)
if geom is None:
return geom
# Ensuring that the geometry is of the correct type (indicated
# using the OGC string label).
if (
str(geom.geom_type).upper() != self.geom_type
and self.geom_type != "GEOMETRY"
):
raise ValidationError(
self.error_messages["invalid_geom_type"], code="invalid_geom_type"
)
# Transforming the geometry if the SRID was set.
if self.srid and self.srid != -1 and self.srid != geom.srid:
try:
geom.transform(self.srid)
except GEOSException:
raise ValidationError(
self.error_messages["transform_error"], code="transform_error"
)
return geom
def has_changed(self, initial, data):
"""Compare geographic value of data with its initial value."""
try:
data = self.to_python(data)
initial = self.to_python(initial)
except ValidationError:
return True
# Only do a geographic comparison if both values are available
if initial and data:
data.transform(initial.srid)
# If the initial value was not added by the browser, the geometry
# provided may be slightly different, the first time it is saved.
# The comparison is done with a very low tolerance.
return not initial.equals_exact(data, tolerance=0.000001)
else:
# Check for change of state of existence
return bool(initial) != bool(data)
class GeometryCollectionField(GeometryField):
geom_type = "GEOMETRYCOLLECTION"
class PointField(GeometryField):
geom_type = "POINT"
class MultiPointField(GeometryField):
geom_type = "MULTIPOINT"
class LineStringField(GeometryField):
geom_type = "LINESTRING"
class MultiLineStringField(GeometryField):
geom_type = "MULTILINESTRING"
class PolygonField(GeometryField):
geom_type = "POLYGON"
class MultiPolygonField(GeometryField):
geom_type = "MULTIPOLYGON"
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/contrib/gis/forms/__init__.py | django/contrib/gis/forms/__init__.py | from django.forms import * # NOQA
from .fields import ( # NOQA
GeometryCollectionField,
GeometryField,
LineStringField,
MultiLineStringField,
MultiPointField,
MultiPolygonField,
PointField,
PolygonField,
)
from .widgets import BaseGeometryWidget, OpenLayersWidget, OSMWidget # NOQA
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/contrib/gis/gdal/envelope.py | django/contrib/gis/gdal/envelope.py | """
The GDAL/OGR library uses an Envelope structure to hold the bounding
box information for a geometry. The envelope (bounding box) contains
two pairs of coordinates, one for the lower left coordinate and one
for the upper right coordinate:
+----------o Upper right; (max_x, max_y)
| |
| |
| |
Lower left (min_x, min_y) o----------+
"""
from ctypes import Structure, c_double
from django.contrib.gis.gdal.error import GDALException
# The OGR definition of an Envelope is a C structure containing four doubles.
# See the 'ogr_core.h' source file for more information:
# https://gdal.org/doxygen/ogr__core_8h_source.html
class OGREnvelope(Structure):
"Represent the OGREnvelope C Structure."
_fields_ = [
("MinX", c_double),
("MaxX", c_double),
("MinY", c_double),
("MaxY", c_double),
]
class Envelope:
"""
The Envelope object is a C structure that contains the minimum and
maximum X, Y coordinates for a rectangle bounding box. The naming
of the variables is compatible with the OGR Envelope structure.
"""
def __init__(self, *args):
"""
The initialization function may take an OGREnvelope structure,
4-element tuple or list, or 4 individual arguments.
"""
if len(args) == 1:
if isinstance(args[0], OGREnvelope):
# OGREnvelope (a ctypes Structure) was passed in.
self._envelope = args[0]
elif isinstance(args[0], (tuple, list)):
# A tuple was passed in.
if len(args[0]) != 4:
raise GDALException(
"Incorrect number of tuple elements (%d)." % len(args[0])
)
else:
self._from_sequence(args[0])
else:
raise TypeError("Incorrect type of argument: %s" % type(args[0]))
elif len(args) == 4:
# Individual parameters passed in.
# Thanks to ww for the help
self._from_sequence([float(a) for a in args])
else:
raise GDALException("Incorrect number (%d) of arguments." % len(args))
# Checking the x,y coordinates
if self.min_x > self.max_x:
raise GDALException("Envelope minimum X > maximum X.")
if self.min_y > self.max_y:
raise GDALException("Envelope minimum Y > maximum Y.")
def __eq__(self, other):
"""
Return True if the envelopes are equivalent; can compare against
other Envelopes and 4-tuples.
"""
if isinstance(other, Envelope):
return (
(self.min_x == other.min_x)
and (self.min_y == other.min_y)
and (self.max_x == other.max_x)
and (self.max_y == other.max_y)
)
elif isinstance(other, tuple) and len(other) == 4:
return (
(self.min_x == other[0])
and (self.min_y == other[1])
and (self.max_x == other[2])
and (self.max_y == other[3])
)
else:
raise GDALException("Equivalence testing only works with other Envelopes.")
def __str__(self):
"Return a string representation of the tuple."
return str(self.tuple)
def _from_sequence(self, seq):
"Initialize the C OGR Envelope structure from the given sequence."
self._envelope = OGREnvelope()
self._envelope.MinX = seq[0]
self._envelope.MinY = seq[1]
self._envelope.MaxX = seq[2]
self._envelope.MaxY = seq[3]
def expand_to_include(self, *args):
"""
Modify the envelope to expand to include the boundaries of
the passed-in 2-tuple (a point), 4-tuple (an extent) or
envelope.
"""
# We provide a number of different signatures for this method,
# and the logic here is all about converting them into a
# 4-tuple single parameter which does the actual work of
# expanding the envelope.
if len(args) == 1:
if isinstance(args[0], Envelope):
return self.expand_to_include(args[0].tuple)
elif hasattr(args[0], "x") and hasattr(args[0], "y"):
return self.expand_to_include(
args[0].x, args[0].y, args[0].x, args[0].y
)
elif isinstance(args[0], (tuple, list)):
# A tuple was passed in.
if len(args[0]) == 2:
return self.expand_to_include(
(args[0][0], args[0][1], args[0][0], args[0][1])
)
elif len(args[0]) == 4:
(minx, miny, maxx, maxy) = args[0]
if minx < self._envelope.MinX:
self._envelope.MinX = minx
if miny < self._envelope.MinY:
self._envelope.MinY = miny
if maxx > self._envelope.MaxX:
self._envelope.MaxX = maxx
if maxy > self._envelope.MaxY:
self._envelope.MaxY = maxy
else:
raise GDALException(
"Incorrect number of tuple elements (%d)." % len(args[0])
)
else:
raise TypeError("Incorrect type of argument: %s" % type(args[0]))
elif len(args) == 2:
# An x and an y parameter were passed in
return self.expand_to_include((args[0], args[1], args[0], args[1]))
elif len(args) == 4:
# Individual parameters passed in.
return self.expand_to_include(args)
else:
raise GDALException("Incorrect number (%d) of arguments." % len(args[0]))
@property
def min_x(self):
"Return the value of the minimum X coordinate."
return self._envelope.MinX
@property
def min_y(self):
"Return the value of the minimum Y coordinate."
return self._envelope.MinY
@property
def max_x(self):
"Return the value of the maximum X coordinate."
return self._envelope.MaxX
@property
def max_y(self):
"Return the value of the maximum Y coordinate."
return self._envelope.MaxY
@property
def ur(self):
"Return the upper-right coordinate."
return (self.max_x, self.max_y)
@property
def ll(self):
"Return the lower-left coordinate."
return (self.min_x, self.min_y)
@property
def tuple(self):
"Return a tuple representing the envelope."
return (self.min_x, self.min_y, self.max_x, self.max_y)
@property
def wkt(self):
"Return WKT representing a Polygon for this envelope."
# TODO: Fix significant figures.
return "POLYGON((%s %s,%s %s,%s %s,%s %s,%s %s))" % (
self.min_x,
self.min_y,
self.min_x,
self.max_y,
self.max_x,
self.max_y,
self.max_x,
self.min_y,
self.min_x,
self.min_y,
)
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/contrib/gis/gdal/error.py | django/contrib/gis/gdal/error.py | """
This module houses the GDAL & SRS Exception objects, and the
check_err() routine which checks the status code returned by
GDAL/OGR methods.
"""
# #### GDAL & SRS Exceptions ####
class GDALException(Exception):
pass
class SRSException(Exception):
pass
# #### GDAL/OGR error checking codes and routine ####
# OGR Error Codes
OGRERR_DICT = {
1: (GDALException, "Not enough data."),
2: (GDALException, "Not enough memory."),
3: (GDALException, "Unsupported geometry type."),
4: (GDALException, "Unsupported operation."),
5: (GDALException, "Corrupt data."),
6: (GDALException, "OGR failure."),
7: (SRSException, "Unsupported SRS."),
8: (GDALException, "Invalid handle."),
}
# CPL Error Codes
# https://gdal.org/api/cpl.html#cpl-error-h
CPLERR_DICT = {
1: (GDALException, "AppDefined"),
2: (GDALException, "OutOfMemory"),
3: (GDALException, "FileIO"),
4: (GDALException, "OpenFailed"),
5: (GDALException, "IllegalArg"),
6: (GDALException, "NotSupported"),
7: (GDALException, "AssertionFailed"),
8: (GDALException, "NoWriteAccess"),
9: (GDALException, "UserInterrupt"),
10: (GDALException, "ObjectNull"),
}
ERR_NONE = 0
def check_err(code, cpl=False):
"""
Check the given CPL/OGRERR and raise an exception where appropriate.
"""
err_dict = CPLERR_DICT if cpl else OGRERR_DICT
if code == ERR_NONE:
return
elif code in err_dict:
e, msg = err_dict[code]
raise e(msg)
else:
raise GDALException('Unknown error code: "%s"' % code)
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/contrib/gis/gdal/layer.py | django/contrib/gis/gdal/layer.py | from ctypes import byref, c_double
from django.contrib.gis.gdal.base import GDALBase
from django.contrib.gis.gdal.envelope import Envelope, OGREnvelope
from django.contrib.gis.gdal.error import GDALException, SRSException
from django.contrib.gis.gdal.feature import Feature
from django.contrib.gis.gdal.field import OGRFieldTypes
from django.contrib.gis.gdal.geometries import OGRGeometry
from django.contrib.gis.gdal.geomtype import OGRGeomType
from django.contrib.gis.gdal.prototypes import ds as capi
from django.contrib.gis.gdal.prototypes import geom as geom_api
from django.contrib.gis.gdal.prototypes import srs as srs_api
from django.contrib.gis.gdal.srs import SpatialReference
from django.utils.encoding import force_bytes, force_str
# For more information, see the OGR C API source code:
# https://gdal.org/api/vector_c_api.html
#
# The OGR_L_* routines are relevant here.
class Layer(GDALBase):
"""
A class that wraps an OGR Layer, needs to be instantiated from a DataSource
object.
"""
def __init__(self, layer_ptr, ds):
"""
Initialize on an OGR C pointer to the Layer and the `DataSource` object
that owns this layer. The `DataSource` object is required so that a
reference to it is kept with this Layer. This prevents garbage
collection of the `DataSource` while this Layer is still active.
"""
if not layer_ptr:
raise GDALException("Cannot create Layer, invalid pointer given")
self.ptr = layer_ptr
self._ds = ds
self._ldefn = capi.get_layer_defn(self._ptr)
# Does the Layer support random reading?
self._random_read = self.test_capability(b"RandomRead")
def __getitem__(self, index):
"Get the Feature at the specified index."
if isinstance(index, int):
# An integer index was given -- we cannot do a check based on the
# number of features because the beginning and ending feature IDs
# are not guaranteed to be 0 and len(layer)-1, respectively.
if index < 0:
raise IndexError("Negative indices are not allowed on OGR Layers.")
return self._make_feature(index)
elif isinstance(index, slice):
# A slice was given
start, stop, stride = index.indices(self.num_feat)
return [self._make_feature(fid) for fid in range(start, stop, stride)]
else:
raise TypeError(
"Integers and slices may only be used when indexing OGR Layers."
)
def __iter__(self):
"Iterate over each Feature in the Layer."
# ResetReading() must be called before iteration is to begin.
capi.reset_reading(self._ptr)
for i in range(self.num_feat):
yield Feature(capi.get_next_feature(self._ptr), self)
def __len__(self):
"The length is the number of features."
return self.num_feat
def __str__(self):
"The string name of the layer."
return self.name
def _make_feature(self, feat_id):
"""
Helper routine for __getitem__ that constructs a Feature from the given
Feature ID. If the OGR Layer does not support random-access reading,
then each feature of the layer will be incremented through until the
a Feature is found matching the given feature ID.
"""
if self._random_read:
# If the Layer supports random reading, return.
try:
return Feature(capi.get_feature(self.ptr, feat_id), self)
except GDALException:
pass
else:
# Random access isn't supported, have to increment through
# each feature until the given feature ID is encountered.
for feat in self:
if feat.fid == feat_id:
return feat
# Should have returned a Feature, raise an IndexError.
raise IndexError("Invalid feature id: %s." % feat_id)
# #### Layer properties ####
@property
def extent(self):
"Return the extent (an Envelope) of this layer."
env = OGREnvelope()
capi.get_extent(self.ptr, byref(env), 1)
return Envelope(env)
@property
def name(self):
"Return the name of this layer in the Data Source."
name = capi.get_fd_name(self._ldefn)
return force_str(name, self._ds.encoding, strings_only=True)
@property
def num_feat(self, force=1):
"Return the number of features in the Layer."
return capi.get_feature_count(self.ptr, force)
@property
def num_fields(self):
"Return the number of fields in the Layer."
return capi.get_field_count(self._ldefn)
@property
def geom_type(self):
"Return the geometry type (OGRGeomType) of the Layer."
return OGRGeomType(capi.get_fd_geom_type(self._ldefn))
@property
def srs(self):
"Return the Spatial Reference used in this Layer."
try:
ptr = capi.get_layer_srs(self.ptr)
return SpatialReference(srs_api.clone_srs(ptr))
except SRSException:
return None
@property
def fields(self):
"""
Return a list of string names corresponding to each of the Fields
available in this Layer.
"""
return [
force_str(
capi.get_field_name(capi.get_field_defn(self._ldefn, i)),
self._ds.encoding,
strings_only=True,
)
for i in range(self.num_fields)
]
@property
def field_types(self):
"""
Return a list of the types of fields in this Layer. For example,
return the list [OFTInteger, OFTReal, OFTString] for an OGR layer that
has an integer, a floating-point, and string fields.
"""
return [
OGRFieldTypes[capi.get_field_type(capi.get_field_defn(self._ldefn, i))]
for i in range(self.num_fields)
]
@property
def field_widths(self):
"Return a list of the maximum field widths for the features."
return [
capi.get_field_width(capi.get_field_defn(self._ldefn, i))
for i in range(self.num_fields)
]
@property
def field_precisions(self):
"Return the field precisions for the features."
return [
capi.get_field_precision(capi.get_field_defn(self._ldefn, i))
for i in range(self.num_fields)
]
def _get_spatial_filter(self):
try:
return OGRGeometry(geom_api.clone_geom(capi.get_spatial_filter(self.ptr)))
except GDALException:
return None
def _set_spatial_filter(self, filter):
if isinstance(filter, OGRGeometry):
capi.set_spatial_filter(self.ptr, filter.ptr)
elif isinstance(filter, (tuple, list)):
if not len(filter) == 4:
raise ValueError("Spatial filter list/tuple must have 4 elements.")
# Map c_double onto params -- if a bad type is passed in it
# will be caught here.
xmin, ymin, xmax, ymax = map(c_double, filter)
capi.set_spatial_filter_rect(self.ptr, xmin, ymin, xmax, ymax)
elif filter is None:
capi.set_spatial_filter(self.ptr, None)
else:
raise TypeError(
"Spatial filter must be either an OGRGeometry instance, a 4-tuple, or "
"None."
)
spatial_filter = property(_get_spatial_filter, _set_spatial_filter)
# #### Layer Methods ####
def get_fields(self, field_name):
"""
Return a list containing the given field name for every Feature
in the Layer.
"""
if field_name not in self.fields:
raise GDALException("invalid field name: %s" % field_name)
return [feat.get(field_name) for feat in self]
def get_geoms(self, geos=False):
"""
Return a list containing the OGRGeometry for every Feature in
the Layer.
"""
if geos:
from django.contrib.gis.geos import GEOSGeometry
return [GEOSGeometry(feat.geom.wkb) for feat in self]
else:
return [feat.geom for feat in self]
def test_capability(self, capability):
"""
Return a bool indicating whether the this Layer supports the given
capability (a string). Valid capability strings include:
'RandomRead', 'SequentialWrite', 'RandomWrite', 'FastSpatialFilter',
'FastFeatureCount', 'FastGetExtent', 'CreateField', 'Transactions',
'DeleteFeature', and 'FastSetNextByIndex'.
"""
return bool(capi.test_capability(self.ptr, force_bytes(capability)))
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/contrib/gis/gdal/driver.py | django/contrib/gis/gdal/driver.py | from ctypes import c_void_p
from django.contrib.gis.gdal.base import GDALBase
from django.contrib.gis.gdal.error import GDALException
from django.contrib.gis.gdal.libgdal import GDAL_VERSION
from django.contrib.gis.gdal.prototypes import ds as capi
from django.utils.encoding import force_bytes, force_str
class Driver(GDALBase):
"""
Wrap a GDAL/OGR Data Source Driver.
For more information, see the C API documentation:
https://gdal.org/api/vector_c_api.html
https://gdal.org/api/raster_c_api.html
"""
# Case-insensitive aliases for some GDAL/OGR Drivers.
# For a complete list of original driver names see
# https://gdal.org/drivers/vector/
# https://gdal.org/drivers/raster/
_alias = {
# vector
"esri": "ESRI Shapefile",
"shp": "ESRI Shapefile",
"shape": "ESRI Shapefile",
# raster
"tiff": "GTiff",
"tif": "GTiff",
"jpeg": "JPEG",
"jpg": "JPEG",
}
if GDAL_VERSION[:2] <= (3, 10):
_alias.update(
{
"tiger": "TIGER",
"tiger/line": "TIGER",
}
)
def __init__(self, dr_input):
"""
Initialize an GDAL/OGR driver on either a string or integer input.
"""
if isinstance(dr_input, str):
# If a string name of the driver was passed in
self.ensure_registered()
# Checking the alias dictionary (case-insensitive) to see if an
# alias exists for the given driver.
if dr_input.lower() in self._alias:
name = self._alias[dr_input.lower()]
else:
name = dr_input
# Attempting to get the GDAL/OGR driver by the string name.
driver = c_void_p(capi.get_driver_by_name(force_bytes(name)))
elif isinstance(dr_input, int):
self.ensure_registered()
driver = capi.get_driver(dr_input)
elif isinstance(dr_input, c_void_p):
driver = dr_input
else:
raise GDALException(
"Unrecognized input type for GDAL/OGR Driver: %s" % type(dr_input)
)
# Making sure we get a valid pointer to the OGR Driver
if not driver:
raise GDALException(
"Could not initialize GDAL/OGR Driver on input: %s" % dr_input
)
self.ptr = driver
def __str__(self):
return self.name
@classmethod
def ensure_registered(cls):
"""
Attempt to register all the data source drivers.
"""
# Only register all if the driver count is 0 (or else all drivers will
# be registered over and over again).
if not capi.get_driver_count():
capi.register_all()
@classmethod
def driver_count(cls):
"""
Return the number of GDAL/OGR data source drivers registered.
"""
return capi.get_driver_count()
@property
def name(self):
"""
Return description/name string for this driver.
"""
return force_str(capi.get_driver_description(self.ptr))
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/contrib/gis/gdal/datasource.py | django/contrib/gis/gdal/datasource.py | """
DataSource is a wrapper for the OGR Data Source object, which provides
an interface for reading vector geometry data from many different file
formats (including ESRI shapefiles).
When instantiating a DataSource object, use the filename of a
GDAL-supported data source. For example, an SHP file.
The ds_driver keyword is used internally when a ctypes pointer
is passed in directly.
Example:
ds = DataSource('/home/foo/bar.shp')
for layer in ds:
for feature in layer:
# Getting the geometry for the feature.
g = feature.geom
# Getting the 'description' field for the feature.
desc = feature['description']
# We can also increment through all of the fields
# attached to this feature.
for field in feature:
# Get the name of the field (e.g. 'description')
nm = field.name
# Get the type (integer) of the field, e.g. 0 => OFTInteger
t = field.type
# Returns the value the field; OFTIntegers return ints,
# OFTReal returns floats, all else returns string.
val = field.value
"""
from pathlib import Path
from django.contrib.gis.gdal.base import GDALBase
from django.contrib.gis.gdal.driver import Driver
from django.contrib.gis.gdal.error import GDALException
from django.contrib.gis.gdal.layer import Layer
from django.contrib.gis.gdal.prototypes import ds as capi
from django.utils.encoding import force_bytes, force_str
# For more information, see the OGR C API documentation:
# https://gdal.org/api/vector_c_api.html
#
# The OGR_DS_* routines are relevant here.
class DataSource(GDALBase):
"Wraps an OGR Data Source object."
destructor = capi.destroy_ds
def __init__(self, ds_input, ds_driver=False, write=False, encoding="utf-8"):
# The write flag.
self._write = capi.GDAL_OF_UPDATE if write else capi.GDAL_OF_READONLY
# See also https://gdal.org/development/rfc/rfc23_ogr_unicode.html
self.encoding = encoding
Driver.ensure_registered()
if isinstance(ds_input, (str, Path)):
try:
# GDALOpenEx will auto-detect the data source type.
ds = capi.open_ds(
force_bytes(ds_input),
self._write | capi.GDAL_OF_VECTOR,
None,
None,
None,
)
except GDALException:
# Making the error message more clear rather than something
# like "Invalid pointer returned from OGROpen".
raise GDALException('Could not open the datasource at "%s"' % ds_input)
elif isinstance(ds_input, self.ptr_type) and isinstance(
ds_driver, Driver.ptr_type
):
ds = ds_input
else:
raise GDALException("Invalid data source input type: %s" % type(ds_input))
if ds:
self.ptr = ds
driver = capi.get_dataset_driver(ds)
self.driver = Driver(driver)
else:
# Raise an exception if the returned pointer is NULL
raise GDALException('Invalid data source file "%s"' % ds_input)
def __getitem__(self, index):
"Allows use of the index [] operator to get a layer at the index."
if isinstance(index, str):
try:
layer = capi.get_layer_by_name(self.ptr, force_bytes(index))
except GDALException:
raise IndexError("Invalid OGR layer name given: %s." % index)
elif isinstance(index, int):
if 0 <= index < self.layer_count:
layer = capi.get_layer(self._ptr, index)
else:
raise IndexError(
"Index out of range when accessing layers in a datasource: %s."
% index
)
else:
raise TypeError("Invalid index type: %s" % type(index))
return Layer(layer, self)
def __len__(self):
"Return the number of layers within the data source."
return self.layer_count
def __str__(self):
"Return OGR GetName and Driver for the Data Source."
return "%s (%s)" % (self.name, self.driver)
@property
def layer_count(self):
"Return the number of layers in the data source."
return capi.get_layer_count(self._ptr)
@property
def name(self):
"Return the name of the data source."
name = capi.get_ds_name(self._ptr)
return force_str(name, self.encoding, strings_only=True)
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/contrib/gis/gdal/geomtype.py | django/contrib/gis/gdal/geomtype.py | from django.contrib.gis.gdal.error import GDALException
class OGRGeomType:
"Encapsulate OGR Geometry Types."
wkb25bit = -2147483648
# Dictionary of acceptable OGRwkbGeometryType s and their string names.
_types = {
0: "Unknown",
1: "Point",
2: "LineString",
3: "Polygon",
4: "MultiPoint",
5: "MultiLineString",
6: "MultiPolygon",
7: "GeometryCollection",
8: "CircularString",
9: "CompoundCurve",
10: "CurvePolygon",
11: "MultiCurve",
12: "MultiSurface",
15: "PolyhedralSurface",
16: "TIN",
17: "Triangle",
100: "None",
101: "LinearRing",
102: "PointZ",
1008: "CircularStringZ",
1009: "CompoundCurveZ",
1010: "CurvePolygonZ",
1011: "MultiCurveZ",
1012: "MultiSurfaceZ",
1013: "CurveZ",
1014: "SurfaceZ",
1015: "PolyhedralSurfaceZ",
1016: "TINZ",
1017: "TriangleZ",
2001: "PointM",
2002: "LineStringM",
2003: "PolygonM",
2004: "MultiPointM",
2005: "MultiLineStringM",
2006: "MultiPolygonM",
2007: "GeometryCollectionM",
2008: "CircularStringM",
2009: "CompoundCurveM",
2010: "CurvePolygonM",
2011: "MultiCurveM",
2012: "MultiSurfaceM",
2015: "PolyhedralSurfaceM",
2016: "TINM",
2017: "TriangleM",
3001: "PointZM",
3002: "LineStringZM",
3003: "PolygonZM",
3004: "MultiPointZM",
3005: "MultiLineStringZM",
3006: "MultiPolygonZM",
3007: "GeometryCollectionZM",
3008: "CircularStringZM",
3009: "CompoundCurveZM",
3010: "CurvePolygonZM",
3011: "MultiCurveZM",
3012: "MultiSurfaceZM",
3015: "PolyhedralSurfaceZM",
3016: "TINZM",
3017: "TriangleZM",
1 + wkb25bit: "Point25D",
2 + wkb25bit: "LineString25D",
3 + wkb25bit: "Polygon25D",
4 + wkb25bit: "MultiPoint25D",
5 + wkb25bit: "MultiLineString25D",
6 + wkb25bit: "MultiPolygon25D",
7 + wkb25bit: "GeometryCollection25D",
}
# Reverse type dictionary, keyed by lowercase of the name.
_str_types = {v.lower(): k for k, v in _types.items()}
def __init__(self, type_input):
"Figure out the correct OGR Type based upon the input."
if isinstance(type_input, OGRGeomType):
num = type_input.num
elif isinstance(type_input, str):
type_input = type_input.lower()
if type_input == "geometry":
type_input = "unknown"
num = self._str_types.get(type_input)
if num is None:
raise GDALException('Invalid OGR String Type "%s"' % type_input)
elif isinstance(type_input, int):
if type_input not in self._types:
raise GDALException("Invalid OGR Integer Type: %d" % type_input)
num = type_input
else:
raise TypeError("Invalid OGR input type given.")
# Setting the OGR geometry type number.
self.num = num
def __str__(self):
"Return the value of the name property."
return self.name
def __repr__(self):
return f"<{self.__class__.__qualname__}: {self.name}>"
def __eq__(self, other):
"""
Do an equivalence test on the OGR type with the given
other OGRGeomType, the short-hand string, or the integer.
"""
if isinstance(other, OGRGeomType):
return self.num == other.num
elif isinstance(other, str):
return self.name.lower() == other.lower()
elif isinstance(other, int):
return self.num == other
else:
return False
@property
def name(self):
"Return a short-hand string form of the OGR Geometry type."
return self._types[self.num]
@property
def django(self):
"Return the Django GeometryField for this OGR Type."
s = self.name.replace("25D", "")
if s in ("LinearRing", "None"):
return None
elif s == "Unknown":
s = "Geometry"
elif s == "PointZ":
s = "Point"
return s + "Field"
def to_multi(self):
"""
Transform Point, LineString, Polygon, and their 25D equivalents
to their Multi... counterpart.
"""
if self.name.startswith(("Point", "LineString", "Polygon")):
self.num += 3
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/contrib/gis/gdal/libgdal.py | django/contrib/gis/gdal/libgdal.py | import logging
import os
import re
from ctypes import CDLL, CFUNCTYPE, c_char_p, c_int
from ctypes.util import find_library
from django.contrib.gis.gdal.error import GDALException
from django.core.exceptions import ImproperlyConfigured
logger = logging.getLogger("django.contrib.gis")
# Custom library path set?
try:
from django.conf import settings
lib_path = settings.GDAL_LIBRARY_PATH
except (AttributeError, ImportError, ImproperlyConfigured, OSError):
lib_path = None
if lib_path:
lib_names = None
elif os.name == "nt":
# Windows NT shared libraries
lib_names = [
"gdal312",
"gdal311",
"gdal310",
"gdal309",
"gdal308",
"gdal307",
"gdal306",
"gdal305",
"gdal304",
"gdal303",
]
elif os.name == "posix":
# *NIX library names.
lib_names = [
"gdal",
"GDAL",
"gdal3.12.0",
"gdal3.11.0",
"gdal3.10.0",
"gdal3.9.0",
"gdal3.8.0",
"gdal3.7.0",
"gdal3.6.0",
"gdal3.5.0",
"gdal3.4.0",
"gdal3.3.0",
]
else:
raise ImproperlyConfigured('GDAL is unsupported on OS "%s".' % os.name)
# Using the ctypes `find_library` utility to find the
# path to the GDAL library from the list of library names.
if lib_names:
for lib_name in lib_names:
lib_path = find_library(lib_name)
if lib_path is not None:
break
if lib_path is None:
raise ImproperlyConfigured(
'Could not find the GDAL library (tried "%s"). Is GDAL installed? '
"If it is, try setting GDAL_LIBRARY_PATH in your settings."
% '", "'.join(lib_names)
)
# This loads the GDAL/OGR C library
lgdal = CDLL(lib_path)
# On Windows, the GDAL binaries have some OSR routines exported with
# STDCALL, while others are not. Thus, the library will also need to
# be loaded up as WinDLL for said OSR functions that require the
# different calling convention.
if os.name == "nt":
from ctypes import WinDLL
lwingdal = WinDLL(lib_path)
def std_call(func):
"""
Return the correct STDCALL function for certain OSR routines on Win32
platforms.
"""
if os.name == "nt":
return lwingdal[func]
else:
return lgdal[func]
# #### Version-information functions. ####
# Return GDAL library version information with the given key.
_version_info = std_call("GDALVersionInfo")
_version_info.argtypes = [c_char_p]
_version_info.restype = c_char_p
def gdal_version():
"Return only the GDAL version number information."
return _version_info(b"RELEASE_NAME")
def gdal_full_version():
"Return the full GDAL version information."
return _version_info(b"")
def gdal_version_info():
ver = gdal_version()
m = re.match(rb"^(?P<major>\d+)\.(?P<minor>\d+)(?:\.(?P<subminor>\d+))?", ver)
if not m:
raise GDALException('Could not parse GDAL version string "%s"' % ver)
major, minor, subminor = m.groups()
return (int(major), int(minor), subminor and int(subminor))
GDAL_VERSION = gdal_version_info()
# Set library error handling so as errors are logged
CPLErrorHandler = CFUNCTYPE(None, c_int, c_int, c_char_p)
def err_handler(error_class, error_number, message):
logger.error("GDAL_ERROR %d: %s", error_number, message)
err_handler = CPLErrorHandler(err_handler)
def function(name, args, restype):
func = std_call(name)
func.argtypes = args
func.restype = restype
return func
set_error_handler = function("CPLSetErrorHandler", [CPLErrorHandler], CPLErrorHandler)
set_error_handler(err_handler)
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/contrib/gis/gdal/__init__.py | django/contrib/gis/gdal/__init__.py | """
This module houses ctypes interfaces for GDAL objects. The following GDAL
objects are supported:
CoordTransform: Used for coordinate transformations from one spatial
reference system to another.
Driver: Wraps an OGR data source driver.
DataSource: Wrapper for the OGR data source object, supports
OGR-supported data sources.
Envelope: A ctypes structure for bounding boxes (GDAL library
not required).
OGRGeometry: Object for accessing OGR Geometry functionality.
OGRGeomType: A class for representing the different OGR Geometry
types (GDAL library not required).
SpatialReference: Represents OSR Spatial Reference objects.
The GDAL library will be imported from the system path using the default
library name for the current OS. The default library path may be overridden
by setting `GDAL_LIBRARY_PATH` in your settings with the path to the GDAL C
library on your system.
"""
from django.contrib.gis.gdal.datasource import DataSource
from django.contrib.gis.gdal.driver import Driver
from django.contrib.gis.gdal.envelope import Envelope
from django.contrib.gis.gdal.error import GDALException, SRSException, check_err
from django.contrib.gis.gdal.geometries import OGRGeometry
from django.contrib.gis.gdal.geomtype import OGRGeomType
from django.contrib.gis.gdal.libgdal import (
GDAL_VERSION,
gdal_full_version,
gdal_version,
)
from django.contrib.gis.gdal.raster.source import GDALRaster
from django.contrib.gis.gdal.srs import AxisOrder, CoordTransform, SpatialReference
__all__ = (
"AxisOrder",
"Driver",
"DataSource",
"CoordTransform",
"Envelope",
"GDALException",
"GDALRaster",
"GDAL_VERSION",
"OGRGeometry",
"OGRGeomType",
"SpatialReference",
"SRSException",
"check_err",
"gdal_version",
"gdal_full_version",
)
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/contrib/gis/gdal/srs.py | django/contrib/gis/gdal/srs.py | """
The Spatial Reference class, represents OGR Spatial Reference objects.
Example:
>>> from django.contrib.gis.gdal import SpatialReference
>>> srs = SpatialReference('WGS84')
>>> print(srs)
GEOGCS["WGS 84",
DATUM["WGS_1984",
SPHEROID["WGS 84",6378137,298.257223563,
AUTHORITY["EPSG","7030"]],
TOWGS84[0,0,0,0,0,0,0],
AUTHORITY["EPSG","6326"]],
PRIMEM["Greenwich",0,
AUTHORITY["EPSG","8901"]],
UNIT["degree",0.01745329251994328,
AUTHORITY["EPSG","9122"]],
AUTHORITY["EPSG","4326"]]
>>> print(srs.proj)
+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs
>>> print(srs.ellipsoid)
(6378137.0, 6356752.3142451793, 298.25722356300003)
>>> print(srs.projected, srs.geographic)
False True
>>> srs.import_epsg(32140)
>>> print(srs.name)
NAD83 / Texas South Central
"""
from ctypes import byref, c_char_p, c_int
from enum import IntEnum
from types import NoneType
from django.contrib.gis.gdal.base import GDALBase
from django.contrib.gis.gdal.error import SRSException
from django.contrib.gis.gdal.prototypes import srs as capi
from django.utils.encoding import force_bytes, force_str
class AxisOrder(IntEnum):
TRADITIONAL = 0
AUTHORITY = 1
class SpatialReference(GDALBase):
"""
A wrapper for the OGRSpatialReference object. According to the GDAL web
site, the SpatialReference object "provide[s] services to represent
coordinate systems (projections and datums) and to transform between them."
"""
destructor = capi.release_srs
def __init__(self, srs_input="", srs_type="user", axis_order=None):
"""
Create a GDAL OSR Spatial Reference object from the given input.
The input may be string of OGC Well Known Text (WKT), an integer
EPSG code, a PROJ string, and/or a projection "well known" shorthand
string (one of 'WGS84', 'WGS72', 'NAD27', 'NAD83').
"""
if not isinstance(axis_order, (NoneType, AxisOrder)):
raise ValueError(
"SpatialReference.axis_order must be an AxisOrder instance."
)
self.axis_order = axis_order or AxisOrder.TRADITIONAL
if srs_type == "wkt":
self.ptr = capi.new_srs(c_char_p(b""))
self.import_wkt(srs_input)
if self.axis_order == AxisOrder.TRADITIONAL:
capi.set_axis_strategy(self.ptr, self.axis_order)
return
elif isinstance(srs_input, str):
try:
# If SRID is a string, e.g., '4326', then make acceptable
# as user input.
srid = int(srs_input)
srs_input = "EPSG:%d" % srid
except ValueError:
pass
elif isinstance(srs_input, int):
# EPSG integer code was input.
srs_type = "epsg"
elif isinstance(srs_input, self.ptr_type):
srs = srs_input
srs_type = "ogr"
else:
raise TypeError('Invalid SRS type "%s"' % srs_type)
if srs_type == "ogr":
# Input is already an SRS pointer.
srs = srs_input
else:
# Creating a new SRS pointer, using the string buffer.
buf = c_char_p(b"")
srs = capi.new_srs(buf)
# If the pointer is NULL, throw an exception.
if not srs:
raise SRSException(
"Could not create spatial reference from: %s" % srs_input
)
else:
self.ptr = srs
if self.axis_order == AxisOrder.TRADITIONAL:
capi.set_axis_strategy(self.ptr, self.axis_order)
# Importing from either the user input string or an integer SRID.
if srs_type == "user":
self.import_user_input(srs_input)
elif srs_type == "epsg":
self.import_epsg(srs_input)
def __getitem__(self, target):
"""
Return the value of the given string attribute node, None if the node
doesn't exist. Can also take a tuple as a parameter, (target, child),
where child is the index of the attribute in the WKT. For example:
>>> wkt = (
... 'GEOGCS["WGS 84",'
... ' DATUM["WGS_1984, ... AUTHORITY["EPSG","4326"]'
... ']'
... )
>>> srs = SpatialReference(wkt) # could also use 'WGS84', or 4326
>>> print(srs['GEOGCS'])
WGS 84
>>> print(srs['DATUM'])
WGS_1984
>>> print(srs['AUTHORITY'])
EPSG
>>> print(srs['AUTHORITY', 1]) # The authority value
4326
>>> print(srs['TOWGS84', 4]) # the fourth value in this wkt
0
>>> # For the units authority, have to use the pipe symbole.
>>> print(srs['UNIT|AUTHORITY'])
EPSG
>>> print(srs['UNIT|AUTHORITY', 1]) # The authority value for the units
9122
"""
if isinstance(target, tuple):
return self.attr_value(*target)
else:
return self.attr_value(target)
def __str__(self):
"Use 'pretty' WKT."
return self.pretty_wkt
# #### SpatialReference Methods ####
def attr_value(self, target, index=0):
"""
The attribute value for the given target node (e.g. 'PROJCS'). The
index keyword specifies an index of the child node to return.
"""
if not isinstance(target, str) or not isinstance(index, int):
raise TypeError
return capi.get_attr_value(self.ptr, force_bytes(target), index)
def auth_name(self, target):
"Return the authority name for the given string target node."
return capi.get_auth_name(
self.ptr, target if target is None else force_bytes(target)
)
def auth_code(self, target):
"Return the authority code for the given string target node."
return capi.get_auth_code(
self.ptr, target if target is None else force_bytes(target)
)
def clone(self):
"Return a clone of this SpatialReference object."
return SpatialReference(capi.clone_srs(self.ptr), axis_order=self.axis_order)
def from_esri(self):
"Morph this SpatialReference from ESRI's format to EPSG."
capi.morph_from_esri(self.ptr)
def identify_epsg(self):
"""
This method inspects the WKT of this SpatialReference, and will
add EPSG authority nodes where an EPSG identifier is applicable.
"""
capi.identify_epsg(self.ptr)
def to_esri(self):
"Morph this SpatialReference to ESRI's format."
capi.morph_to_esri(self.ptr)
def validate(self):
"Check to see if the given spatial reference is valid."
capi.srs_validate(self.ptr)
# #### Name & SRID properties ####
@property
def name(self):
"Return the name of this Spatial Reference."
if self.projected:
return self.attr_value("PROJCS")
elif self.geographic:
return self.attr_value("GEOGCS")
elif self.local:
return self.attr_value("LOCAL_CS")
else:
return None
@property
def srid(self):
"Return the SRID of top-level authority, or None if undefined."
try:
return int(self.auth_code(target=None))
except (TypeError, ValueError):
return None
# #### Unit Properties ####
@property
def linear_name(self):
"Return the name of the linear units."
units, name = capi.linear_units(self.ptr, byref(c_char_p()))
return name
@property
def linear_units(self):
"Return the value of the linear units."
units, name = capi.linear_units(self.ptr, byref(c_char_p()))
return units
@property
def angular_name(self):
"Return the name of the angular units."
units, name = capi.angular_units(self.ptr, byref(c_char_p()))
return name
@property
def angular_units(self):
"Return the value of the angular units."
units, name = capi.angular_units(self.ptr, byref(c_char_p()))
return units
@property
def units(self):
"""
Return a 2-tuple of the units value and the units name. Automatically
determine whether to return the linear or angular units.
"""
units, name = None, None
if self.projected or self.local:
units, name = capi.linear_units(self.ptr, byref(c_char_p()))
elif self.geographic:
units, name = capi.angular_units(self.ptr, byref(c_char_p()))
if name is not None:
name = force_str(name)
return (units, name)
# #### Spheroid/Ellipsoid Properties ####
@property
def ellipsoid(self):
"""
Return a tuple of the ellipsoid parameters:
(semimajor axis, semiminor axis, and inverse flattening)
"""
return (self.semi_major, self.semi_minor, self.inverse_flattening)
@property
def semi_major(self):
"Return the Semi Major Axis for this Spatial Reference."
return capi.semi_major(self.ptr, byref(c_int()))
@property
def semi_minor(self):
"Return the Semi Minor Axis for this Spatial Reference."
return capi.semi_minor(self.ptr, byref(c_int()))
@property
def inverse_flattening(self):
"Return the Inverse Flattening for this Spatial Reference."
return capi.invflattening(self.ptr, byref(c_int()))
# #### Boolean Properties ####
@property
def geographic(self):
"""
Return True if this SpatialReference is geographic
(root node is GEOGCS).
"""
return bool(capi.isgeographic(self.ptr))
@property
def local(self):
"""
Return True if this SpatialReference is local (root node is LOCAL_CS).
"""
return bool(capi.islocal(self.ptr))
@property
def projected(self):
"""
Return True if this SpatialReference is a projected coordinate system
(root node is PROJCS).
"""
return bool(capi.isprojected(self.ptr))
# #### Import Routines #####
def import_epsg(self, epsg):
"Import the Spatial Reference from the EPSG code (an integer)."
capi.from_epsg(self.ptr, epsg)
def import_proj(self, proj):
"""Import the Spatial Reference from a PROJ string."""
capi.from_proj(self.ptr, proj)
def import_user_input(self, user_input):
"Import the Spatial Reference from the given user input string."
capi.from_user_input(self.ptr, force_bytes(user_input))
def import_wkt(self, wkt):
"Import the Spatial Reference from OGC WKT (string)"
capi.from_wkt(self.ptr, byref(c_char_p(force_bytes(wkt))))
def import_xml(self, xml):
"Import the Spatial Reference from an XML string."
capi.from_xml(self.ptr, xml)
# #### Export Properties ####
@property
def wkt(self):
"Return the WKT representation of this Spatial Reference."
return capi.to_wkt(self.ptr, byref(c_char_p()))
@property
def pretty_wkt(self, simplify=0):
"Return the 'pretty' representation of the WKT."
return capi.to_pretty_wkt(self.ptr, byref(c_char_p()), simplify)
@property
def proj(self):
"""Return the PROJ representation for this Spatial Reference."""
return capi.to_proj(self.ptr, byref(c_char_p()))
@property
def proj4(self):
"Alias for proj()."
return self.proj
@property
def xml(self, dialect=""):
"Return the XML representation of this Spatial Reference."
return capi.to_xml(self.ptr, byref(c_char_p()), force_bytes(dialect))
class CoordTransform(GDALBase):
"The coordinate system transformation object."
destructor = capi.destroy_ct
def __init__(self, source, target):
"Initialize on a source and target SpatialReference objects."
if not isinstance(source, SpatialReference) or not isinstance(
target, SpatialReference
):
raise TypeError("source and target must be of type SpatialReference")
self.ptr = capi.new_ct(source._ptr, target._ptr)
self._srs1_name = source.name
self._srs2_name = target.name
def __str__(self):
return 'Transform from "%s" to "%s"' % (self._srs1_name, self._srs2_name)
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/contrib/gis/gdal/feature.py | django/contrib/gis/gdal/feature.py | from django.contrib.gis.gdal.base import GDALBase
from django.contrib.gis.gdal.error import GDALException
from django.contrib.gis.gdal.field import Field
from django.contrib.gis.gdal.geometries import OGRGeometry, OGRGeomType
from django.contrib.gis.gdal.prototypes import ds as capi
from django.contrib.gis.gdal.prototypes import geom as geom_api
from django.utils.encoding import force_bytes, force_str
# For more information, see the OGR C API source code:
# https://gdal.org/api/vector_c_api.html
#
# The OGR_F_* routines are relevant here.
class Feature(GDALBase):
"""
This class that wraps an OGR Feature, needs to be instantiated
from a Layer object.
"""
destructor = capi.destroy_feature
def __init__(self, feat, layer):
"""
Initialize Feature from a pointer and its Layer object.
"""
if not feat:
raise GDALException("Cannot create OGR Feature, invalid pointer given.")
self.ptr = feat
self._layer = layer
def __getitem__(self, index):
"""
Get the Field object at the specified index, which may be either
an integer or the Field's string label. Note that the Field object
is not the field's _value_ -- use the `get` method instead to
retrieve the value (e.g. an integer) instead of a Field instance.
"""
if isinstance(index, str):
i = self.index(index)
elif 0 <= index < self.num_fields:
i = index
else:
raise IndexError(
"Index out of range when accessing field in a feature: %s." % index
)
return Field(self, i)
def __len__(self):
"Return the count of fields in this feature."
return self.num_fields
def __str__(self):
"The string name of the feature."
return "Feature FID %d in Layer<%s>" % (self.fid, self.layer_name)
def __eq__(self, other):
"Do equivalence testing on the features."
return bool(capi.feature_equal(self.ptr, other._ptr))
# #### Feature Properties ####
@property
def encoding(self):
return self._layer._ds.encoding
@property
def fid(self):
"Return the feature identifier."
return capi.get_fid(self.ptr)
@property
def layer_name(self):
"Return the name of the layer for the feature."
name = capi.get_feat_name(self._layer._ldefn)
return force_str(name, self.encoding, strings_only=True)
@property
def num_fields(self):
"Return the number of fields in the Feature."
return capi.get_feat_field_count(self.ptr)
@property
def fields(self):
"Return a list of fields in the Feature."
return [
force_str(
capi.get_field_name(capi.get_field_defn(self._layer._ldefn, i)),
self.encoding,
strings_only=True,
)
for i in range(self.num_fields)
]
@property
def geom(self):
"Return the OGR Geometry for this Feature."
# Retrieving the geometry pointer for the feature.
geom_ptr = capi.get_feat_geom_ref(self.ptr)
return OGRGeometry(geom_api.clone_geom(geom_ptr))
@property
def geom_type(self):
"Return the OGR Geometry Type for this Feature."
return OGRGeomType(capi.get_fd_geom_type(self._layer._ldefn))
# #### Feature Methods ####
def get(self, field):
"""
Return the value of the field, instead of an instance of the Field
object. May take a string of the field name or a Field object as
parameters.
"""
field_name = getattr(field, "name", field)
return self[field_name].value
def index(self, field_name):
"Return the index of the given field name."
i = capi.get_field_index(self.ptr, force_bytes(field_name))
if i < 0:
raise IndexError("Invalid OFT field name given: %s." % field_name)
return i
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/contrib/gis/gdal/geometries.py | django/contrib/gis/gdal/geometries.py | """
The OGRGeometry is a wrapper for using the OGR Geometry class
(see https://gdal.org/api/ogrgeometry_cpp.html#_CPPv411OGRGeometry).
OGRGeometry may be instantiated when reading geometries from OGR Data Sources
(e.g. SHP files), or when given OGC WKT (a string).
While the 'full' API is not present yet, the API is "pythonic" unlike
the traditional and "next-generation" OGR Python bindings. One major
advantage OGR Geometries have over their GEOS counterparts is support
for spatial reference systems and their transformation.
Example:
>>> from django.contrib.gis.gdal import (
... OGRGeometry, OGRGeomType, SpatialReference
... )
>>> wkt1, wkt2 = 'POINT(-90 30)', 'POLYGON((0 0, 5 0, 5 5, 0 5)'
>>> pnt = OGRGeometry(wkt1)
>>> print(pnt)
POINT (-90 30)
>>> mpnt = OGRGeometry(OGRGeomType('MultiPoint'), SpatialReference('WGS84'))
>>> mpnt.add(wkt1)
>>> mpnt.add(wkt1)
>>> print(mpnt)
MULTIPOINT (-90 30,-90 30)
>>> print(mpnt.srs.name)
WGS 84
>>> print(mpnt.srs.proj)
+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs
>>> mpnt.transform(SpatialReference('NAD27'))
>>> print(mpnt.proj)
+proj=longlat +ellps=clrk66 +datum=NAD27 +no_defs
>>> print(mpnt)
MULTIPOINT (-89.99993037860248 29.99979788655764,-89.99993037860248
29.99979788655764)
The OGRGeomType class is to make it easy to specify an OGR geometry type:
>>> from django.contrib.gis.gdal import OGRGeomType
>>> gt1 = OGRGeomType(3) # Using an integer for the type
>>> gt2 = OGRGeomType('Polygon') # Using a string
>>> gt3 = OGRGeomType('POLYGON') # It's case-insensitive
>>> # Equivalence works w/non-OGRGeomType objects:
>>> print(gt1 == 3, gt1 == 'Polygon')
True True
"""
import sys
from binascii import b2a_hex
from ctypes import byref, c_char_p, c_double, c_ubyte, c_void_p, string_at
from django.contrib.gis.gdal.base import GDALBase
from django.contrib.gis.gdal.envelope import Envelope, OGREnvelope
from django.contrib.gis.gdal.error import GDALException, SRSException
from django.contrib.gis.gdal.geomtype import OGRGeomType
from django.contrib.gis.gdal.prototypes import geom as capi
from django.contrib.gis.gdal.prototypes import srs as srs_api
from django.contrib.gis.gdal.srs import CoordTransform, SpatialReference
from django.contrib.gis.geometry import hex_regex, json_regex, wkt_regex
from django.utils.encoding import force_bytes
# For more information, see the OGR C API source code:
# https://gdal.org/api/vector_c_api.html
#
# The OGR_G_* routines are relevant here.
class OGRGeometry(GDALBase):
"""Encapsulate an OGR geometry."""
destructor = capi.destroy_geom
geos_support = True
def __init__(self, geom_input, srs=None):
"""Initialize Geometry on either WKT or an OGR pointer as input."""
str_instance = isinstance(geom_input, str)
# If HEX, unpack input to a binary buffer.
if str_instance and hex_regex.match(geom_input):
geom_input = memoryview(bytes.fromhex(geom_input))
str_instance = False
# Constructing the geometry,
if str_instance:
wkt_m = wkt_regex.match(geom_input)
json_m = json_regex.match(geom_input)
if wkt_m:
if wkt_m["srid"]:
# If there's EWKT, set the SRS w/value of the SRID.
srs = int(wkt_m["srid"])
if wkt_m["type"].upper() == "LINEARRING":
# OGR_G_CreateFromWkt doesn't work with LINEARRING WKT.
# See https://trac.osgeo.org/gdal/ticket/1992.
g = capi.create_geom(OGRGeomType(wkt_m["type"]).num)
capi.import_wkt(g, byref(c_char_p(wkt_m["wkt"].encode())))
else:
g = capi.from_wkt(
byref(c_char_p(wkt_m["wkt"].encode())), None, byref(c_void_p())
)
elif json_m:
g = self._from_json(geom_input.encode())
else:
# Seeing if the input is a valid short-hand string
# (e.g., 'Point', 'POLYGON').
OGRGeomType(geom_input)
g = capi.create_geom(OGRGeomType(geom_input).num)
elif isinstance(geom_input, memoryview):
# WKB was passed in
g = self._from_wkb(geom_input)
elif isinstance(geom_input, OGRGeomType):
# OGRGeomType was passed in, an empty geometry will be created.
g = capi.create_geom(geom_input.num)
elif isinstance(geom_input, self.ptr_type):
# OGR pointer (c_void_p) was the input.
g = geom_input
else:
raise GDALException(
"Invalid input type for OGR Geometry construction: %s"
% type(geom_input)
)
# Now checking the Geometry pointer before finishing initialization
# by setting the pointer for the object.
if not g:
raise GDALException(
"Cannot create OGR Geometry from input: %s" % geom_input
)
self.ptr = g
# Assigning the SpatialReference object to the geometry, if valid.
if srs:
self.srs = srs
# Setting the class depending upon the OGR Geometry Type
if (geo_class := GEO_CLASSES.get(self.geom_type.num)) is None:
raise TypeError(f"Unsupported geometry type: {self.geom_type}")
self.__class__ = geo_class
# Pickle routines
def __getstate__(self):
srs = self.srs
if srs:
srs = srs.wkt
else:
srs = None
return bytes(self.wkb), srs
def __setstate__(self, state):
wkb, srs = state
ptr = capi.from_wkb(wkb, None, byref(c_void_p()), len(wkb))
if not ptr:
raise GDALException("Invalid OGRGeometry loaded from pickled state.")
self.ptr = ptr
self.srs = srs
@classmethod
def _from_wkb(cls, geom_input):
return capi.from_wkb(
bytes(geom_input), None, byref(c_void_p()), len(geom_input)
)
@staticmethod
def _from_json(geom_input):
return capi.from_json(geom_input)
@classmethod
def from_bbox(cls, bbox):
"Construct a Polygon from a bounding box (4-tuple)."
x0, y0, x1, y1 = bbox
return OGRGeometry(
"POLYGON((%s %s, %s %s, %s %s, %s %s, %s %s))"
% (x0, y0, x0, y1, x1, y1, x1, y0, x0, y0)
)
@staticmethod
def from_json(geom_input):
return OGRGeometry(OGRGeometry._from_json(force_bytes(geom_input)))
@classmethod
def from_gml(cls, gml_string):
return cls(capi.from_gml(force_bytes(gml_string)))
# ### Geometry set-like operations ###
# g = g1 | g2
def __or__(self, other):
"Return the union of the two geometries."
return self.union(other)
# g = g1 & g2
def __and__(self, other):
"Return the intersection of this Geometry and the other."
return self.intersection(other)
# g = g1 - g2
def __sub__(self, other):
"Return the difference this Geometry and the other."
return self.difference(other)
# g = g1 ^ g2
def __xor__(self, other):
"Return the symmetric difference of this Geometry and the other."
return self.sym_difference(other)
def __eq__(self, other):
"Is this Geometry equal to the other?"
return isinstance(other, OGRGeometry) and self.equals(other)
def __str__(self):
"WKT is used for the string representation."
return self.wkt
# #### Geometry Properties ####
@property
def dimension(self):
"Return 0 for points, 1 for lines, and 2 for surfaces."
return capi.get_dims(self.ptr)
@property
def coord_dim(self):
"Return the coordinate dimension of the Geometry."
return capi.get_coord_dim(self.ptr)
@property
def geom_count(self):
"Return the number of elements in this Geometry."
return capi.get_geom_count(self.ptr)
@property
def point_count(self):
"Return the number of Points in this Geometry."
return capi.get_point_count(self.ptr)
@property
def num_points(self):
"Alias for `point_count` (same name method in GEOS API.)"
return self.point_count
@property
def num_coords(self):
"Alias for `point_count`."
return self.point_count
@property
def geom_type(self):
"Return the Type for this Geometry."
return OGRGeomType(capi.get_geom_type(self.ptr))
@property
def geom_name(self):
"Return the Name of this Geometry."
return capi.get_geom_name(self.ptr)
@property
def area(self):
"""
Return the area for a LinearRing, Polygon, or MultiPolygon; 0
otherwise.
"""
return capi.get_area(self.ptr)
@property
def envelope(self):
"Return the envelope for this Geometry."
# TODO: Fix Envelope() for Point geometries.
return Envelope(capi.get_envelope(self.ptr, byref(OGREnvelope())))
@property
def empty(self):
return capi.is_empty(self.ptr)
@property
def extent(self):
"Return the envelope as a 4-tuple, instead of as an Envelope object."
return self.envelope.tuple
@property
def is_3d(self):
"""Return True if the geometry has Z coordinates."""
return capi.is_3d(self.ptr)
def set_3d(self, value):
"""Set if this geometry has Z coordinates."""
if value is True:
capi.set_3d(self.ptr, 1)
elif value is False:
capi.set_3d(self.ptr, 0)
else:
raise ValueError(f"Input to 'set_3d' must be a boolean, got '{value!r}'.")
@property
def is_measured(self):
"""Return True if the geometry has M coordinates."""
return capi.is_measured(self.ptr)
def set_measured(self, value):
"""Set if this geometry has M coordinates."""
if value is True:
capi.set_measured(self.ptr, 1)
elif value is False:
capi.set_measured(self.ptr, 0)
else:
raise ValueError(
f"Input to 'set_measured' must be a boolean, got '{value!r}'."
)
@property
def has_curve(self):
"""Return True if the geometry is or has curve geometry."""
return capi.has_curve_geom(self.ptr, 0)
def get_linear_geometry(self):
"""Return a linear version of this geometry."""
return OGRGeometry(capi.get_linear_geom(self.ptr, 0, None))
def get_curve_geometry(self):
"""Return a curve version of this geometry."""
return OGRGeometry(capi.get_curve_geom(self.ptr, None))
# #### SpatialReference-related Properties ####
# The SRS property
def _get_srs(self):
"Return the Spatial Reference for this Geometry."
try:
srs_ptr = capi.get_geom_srs(self.ptr)
return SpatialReference(srs_api.clone_srs(srs_ptr))
except SRSException:
return None
def _set_srs(self, srs):
"Set the SpatialReference for this geometry."
# Do not have to clone the `SpatialReference` object pointer because
# when it is assigned to this `OGRGeometry` it's internal OGR
# reference count is incremented, and will likewise be released
# (decremented) when this geometry's destructor is called.
if isinstance(srs, SpatialReference):
srs_ptr = srs.ptr
elif isinstance(srs, (int, str)):
sr = SpatialReference(srs)
srs_ptr = sr.ptr
elif srs is None:
srs_ptr = None
else:
raise TypeError(
"Cannot assign spatial reference with object of type: %s" % type(srs)
)
capi.assign_srs(self.ptr, srs_ptr)
srs = property(_get_srs, _set_srs)
# The SRID property
def _get_srid(self):
srs = self.srs
if srs:
return srs.srid
return None
def _set_srid(self, srid):
if isinstance(srid, int) or srid is None:
self.srs = srid
else:
raise TypeError("SRID must be set with an integer.")
srid = property(_get_srid, _set_srid)
# #### Output Methods ####
def _geos_ptr(self):
from django.contrib.gis.geos import GEOSGeometry
return GEOSGeometry._from_wkb(self.wkb)
@property
def geos(self):
"Return a GEOSGeometry object from this OGRGeometry."
if self.geos_support:
from django.contrib.gis.geos import GEOSGeometry
return GEOSGeometry(self._geos_ptr(), self.srid)
else:
from django.contrib.gis.geos import GEOSException
raise GEOSException(f"GEOS does not support {self.__class__.__qualname__}.")
@property
def gml(self):
"Return the GML representation of the Geometry."
return capi.to_gml(self.ptr)
@property
def hex(self):
"Return the hexadecimal representation of the WKB (a string)."
return b2a_hex(self.wkb).upper()
@property
def json(self):
"""
Return the GeoJSON representation of this Geometry.
"""
return capi.to_json(self.ptr)
geojson = json
@property
def kml(self):
"Return the KML representation of the Geometry."
return capi.to_kml(self.ptr, None)
@property
def wkb_size(self):
"Return the size of the WKB buffer."
return capi.get_wkbsize(self.ptr)
@property
def wkb(self):
"Return the WKB representation of the Geometry."
if sys.byteorder == "little":
byteorder = 1 # wkbNDR (from ogr_core.h)
else:
byteorder = 0 # wkbXDR
sz = self.wkb_size
# Creating the unsigned character buffer, and passing it in by
# reference.
buf = (c_ubyte * sz)()
# For backward compatibility, export old-style 99-402 extended
# dimension types when geometry does not have an M dimension.
# https://gdal.org/api/vector_c_api.html#_CPPv417OGR_G_ExportToWkb12OGRGeometryH15OGRwkbByteOrderPh
to_wkb = capi.to_iso_wkb if self.is_measured else capi.to_wkb
to_wkb(self.ptr, byteorder, byref(buf))
# Returning a buffer of the string at the pointer.
return memoryview(string_at(buf, sz))
@property
def wkt(self):
"Return the WKT representation of the Geometry."
# For backward compatibility, export old-style 99-402 extended
# dimension types when geometry does not have an M dimension.
# https://gdal.org/api/vector_c_api.html#_CPPv417OGR_G_ExportToWkt12OGRGeometryHPPc
to_wkt = capi.to_iso_wkt if self.is_measured else capi.to_wkt
return to_wkt(self.ptr, byref(c_char_p()))
@property
def ewkt(self):
"Return the EWKT representation of the Geometry."
srs = self.srs
if srs and srs.srid:
return "SRID=%s;%s" % (srs.srid, self.wkt)
else:
return self.wkt
# #### Geometry Methods ####
def clone(self):
"Clone this OGR Geometry."
return OGRGeometry(capi.clone_geom(self.ptr), self.srs)
def close_rings(self):
"""
If there are any rings within this geometry that have not been
closed, this routine will do so by adding the starting point at the
end.
"""
# Closing the open rings.
capi.geom_close_rings(self.ptr)
def transform(self, coord_trans, clone=False):
"""
Transform this geometry to a different spatial reference system.
May take a CoordTransform object, a SpatialReference object, string
WKT or PROJ, and/or an integer SRID. By default, return nothing
and transform the geometry in-place. However, if the `clone` keyword is
set, return a transformed clone of this geometry.
"""
if clone:
klone = self.clone()
klone.transform(coord_trans)
return klone
# Depending on the input type, use the appropriate OGR routine
# to perform the transformation.
if isinstance(coord_trans, CoordTransform):
capi.geom_transform(self.ptr, coord_trans.ptr)
elif isinstance(coord_trans, SpatialReference):
capi.geom_transform_to(self.ptr, coord_trans.ptr)
elif isinstance(coord_trans, (int, str)):
sr = SpatialReference(coord_trans)
capi.geom_transform_to(self.ptr, sr.ptr)
else:
raise TypeError(
"Transform only accepts CoordTransform, "
"SpatialReference, string, and integer objects."
)
# #### Topology Methods ####
def _topology(self, func, other):
"""A generalized function for topology operations, takes a GDAL
function and the other geometry to perform the operation on."""
if not isinstance(other, OGRGeometry):
raise TypeError(
"Must use another OGRGeometry object for topology operations!"
)
# Returning the output of the given function with the other geometry's
# pointer.
return func(self.ptr, other.ptr)
def intersects(self, other):
"Return True if this geometry intersects with the other."
return self._topology(capi.ogr_intersects, other)
def equals(self, other):
"Return True if this geometry is equivalent to the other."
return self._topology(capi.ogr_equals, other)
def disjoint(self, other):
"Return True if this geometry and the other are spatially disjoint."
return self._topology(capi.ogr_disjoint, other)
def touches(self, other):
"Return True if this geometry touches the other."
return self._topology(capi.ogr_touches, other)
def crosses(self, other):
"Return True if this geometry crosses the other."
return self._topology(capi.ogr_crosses, other)
def within(self, other):
"Return True if this geometry is within the other."
return self._topology(capi.ogr_within, other)
def contains(self, other):
"Return True if this geometry contains the other."
return self._topology(capi.ogr_contains, other)
def overlaps(self, other):
"Return True if this geometry overlaps the other."
return self._topology(capi.ogr_overlaps, other)
# #### Geometry-generation Methods ####
def _geomgen(self, gen_func, other=None):
"A helper routine for the OGR routines that generate geometries."
if isinstance(other, OGRGeometry):
return OGRGeometry(gen_func(self.ptr, other.ptr), self.srs)
else:
return OGRGeometry(gen_func(self.ptr), self.srs)
@property
def boundary(self):
"Return the boundary of this geometry."
return self._geomgen(capi.get_boundary)
@property
def convex_hull(self):
"""
Return the smallest convex Polygon that contains all the points in
this Geometry.
"""
return self._geomgen(capi.geom_convex_hull)
def difference(self, other):
"""
Return a new geometry consisting of the region which is the difference
of this geometry and the other.
"""
return self._geomgen(capi.geom_diff, other)
def intersection(self, other):
"""
Return a new geometry consisting of the region of intersection of this
geometry and the other.
"""
return self._geomgen(capi.geom_intersection, other)
def sym_difference(self, other):
"""
Return a new geometry which is the symmetric difference of this
geometry and the other.
"""
return self._geomgen(capi.geom_sym_diff, other)
def union(self, other):
"""
Return a new geometry consisting of the region which is the union of
this geometry and the other.
"""
return self._geomgen(capi.geom_union, other)
@property
def centroid(self):
"""Return the centroid (a Point) of this Polygon."""
# The centroid is a Point, create a geometry for this.
p = OGRGeometry(OGRGeomType("Point"))
capi.get_centroid(self.ptr, p.ptr)
return p
# The subclasses for OGR Geometry.
class Point(OGRGeometry):
def _geos_ptr(self):
from django.contrib.gis import geos
return geos.Point._create_empty() if self.empty else super()._geos_ptr()
@classmethod
def _create_empty(cls):
return capi.create_geom(OGRGeomType("point").num)
@property
def x(self):
"Return the X coordinate for this Point."
return capi.getx(self.ptr, 0)
@property
def y(self):
"Return the Y coordinate for this Point."
return capi.gety(self.ptr, 0)
@property
def z(self):
"Return the Z coordinate for this Point."
if self.is_3d:
return capi.getz(self.ptr, 0)
@property
def m(self):
"""Return the M coordinate for this Point."""
if self.is_measured:
return capi.getm(self.ptr, 0)
@property
def tuple(self):
"Return the tuple of this point."
if self.is_3d and self.is_measured:
return self.x, self.y, self.z, self.m
if self.is_3d:
return self.x, self.y, self.z
if self.is_measured:
return self.x, self.y, self.m
return self.x, self.y
coords = tuple
class LineString(OGRGeometry):
def __getitem__(self, index):
"Return the Point at the given index."
if 0 <= index < self.point_count:
x, y, z, m = c_double(), c_double(), c_double(), c_double()
capi.get_point(self.ptr, index, byref(x), byref(y), byref(z), byref(m))
if self.is_3d and self.is_measured:
return x.value, y.value, z.value, m.value
if self.is_3d:
return x.value, y.value, z.value
if self.is_measured:
return x.value, y.value, m.value
dim = self.coord_dim
if dim == 1:
return (x.value,)
elif dim == 2:
return (x.value, y.value)
else:
raise IndexError(
"Index out of range when accessing points of a line string: %s." % index
)
def __len__(self):
"Return the number of points in the LineString."
return self.point_count
@property
def tuple(self):
"Return the tuple representation of this LineString."
return tuple(self[i] for i in range(len(self)))
coords = tuple
def _listarr(self, func):
"""
Internal routine that returns a sequence (list) corresponding with
the given function.
"""
return [func(self.ptr, i) for i in range(len(self))]
@property
def x(self):
"Return the X coordinates in a list."
return self._listarr(capi.getx)
@property
def y(self):
"Return the Y coordinates in a list."
return self._listarr(capi.gety)
@property
def z(self):
"Return the Z coordinates in a list."
if self.is_3d:
return self._listarr(capi.getz)
@property
def m(self):
"""Return the M coordinates in a list."""
if self.is_measured:
return self._listarr(capi.getm)
# LinearRings are used in Polygons.
class LinearRing(LineString):
pass
class Polygon(OGRGeometry):
def __len__(self):
"Return the number of interior rings in this Polygon."
return self.geom_count
def __getitem__(self, index):
"Get the ring at the specified index."
if 0 <= index < self.geom_count:
return OGRGeometry(
capi.clone_geom(capi.get_geom_ref(self.ptr, index)), self.srs
)
else:
raise IndexError(
"Index out of range when accessing rings of a polygon: %s." % index
)
# Polygon Properties
@property
def shell(self):
"Return the shell of this Polygon."
return self[0] # First ring is the shell
exterior_ring = shell
@property
def tuple(self):
"Return a tuple of LinearRing coordinate tuples."
return tuple(self[i].tuple for i in range(self.geom_count))
coords = tuple
@property
def point_count(self):
"Return the number of Points in this Polygon."
# Summing up the number of points in each ring of the Polygon.
return sum(self[i].point_count for i in range(self.geom_count))
class CircularString(LineString):
geos_support = False
class CurvePolygon(Polygon):
geos_support = False
class CompoundCurve(OGRGeometry):
geos_support = False
# Geometry Collection base class.
class GeometryCollection(OGRGeometry):
"The Geometry Collection class."
def __getitem__(self, index):
"Get the Geometry at the specified index."
if 0 <= index < self.geom_count:
return OGRGeometry(
capi.clone_geom(capi.get_geom_ref(self.ptr, index)), self.srs
)
else:
raise IndexError(
"Index out of range when accessing geometry in a collection: %s."
% index
)
def __len__(self):
"Return the number of geometries in this Geometry Collection."
return self.geom_count
def add(self, geom):
"Add the geometry to this Geometry Collection."
if isinstance(geom, OGRGeometry):
if isinstance(geom, self.__class__):
for g in geom:
capi.add_geom(self.ptr, g.ptr)
else:
capi.add_geom(self.ptr, geom.ptr)
elif isinstance(geom, str):
tmp = OGRGeometry(geom)
capi.add_geom(self.ptr, tmp.ptr)
else:
raise GDALException("Must add an OGRGeometry.")
@property
def point_count(self):
"Return the number of Points in this Geometry Collection."
# Summing up the number of points in each geometry in this collection
return sum(self[i].point_count for i in range(self.geom_count))
@property
def tuple(self):
"Return a tuple representation of this Geometry Collection."
return tuple(self[i].tuple for i in range(self.geom_count))
coords = tuple
# Multiple Geometry types.
class MultiPoint(GeometryCollection):
pass
class MultiLineString(GeometryCollection):
pass
class MultiPolygon(GeometryCollection):
pass
class MultiSurface(GeometryCollection):
geos_support = False
class MultiCurve(GeometryCollection):
geos_support = False
# Class mapping dictionary (using the OGRwkbGeometryType as the key)
GEO_CLASSES = {
1: Point,
2: LineString,
3: Polygon,
4: MultiPoint,
5: MultiLineString,
6: MultiPolygon,
7: GeometryCollection,
8: CircularString,
9: CompoundCurve,
10: CurvePolygon,
11: MultiCurve,
12: MultiSurface,
101: LinearRing,
1008: CircularString, # CIRCULARSTRING Z
1009: CompoundCurve, # COMPOUNDCURVE Z
1010: CurvePolygon, # CURVEPOLYGON Z
1011: MultiCurve, # MULTICURVE Z
1012: MultiSurface, # MULTICURVE Z
2001: Point, # POINT M
2002: LineString, # LINESTRING M
2003: Polygon, # POLYGON M
2004: MultiPoint, # MULTIPOINT M
2005: MultiLineString, # MULTILINESTRING M
2006: MultiPolygon, # MULTIPOLYGON M
2007: GeometryCollection, # GEOMETRYCOLLECTION M
2008: CircularString, # CIRCULARSTRING M
2009: CompoundCurve, # COMPOUNDCURVE M
2010: CurvePolygon, # CURVEPOLYGON M
2011: MultiCurve, # MULTICURVE M
2012: MultiSurface, # MULTICURVE M
3001: Point, # POINT ZM
3002: LineString, # LINESTRING ZM
3003: Polygon, # POLYGON ZM
3004: MultiPoint, # MULTIPOINT ZM
3005: MultiLineString, # MULTILINESTRING ZM
3006: MultiPolygon, # MULTIPOLYGON ZM
3007: GeometryCollection, # GEOMETRYCOLLECTION ZM
3008: CircularString, # CIRCULARSTRING ZM
3009: CompoundCurve, # COMPOUNDCURVE ZM
3010: CurvePolygon, # CURVEPOLYGON ZM
3011: MultiCurve, # MULTICURVE ZM
3012: MultiSurface, # MULTISURFACE ZM
1 + OGRGeomType.wkb25bit: Point, # POINT Z
2 + OGRGeomType.wkb25bit: LineString, # LINESTRING Z
3 + OGRGeomType.wkb25bit: Polygon, # POLYGON Z
4 + OGRGeomType.wkb25bit: MultiPoint, # MULTIPOINT Z
5 + OGRGeomType.wkb25bit: MultiLineString, # MULTILINESTRING Z
6 + OGRGeomType.wkb25bit: MultiPolygon, # MULTIPOLYGON Z
7 + OGRGeomType.wkb25bit: GeometryCollection, # GEOMETRYCOLLECTION Z
}
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/contrib/gis/gdal/base.py | django/contrib/gis/gdal/base.py | from django.contrib.gis.gdal.error import GDALException
from django.contrib.gis.ptr import CPointerBase
class GDALBase(CPointerBase):
null_ptr_exception_class = GDALException
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/contrib/gis/gdal/field.py | django/contrib/gis/gdal/field.py | from ctypes import byref, c_int
from datetime import date, datetime, time
from django.contrib.gis.gdal.base import GDALBase
from django.contrib.gis.gdal.error import GDALException
from django.contrib.gis.gdal.prototypes import ds as capi
from django.utils.encoding import force_str
# For more information, see the OGR C API source code:
# https://gdal.org/api/vector_c_api.html
#
# The OGR_Fld_* routines are relevant here.
class Field(GDALBase):
"""
Wrap an OGR Field. Needs to be instantiated from a Feature object.
"""
def __init__(self, feat, index):
"""
Initialize on the feature object and the integer index of
the field within the feature.
"""
# Setting the feature pointer and index.
self._feat = feat
self._index = index
# Getting the pointer for this field.
fld_ptr = capi.get_feat_field_defn(feat.ptr, index)
if not fld_ptr:
raise GDALException("Cannot create OGR Field, invalid pointer given.")
self.ptr = fld_ptr
# Setting the class depending upon the OGR Field Type (OFT)
self.__class__ = OGRFieldTypes[self.type]
def __str__(self):
"Return the string representation of the Field."
return str(self.value).strip()
# #### Field Methods ####
def as_double(self):
"Retrieve the Field's value as a double (float)."
return (
capi.get_field_as_double(self._feat.ptr, self._index)
if self.is_set
else None
)
def as_int(self, is_64=False):
"Retrieve the Field's value as an integer."
if is_64:
return (
capi.get_field_as_integer64(self._feat.ptr, self._index)
if self.is_set
else None
)
else:
return (
capi.get_field_as_integer(self._feat.ptr, self._index)
if self.is_set
else None
)
def as_string(self):
"Retrieve the Field's value as a string."
if not self.is_set:
return None
string = capi.get_field_as_string(self._feat.ptr, self._index)
return force_str(string, encoding=self._feat.encoding, strings_only=True)
def as_datetime(self):
"Retrieve the Field's value as a tuple of date & time components."
if not self.is_set:
return None
yy, mm, dd, hh, mn, ss, tz = [c_int() for i in range(7)]
status = capi.get_field_as_datetime(
self._feat.ptr,
self._index,
byref(yy),
byref(mm),
byref(dd),
byref(hh),
byref(mn),
byref(ss),
byref(tz),
)
if status:
return (yy, mm, dd, hh, mn, ss, tz)
else:
raise GDALException(
"Unable to retrieve date & time information from the field."
)
# #### Field Properties ####
@property
def is_set(self):
"Return True if the value of this field isn't null, False otherwise."
return capi.is_field_set(self._feat.ptr, self._index)
@property
def name(self):
"Return the name of this Field."
name = capi.get_field_name(self.ptr)
return force_str(name, encoding=self._feat.encoding, strings_only=True)
@property
def precision(self):
"Return the precision of this Field."
return capi.get_field_precision(self.ptr)
@property
def type(self):
"Return the OGR type of this Field."
return capi.get_field_type(self.ptr)
@property
def type_name(self):
"Return the OGR field type name for this Field."
return capi.get_field_type_name(self.type)
@property
def value(self):
"Return the value of this Field."
# Default is to get the field as a string.
return self.as_string()
@property
def width(self):
"Return the width of this Field."
return capi.get_field_width(self.ptr)
# ### The Field sub-classes for each OGR Field type. ###
class OFTInteger(Field):
_bit64 = False
@property
def value(self):
"Return an integer contained in this field."
return self.as_int(self._bit64)
@property
def type(self):
"""
GDAL uses OFTReals to represent OFTIntegers in created
shapefiles -- forcing the type here since the underlying field
type may actually be OFTReal.
"""
return 0
class OFTReal(Field):
@property
def value(self):
"Return a float contained in this field."
return self.as_double()
# String & Binary fields, just subclasses
class OFTString(Field):
pass
class OFTWideString(Field):
pass
class OFTBinary(Field):
pass
# OFTDate, OFTTime, OFTDateTime fields.
class OFTDate(Field):
@property
def value(self):
"Return a Python `date` object for the OFTDate field."
try:
yy, mm, dd, hh, mn, ss, tz = self.as_datetime()
return date(yy.value, mm.value, dd.value)
except (TypeError, ValueError, GDALException):
return None
class OFTDateTime(Field):
@property
def value(self):
"Return a Python `datetime` object for this OFTDateTime field."
# TODO: Adapt timezone information. See:
# https://lists.osgeo.org/pipermail/gdal-dev/2006-February/007990.html
# The `tz` variable has values of: 0=unknown, 1=localtime (ambiguous),
# 100=GMT, 104=GMT+1, 80=GMT-5, etc.
try:
yy, mm, dd, hh, mn, ss, tz = self.as_datetime()
return datetime(yy.value, mm.value, dd.value, hh.value, mn.value, ss.value)
except (TypeError, ValueError, GDALException):
return None
class OFTTime(Field):
@property
def value(self):
"Return a Python `time` object for this OFTTime field."
try:
yy, mm, dd, hh, mn, ss, tz = self.as_datetime()
return time(hh.value, mn.value, ss.value)
except (ValueError, GDALException):
return None
class OFTInteger64(OFTInteger):
_bit64 = True
# List fields are also just subclasses
class OFTIntegerList(Field):
pass
class OFTRealList(Field):
pass
class OFTStringList(Field):
pass
class OFTWideStringList(Field):
pass
class OFTInteger64List(Field):
pass
# Class mapping dictionary for OFT Types and reverse mapping.
OGRFieldTypes = {
0: OFTInteger,
1: OFTIntegerList,
2: OFTReal,
3: OFTRealList,
4: OFTString,
5: OFTStringList,
6: OFTWideString,
7: OFTWideStringList,
8: OFTBinary,
9: OFTDate,
10: OFTTime,
11: OFTDateTime,
12: OFTInteger64,
13: OFTInteger64List,
}
ROGRFieldTypes = {cls: num for num, cls in OGRFieldTypes.items()}
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/contrib/gis/gdal/raster/const.py | django/contrib/gis/gdal/raster/const.py | """
GDAL - Constant definitions
"""
from ctypes import (
c_double,
c_float,
c_int8,
c_int16,
c_int32,
c_int64,
c_ubyte,
c_uint16,
c_uint32,
c_uint64,
)
# See https://gdal.org/api/raster_c_api.html#_CPPv412GDALDataType
GDAL_PIXEL_TYPES = {
0: "GDT_Unknown", # Unknown or unspecified type
1: "GDT_Byte", # Eight bit unsigned integer
2: "GDT_UInt16", # Sixteen bit unsigned integer
3: "GDT_Int16", # Sixteen bit signed integer
4: "GDT_UInt32", # Thirty-two bit unsigned integer
5: "GDT_Int32", # Thirty-two bit signed integer
6: "GDT_Float32", # Thirty-two bit floating point
7: "GDT_Float64", # Sixty-four bit floating point
8: "GDT_CInt16", # Complex Int16
9: "GDT_CInt32", # Complex Int32
10: "GDT_CFloat32", # Complex Float32
11: "GDT_CFloat64", # Complex Float64
12: "GDT_UInt64", # 64 bit unsigned integer (GDAL 3.5+).
13: "GDT_Int64", # 64 bit signed integer (GDAL 3.5+).
14: "GDT_Int8", # 8 bit signed integer (GDAL 3.7+).
}
# A list of gdal datatypes that are integers.
GDAL_INTEGER_TYPES = [1, 2, 3, 4, 5, 12, 13, 14]
# Lookup values to convert GDAL pixel type indices into ctypes objects.
# The GDAL band-io works with ctypes arrays to hold data to be written
# or to hold the space for data to be read into. The lookup below helps
# selecting the right ctypes object for a given gdal pixel type.
GDAL_TO_CTYPES = [
None,
c_ubyte,
c_uint16,
c_int16,
c_uint32,
c_int32,
c_float,
c_double,
None,
None,
None,
None,
c_uint64,
c_int64,
c_int8,
]
# List of resampling algorithms that can be used to warp a GDALRaster.
GDAL_RESAMPLE_ALGORITHMS = {
"NearestNeighbour": 0,
"Bilinear": 1,
"Cubic": 2,
"CubicSpline": 3,
"Lanczos": 4,
"Average": 5,
"Mode": 6,
}
# See https://gdal.org/api/raster_c_api.html#_CPPv415GDALColorInterp
GDAL_COLOR_TYPES = {
0: "GCI_Undefined", # Undefined, default value, i.e. not known
1: "GCI_GrayIndex", # Grayscale
2: "GCI_PaletteIndex", # Paletted
3: "GCI_RedBand", # Red band of RGBA image
4: "GCI_GreenBand", # Green band of RGBA image
5: "GCI_BlueBand", # Blue band of RGBA image
6: "GCI_AlphaBand", # Alpha (0=transparent, 255=opaque)
7: "GCI_HueBand", # Hue band of HLS image
8: "GCI_SaturationBand", # Saturation band of HLS image
9: "GCI_LightnessBand", # Lightness band of HLS image
10: "GCI_CyanBand", # Cyan band of CMYK image
11: "GCI_MagentaBand", # Magenta band of CMYK image
12: "GCI_YellowBand", # Yellow band of CMYK image
13: "GCI_BlackBand", # Black band of CMLY image
14: "GCI_YCbCr_YBand", # Y Luminance
15: "GCI_YCbCr_CbBand", # Cb Chroma
16: "GCI_YCbCr_CrBand", # Cr Chroma, also GCI_Max
}
# GDAL virtual filesystems prefix.
VSI_FILESYSTEM_PREFIX = "/vsi"
# Fixed base path for buffer-based GDAL in-memory files.
VSI_MEM_FILESYSTEM_BASE_PATH = "/vsimem/"
# Should the memory file system take ownership of the buffer, freeing it when
# the file is deleted? (No, GDALRaster.__del__() will delete the buffer.)
VSI_TAKE_BUFFER_OWNERSHIP = False
# Should a VSI file be removed when retrieving its buffer?
VSI_DELETE_BUFFER_ON_READ = False
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/contrib/gis/gdal/raster/band.py | django/contrib/gis/gdal/raster/band.py | from ctypes import byref, c_double, c_int, c_void_p
from django.contrib.gis.gdal.error import GDALException
from django.contrib.gis.gdal.prototypes import raster as capi
from django.contrib.gis.gdal.raster.base import GDALRasterBase
from django.contrib.gis.shortcuts import numpy
from django.utils.encoding import force_str
from .const import (
GDAL_COLOR_TYPES,
GDAL_INTEGER_TYPES,
GDAL_PIXEL_TYPES,
GDAL_TO_CTYPES,
)
class GDALBand(GDALRasterBase):
"""
Wrap a GDAL raster band, needs to be obtained from a GDALRaster object.
"""
def __init__(self, source, index):
self.source = source
self._ptr = capi.get_ds_raster_band(source._ptr, index)
def _flush(self):
"""
Call the flush method on the Band's parent raster and force a refresh
of the statistics attribute when requested the next time.
"""
self.source._flush()
self._stats_refresh = True
@property
def description(self):
"""
Return the description string of the band.
"""
return force_str(capi.get_band_description(self._ptr))
@property
def width(self):
"""
Width (X axis) in pixels of the band.
"""
return capi.get_band_xsize(self._ptr)
@property
def height(self):
"""
Height (Y axis) in pixels of the band.
"""
return capi.get_band_ysize(self._ptr)
@property
def pixel_count(self):
"""
Return the total number of pixels in this band.
"""
return self.width * self.height
_stats_refresh = False
def statistics(self, refresh=False, approximate=False):
"""
Compute statistics on the pixel values of this band.
The return value is a tuple with the following structure:
(minimum, maximum, mean, standard deviation).
If approximate=True, the statistics may be computed based on overviews
or a subset of image tiles.
If refresh=True, the statistics will be computed from the data
directly, and the cache will be updated where applicable.
For empty bands (where all pixel values are nodata), all statistics
values are returned as None.
For raster formats using Persistent Auxiliary Metadata (PAM) services,
the statistics might be cached in an auxiliary file.
"""
# Prepare array with arguments for capi function
smin, smax, smean, sstd = c_double(), c_double(), c_double(), c_double()
stats_args = [
self._ptr,
c_int(approximate),
byref(smin),
byref(smax),
byref(smean),
byref(sstd),
c_void_p(),
c_void_p(),
]
if refresh or self._stats_refresh:
func = capi.compute_band_statistics
else:
# Add additional argument to force computation if there is no
# existing PAM file to take the values from.
force = True
stats_args.insert(2, c_int(force))
func = capi.get_band_statistics
# Computation of statistics fails for empty bands.
try:
func(*stats_args)
result = smin.value, smax.value, smean.value, sstd.value
except GDALException:
result = (None, None, None, None)
self._stats_refresh = False
return result
@property
def min(self):
"""
Return the minimum pixel value for this band.
"""
return self.statistics()[0]
@property
def max(self):
"""
Return the maximum pixel value for this band.
"""
return self.statistics()[1]
@property
def mean(self):
"""
Return the mean of all pixel values of this band.
"""
return self.statistics()[2]
@property
def std(self):
"""
Return the standard deviation of all pixel values of this band.
"""
return self.statistics()[3]
@property
def nodata_value(self):
"""
Return the nodata value for this band, or None if it isn't set.
"""
# Get value and nodata exists flag
nodata_exists = c_int()
value = capi.get_band_nodata_value(self._ptr, nodata_exists)
if not nodata_exists:
value = None
# If the pixeltype is an integer, convert to int
elif self.datatype() in GDAL_INTEGER_TYPES:
value = int(value)
return value
@nodata_value.setter
def nodata_value(self, value):
"""
Set the nodata value for this band.
"""
if value is None:
capi.delete_band_nodata_value(self._ptr)
elif not isinstance(value, (int, float)):
raise ValueError("Nodata value must be numeric or None.")
else:
capi.set_band_nodata_value(self._ptr, value)
self._flush()
def datatype(self, as_string=False):
"""
Return the GDAL Pixel Datatype for this band.
"""
dtype = capi.get_band_datatype(self._ptr)
if as_string:
dtype = GDAL_PIXEL_TYPES[dtype]
return dtype
def color_interp(self, as_string=False):
"""Return the GDAL color interpretation for this band."""
color = capi.get_band_color_interp(self._ptr)
if as_string:
color = GDAL_COLOR_TYPES[color]
return color
def data(self, data=None, offset=None, size=None, shape=None, as_memoryview=False):
"""
Read or writes pixel values for this band. Blocks of data can
be accessed by specifying the width, height and offset of the
desired block. The same specification can be used to update
parts of a raster by providing an array of values.
Allowed input data types are bytes, memoryview, list, tuple, and array.
"""
offset = offset or (0, 0)
size = size or (self.width - offset[0], self.height - offset[1])
shape = shape or size
if any(x <= 0 for x in size):
raise ValueError("Offset too big for this raster.")
if size[0] > self.width or size[1] > self.height:
raise ValueError("Size is larger than raster.")
# Create ctypes type array generator
ctypes_array = GDAL_TO_CTYPES[self.datatype()] * (shape[0] * shape[1])
if data is None:
# Set read mode
access_flag = 0
# Prepare empty ctypes array
data_array = ctypes_array()
else:
# Set write mode
access_flag = 1
# Instantiate ctypes array holding the input data
if isinstance(data, (bytes, memoryview)) or (
numpy and isinstance(data, numpy.ndarray)
):
data_array = ctypes_array.from_buffer_copy(data)
else:
data_array = ctypes_array(*data)
# Access band
capi.band_io(
self._ptr,
access_flag,
offset[0],
offset[1],
size[0],
size[1],
byref(data_array),
shape[0],
shape[1],
self.datatype(),
0,
0,
)
# Return data as numpy array if possible, otherwise as list
if data is None:
if as_memoryview:
return memoryview(data_array)
elif numpy:
# reshape() needs a reshape parameter with the height first.
return numpy.frombuffer(
data_array, dtype=numpy.dtype(data_array)
).reshape(tuple(reversed(size)))
else:
return list(data_array)
else:
self._flush()
class BandList(list):
def __init__(self, source):
self.source = source
super().__init__()
def __iter__(self):
for idx in range(1, len(self) + 1):
yield GDALBand(self.source, idx)
def __len__(self):
return capi.get_ds_raster_count(self.source._ptr)
def __getitem__(self, index):
try:
return GDALBand(self.source, index + 1)
except GDALException:
raise GDALException("Unable to get band index %d" % index)
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/contrib/gis/gdal/raster/__init__.py | django/contrib/gis/gdal/raster/__init__.py | python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false | |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/contrib/gis/gdal/raster/source.py | django/contrib/gis/gdal/raster/source.py | import json
import os
import sys
import uuid
from ctypes import (
addressof,
byref,
c_buffer,
c_char_p,
c_double,
c_int,
c_void_p,
string_at,
)
from pathlib import Path
from django.contrib.gis.gdal.driver import Driver
from django.contrib.gis.gdal.error import GDALException
from django.contrib.gis.gdal.prototypes import raster as capi
from django.contrib.gis.gdal.raster.band import BandList
from django.contrib.gis.gdal.raster.base import GDALRasterBase
from django.contrib.gis.gdal.raster.const import (
GDAL_RESAMPLE_ALGORITHMS,
VSI_DELETE_BUFFER_ON_READ,
VSI_FILESYSTEM_PREFIX,
VSI_MEM_FILESYSTEM_BASE_PATH,
VSI_TAKE_BUFFER_OWNERSHIP,
)
from django.contrib.gis.gdal.srs import SpatialReference, SRSException
from django.contrib.gis.geometry import json_regex
from django.utils.encoding import force_bytes, force_str
from django.utils.functional import cached_property
class TransformPoint(list):
indices = {
"origin": (0, 3),
"scale": (1, 5),
"skew": (2, 4),
}
def __init__(self, raster, prop):
x = raster.geotransform[self.indices[prop][0]]
y = raster.geotransform[self.indices[prop][1]]
super().__init__([x, y])
self._raster = raster
self._prop = prop
@property
def x(self):
return self[0]
@x.setter
def x(self, value):
gtf = self._raster.geotransform
gtf[self.indices[self._prop][0]] = value
self._raster.geotransform = gtf
@property
def y(self):
return self[1]
@y.setter
def y(self, value):
gtf = self._raster.geotransform
gtf[self.indices[self._prop][1]] = value
self._raster.geotransform = gtf
class GDALRaster(GDALRasterBase):
"""
Wrap a raster GDAL Data Source object.
"""
destructor = capi.close_ds
def __init__(self, ds_input, write=False):
self._write = 1 if write else 0
Driver.ensure_registered()
# Preprocess json inputs. This converts json strings to dictionaries,
# which are parsed below the same way as direct dictionary inputs.
if isinstance(ds_input, str) and json_regex.match(ds_input):
ds_input = json.loads(ds_input)
# If input is a valid file path, try setting file as source.
if isinstance(ds_input, (str, Path)):
ds_input = str(ds_input)
if not ds_input.startswith(VSI_FILESYSTEM_PREFIX) and not os.path.exists(
ds_input
):
raise GDALException(
'Unable to read raster source input "%s".' % ds_input
)
try:
# GDALOpen will auto-detect the data source type.
self._ptr = capi.open_ds(force_bytes(ds_input), self._write)
except GDALException as err:
raise GDALException(
'Could not open the datasource at "{}" ({}).'.format(ds_input, err)
)
elif isinstance(ds_input, bytes):
# Create a new raster in write mode.
self._write = 1
# Get size of buffer.
size = sys.getsizeof(ds_input)
# Pass data to ctypes, keeping a reference to the ctypes object so
# that the vsimem file remains available until the GDALRaster is
# deleted.
self._ds_input = c_buffer(ds_input)
# Create random name to reference in vsimem filesystem.
vsi_path = os.path.join(VSI_MEM_FILESYSTEM_BASE_PATH, str(uuid.uuid4()))
# Create vsimem file from buffer.
capi.create_vsi_file_from_mem_buffer(
force_bytes(vsi_path),
byref(self._ds_input),
size,
VSI_TAKE_BUFFER_OWNERSHIP,
)
# Open the new vsimem file as a GDALRaster.
try:
self._ptr = capi.open_ds(force_bytes(vsi_path), self._write)
except GDALException:
# Remove the broken file from the VSI filesystem.
capi.unlink_vsi_file(force_bytes(vsi_path))
raise GDALException("Failed creating VSI raster from the input buffer.")
elif isinstance(ds_input, dict):
# A new raster needs to be created in write mode
self._write = 1
# Create driver (in memory by default)
driver = Driver(ds_input.get("driver", "MEM"))
# For out of memory drivers, check filename argument
if driver.name != "MEM" and "name" not in ds_input:
raise GDALException(
'Specify name for creation of raster with driver "{}".'.format(
driver.name
)
)
# Check if width and height where specified
if "width" not in ds_input or "height" not in ds_input:
raise GDALException(
"Specify width and height attributes for JSON or dict input."
)
# Check if srid was specified
if "srid" not in ds_input:
raise GDALException("Specify srid for JSON or dict input.")
# Create null terminated gdal options array.
papsz_options = []
for key, val in ds_input.get("papsz_options", {}).items():
option = "{}={}".format(key, val)
papsz_options.append(option.upper().encode())
papsz_options.append(None)
# Convert papszlist to ctypes array.
papsz_options = (c_char_p * len(papsz_options))(*papsz_options)
# Create GDAL Raster
self._ptr = capi.create_ds(
driver._ptr,
force_bytes(ds_input.get("name", "")),
ds_input["width"],
ds_input["height"],
ds_input.get("nr_of_bands", len(ds_input.get("bands", []))),
ds_input.get("datatype", 6),
byref(papsz_options),
)
# Set band data if provided
for i, band_input in enumerate(ds_input.get("bands", [])):
band = self.bands[i]
if "nodata_value" in band_input:
band.nodata_value = band_input["nodata_value"]
# Instantiate band filled with nodata values if only
# partial input data has been provided.
if band.nodata_value is not None and (
"data" not in band_input
or "size" in band_input
or "shape" in band_input
):
band.data(data=(band.nodata_value,), shape=(1, 1))
# Set band data values from input.
band.data(
data=band_input.get("data"),
size=band_input.get("size"),
shape=band_input.get("shape"),
offset=band_input.get("offset"),
)
# Set SRID
self.srs = ds_input.get("srid")
# Set additional properties if provided
if "origin" in ds_input:
self.origin.x, self.origin.y = ds_input["origin"]
if "scale" in ds_input:
self.scale.x, self.scale.y = ds_input["scale"]
if "skew" in ds_input:
self.skew.x, self.skew.y = ds_input["skew"]
elif isinstance(ds_input, c_void_p):
# Instantiate the object using an existing pointer to a gdal
# raster.
self._ptr = ds_input
else:
raise GDALException(
'Invalid data source input type: "{}".'.format(type(ds_input))
)
def __del__(self):
if self.is_vsi_based:
# Remove the temporary file from the VSI in-memory filesystem.
capi.unlink_vsi_file(force_bytes(self.name))
super().__del__()
def __str__(self):
return self.name
def __repr__(self):
"""
Short-hand representation because WKB may be very large.
"""
return "<Raster object at %s>" % hex(addressof(self._ptr))
def _flush(self):
"""
Flush all data from memory into the source file if it exists.
The data that needs flushing are geotransforms, coordinate systems,
nodata_values and pixel values. This function will be called
automatically wherever it is needed.
"""
# Raise an Exception if the value is being changed in read mode.
if not self._write:
raise GDALException(
"Raster needs to be opened in write mode to change values."
)
capi.flush_ds(self._ptr)
@property
def vsi_buffer(self):
if not (
self.is_vsi_based and self.name.startswith(VSI_MEM_FILESYSTEM_BASE_PATH)
):
return None
# Prepare an integer that will contain the buffer length.
out_length = c_int()
# Get the data using the vsi file name.
dat = capi.get_mem_buffer_from_vsi_file(
force_bytes(self.name),
byref(out_length),
VSI_DELETE_BUFFER_ON_READ,
)
# Read the full buffer pointer.
return string_at(dat, out_length.value)
@cached_property
def is_vsi_based(self):
return self._ptr and self.name.startswith(VSI_FILESYSTEM_PREFIX)
@property
def name(self):
"""
Return the name of this raster. Corresponds to filename
for file-based rasters.
"""
return force_str(capi.get_ds_description(self._ptr))
@cached_property
def driver(self):
"""
Return the GDAL Driver used for this raster.
"""
ds_driver = capi.get_ds_driver(self._ptr)
return Driver(ds_driver)
@property
def width(self):
"""
Width (X axis) in pixels.
"""
return capi.get_ds_xsize(self._ptr)
@property
def height(self):
"""
Height (Y axis) in pixels.
"""
return capi.get_ds_ysize(self._ptr)
@property
def srs(self):
"""
Return the SpatialReference used in this GDALRaster.
"""
try:
wkt = capi.get_ds_projection_ref(self._ptr)
if not wkt:
return None
return SpatialReference(wkt, srs_type="wkt")
except SRSException:
return None
@srs.setter
def srs(self, value):
"""
Set the spatial reference used in this GDALRaster. The input can be
a SpatialReference or any parameter accepted by the SpatialReference
constructor.
"""
if isinstance(value, SpatialReference):
srs = value
elif isinstance(value, (int, str)):
srs = SpatialReference(value)
else:
raise ValueError("Could not create a SpatialReference from input.")
capi.set_ds_projection_ref(self._ptr, srs.wkt.encode())
self._flush()
@property
def srid(self):
"""
Shortcut to access the srid of this GDALRaster.
"""
return self.srs.srid
@srid.setter
def srid(self, value):
"""
Shortcut to set this GDALRaster's srs from an srid.
"""
self.srs = value
@property
def geotransform(self):
"""
Return the geotransform of the data source.
Return the default geotransform if it does not exist or has not been
set previously. The default is [0.0, 1.0, 0.0, 0.0, 0.0, -1.0].
"""
# Create empty ctypes double array for data
gtf = (c_double * 6)()
capi.get_ds_geotransform(self._ptr, byref(gtf))
return list(gtf)
@geotransform.setter
def geotransform(self, values):
"Set the geotransform for the data source."
if len(values) != 6 or not all(isinstance(x, (int, float)) for x in values):
raise ValueError("Geotransform must consist of 6 numeric values.")
# Create ctypes double array with input and write data
values = (c_double * 6)(*values)
capi.set_ds_geotransform(self._ptr, byref(values))
self._flush()
@property
def origin(self):
"""
Coordinates of the raster origin.
"""
return TransformPoint(self, "origin")
@property
def scale(self):
"""
Pixel scale in units of the raster projection.
"""
return TransformPoint(self, "scale")
@property
def skew(self):
"""
Skew of pixels (rotation parameters).
"""
return TransformPoint(self, "skew")
@property
def extent(self):
"""
Return the extent as a 4-tuple (xmin, ymin, xmax, ymax).
"""
# Calculate boundary values based on scale and size
xval = self.origin.x + self.scale.x * self.width
yval = self.origin.y + self.scale.y * self.height
# Calculate min and max values
xmin = min(xval, self.origin.x)
xmax = max(xval, self.origin.x)
ymin = min(yval, self.origin.y)
ymax = max(yval, self.origin.y)
return xmin, ymin, xmax, ymax
@property
def bands(self):
return BandList(self)
def warp(self, ds_input, resampling="NearestNeighbour", max_error=0.0):
"""
Return a warped GDALRaster with the given input characteristics.
The input is expected to be a dictionary containing the parameters
of the target raster. Allowed values are width, height, SRID, origin,
scale, skew, datatype, driver, and name (filename).
By default, the warp functions keeps all parameters equal to the values
of the original source raster. For the name of the target raster, the
name of the source raster will be used and appended with
_copy. + source_driver_name.
In addition, the resampling algorithm can be specified with the
"resampling" input parameter. The default is NearestNeighbor. For a
list of all options consult the GDAL_RESAMPLE_ALGORITHMS constant.
"""
# Get the parameters defining the geotransform, srid, and size of the
# raster
ds_input.setdefault("width", self.width)
ds_input.setdefault("height", self.height)
ds_input.setdefault("srid", self.srs.srid)
ds_input.setdefault("origin", self.origin)
ds_input.setdefault("scale", self.scale)
ds_input.setdefault("skew", self.skew)
# Get the driver, name, and datatype of the target raster
ds_input.setdefault("driver", self.driver.name)
if "name" not in ds_input:
ds_input["name"] = self.name + "_copy." + self.driver.name
if "datatype" not in ds_input:
ds_input["datatype"] = self.bands[0].datatype()
# Instantiate raster bands filled with nodata values.
ds_input["bands"] = [{"nodata_value": bnd.nodata_value} for bnd in self.bands]
# Create target raster
target = GDALRaster(ds_input, write=True)
# Select resampling algorithm
algorithm = GDAL_RESAMPLE_ALGORITHMS[resampling]
# Reproject image
capi.reproject_image(
self._ptr,
self.srs.wkt.encode(),
target._ptr,
target.srs.wkt.encode(),
algorithm,
0.0,
max_error,
c_void_p(),
c_void_p(),
c_void_p(),
)
# Make sure all data is written to file
target._flush()
return target
def clone(self, name=None):
"""Return a clone of this GDALRaster."""
if name:
clone_name = name
elif self.driver.name != "MEM":
clone_name = self.name + "_copy." + self.driver.name
else:
clone_name = os.path.join(VSI_MEM_FILESYSTEM_BASE_PATH, str(uuid.uuid4()))
return GDALRaster(
capi.copy_ds(
self.driver._ptr,
force_bytes(clone_name),
self._ptr,
c_int(),
c_char_p(),
c_void_p(),
c_void_p(),
),
write=self._write,
)
def transform(
self, srs, driver=None, name=None, resampling="NearestNeighbour", max_error=0.0
):
"""
Return a copy of this raster reprojected into the given spatial
reference system.
"""
# Convert the resampling algorithm name into an algorithm id
algorithm = GDAL_RESAMPLE_ALGORITHMS[resampling]
if isinstance(srs, SpatialReference):
target_srs = srs
elif isinstance(srs, (int, str)):
target_srs = SpatialReference(srs)
else:
raise TypeError(
"Transform only accepts SpatialReference, string, and integer "
"objects."
)
if target_srs.srid == self.srid and (not driver or driver == self.driver.name):
return self.clone(name)
# Create warped virtual dataset in the target reference system
target = capi.auto_create_warped_vrt(
self._ptr,
self.srs.wkt.encode(),
target_srs.wkt.encode(),
algorithm,
max_error,
c_void_p(),
)
target = GDALRaster(target)
# Construct the target warp dictionary from the virtual raster
data = {
"srid": target_srs.srid,
"width": target.width,
"height": target.height,
"origin": [target.origin.x, target.origin.y],
"scale": [target.scale.x, target.scale.y],
"skew": [target.skew.x, target.skew.y],
}
# Set the driver and filepath if provided
if driver:
data["driver"] = driver
if name:
data["name"] = name
# Warp the raster into new srid
return self.warp(data, resampling=resampling, max_error=max_error)
@property
def info(self):
"""
Return information about this raster in a string format equivalent
to the output of the gdalinfo command line utility.
"""
return capi.get_ds_info(self.ptr, None).decode()
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/contrib/gis/gdal/raster/base.py | django/contrib/gis/gdal/raster/base.py | from django.contrib.gis.gdal.base import GDALBase
from django.contrib.gis.gdal.prototypes import raster as capi
class GDALRasterBase(GDALBase):
"""
Attributes that exist on both GDALRaster and GDALBand.
"""
@property
def metadata(self):
"""
Return the metadata for this raster or band. The return value is a
nested dictionary, where the first-level key is the metadata domain and
the second-level is the metadata item names and values for that domain.
"""
# The initial metadata domain list contains the default domain.
# The default is returned if domain name is None.
domain_list = ["DEFAULT"]
# Get additional metadata domains from the raster.
meta_list = capi.get_ds_metadata_domain_list(self._ptr)
if meta_list:
# The number of domains is unknown, so retrieve data until there
# are no more values in the ctypes array.
counter = 0
domain = meta_list[counter]
while domain:
domain_list.append(domain.decode())
counter += 1
domain = meta_list[counter]
# Free domain list array.
capi.free_dsl(meta_list)
# Retrieve metadata values for each domain.
result = {}
for domain in domain_list:
# Get metadata for this domain.
data = capi.get_ds_metadata(
self._ptr,
(None if domain == "DEFAULT" else domain.encode()),
)
if not data:
continue
# The number of metadata items is unknown, so retrieve data until
# there are no more values in the ctypes array.
domain_meta = {}
counter = 0
item = data[counter]
while item:
key, val = item.decode().split("=")
domain_meta[key] = val
counter += 1
item = data[counter]
# The default domain values are returned if domain is None.
result[domain or "DEFAULT"] = domain_meta
return result
@metadata.setter
def metadata(self, value):
"""
Set the metadata. Update only the domains that are contained in the
value dictionary.
"""
# Loop through domains.
for domain, metadata in value.items():
# Set the domain to None for the default, otherwise encode.
domain = None if domain == "DEFAULT" else domain.encode()
# Set each metadata entry separately.
for meta_name, meta_value in metadata.items():
capi.set_ds_metadata_item(
self._ptr,
meta_name.encode(),
meta_value.encode() if meta_value else None,
domain,
)
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/contrib/gis/gdal/prototypes/errcheck.py | django/contrib/gis/gdal/prototypes/errcheck.py | """
This module houses the error-checking routines used by the GDAL
ctypes prototypes.
"""
from ctypes import c_void_p, string_at
from django.contrib.gis.gdal.error import GDALException, SRSException, check_err
from django.contrib.gis.gdal.libgdal import lgdal
# Helper routines for retrieving pointers and/or values from
# arguments passed in by reference.
def arg_byref(args, offset=-1):
"Return the pointer argument's by-reference value."
return args[offset]._obj.value
def ptr_byref(args, offset=-1):
"Return the pointer argument passed in by-reference."
return args[offset]._obj
# ### String checking Routines ###
def check_const_string(result, func, cargs, offset=None, cpl=False):
"""
Similar functionality to `check_string`, but does not free the pointer.
"""
if offset:
check_err(result, cpl=cpl)
ptr = ptr_byref(cargs, offset)
return ptr.value
else:
return result
def check_string(result, func, cargs, offset=-1, str_result=False):
"""
Check the string output returned from the given function, and free
the string pointer allocated by OGR. The `str_result` keyword
may be used when the result is the string pointer, otherwise
the OGR error code is assumed. The `offset` keyword may be used
to extract the string pointer passed in by-reference at the given
slice offset in the function arguments.
"""
if str_result:
# For routines that return a string.
ptr = result
if not ptr:
s = None
else:
s = string_at(result)
else:
# Error-code return specified.
check_err(result)
ptr = ptr_byref(cargs, offset)
# Getting the string value
s = ptr.value
# Correctly freeing the allocated memory behind GDAL pointer
# with the VSIFree routine.
if ptr:
lgdal.VSIFree(ptr)
return s
# ### DataSource, Layer error-checking ###
# ### Envelope checking ###
def check_envelope(result, func, cargs, offset=-1):
"Check a function that returns an OGR Envelope by reference."
return ptr_byref(cargs, offset)
# ### Geometry error-checking routines ###
def check_geom(result, func, cargs):
"Check a function that returns a geometry."
# OGR_G_Clone may return an integer, even though the
# restype is set to c_void_p
if isinstance(result, int):
result = c_void_p(result)
if not result:
raise GDALException(
'Invalid geometry pointer returned from "%s".' % func.__name__
)
return result
def check_geom_offset(result, func, cargs, offset=-1):
"Check the geometry at the given offset in the C parameter list."
check_err(result)
geom = ptr_byref(cargs, offset=offset)
return check_geom(geom, func, cargs)
# ### Spatial Reference error-checking routines ###
def check_srs(result, func, cargs):
if isinstance(result, int):
result = c_void_p(result)
if not result:
raise SRSException(
'Invalid spatial reference pointer returned from "%s".' % func.__name__
)
return result
# ### Other error-checking routines ###
def check_arg_errcode(result, func, cargs, cpl=False):
"""
The error code is returned in the last argument, by reference.
Check its value with `check_err` before returning the result.
"""
check_err(arg_byref(cargs), cpl=cpl)
return result
def check_errcode(result, func, cargs, cpl=False):
"""
Check the error code returned (c_int).
"""
check_err(result, cpl=cpl)
def check_pointer(result, func, cargs):
"Make sure the result pointer is valid."
if isinstance(result, int):
result = c_void_p(result)
if result:
return result
else:
raise GDALException('Invalid pointer returned from "%s"' % func.__name__)
def check_str_arg(result, func, cargs):
"""
This is for the OSRGet[Angular|Linear]Units functions, which
require that the returned string pointer not be freed. This
returns both the double and string values.
"""
dbl = result
ptr = cargs[-1]._obj
return dbl, ptr.value.decode()
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/contrib/gis/gdal/prototypes/ds.py | django/contrib/gis/gdal/prototypes/ds.py | """
This module houses the ctypes function prototypes for OGR DataSource
related data structures. OGR_Dr_*, OGR_DS_*, OGR_L_*, OGR_F_*,
OGR_Fld_* routines are relevant here.
"""
from ctypes import POINTER, c_char_p, c_double, c_int, c_long, c_uint, c_void_p
from django.contrib.gis.gdal.envelope import OGREnvelope
from django.contrib.gis.gdal.libgdal import lgdal
from django.contrib.gis.gdal.prototypes.generation import (
bool_output,
const_string_output,
double_output,
geom_output,
int64_output,
int_output,
srs_output,
void_output,
voidptr_output,
)
c_int_p = POINTER(c_int) # shortcut type
GDAL_OF_READONLY = 0x00
GDAL_OF_UPDATE = 0x01
GDAL_OF_ALL = 0x00
GDAL_OF_RASTER = 0x02
GDAL_OF_VECTOR = 0x04
# Driver Routines
register_all = void_output(lgdal.GDALAllRegister, [], errcheck=False)
cleanup_all = void_output(lgdal.GDALDestroyDriverManager, [], errcheck=False)
get_driver = voidptr_output(lgdal.GDALGetDriver, [c_int])
get_driver_by_name = voidptr_output(
lgdal.GDALGetDriverByName, [c_char_p], errcheck=False
)
get_driver_count = int_output(lgdal.GDALGetDriverCount, [])
get_driver_description = const_string_output(lgdal.GDALGetDescription, [c_void_p])
# DataSource
open_ds = voidptr_output(
lgdal.GDALOpenEx,
[c_char_p, c_uint, POINTER(c_char_p), POINTER(c_char_p), POINTER(c_char_p)],
)
destroy_ds = void_output(lgdal.GDALClose, [c_void_p], errcheck=False)
get_ds_name = const_string_output(lgdal.GDALGetDescription, [c_void_p])
get_dataset_driver = voidptr_output(lgdal.GDALGetDatasetDriver, [c_void_p])
get_layer = voidptr_output(lgdal.GDALDatasetGetLayer, [c_void_p, c_int])
get_layer_by_name = voidptr_output(
lgdal.GDALDatasetGetLayerByName, [c_void_p, c_char_p]
)
get_layer_count = int_output(lgdal.GDALDatasetGetLayerCount, [c_void_p])
# Layer Routines
get_extent = void_output(lgdal.OGR_L_GetExtent, [c_void_p, POINTER(OGREnvelope), c_int])
get_feature = voidptr_output(lgdal.OGR_L_GetFeature, [c_void_p, c_long])
get_feature_count = int_output(lgdal.OGR_L_GetFeatureCount, [c_void_p, c_int])
get_layer_defn = voidptr_output(lgdal.OGR_L_GetLayerDefn, [c_void_p])
get_layer_srs = srs_output(lgdal.OGR_L_GetSpatialRef, [c_void_p])
get_next_feature = voidptr_output(lgdal.OGR_L_GetNextFeature, [c_void_p])
reset_reading = void_output(lgdal.OGR_L_ResetReading, [c_void_p], errcheck=False)
test_capability = int_output(lgdal.OGR_L_TestCapability, [c_void_p, c_char_p])
get_spatial_filter = geom_output(lgdal.OGR_L_GetSpatialFilter, [c_void_p])
set_spatial_filter = void_output(
lgdal.OGR_L_SetSpatialFilter, [c_void_p, c_void_p], errcheck=False
)
set_spatial_filter_rect = void_output(
lgdal.OGR_L_SetSpatialFilterRect,
[c_void_p, c_double, c_double, c_double, c_double],
errcheck=False,
)
# Feature Definition Routines
get_fd_geom_type = int_output(lgdal.OGR_FD_GetGeomType, [c_void_p])
get_fd_name = const_string_output(lgdal.OGR_FD_GetName, [c_void_p])
get_feat_name = const_string_output(lgdal.OGR_FD_GetName, [c_void_p])
get_field_count = int_output(lgdal.OGR_FD_GetFieldCount, [c_void_p])
get_field_defn = voidptr_output(lgdal.OGR_FD_GetFieldDefn, [c_void_p, c_int])
# Feature Routines
clone_feature = voidptr_output(lgdal.OGR_F_Clone, [c_void_p])
destroy_feature = void_output(lgdal.OGR_F_Destroy, [c_void_p], errcheck=False)
feature_equal = int_output(lgdal.OGR_F_Equal, [c_void_p, c_void_p])
get_feat_geom_ref = geom_output(lgdal.OGR_F_GetGeometryRef, [c_void_p])
get_feat_field_count = int_output(lgdal.OGR_F_GetFieldCount, [c_void_p])
get_feat_field_defn = voidptr_output(lgdal.OGR_F_GetFieldDefnRef, [c_void_p, c_int])
get_fid = int_output(lgdal.OGR_F_GetFID, [c_void_p])
get_field_as_datetime = int_output(
lgdal.OGR_F_GetFieldAsDateTime,
[c_void_p, c_int, c_int_p, c_int_p, c_int_p, c_int_p, c_int_p, c_int_p],
)
get_field_as_double = double_output(lgdal.OGR_F_GetFieldAsDouble, [c_void_p, c_int])
get_field_as_integer = int_output(lgdal.OGR_F_GetFieldAsInteger, [c_void_p, c_int])
get_field_as_integer64 = int64_output(
lgdal.OGR_F_GetFieldAsInteger64, [c_void_p, c_int]
)
is_field_set = bool_output(lgdal.OGR_F_IsFieldSetAndNotNull, [c_void_p, c_int])
get_field_as_string = const_string_output(
lgdal.OGR_F_GetFieldAsString, [c_void_p, c_int]
)
get_field_index = int_output(lgdal.OGR_F_GetFieldIndex, [c_void_p, c_char_p])
# Field Routines
get_field_name = const_string_output(lgdal.OGR_Fld_GetNameRef, [c_void_p])
get_field_precision = int_output(lgdal.OGR_Fld_GetPrecision, [c_void_p])
get_field_type = int_output(lgdal.OGR_Fld_GetType, [c_void_p])
get_field_type_name = const_string_output(lgdal.OGR_GetFieldTypeName, [c_int])
get_field_width = int_output(lgdal.OGR_Fld_GetWidth, [c_void_p])
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/contrib/gis/gdal/prototypes/geom.py | django/contrib/gis/gdal/prototypes/geom.py | from ctypes import POINTER, c_char_p, c_double, c_int, c_void_p
from django.contrib.gis.gdal.envelope import OGREnvelope
from django.contrib.gis.gdal.libgdal import lgdal
from django.contrib.gis.gdal.prototypes.errcheck import check_envelope
from django.contrib.gis.gdal.prototypes.generation import (
bool_output,
const_string_output,
double_output,
geom_output,
int_output,
srs_output,
string_output,
void_output,
)
# ### Generation routines specific to this module ###
def env_func(f, argtypes):
"For getting OGREnvelopes."
f.argtypes = argtypes
f.restype = None
f.errcheck = check_envelope
return f
def pnt_func(f):
"For accessing point information."
return double_output(f, [c_void_p, c_int])
def topology_func(f):
f.argtypes = [c_void_p, c_void_p]
f.restype = c_int
f.errcheck = lambda result, func, cargs: bool(result)
return f
# ### OGR_G ctypes function prototypes ###
# GeoJSON routines.
from_json = geom_output(lgdal.OGR_G_CreateGeometryFromJson, [c_char_p])
to_json = string_output(
lgdal.OGR_G_ExportToJson, [c_void_p], str_result=True, decoding="ascii"
)
to_kml = string_output(
lgdal.OGR_G_ExportToKML, [c_void_p, c_char_p], str_result=True, decoding="ascii"
)
# GetX, GetY, GetZ all return doubles.
getx = pnt_func(lgdal.OGR_G_GetX)
gety = pnt_func(lgdal.OGR_G_GetY)
getz = pnt_func(lgdal.OGR_G_GetZ)
getm = pnt_func(lgdal.OGR_G_GetM)
# Geometry creation routines.
from_wkb = geom_output(
lgdal.OGR_G_CreateFromWkbEx,
[c_char_p, c_void_p, POINTER(c_void_p), c_int],
offset=-2,
)
from_wkt = geom_output(
lgdal.OGR_G_CreateFromWkt,
[POINTER(c_char_p), c_void_p, POINTER(c_void_p)],
offset=-1,
)
from_gml = geom_output(lgdal.OGR_G_CreateFromGML, [c_char_p])
create_geom = geom_output(lgdal.OGR_G_CreateGeometry, [c_int])
clone_geom = geom_output(lgdal.OGR_G_Clone, [c_void_p])
get_geom_ref = geom_output(lgdal.OGR_G_GetGeometryRef, [c_void_p, c_int])
get_boundary = geom_output(lgdal.OGR_G_GetBoundary, [c_void_p])
geom_convex_hull = geom_output(lgdal.OGR_G_ConvexHull, [c_void_p])
geom_diff = geom_output(lgdal.OGR_G_Difference, [c_void_p, c_void_p])
geom_intersection = geom_output(lgdal.OGR_G_Intersection, [c_void_p, c_void_p])
geom_sym_diff = geom_output(lgdal.OGR_G_SymmetricDifference, [c_void_p, c_void_p])
geom_union = geom_output(lgdal.OGR_G_Union, [c_void_p, c_void_p])
is_3d = bool_output(lgdal.OGR_G_Is3D, [c_void_p])
set_3d = void_output(lgdal.OGR_G_Set3D, [c_void_p, c_int], errcheck=False)
is_measured = bool_output(lgdal.OGR_G_IsMeasured, [c_void_p])
set_measured = void_output(lgdal.OGR_G_SetMeasured, [c_void_p, c_int], errcheck=False)
has_curve_geom = bool_output(lgdal.OGR_G_HasCurveGeometry, [c_void_p, c_int])
get_linear_geom = geom_output(
lgdal.OGR_G_GetLinearGeometry, [c_void_p, c_double, POINTER(c_char_p)]
)
get_curve_geom = geom_output(
lgdal.OGR_G_GetCurveGeometry, [c_void_p, POINTER(c_char_p)]
)
# Geometry modification routines.
add_geom = void_output(lgdal.OGR_G_AddGeometry, [c_void_p, c_void_p])
import_wkt = void_output(lgdal.OGR_G_ImportFromWkt, [c_void_p, POINTER(c_char_p)])
# Destroys a geometry
destroy_geom = void_output(lgdal.OGR_G_DestroyGeometry, [c_void_p], errcheck=False)
# Geometry export routines.
to_wkb = void_output(
lgdal.OGR_G_ExportToWkb, None, errcheck=True
) # special handling for WKB.
to_iso_wkb = void_output(lgdal.OGR_G_ExportToIsoWkb, None, errcheck=True)
to_wkt = string_output(
lgdal.OGR_G_ExportToWkt, [c_void_p, POINTER(c_char_p)], decoding="ascii"
)
to_iso_wkt = string_output(
lgdal.OGR_G_ExportToIsoWkt, [c_void_p, POINTER(c_char_p)], decoding="ascii"
)
to_gml = string_output(
lgdal.OGR_G_ExportToGML, [c_void_p], str_result=True, decoding="ascii"
)
get_wkbsize = int_output(lgdal.OGR_G_WkbSizeEx, [c_void_p])
# Geometry spatial-reference related routines.
assign_srs = void_output(
lgdal.OGR_G_AssignSpatialReference, [c_void_p, c_void_p], errcheck=False
)
get_geom_srs = srs_output(lgdal.OGR_G_GetSpatialReference, [c_void_p])
# Geometry properties
get_area = double_output(lgdal.OGR_G_GetArea, [c_void_p])
get_centroid = void_output(lgdal.OGR_G_Centroid, [c_void_p, c_void_p])
get_dims = int_output(lgdal.OGR_G_GetDimension, [c_void_p])
get_coord_dim = int_output(lgdal.OGR_G_CoordinateDimension, [c_void_p])
set_coord_dim = void_output(
lgdal.OGR_G_SetCoordinateDimension, [c_void_p, c_int], errcheck=False
)
is_empty = int_output(
lgdal.OGR_G_IsEmpty, [c_void_p], errcheck=lambda result, func, cargs: bool(result)
)
get_geom_count = int_output(lgdal.OGR_G_GetGeometryCount, [c_void_p])
get_geom_name = const_string_output(
lgdal.OGR_G_GetGeometryName, [c_void_p], decoding="ascii"
)
get_geom_type = int_output(lgdal.OGR_G_GetGeometryType, [c_void_p])
get_point_count = int_output(lgdal.OGR_G_GetPointCount, [c_void_p])
get_point = void_output(
lgdal.OGR_G_GetPointZM,
[
c_void_p,
c_int,
POINTER(c_double),
POINTER(c_double),
POINTER(c_double),
POINTER(c_double),
],
errcheck=False,
)
geom_close_rings = void_output(lgdal.OGR_G_CloseRings, [c_void_p], errcheck=False)
# Topology routines.
ogr_contains = topology_func(lgdal.OGR_G_Contains)
ogr_crosses = topology_func(lgdal.OGR_G_Crosses)
ogr_disjoint = topology_func(lgdal.OGR_G_Disjoint)
ogr_equals = topology_func(lgdal.OGR_G_Equals)
ogr_intersects = topology_func(lgdal.OGR_G_Intersects)
ogr_overlaps = topology_func(lgdal.OGR_G_Overlaps)
ogr_touches = topology_func(lgdal.OGR_G_Touches)
ogr_within = topology_func(lgdal.OGR_G_Within)
# Transformation routines.
geom_transform = void_output(lgdal.OGR_G_Transform, [c_void_p, c_void_p])
geom_transform_to = void_output(lgdal.OGR_G_TransformTo, [c_void_p, c_void_p])
# For retrieving the envelope of the geometry.
get_envelope = env_func(lgdal.OGR_G_GetEnvelope, [c_void_p, POINTER(OGREnvelope)])
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/contrib/gis/gdal/prototypes/__init__.py | django/contrib/gis/gdal/prototypes/__init__.py | python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false | |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/contrib/gis/gdal/prototypes/srs.py | django/contrib/gis/gdal/prototypes/srs.py | from ctypes import POINTER, c_char_p, c_int, c_void_p
from django.contrib.gis.gdal.libgdal import lgdal, std_call
from django.contrib.gis.gdal.prototypes.generation import (
const_string_output,
double_output,
int_output,
srs_output,
string_output,
void_output,
)
# Shortcut generation for routines with known parameters.
def srs_double(f):
"""
Create a function prototype for the OSR routines that take
the OSRSpatialReference object and return a double value.
"""
return double_output(f, [c_void_p, POINTER(c_int)], errcheck=True)
def units_func(f):
"""
Create a ctypes function prototype for OSR units functions, e.g.,
OSRGetAngularUnits, OSRGetLinearUnits.
"""
return double_output(f, [c_void_p, POINTER(c_char_p)], strarg=True)
# Creation & destruction.
clone_srs = srs_output(std_call("OSRClone"), [c_void_p])
new_srs = srs_output(std_call("OSRNewSpatialReference"), [c_char_p])
release_srs = void_output(lgdal.OSRRelease, [c_void_p], errcheck=False)
destroy_srs = void_output(
std_call("OSRDestroySpatialReference"), [c_void_p], errcheck=False
)
srs_validate = void_output(lgdal.OSRValidate, [c_void_p])
set_axis_strategy = void_output(
lgdal.OSRSetAxisMappingStrategy, [c_void_p, c_int], errcheck=False
)
# Getting the semi_major, semi_minor, and flattening functions.
semi_major = srs_double(lgdal.OSRGetSemiMajor)
semi_minor = srs_double(lgdal.OSRGetSemiMinor)
invflattening = srs_double(lgdal.OSRGetInvFlattening)
# WKT, PROJ, EPSG, XML importation routines.
from_wkt = void_output(lgdal.OSRImportFromWkt, [c_void_p, POINTER(c_char_p)])
from_proj = void_output(lgdal.OSRImportFromProj4, [c_void_p, c_char_p])
from_epsg = void_output(std_call("OSRImportFromEPSG"), [c_void_p, c_int])
from_xml = void_output(lgdal.OSRImportFromXML, [c_void_p, c_char_p])
from_user_input = void_output(std_call("OSRSetFromUserInput"), [c_void_p, c_char_p])
# Morphing to/from ESRI WKT.
morph_to_esri = void_output(lgdal.OSRMorphToESRI, [c_void_p])
morph_from_esri = void_output(lgdal.OSRMorphFromESRI, [c_void_p])
# Identifying the EPSG
identify_epsg = void_output(lgdal.OSRAutoIdentifyEPSG, [c_void_p])
# Getting the angular_units, linear_units functions
linear_units = units_func(lgdal.OSRGetLinearUnits)
angular_units = units_func(lgdal.OSRGetAngularUnits)
# For exporting to WKT, PROJ, "Pretty" WKT, and XML.
to_wkt = string_output(
std_call("OSRExportToWkt"), [c_void_p, POINTER(c_char_p)], decoding="utf-8"
)
to_proj = string_output(
std_call("OSRExportToProj4"), [c_void_p, POINTER(c_char_p)], decoding="ascii"
)
to_pretty_wkt = string_output(
std_call("OSRExportToPrettyWkt"),
[c_void_p, POINTER(c_char_p), c_int],
offset=-2,
decoding="utf-8",
)
to_xml = string_output(
lgdal.OSRExportToXML,
[c_void_p, POINTER(c_char_p), c_char_p],
offset=-2,
decoding="utf-8",
)
# String attribute retrieval routines.
get_attr_value = const_string_output(
std_call("OSRGetAttrValue"), [c_void_p, c_char_p, c_int], decoding="utf-8"
)
get_auth_name = const_string_output(
lgdal.OSRGetAuthorityName, [c_void_p, c_char_p], decoding="ascii"
)
get_auth_code = const_string_output(
lgdal.OSRGetAuthorityCode, [c_void_p, c_char_p], decoding="ascii"
)
# SRS Properties
isgeographic = int_output(lgdal.OSRIsGeographic, [c_void_p])
islocal = int_output(lgdal.OSRIsLocal, [c_void_p])
isprojected = int_output(lgdal.OSRIsProjected, [c_void_p])
# Coordinate transformation
new_ct = srs_output(std_call("OCTNewCoordinateTransformation"), [c_void_p, c_void_p])
destroy_ct = void_output(
std_call("OCTDestroyCoordinateTransformation"), [c_void_p], errcheck=False
)
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/contrib/gis/gdal/prototypes/raster.py | django/contrib/gis/gdal/prototypes/raster.py | """
This module houses the ctypes function prototypes for GDAL DataSource (raster)
related data structures.
"""
from ctypes import POINTER, c_bool, c_char_p, c_double, c_int, c_void_p
from functools import partial
from django.contrib.gis.gdal.libgdal import std_call
from django.contrib.gis.gdal.prototypes.generation import (
chararray_output,
const_string_output,
double_output,
int_output,
void_output,
voidptr_output,
)
# For more detail about c function names and definitions see
# https://gdal.org/api/raster_c_api.html
# https://gdal.org/doxygen/gdalwarper_8h.html
# https://gdal.org/api/gdal_utils.html
# Prepare partial functions that use cpl error codes
void_output = partial(void_output, cpl=True)
const_string_output = partial(const_string_output, cpl=True)
double_output = partial(double_output, cpl=True)
# Raster Data Source Routines
create_ds = voidptr_output(
std_call("GDALCreate"), [c_void_p, c_char_p, c_int, c_int, c_int, c_int, c_void_p]
)
open_ds = voidptr_output(std_call("GDALOpen"), [c_char_p, c_int])
close_ds = void_output(std_call("GDALClose"), [c_void_p], errcheck=False)
flush_ds = int_output(std_call("GDALFlushCache"), [c_void_p])
copy_ds = voidptr_output(
std_call("GDALCreateCopy"),
[c_void_p, c_char_p, c_void_p, c_int, POINTER(c_char_p), c_void_p, c_void_p],
)
add_band_ds = void_output(std_call("GDALAddBand"), [c_void_p, c_int])
get_ds_description = const_string_output(std_call("GDALGetDescription"), [c_void_p])
get_ds_driver = voidptr_output(std_call("GDALGetDatasetDriver"), [c_void_p])
get_ds_info = const_string_output(std_call("GDALInfo"), [c_void_p, c_void_p])
get_ds_xsize = int_output(std_call("GDALGetRasterXSize"), [c_void_p])
get_ds_ysize = int_output(std_call("GDALGetRasterYSize"), [c_void_p])
get_ds_raster_count = int_output(std_call("GDALGetRasterCount"), [c_void_p])
get_ds_raster_band = voidptr_output(std_call("GDALGetRasterBand"), [c_void_p, c_int])
get_ds_projection_ref = const_string_output(
std_call("GDALGetProjectionRef"), [c_void_p]
)
set_ds_projection_ref = void_output(std_call("GDALSetProjection"), [c_void_p, c_char_p])
get_ds_geotransform = void_output(
std_call("GDALGetGeoTransform"), [c_void_p, POINTER(c_double * 6)], errcheck=False
)
set_ds_geotransform = void_output(
std_call("GDALSetGeoTransform"), [c_void_p, POINTER(c_double * 6)]
)
get_ds_metadata = chararray_output(
std_call("GDALGetMetadata"), [c_void_p, c_char_p], errcheck=False
)
set_ds_metadata = void_output(
std_call("GDALSetMetadata"), [c_void_p, POINTER(c_char_p), c_char_p]
)
get_ds_metadata_domain_list = chararray_output(
std_call("GDALGetMetadataDomainList"), [c_void_p], errcheck=False
)
get_ds_metadata_item = const_string_output(
std_call("GDALGetMetadataItem"), [c_void_p, c_char_p, c_char_p]
)
set_ds_metadata_item = const_string_output(
std_call("GDALSetMetadataItem"), [c_void_p, c_char_p, c_char_p, c_char_p]
)
free_dsl = void_output(std_call("CSLDestroy"), [POINTER(c_char_p)], errcheck=False)
# Raster Band Routines
band_io = void_output(
std_call("GDALRasterIO"),
[
c_void_p,
c_int,
c_int,
c_int,
c_int,
c_int,
c_void_p,
c_int,
c_int,
c_int,
c_int,
c_int,
],
)
get_band_xsize = int_output(std_call("GDALGetRasterBandXSize"), [c_void_p])
get_band_ysize = int_output(std_call("GDALGetRasterBandYSize"), [c_void_p])
get_band_index = int_output(std_call("GDALGetBandNumber"), [c_void_p])
get_band_description = const_string_output(std_call("GDALGetDescription"), [c_void_p])
get_band_ds = voidptr_output(std_call("GDALGetBandDataset"), [c_void_p])
get_band_datatype = int_output(std_call("GDALGetRasterDataType"), [c_void_p])
get_band_color_interp = int_output(
std_call("GDALGetRasterColorInterpretation"), [c_void_p]
)
get_band_nodata_value = double_output(
std_call("GDALGetRasterNoDataValue"), [c_void_p, POINTER(c_int)]
)
set_band_nodata_value = void_output(
std_call("GDALSetRasterNoDataValue"), [c_void_p, c_double]
)
delete_band_nodata_value = void_output(
std_call("GDALDeleteRasterNoDataValue"), [c_void_p]
)
get_band_statistics = void_output(
std_call("GDALGetRasterStatistics"),
[
c_void_p,
c_int,
c_int,
POINTER(c_double),
POINTER(c_double),
POINTER(c_double),
POINTER(c_double),
c_void_p,
c_void_p,
],
)
compute_band_statistics = void_output(
std_call("GDALComputeRasterStatistics"),
[
c_void_p,
c_int,
POINTER(c_double),
POINTER(c_double),
POINTER(c_double),
POINTER(c_double),
c_void_p,
c_void_p,
],
)
# Reprojection routine
reproject_image = void_output(
std_call("GDALReprojectImage"),
[
c_void_p,
c_char_p,
c_void_p,
c_char_p,
c_int,
c_double,
c_double,
c_void_p,
c_void_p,
c_void_p,
],
)
auto_create_warped_vrt = voidptr_output(
std_call("GDALAutoCreateWarpedVRT"),
[c_void_p, c_char_p, c_char_p, c_int, c_double, c_void_p],
)
# Create VSI gdal raster files from in-memory buffers.
# https://gdal.org/api/cpl.html#cpl-vsi-h
create_vsi_file_from_mem_buffer = voidptr_output(
std_call("VSIFileFromMemBuffer"), [c_char_p, c_void_p, c_int, c_int]
)
get_mem_buffer_from_vsi_file = voidptr_output(
std_call("VSIGetMemFileBuffer"), [c_char_p, POINTER(c_int), c_bool]
)
unlink_vsi_file = int_output(std_call("VSIUnlink"), [c_char_p])
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/contrib/gis/gdal/prototypes/generation.py | django/contrib/gis/gdal/prototypes/generation.py | """
This module contains functions that generate ctypes prototypes for the
GDAL routines.
"""
from ctypes import POINTER, c_bool, c_char_p, c_double, c_int, c_int64, c_void_p
from functools import partial
from django.contrib.gis.gdal.prototypes.errcheck import (
check_arg_errcode,
check_const_string,
check_errcode,
check_geom,
check_geom_offset,
check_pointer,
check_srs,
check_str_arg,
check_string,
)
class gdal_char_p(c_char_p):
pass
def bool_output(func, argtypes, errcheck=None):
"""Generate a ctypes function that returns a boolean value."""
func.argtypes = argtypes
func.restype = c_bool
if errcheck:
func.errcheck = errcheck
return func
def double_output(func, argtypes, errcheck=False, strarg=False, cpl=False):
"Generate a ctypes function that returns a double value."
func.argtypes = argtypes
func.restype = c_double
if errcheck:
func.errcheck = partial(check_arg_errcode, cpl=cpl)
if strarg:
func.errcheck = check_str_arg
return func
def geom_output(func, argtypes, offset=None):
"""
Generate a function that returns a Geometry either by reference
or directly (if the return_geom keyword is set to True).
"""
# Setting the argument types
func.argtypes = argtypes
if not offset:
# When a geometry pointer is directly returned.
func.restype = c_void_p
func.errcheck = check_geom
else:
# Error code returned, geometry is returned by-reference.
func.restype = c_int
def geomerrcheck(result, func, cargs):
return check_geom_offset(result, func, cargs, offset)
func.errcheck = geomerrcheck
return func
def int_output(func, argtypes, errcheck=None):
"Generate a ctypes function that returns an integer value."
func.argtypes = argtypes
func.restype = c_int
if errcheck:
func.errcheck = errcheck
return func
def int64_output(func, argtypes):
"Generate a ctypes function that returns a 64-bit integer value."
func.argtypes = argtypes
func.restype = c_int64
return func
def srs_output(func, argtypes):
"""
Generate a ctypes prototype for the given function with
the given C arguments that returns a pointer to an OGR
Spatial Reference System.
"""
func.argtypes = argtypes
func.restype = c_void_p
func.errcheck = check_srs
return func
def const_string_output(func, argtypes, offset=None, decoding=None, cpl=False):
func.argtypes = argtypes
if offset:
func.restype = c_int
else:
func.restype = c_char_p
def _check_const(result, func, cargs):
res = check_const_string(result, func, cargs, offset=offset, cpl=cpl)
if res and decoding:
res = res.decode(decoding)
return res
func.errcheck = _check_const
return func
def string_output(func, argtypes, offset=-1, str_result=False, decoding=None):
"""
Generate a ctypes prototype for the given function with the
given argument types that returns a string from a GDAL pointer.
The `const` flag indicates whether the allocated pointer should
be freed via the GDAL library routine VSIFree -- but only applies
only when `str_result` is True.
"""
func.argtypes = argtypes
if str_result:
# Use subclass of c_char_p so the error checking routine
# can free the memory at the pointer's address.
func.restype = gdal_char_p
else:
# Error code is returned
func.restype = c_int
# Dynamically defining our error-checking function with the
# given offset.
def _check_str(result, func, cargs):
res = check_string(result, func, cargs, offset=offset, str_result=str_result)
if res and decoding:
res = res.decode(decoding)
return res
func.errcheck = _check_str
return func
def void_output(func, argtypes, errcheck=True, cpl=False):
"""
For functions that don't only return an error code that needs to
be examined.
"""
if argtypes:
func.argtypes = argtypes
if errcheck:
# `errcheck` keyword may be set to False for routines that
# return void, rather than a status code.
func.restype = c_int
func.errcheck = partial(check_errcode, cpl=cpl)
else:
func.restype = None
return func
def voidptr_output(func, argtypes, errcheck=True):
"For functions that return c_void_p."
func.argtypes = argtypes
func.restype = c_void_p
if errcheck:
func.errcheck = check_pointer
return func
def chararray_output(func, argtypes, errcheck=True):
"""For functions that return a c_char_p array."""
func.argtypes = argtypes
func.restype = POINTER(c_char_p)
if errcheck:
func.errcheck = check_pointer
return func
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/contrib/gis/utils/ogrinspect.py | django/contrib/gis/utils/ogrinspect.py | """
This module is for inspecting OGR data sources and generating either
models for GeoDjango and/or mapping dictionaries for use with the
`LayerMapping` utility.
"""
from django.contrib.gis.gdal import DataSource
from django.contrib.gis.gdal.field import (
OFTDate,
OFTDateTime,
OFTInteger,
OFTInteger64,
OFTReal,
OFTString,
OFTTime,
)
def mapping(data_source, geom_name="geom", layer_key=0, multi_geom=False):
"""
Given a DataSource, generate a dictionary that may be used
for invoking the LayerMapping utility.
Keyword Arguments:
`geom_name` => The name of the geometry field to use for the model.
`layer_key` => The key specifying which layer in the DataSource to use;
defaults to 0 (the first layer). May be an integer index or a string
identifier for the layer.
`multi_geom` => Boolean (default: False) - specify as multigeometry.
"""
if isinstance(data_source, str):
# Instantiating the DataSource from the string.
data_source = DataSource(data_source)
elif isinstance(data_source, DataSource):
pass
else:
raise TypeError(
"Data source parameter must be a string or a DataSource object."
)
# Creating the dictionary.
_mapping = {}
# Generating the field name for each field in the layer.
for field in data_source[layer_key].fields:
mfield = field.lower()
if mfield[-1:] == "_":
mfield += "field"
_mapping[mfield] = field
gtype = data_source[layer_key].geom_type
if multi_geom:
gtype.to_multi()
_mapping[geom_name] = str(gtype).upper()
return _mapping
def ogrinspect(*args, **kwargs):
"""
Given a data source (either a string or a DataSource object) and a string
model name this function will generate a GeoDjango model.
Usage:
>>> from django.contrib.gis.utils import ogrinspect
>>> ogrinspect('/path/to/shapefile.shp','NewModel')
...will print model definition to stout
or put this in a Python script and use to redirect the output to a new
model like:
$ python generate_model.py > myapp/models.py
# generate_model.py
from django.contrib.gis.utils import ogrinspect
shp_file = 'data/mapping_hacks/world_borders.shp'
model_name = 'WorldBorders'
print(ogrinspect(shp_file, model_name, multi_geom=True, srid=4326,
geom_name='shapes', blank=True))
Required Arguments
`datasource` => string or DataSource object to file pointer
`model name` => string of name of new model class to create
Optional Keyword Arguments
`geom_name` => For specifying the model name for the Geometry Field.
Otherwise will default to `geom`
`layer_key` => The key specifying which layer in the DataSource to use;
defaults to 0 (the first layer). May be an integer index or a string
identifier for the layer.
`srid` => The SRID to use for the Geometry Field. If it can be determined,
the SRID of the datasource is used.
`multi_geom` => Boolean (default: False) - specify as multigeometry.
`name_field` => String - specifies a field name to return for the
__str__() method (which will be generated if specified).
`imports` => Boolean (default: True) - set to False to omit the
`from django.contrib.gis.db import models` code from the
autogenerated models thus avoiding duplicated imports when building
more than one model by batching ogrinspect()
`decimal` => Boolean or sequence (default: False). When set to True
all generated model fields corresponding to the `OFTReal` type will
be `DecimalField` instead of `FloatField`. A sequence of specific
field names to generate as `DecimalField` may also be used.
`blank` => Boolean or sequence (default: False). When set to True all
generated model fields will have `blank=True`. If the user wants to
give specific fields to have blank, then a list/tuple of OGR field
names may be used.
`null` => Boolean (default: False) - When set to True all generated
model fields will have `null=True`. If the user wants to specify
give specific fields to have null, then a list/tuple of OGR field
names may be used.
Note: Call the _ogrinspect() helper to do the heavy lifting.
"""
return "\n".join(_ogrinspect(*args, **kwargs))
def _ogrinspect(
data_source,
model_name,
geom_name="geom",
layer_key=0,
srid=None,
multi_geom=False,
name_field=None,
imports=True,
decimal=False,
blank=False,
null=False,
):
"""
Helper routine for `ogrinspect` that generates GeoDjango models
corresponding to the given data source. See the `ogrinspect` docstring for
more details.
"""
# Getting the DataSource
if isinstance(data_source, str):
data_source = DataSource(data_source)
elif isinstance(data_source, DataSource):
pass
else:
raise TypeError(
"Data source parameter must be a string or a DataSource object."
)
# Getting the layer corresponding to the layer key and getting
# a string listing of all OGR fields in the Layer.
layer = data_source[layer_key]
ogr_fields = layer.fields
# Creating lists from the `null`, `blank`, and `decimal`
# keyword arguments.
def process_kwarg(kwarg):
if isinstance(kwarg, (list, tuple)):
return [s.lower() for s in kwarg]
elif kwarg:
return [s.lower() for s in ogr_fields]
else:
return []
null_fields = process_kwarg(null)
blank_fields = process_kwarg(blank)
decimal_fields = process_kwarg(decimal)
# Gets the `null` and `blank` keywords for the given field name.
def get_kwargs_str(field_name):
kwlist = []
if field_name.lower() in null_fields:
kwlist.append("null=True")
if field_name.lower() in blank_fields:
kwlist.append("blank=True")
if kwlist:
return ", " + ", ".join(kwlist)
else:
return ""
# For those wishing to disable the imports.
if imports:
yield "# This is an auto-generated Django model module created by ogrinspect."
yield "from django.contrib.gis.db import models"
yield ""
yield ""
yield "class %s(models.Model):" % model_name
for field_name, width, precision, field_type in zip(
ogr_fields, layer.field_widths, layer.field_precisions, layer.field_types
):
# The model field name.
mfield = field_name.lower()
if mfield[-1:] == "_":
mfield += "field"
# Getting the keyword args string.
kwargs_str = get_kwargs_str(field_name)
if field_type is OFTReal:
# By default OFTReals are mapped to `FloatField`, however, they
# may also be mapped to `DecimalField` if specified in the
# `decimal` keyword.
if field_name.lower() in decimal_fields:
yield (
" %s = models.DecimalField(max_digits=%d, decimal_places=%d%s)"
) % (
mfield,
width,
precision,
kwargs_str,
)
else:
yield " %s = models.FloatField(%s)" % (mfield, kwargs_str[2:])
elif field_type is OFTInteger:
yield " %s = models.IntegerField(%s)" % (mfield, kwargs_str[2:])
elif field_type is OFTInteger64:
yield " %s = models.BigIntegerField(%s)" % (mfield, kwargs_str[2:])
elif field_type is OFTString:
yield " %s = models.CharField(max_length=%s%s)" % (
mfield,
width,
kwargs_str,
)
elif field_type is OFTDate:
yield " %s = models.DateField(%s)" % (mfield, kwargs_str[2:])
elif field_type is OFTDateTime:
yield " %s = models.DateTimeField(%s)" % (mfield, kwargs_str[2:])
elif field_type is OFTTime:
yield " %s = models.TimeField(%s)" % (mfield, kwargs_str[2:])
else:
raise TypeError("Unknown field type %s in %s" % (field_type, mfield))
# TODO: Autodetection of multigeometry types (see #7218).
gtype = layer.geom_type
if multi_geom:
gtype.to_multi()
geom_field = gtype.django
# Setting up the SRID keyword string.
if srid is None:
if layer.srs is None:
srid_str = "srid=-1"
else:
srid = layer.srs.srid
if srid is None:
srid_str = "srid=-1"
elif srid == 4326:
# WGS84 is already the default.
srid_str = ""
else:
srid_str = "srid=%s" % srid
else:
srid_str = "srid=%s" % srid
yield " %s = models.%s(%s)" % (geom_name, geom_field, srid_str)
if name_field:
yield ""
yield " def __str__(self): return self.%s" % name_field
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/contrib/gis/utils/layermapping.py | django/contrib/gis/utils/layermapping.py | # LayerMapping -- A Django Model/OGR Layer Mapping Utility
"""
The LayerMapping class provides a way to map the contents of OGR
vector files (e.g. SHP files) to Geographic-enabled Django models.
For more information, please consult the GeoDjango documentation:
https://docs.djangoproject.com/en/dev/ref/contrib/gis/layermapping/
"""
import sys
from decimal import Decimal
from decimal import InvalidOperation as DecimalInvalidOperation
from pathlib import Path
from django.contrib.gis.db.models import GeometryField
from django.contrib.gis.gdal import (
CoordTransform,
DataSource,
GDALException,
OGRGeometry,
OGRGeomType,
SpatialReference,
)
from django.contrib.gis.gdal.field import (
OFTDate,
OFTDateTime,
OFTInteger,
OFTInteger64,
OFTReal,
OFTString,
OFTTime,
)
from django.core.exceptions import FieldDoesNotExist, ObjectDoesNotExist
from django.db import connections, models, router, transaction
from django.utils.encoding import force_str
# LayerMapping exceptions.
class LayerMapError(Exception):
pass
class InvalidString(LayerMapError):
pass
class InvalidDecimal(LayerMapError):
pass
class InvalidInteger(LayerMapError):
pass
class MissingForeignKey(LayerMapError):
pass
class LayerMapping:
"A class that maps OGR Layers to GeoDjango Models."
# Acceptable 'base' types for a multi-geometry type.
MULTI_TYPES = {
1: OGRGeomType("MultiPoint"),
2: OGRGeomType("MultiLineString"),
3: OGRGeomType("MultiPolygon"),
OGRGeomType("Point25D").num: OGRGeomType("MultiPoint25D"),
OGRGeomType("LineString25D").num: OGRGeomType("MultiLineString25D"),
OGRGeomType("Polygon25D").num: OGRGeomType("MultiPolygon25D"),
}
# Acceptable Django field types and corresponding acceptable OGR
# counterparts.
FIELD_TYPES = {
models.AutoField: OFTInteger,
models.BigAutoField: OFTInteger64,
models.SmallAutoField: OFTInteger,
models.BooleanField: (OFTInteger, OFTReal, OFTString),
models.IntegerField: (OFTInteger, OFTReal, OFTString),
models.FloatField: (OFTInteger, OFTReal),
models.DateField: OFTDate,
models.DateTimeField: OFTDateTime,
models.EmailField: OFTString,
models.TimeField: OFTTime,
models.DecimalField: (OFTInteger, OFTReal),
models.CharField: OFTString,
models.SlugField: OFTString,
models.TextField: OFTString,
models.URLField: OFTString,
models.UUIDField: OFTString,
models.BigIntegerField: (OFTInteger, OFTReal, OFTString),
models.SmallIntegerField: (OFTInteger, OFTReal, OFTString),
models.PositiveBigIntegerField: (OFTInteger, OFTReal, OFTString),
models.PositiveIntegerField: (OFTInteger, OFTReal, OFTString),
models.PositiveSmallIntegerField: (OFTInteger, OFTReal, OFTString),
}
def __init__(
self,
model,
data,
mapping,
layer=0,
source_srs=None,
encoding="utf-8",
transaction_mode="commit_on_success",
transform=True,
unique=None,
using=None,
):
"""
A LayerMapping object is initialized using the given Model (not an
instance), a DataSource (or string path to an OGR-supported data file),
and a mapping dictionary. See the module level docstring for more
details and keyword argument usage.
"""
# Getting the DataSource and the associated Layer.
if isinstance(data, (str, Path)):
self.ds = DataSource(data, encoding=encoding)
else:
self.ds = data
self.layer = self.ds[layer]
self.using = using if using is not None else router.db_for_write(model)
connection = connections[self.using]
self.spatial_backend = connection.ops
# Setting the mapping & model attributes.
self.mapping = mapping
self.model = model
# Checking the layer -- initialization of the object will fail if
# things don't check out before hand.
self.check_layer()
# Getting the geometry column associated with the model (an
# exception will be raised if there is no geometry column).
if connection.features.supports_transform:
self.geo_field = self.geometry_field()
else:
transform = False
# Checking the source spatial reference system, and getting
# the coordinate transformation object (unless the `transform`
# keyword is set to False)
if transform:
self.source_srs = self.check_srs(source_srs)
self.transform = self.coord_transform()
else:
self.transform = transform
# Setting the encoding for OFTString fields, if specified.
if encoding:
# Making sure the encoding exists, if not a LookupError
# exception will be thrown.
from codecs import lookup
lookup(encoding)
self.encoding = encoding
else:
self.encoding = None
if unique:
self.check_unique(unique)
transaction_mode = "autocommit" # Has to be set to autocommit.
self.unique = unique
else:
self.unique = None
# Setting the transaction decorator with the function in the
# transaction modes dictionary.
self.transaction_mode = transaction_mode
if transaction_mode == "autocommit":
self.transaction_decorator = None
elif transaction_mode == "commit_on_success":
self.transaction_decorator = transaction.atomic
else:
raise LayerMapError("Unrecognized transaction mode: %s" % transaction_mode)
# Checking routines used during initialization.
def check_fid_range(self, fid_range):
"Check the `fid_range` keyword."
if fid_range:
if isinstance(fid_range, (tuple, list)):
return slice(*fid_range)
elif isinstance(fid_range, slice):
return fid_range
else:
raise TypeError
else:
return None
def check_layer(self):
"""
Check the Layer metadata and ensure that it's compatible with the
mapping information and model. Unlike previous revisions, there is no
need to increment through each feature in the Layer.
"""
# The geometry field of the model is set here.
# TODO: Support more than one geometry field / model. However, this
# depends on the GDAL Driver in use.
self.geom_field = False
self.fields = {}
# Getting lists of the field names and the field types available in
# the OGR Layer.
ogr_fields = self.layer.fields
ogr_field_types = self.layer.field_types
# Function for determining if the OGR mapping field is in the Layer.
def check_ogr_fld(ogr_map_fld):
try:
idx = ogr_fields.index(ogr_map_fld)
except ValueError:
raise LayerMapError(
'Given mapping OGR field "%s" not found in OGR Layer.' % ogr_map_fld
)
return idx
# No need to increment through each feature in the model, simply check
# the Layer metadata against what was given in the mapping dictionary.
for field_name, ogr_name in self.mapping.items():
# Ensuring that a corresponding field exists in the model
# for the given field name in the mapping.
try:
model_field = self.model._meta.get_field(field_name)
except FieldDoesNotExist:
raise LayerMapError(
'Given mapping field "%s" not in given Model fields.' % field_name
)
# Getting the string name for the Django field class (e.g.,
# 'PointField').
fld_name = model_field.__class__.__name__
if isinstance(model_field, GeometryField):
if self.geom_field:
raise LayerMapError(
"LayerMapping does not support more than one GeometryField per "
"model."
)
# Getting the coordinate dimension of the geometry field.
coord_dim = model_field.dim
try:
if coord_dim == 3:
gtype = OGRGeomType(ogr_name + "25D")
else:
gtype = OGRGeomType(ogr_name)
except GDALException:
raise LayerMapError(
'Invalid mapping for GeometryField "%s".' % field_name
)
# Making sure that the OGR Layer's Geometry is compatible.
ltype = self.layer.geom_type
if not (
ltype.name.startswith(gtype.name)
or self.make_multi(ltype, model_field)
):
raise LayerMapError(
"Invalid mapping geometry; model has %s%s, "
"layer geometry type is %s."
% (fld_name, "(dim=3)" if coord_dim == 3 else "", ltype)
)
# Setting the `geom_field` attribute w/the name of the model
# field that is a Geometry. Also setting the coordinate
# dimension attribute.
self.geom_field = field_name
self.coord_dim = coord_dim
fields_val = model_field
elif isinstance(model_field, models.ForeignKey):
if isinstance(ogr_name, dict):
# Is every given related model mapping field in the Layer?
rel_model = model_field.remote_field.model
for rel_name, ogr_field in ogr_name.items():
idx = check_ogr_fld(ogr_field)
try:
rel_model._meta.get_field(rel_name)
except FieldDoesNotExist:
raise LayerMapError(
'ForeignKey mapping field "%s" not in %s fields.'
% (rel_name, rel_model.__class__.__name__)
)
fields_val = rel_model
else:
raise TypeError("ForeignKey mapping must be of dictionary type.")
else:
# Is the model field type supported by LayerMapping?
if model_field.__class__ not in self.FIELD_TYPES:
raise LayerMapError(
'Django field type "%s" has no OGR mapping (yet).' % fld_name
)
# Is the OGR field in the Layer?
idx = check_ogr_fld(ogr_name)
ogr_field = ogr_field_types[idx]
# Can the OGR field type be mapped to the Django field type?
if not issubclass(ogr_field, self.FIELD_TYPES[model_field.__class__]):
raise LayerMapError(
'OGR field "%s" (of type %s) cannot be mapped to Django %s.'
% (ogr_field, ogr_field.__name__, fld_name)
)
fields_val = model_field
self.fields[field_name] = fields_val
def check_srs(self, source_srs):
"Check the compatibility of the given spatial reference object."
if isinstance(source_srs, SpatialReference):
sr = source_srs
elif isinstance(source_srs, self.spatial_backend.spatial_ref_sys()):
sr = source_srs.srs
elif isinstance(source_srs, (int, str)):
sr = SpatialReference(source_srs)
else:
# Otherwise just pulling the SpatialReference from the layer
sr = self.layer.srs
if not sr:
raise LayerMapError("No source reference system defined.")
else:
return sr
def check_unique(self, unique):
"Check the `unique` keyword parameter -- may be a sequence or string."
if isinstance(unique, (list, tuple)):
# List of fields to determine uniqueness with
for attr in unique:
if attr not in self.mapping:
raise ValueError
elif isinstance(unique, str):
# Only a single field passed in.
if unique not in self.mapping:
raise ValueError
else:
raise TypeError(
"Unique keyword argument must be set with a tuple, list, or string."
)
# Keyword argument retrieval routines.
def feature_kwargs(self, feat):
"""
Given an OGR Feature, return a dictionary of keyword arguments for
constructing the mapped model.
"""
# The keyword arguments for model construction.
kwargs = {}
# Incrementing through each model field and OGR field in the
# dictionary mapping.
for field_name, ogr_name in self.mapping.items():
model_field = self.fields[field_name]
if isinstance(model_field, GeometryField):
# Verify OGR geometry.
try:
val = self.verify_geom(feat.geom, model_field)
except GDALException:
raise LayerMapError("Could not retrieve geometry from feature.")
elif isinstance(model_field, models.base.ModelBase):
# The related _model_, not a field was passed in -- indicating
# another mapping for the related Model.
val = self.verify_fk(feat, model_field, ogr_name)
else:
# Otherwise, verify OGR Field type.
val = self.verify_ogr_field(feat[ogr_name], model_field)
# Setting the keyword arguments for the field name with the
# value obtained above.
kwargs[field_name] = val
return kwargs
def unique_kwargs(self, kwargs):
"""
Given the feature keyword arguments (from `feature_kwargs`), construct
and return the uniqueness keyword arguments -- a subset of the feature
kwargs.
"""
if isinstance(self.unique, str):
return {self.unique: kwargs[self.unique]}
else:
return {fld: kwargs[fld] for fld in self.unique}
# Verification routines used in constructing model keyword arguments.
def verify_ogr_field(self, ogr_field, model_field):
"""
Verify if the OGR Field contents are acceptable to the model field. If
they are, return the verified value, otherwise raise an exception.
"""
if isinstance(ogr_field, OFTString) and isinstance(
model_field, (models.CharField, models.TextField)
):
if self.encoding and ogr_field.value is not None:
# The encoding for OGR data sources may be specified here
# (e.g., 'cp437' for Census Bureau boundary files).
val = force_str(ogr_field.value, self.encoding)
else:
val = ogr_field.value
if (
model_field.max_length
and val is not None
and len(val) > model_field.max_length
):
raise InvalidString(
"%s model field maximum string length is %s, given %s characters."
% (model_field.name, model_field.max_length, len(val))
)
elif isinstance(ogr_field, OFTReal) and isinstance(
model_field, models.DecimalField
):
try:
# Creating an instance of the Decimal value to use.
d = Decimal(str(ogr_field.value))
except DecimalInvalidOperation:
raise InvalidDecimal(
"Could not construct decimal from: %s" % ogr_field.value
)
# Getting the decimal value as a tuple.
dtup = d.as_tuple()
digits = dtup[1]
d_idx = dtup[2] # index where the decimal is
# Maximum amount of precision, or digits to the left of the
# decimal.
max_prec = model_field.max_digits - model_field.decimal_places
# Getting the digits to the left of the decimal place for the
# given decimal.
if d_idx < 0:
n_prec = len(digits[:d_idx])
else:
n_prec = len(digits) + d_idx
# If we have more than the maximum digits allowed, then throw an
# InvalidDecimal exception.
if n_prec > max_prec:
raise InvalidDecimal(
"A DecimalField with max_digits %d, decimal_places %d must "
"round to an absolute value less than 10^%d."
% (model_field.max_digits, model_field.decimal_places, max_prec)
)
val = d
elif isinstance(ogr_field, (OFTReal, OFTString)) and isinstance(
model_field, models.IntegerField
):
# Attempt to convert any OFTReal and OFTString value to an
# OFTInteger.
try:
val = int(ogr_field.value)
except ValueError:
raise InvalidInteger(
"Could not construct integer from: %s" % ogr_field.value
)
else:
val = ogr_field.value
return val
def verify_fk(self, feat, rel_model, rel_mapping):
"""
Given an OGR Feature, the related model and its dictionary mapping,
retrieve the related model for the ForeignKey mapping.
"""
# TODO: It is expensive to retrieve a model for every record --
# explore if an efficient mechanism exists for caching related
# ForeignKey models.
# Constructing and verifying the related model keyword arguments.
fk_kwargs = {}
for field_name, ogr_name in rel_mapping.items():
fk_kwargs[field_name] = self.verify_ogr_field(
feat[ogr_name], rel_model._meta.get_field(field_name)
)
# Attempting to retrieve and return the related model.
try:
return rel_model.objects.using(self.using).get(**fk_kwargs)
except ObjectDoesNotExist:
raise MissingForeignKey(
"No ForeignKey %s model found with keyword arguments: %s"
% (rel_model.__name__, fk_kwargs)
)
def verify_geom(self, geom, model_field):
"""
Verify the geometry -- construct and return a GeometryCollection
if necessary (for example if the model field is MultiPolygonField while
the mapped shapefile only contains Polygons).
"""
# Measured geometries are not yet supported by GeoDjango models.
if geom.is_measured:
geom.set_measured(False)
# Downgrade a 3D geom to a 2D one, if necessary.
if self.coord_dim == 2 and geom.is_3d:
geom.set_3d(False)
if self.make_multi(geom.geom_type, model_field):
# Constructing a multi-geometry type to contain the single geometry
multi_type = self.MULTI_TYPES[geom.geom_type.num]
g = OGRGeometry(multi_type)
g.add(geom)
else:
g = geom
# Transforming the geometry with our Coordinate Transformation object,
# but only if the class variable `transform` is set w/a CoordTransform
# object.
if self.transform:
g.transform(self.transform)
# Returning the WKT of the geometry.
return g.wkt
# Other model methods.
def coord_transform(self):
"Return the coordinate transformation object."
SpatialRefSys = self.spatial_backend.spatial_ref_sys()
try:
# Getting the target spatial reference system
target_srs = (
SpatialRefSys.objects.using(self.using)
.get(srid=self.geo_field.srid)
.srs
)
# Creating the CoordTransform object
return CoordTransform(self.source_srs, target_srs)
except Exception as exc:
raise LayerMapError(
"Could not translate between the data source and model geometry."
) from exc
def geometry_field(self):
"""
Return the GeometryField instance associated with the geographic
column.
"""
# Use `get_field()` on the model's options so that we
# get the correct field instance if there's model inheritance.
opts = self.model._meta
return opts.get_field(self.geom_field)
def make_multi(self, geom_type, model_field):
"""
Given the OGRGeomType for a geometry and its associated GeometryField,
determine whether the geometry should be turned into a
GeometryCollection.
"""
return (
geom_type.num in self.MULTI_TYPES
and model_field.__class__.__name__ == "Multi%s" % geom_type.django
)
def save(
self,
verbose=False,
fid_range=False,
step=False,
progress=False,
silent=False,
stream=sys.stdout,
strict=False,
):
"""
Save the contents from the OGR DataSource Layer into the database
according to the mapping dictionary given at initialization.
Keyword Parameters:
verbose:
If set, information will be printed subsequent to each model save
executed on the database.
fid_range:
May be set with a slice or tuple of (begin, end) feature ID's to map
from the data source. In other words, this keyword enables the user
to selectively import a subset range of features in the geographic
data source.
step:
If set with an integer, transactions will occur at every step
interval. For example, if step=1000, a commit would occur after
the 1,000th feature, the 2,000th feature etc.
progress:
When this keyword is set, status information will be printed giving
the number of features processed and successfully saved. By default,
progress information will pe printed every 1000 features processed,
however, this default may be overridden by setting this keyword with
an integer for the desired interval.
stream:
Status information will be written to this file handle. Defaults to
using `sys.stdout`, but any object with a `write` method is
supported.
silent:
By default, non-fatal error notifications are printed to stdout, but
this keyword may be set to disable these notifications.
strict:
Execution of the model mapping will cease upon the first error
encountered. The default behavior is to attempt to continue.
"""
# Getting the default Feature ID range.
default_range = self.check_fid_range(fid_range)
# Setting the progress interval, if requested.
if progress:
if progress is True or not isinstance(progress, int):
progress_interval = 1000
else:
progress_interval = progress
def _save(feat_range=default_range, num_feat=0, num_saved=0):
if feat_range:
layer_iter = self.layer[feat_range]
else:
layer_iter = self.layer
for feat in layer_iter:
num_feat += 1
# Getting the keyword arguments
try:
kwargs = self.feature_kwargs(feat)
except LayerMapError as msg:
# Something borked the validation
if strict:
raise
elif not silent:
stream.write(
"Ignoring Feature ID %s because: %s\n" % (feat.fid, msg)
)
else:
# Constructing the model using the keyword args
is_update = False
if self.unique:
# If we want unique models on a particular field,
# handle the geometry appropriately.
try:
# Getting the keyword arguments and retrieving
# the unique model.
u_kwargs = self.unique_kwargs(kwargs)
m = self.model.objects.using(self.using).get(**u_kwargs)
is_update = True
# Getting the geometry (in OGR form), creating
# one from the kwargs WKT, adding in additional
# geometries, and update the attribute with the
# just-updated geometry WKT.
geom_value = getattr(m, self.geom_field)
if geom_value is None:
geom = OGRGeometry(kwargs[self.geom_field])
else:
geom = geom_value.ogr
new = OGRGeometry(kwargs[self.geom_field])
for g in new:
geom.add(g)
setattr(m, self.geom_field, geom.wkt)
except ObjectDoesNotExist:
# No unique model exists yet, create.
m = self.model(**kwargs)
else:
m = self.model(**kwargs)
try:
# Attempting to save.
m.save(using=self.using)
num_saved += 1
if verbose:
stream.write(
"%s: %s\n" % ("Updated" if is_update else "Saved", m)
)
except Exception as msg:
if strict:
# Bailing out if the `strict` keyword is set.
if not silent:
stream.write(
"Failed to save the feature (id: %s) into the "
"model with the keyword arguments:\n" % feat.fid
)
stream.write("%s\n" % kwargs)
raise
elif not silent:
stream.write(
"Failed to save %s:\n %s\nContinuing\n" % (kwargs, msg)
)
# Printing progress information, if requested.
if progress and num_feat % progress_interval == 0:
stream.write(
"Processed %d features, saved %d ...\n" % (num_feat, num_saved)
)
# Only used for status output purposes -- incremental saving uses
# the values returned here.
return num_saved, num_feat
if self.transaction_decorator is not None:
_save = self.transaction_decorator(_save)
nfeat = self.layer.num_feat
if step and isinstance(step, int) and step < nfeat:
# Incremental saving is requested at the given interval (step)
if default_range:
raise LayerMapError(
"The `step` keyword may not be used in conjunction with the "
"`fid_range` keyword."
)
beg, num_feat, num_saved = (0, 0, 0)
indices = range(step, nfeat, step)
n_i = len(indices)
for i, end in enumerate(indices):
# Constructing the slice to use for this step; the last slice
# is special (e.g, [100:] instead of [90:100]).
if i + 1 == n_i:
step_slice = slice(beg, None)
else:
step_slice = slice(beg, end)
try:
num_feat, num_saved = _save(step_slice, num_feat, num_saved)
beg = end
except Exception: # Deliberately catch everything
stream.write(
"%s\nFailed to save slice: %s\n" % ("=-" * 20, step_slice)
)
raise
else:
# Otherwise, just calling the previously defined _save() function.
_save()
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/contrib/gis/utils/ogrinfo.py | django/contrib/gis/utils/ogrinfo.py | """
This module includes some utility functions for inspecting the layout
of a GDAL data source -- the functionality is analogous to the output
produced by the `ogrinfo` utility.
"""
from django.contrib.gis.gdal import DataSource
from django.contrib.gis.gdal.geometries import GEO_CLASSES
def ogrinfo(data_source, num_features=10):
"""
Walk the available layers in the supplied `data_source`, displaying
the fields for the first `num_features` features.
"""
# Checking the parameters.
if isinstance(data_source, str):
data_source = DataSource(data_source)
elif isinstance(data_source, DataSource):
pass
else:
raise Exception(
"Data source parameter must be a string or a DataSource object."
)
for i, layer in enumerate(data_source):
print("data source : %s" % data_source.name)
print("==== layer %s" % i)
print(" shape type: %s" % GEO_CLASSES[layer.geom_type.num].__name__)
print(" # features: %s" % len(layer))
print(" srs: %s" % layer.srs)
extent_tup = layer.extent.tuple
print(" extent: %s - %s" % (extent_tup[0:2], extent_tup[2:4]))
print("Displaying the first %s features ====" % num_features)
width = max(*map(len, layer.fields))
fmt = " %%%ss: %%s" % width
for j, feature in enumerate(layer[:num_features]):
print("=== Feature %s" % j)
for fld_name in layer.fields:
type_name = feature[fld_name].type_name
output = fmt % (fld_name, type_name)
val = feature.get(fld_name)
if val:
if isinstance(val, str):
val_fmt = ' ("%s")'
else:
val_fmt = " (%s)"
output += val_fmt % val
else:
output += " (None)"
print(output)
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/contrib/gis/utils/__init__.py | django/contrib/gis/utils/__init__.py | """
This module contains useful utilities for GeoDjango.
"""
from django.contrib.gis.utils.layermapping import LayerMapError, LayerMapping
from django.contrib.gis.utils.ogrinfo import ogrinfo
from django.contrib.gis.utils.ogrinspect import mapping, ogrinspect
from django.contrib.gis.utils.srs import add_srs_entry
__all__ = [
"add_srs_entry",
"mapping",
"ogrinfo",
"ogrinspect",
"LayerMapError",
"LayerMapping",
]
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/contrib/gis/utils/srs.py | django/contrib/gis/utils/srs.py | from django.contrib.gis.gdal import SpatialReference
from django.db import DEFAULT_DB_ALIAS, connections
def add_srs_entry(
srs, auth_name="EPSG", auth_srid=None, ref_sys_name=None, database=None
):
"""
Take a GDAL SpatialReference system and add its information to the
`spatial_ref_sys` table of the spatial backend. Doing this enables
database-level spatial transformations for the backend. Thus, this utility
is useful for adding spatial reference systems not included by default with
the backend:
>>> from django.contrib.gis.utils import add_srs_entry
>>> add_srs_entry(3857)
Keyword Arguments:
auth_name:
This keyword may be customized with the value of the `auth_name` field.
Defaults to 'EPSG'.
auth_srid:
This keyword may be customized with the value of the `auth_srid` field.
Defaults to the SRID determined by GDAL.
ref_sys_name:
For SpatiaLite users only, sets the value of the `ref_sys_name` field.
Defaults to the name determined by GDAL.
database:
The name of the database connection to use; the default is the value
of `django.db.DEFAULT_DB_ALIAS` (at the time of this writing, its value
is 'default').
"""
database = database or DEFAULT_DB_ALIAS
connection = connections[database]
if not hasattr(connection.ops, "spatial_version"):
raise Exception("The `add_srs_entry` utility only works with spatial backends.")
if not connection.features.supports_add_srs_entry:
raise Exception("This utility does not support your database backend.")
SpatialRefSys = connection.ops.spatial_ref_sys()
# If argument is not a `SpatialReference` instance, use it as parameter
# to construct a `SpatialReference` instance.
if not isinstance(srs, SpatialReference):
srs = SpatialReference(srs)
if srs.srid is None:
raise Exception(
"Spatial reference requires an SRID to be "
"compatible with the spatial backend."
)
# Initializing the keyword arguments dictionary for both PostGIS
# and SpatiaLite.
kwargs = {
"srid": srs.srid,
"auth_name": auth_name,
"auth_srid": auth_srid or srs.srid,
"proj4text": srs.proj4,
}
# Backend-specific fields for the SpatialRefSys model.
srs_field_names = {f.name for f in SpatialRefSys._meta.get_fields()}
if "srtext" in srs_field_names:
kwargs["srtext"] = srs.wkt
if "ref_sys_name" in srs_field_names:
# SpatiaLite specific
kwargs["ref_sys_name"] = ref_sys_name or srs.name
# Creating the spatial_ref_sys model.
try:
# Try getting via SRID only, because using all kwargs may
# differ from exact wkt/proj in database.
SpatialRefSys.objects.using(database).get(srid=srs.srid)
except SpatialRefSys.DoesNotExist:
SpatialRefSys.objects.using(database).create(**kwargs)
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/contrib/gis/sitemaps/views.py | django/contrib/gis/sitemaps/views.py | from django.apps import apps
from django.contrib.gis.db.models import GeometryField
from django.contrib.gis.db.models.functions import AsKML, Transform
from django.contrib.gis.shortcuts import render_to_kml, render_to_kmz
from django.core.exceptions import FieldDoesNotExist
from django.db import DEFAULT_DB_ALIAS, connections
from django.http import Http404
def kml(request, label, model, field_name=None, compress=False, using=DEFAULT_DB_ALIAS):
"""
This view generates KML for the given app label, model, and field name.
The field name must be that of a geographic field.
"""
placemarks = []
try:
klass = apps.get_model(label, model)
except LookupError:
raise Http404(
'You must supply a valid app label and module name. Got "%s.%s"'
% (label, model)
)
if field_name:
try:
field = klass._meta.get_field(field_name)
if not isinstance(field, GeometryField):
raise FieldDoesNotExist
except FieldDoesNotExist:
raise Http404("Invalid geometry field.")
connection = connections[using]
if connection.features.has_AsKML_function:
# Database will take care of transformation.
placemarks = klass._default_manager.using(using).annotate(kml=AsKML(field_name))
else:
# If the database offers no KML method, we use the `kml`
# attribute of the lazy geometry instead.
placemarks = []
if connection.features.has_Transform_function:
qs = klass._default_manager.using(using).annotate(
**{"%s_4326" % field_name: Transform(field_name, 4326)}
)
field_name += "_4326"
else:
qs = klass._default_manager.using(using).all()
for mod in qs:
mod.kml = getattr(mod, field_name).kml
placemarks.append(mod)
# Getting the render function and rendering to the correct.
if compress:
render = render_to_kmz
else:
render = render_to_kml
return render("gis/kml/placemarks.kml", {"places": placemarks})
def kmz(request, label, model, field_name=None, using=DEFAULT_DB_ALIAS):
"""
Return KMZ for the given app label, model, and field name.
"""
return kml(request, label, model, field_name, compress=True, using=using)
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/contrib/gis/sitemaps/__init__.py | django/contrib/gis/sitemaps/__init__.py | # Geo-enabled Sitemap classes.
from django.contrib.gis.sitemaps.kml import KMLSitemap, KMZSitemap
__all__ = ["KMLSitemap", "KMZSitemap"]
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/contrib/gis/sitemaps/kml.py | django/contrib/gis/sitemaps/kml.py | from django.apps import apps
from django.contrib.gis.db.models import GeometryField
from django.contrib.sitemaps import Sitemap
from django.db import models
from django.urls import reverse
class KMLSitemap(Sitemap):
"""
A minimal hook to produce KML sitemaps.
"""
geo_format = "kml"
def __init__(self, locations=None):
# If no locations specified, then we try to build for
# every model in installed applications.
self.locations = self._build_kml_sources(locations)
def _build_kml_sources(self, sources):
"""
Go through the given sources and return a 3-tuple of the application
label, module name, and field name of every GeometryField encountered
in the sources.
If no sources are provided, then all models.
"""
kml_sources = []
if sources is None:
sources = apps.get_models()
for source in sources:
if isinstance(source, models.base.ModelBase):
for field in source._meta.fields:
if isinstance(field, GeometryField):
kml_sources.append(
(
source._meta.app_label,
source._meta.model_name,
field.name,
)
)
elif isinstance(source, (list, tuple)):
if len(source) != 3:
raise ValueError(
"Must specify a 3-tuple of (app_label, module_name, "
"field_name)."
)
kml_sources.append(source)
else:
raise TypeError("KML Sources must be a model or a 3-tuple.")
return kml_sources
def get_urls(self, page=1, site=None, protocol=None):
"""
This method is overridden so the appropriate `geo_format` attribute
is placed on each URL element.
"""
urls = Sitemap.get_urls(self, page=page, site=site, protocol=protocol)
for url in urls:
url["geo_format"] = self.geo_format
return urls
def items(self):
return self.locations
def location(self, obj):
return reverse(
"django.contrib.gis.sitemaps.views.%s" % self.geo_format,
kwargs={
"label": obj[0],
"model": obj[1],
"field_name": obj[2],
},
)
class KMZSitemap(KMLSitemap):
geo_format = "kmz"
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/middleware/csp.py | django/middleware/csp.py | from django.conf import settings
from django.utils.csp import CSP, LazyNonce, build_policy
from django.utils.deprecation import MiddlewareMixin
def get_nonce(request):
return getattr(request, "_csp_nonce", None)
class ContentSecurityPolicyMiddleware(MiddlewareMixin):
def process_request(self, request):
request._csp_nonce = LazyNonce()
def process_response(self, request, response):
nonce = get_nonce(request)
sentinel = object()
if (csp_config := getattr(response, "_csp_config", sentinel)) is sentinel:
csp_config = settings.SECURE_CSP
if (csp_ro_config := getattr(response, "_csp_ro_config", sentinel)) is sentinel:
csp_ro_config = settings.SECURE_CSP_REPORT_ONLY
for header, config in [
(CSP.HEADER_ENFORCE, csp_config),
(CSP.HEADER_REPORT_ONLY, csp_ro_config),
]:
# If headers are already set on the response, don't overwrite them.
# This allows for views to set their own CSP headers as needed.
# An empty config means CSP headers are not added to the response.
if config and header not in response:
response.headers[str(header)] = build_policy(config, nonce)
return response
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/middleware/security.py | django/middleware/security.py | import re
from django.conf import settings
from django.http import HttpResponsePermanentRedirect
from django.utils.deprecation import MiddlewareMixin
class SecurityMiddleware(MiddlewareMixin):
def __init__(self, get_response):
super().__init__(get_response)
self.sts_seconds = settings.SECURE_HSTS_SECONDS
self.sts_include_subdomains = settings.SECURE_HSTS_INCLUDE_SUBDOMAINS
self.sts_preload = settings.SECURE_HSTS_PRELOAD
self.content_type_nosniff = settings.SECURE_CONTENT_TYPE_NOSNIFF
self.redirect = settings.SECURE_SSL_REDIRECT
self.redirect_host = settings.SECURE_SSL_HOST
self.redirect_exempt = [re.compile(r) for r in settings.SECURE_REDIRECT_EXEMPT]
self.referrer_policy = settings.SECURE_REFERRER_POLICY
self.cross_origin_opener_policy = settings.SECURE_CROSS_ORIGIN_OPENER_POLICY
def process_request(self, request):
path = request.path.lstrip("/")
if (
self.redirect
and not request.is_secure()
and not any(pattern.search(path) for pattern in self.redirect_exempt)
):
host = self.redirect_host or request.get_host()
return HttpResponsePermanentRedirect(
"https://%s%s" % (host, request.get_full_path())
)
def process_response(self, request, response):
if (
self.sts_seconds
and request.is_secure()
and "Strict-Transport-Security" not in response
):
sts_header = "max-age=%s" % self.sts_seconds
if self.sts_include_subdomains:
sts_header += "; includeSubDomains"
if self.sts_preload:
sts_header += "; preload"
response.headers["Strict-Transport-Security"] = sts_header
if self.content_type_nosniff:
response.headers.setdefault("X-Content-Type-Options", "nosniff")
if self.referrer_policy:
# Support a comma-separated string or iterable of values to allow
# fallback.
response.headers.setdefault(
"Referrer-Policy",
",".join(
[v.strip() for v in self.referrer_policy.split(",")]
if isinstance(self.referrer_policy, str)
else self.referrer_policy
),
)
if self.cross_origin_opener_policy:
response.setdefault(
"Cross-Origin-Opener-Policy",
self.cross_origin_opener_policy,
)
return response
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/middleware/http.py | django/middleware/http.py | from django.utils.cache import cc_delim_re, get_conditional_response, set_response_etag
from django.utils.deprecation import MiddlewareMixin
from django.utils.http import parse_http_date_safe
class ConditionalGetMiddleware(MiddlewareMixin):
"""
Handle conditional GET operations. If the response has an ETag or
Last-Modified header and the request has If-None-Match or
If-Modified-Since, replace the response with HttpNotModified. Add an ETag
header if needed.
"""
def process_response(self, request, response):
# It's too late to prevent an unsafe request with a 412 response, and
# for a HEAD request, the response body is always empty so computing
# an accurate ETag isn't possible.
if request.method != "GET":
return response
if self.needs_etag(response) and not response.has_header("ETag"):
set_response_etag(response)
etag = response.get("ETag")
last_modified = response.get("Last-Modified")
last_modified = last_modified and parse_http_date_safe(last_modified)
if etag or last_modified:
return get_conditional_response(
request,
etag=etag,
last_modified=last_modified,
response=response,
)
return response
def needs_etag(self, response):
"""Return True if an ETag header should be added to response."""
cache_control_headers = cc_delim_re.split(response.get("Cache-Control", ""))
return all(header.lower() != "no-store" for header in cache_control_headers)
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/middleware/common.py | django/middleware/common.py | import re
from urllib.parse import urlsplit
from django.conf import settings
from django.core.exceptions import PermissionDenied
from django.core.mail import mail_managers
from django.http import HttpResponsePermanentRedirect
from django.urls import is_valid_path
from django.utils.deprecation import MiddlewareMixin
from django.utils.http import escape_leading_slashes
class CommonMiddleware(MiddlewareMixin):
"""
"Common" middleware for taking care of some basic operations:
- Forbid access to User-Agents in settings.DISALLOWED_USER_AGENTS
- URL rewriting: Based on the APPEND_SLASH and PREPEND_WWW settings,
append missing slashes and/or prepends missing "www."s.
- If APPEND_SLASH is set and the initial URL doesn't end with a
slash, and it is not found in urlpatterns, form a new URL by
appending a slash at the end. If this new URL is found in
urlpatterns, return an HTTP redirect to this new URL; otherwise
process the initial URL as usual.
This behavior can be customized by subclassing CommonMiddleware and
overriding the response_redirect_class attribute.
"""
response_redirect_class = HttpResponsePermanentRedirect
def process_request(self, request):
"""
Check for denied User-Agents and rewrite the URL based on
settings.APPEND_SLASH and settings.PREPEND_WWW
"""
# Check for denied User-Agents
user_agent = request.META.get("HTTP_USER_AGENT")
if user_agent is not None:
for user_agent_regex in settings.DISALLOWED_USER_AGENTS:
if user_agent_regex.search(user_agent):
raise PermissionDenied("Forbidden user agent")
# Check for a redirect based on settings.PREPEND_WWW
host = request.get_host()
if settings.PREPEND_WWW and host and not host.startswith("www."):
# Check if we also need to append a slash so we can do it all
# with a single redirect. (This check may be somewhat expensive,
# so we only do it if we already know we're sending a redirect,
# or in process_response if we get a 404.)
if self.should_redirect_with_slash(request):
path = self.get_full_path_with_slash(request)
else:
path = request.get_full_path()
return self.response_redirect_class(f"{request.scheme}://www.{host}{path}")
def should_redirect_with_slash(self, request):
"""
Return True if settings.APPEND_SLASH is True and appending a slash to
the request path turns an invalid path into a valid one.
"""
if settings.APPEND_SLASH and not request.path_info.endswith("/"):
urlconf = getattr(request, "urlconf", None)
if not is_valid_path(request.path_info, urlconf):
match = is_valid_path("%s/" % request.path_info, urlconf)
if match:
view = match.func
return getattr(view, "should_append_slash", True)
return False
def get_full_path_with_slash(self, request):
"""
Return the full path of the request with a trailing slash appended.
Raise a RuntimeError if settings.DEBUG is True and request.method is
DELETE, POST, PUT, or PATCH.
"""
new_path = request.get_full_path(force_append_slash=True)
# Prevent construction of scheme relative urls.
new_path = escape_leading_slashes(new_path)
if settings.DEBUG and request.method in ("DELETE", "POST", "PUT", "PATCH"):
raise RuntimeError(
"You called this URL via %(method)s, but the URL doesn't end "
"in a slash and you have APPEND_SLASH set. Django can't "
"redirect to the slash URL while maintaining %(method)s data. "
"Change your form to point to %(url)s (note the trailing "
"slash), or set APPEND_SLASH=False in your Django settings."
% {
"method": request.method,
"url": request.get_host() + new_path,
}
)
return new_path
def process_response(self, request, response):
"""
When the status code of the response is 404, it may redirect to a path
with an appended slash if should_redirect_with_slash() returns True.
"""
# If the given URL is "Not Found", then check if we should redirect to
# a path with a slash appended.
if response.status_code == 404 and self.should_redirect_with_slash(request):
response = self.response_redirect_class(
self.get_full_path_with_slash(request)
)
# Add the Content-Length header to non-streaming responses if not
# already set.
if not response.streaming and not response.has_header("Content-Length"):
response.headers["Content-Length"] = str(len(response.content))
return response
class BrokenLinkEmailsMiddleware(MiddlewareMixin):
def process_response(self, request, response):
"""Send broken link emails for relevant 404 NOT FOUND responses."""
if response.status_code == 404 and not settings.DEBUG:
domain = request.get_host()
path = request.get_full_path()
referer = request.META.get("HTTP_REFERER", "")
if not self.is_ignorable_request(request, path, domain, referer):
ua = request.META.get("HTTP_USER_AGENT", "<none>")
ip = request.META.get("REMOTE_ADDR", "<none>")
mail_managers(
"Broken %slink on %s"
% (
(
"INTERNAL "
if self.is_internal_request(domain, referer)
else ""
),
domain,
),
"Referrer: %s\nRequested URL: %s\nUser agent: %s\n"
"IP address: %s\n" % (referer, path, ua, ip),
fail_silently=True,
)
return response
def is_internal_request(self, domain, referer):
"""
Return True if the referring URL is the same domain as the current
request.
"""
# Different subdomains are treated as different domains.
return bool(re.match("^https?://%s/" % re.escape(domain), referer))
def is_ignorable_request(self, request, uri, domain, referer):
"""
Return True if the given request *shouldn't* notify the site managers
according to project settings or in situations outlined by the inline
comments.
"""
# The referer is empty.
if not referer:
return True
# APPEND_SLASH is enabled and the referer is equal to the current URL
# without a trailing slash indicating an internal redirect.
if settings.APPEND_SLASH and uri.endswith("/") and referer == uri[:-1]:
return True
# A '?' in referer is identified as a search engine source.
if not self.is_internal_request(domain, referer) and "?" in referer:
return True
# The referer is equal to the current URL, ignoring the scheme (assumed
# to be a poorly implemented bot).
parsed_referer = urlsplit(referer)
if parsed_referer.netloc in ["", domain] and parsed_referer.path == uri:
return True
return any(pattern.search(uri) for pattern in settings.IGNORABLE_404_URLS)
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/middleware/gzip.py | django/middleware/gzip.py | from django.utils.cache import patch_vary_headers
from django.utils.deprecation import MiddlewareMixin
from django.utils.regex_helper import _lazy_re_compile
from django.utils.text import acompress_sequence, compress_sequence, compress_string
re_accepts_gzip = _lazy_re_compile(r"\bgzip\b")
class GZipMiddleware(MiddlewareMixin):
"""
Compress content if the browser allows gzip compression.
Set the Vary header accordingly, so that caches will base their storage
on the Accept-Encoding header.
"""
max_random_bytes = 100
def process_response(self, request, response):
# It's not worth attempting to compress really short responses.
if not response.streaming and len(response.content) < 200:
return response
# Avoid gzipping if we've already got a content-encoding.
if response.has_header("Content-Encoding"):
return response
patch_vary_headers(response, ("Accept-Encoding",))
ae = request.META.get("HTTP_ACCEPT_ENCODING", "")
if not re_accepts_gzip.search(ae):
return response
if response.streaming:
if response.is_async:
response.streaming_content = acompress_sequence(
response.streaming_content,
max_random_bytes=self.max_random_bytes,
)
else:
response.streaming_content = compress_sequence(
response.streaming_content,
max_random_bytes=self.max_random_bytes,
)
# Delete the `Content-Length` header for streaming content, because
# we won't know the compressed size until we stream it.
del response.headers["Content-Length"]
else:
# Return the compressed content only if it's actually shorter.
compressed_content = compress_string(
response.content,
max_random_bytes=self.max_random_bytes,
)
if len(compressed_content) >= len(response.content):
return response
response.content = compressed_content
response.headers["Content-Length"] = str(len(response.content))
# If there is a strong ETag, make it weak to fulfill the requirements
# of RFC 9110 Section 8.8.1 while also allowing conditional request
# matches on ETags.
etag = response.get("ETag")
if etag and etag.startswith('"'):
response.headers["ETag"] = "W/" + etag
response.headers["Content-Encoding"] = "gzip"
return response
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/middleware/clickjacking.py | django/middleware/clickjacking.py | """
Clickjacking Protection Middleware.
This module provides a middleware that implements protection against a
malicious site loading resources from your site in a hidden frame.
"""
from django.conf import settings
from django.utils.deprecation import MiddlewareMixin
class XFrameOptionsMiddleware(MiddlewareMixin):
"""
Set the X-Frame-Options HTTP header in HTTP responses.
Do not set the header if it's already set or if the response contains
a xframe_options_exempt value set to True.
By default, set the X-Frame-Options header to 'DENY', meaning the response
cannot be displayed in a frame, regardless of the site attempting to do so.
To enable the response to be loaded on a frame within the same site, set
X_FRAME_OPTIONS in your project's Django settings to 'SAMEORIGIN'.
"""
def process_response(self, request, response):
# Don't set it if it's already in the response
if response.get("X-Frame-Options") is not None:
return response
# Don't set it if they used @xframe_options_exempt
if getattr(response, "xframe_options_exempt", False):
return response
response.headers["X-Frame-Options"] = self.get_xframe_options_value(
request,
response,
)
return response
def get_xframe_options_value(self, request, response):
"""
Get the value to set for the X_FRAME_OPTIONS header. Use the value from
the X_FRAME_OPTIONS setting, or 'DENY' if not set.
This method can be overridden if needed, allowing it to vary based on
the request or response.
"""
return getattr(settings, "X_FRAME_OPTIONS", "DENY").upper()
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/middleware/__init__.py | django/middleware/__init__.py | python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false | |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/middleware/csrf.py | django/middleware/csrf.py | """
Cross Site Request Forgery Middleware.
This module provides a middleware that implements protection
against request forgeries from other sites.
"""
import logging
import string
from collections import defaultdict
from urllib.parse import urlsplit
from django.conf import settings
from django.core.exceptions import DisallowedHost, ImproperlyConfigured
from django.http import HttpHeaders, UnreadablePostError
from django.urls import get_callable
from django.utils.cache import patch_vary_headers
from django.utils.crypto import constant_time_compare, get_random_string
from django.utils.deprecation import MiddlewareMixin
from django.utils.functional import cached_property
from django.utils.http import is_same_domain
from django.utils.log import log_response
from django.utils.regex_helper import _lazy_re_compile
logger = logging.getLogger("django.security.csrf")
# This matches if any character is not in CSRF_ALLOWED_CHARS.
invalid_token_chars_re = _lazy_re_compile("[^a-zA-Z0-9]")
REASON_BAD_ORIGIN = "Origin checking failed - %s does not match any trusted origins."
REASON_NO_REFERER = "Referer checking failed - no Referer."
REASON_BAD_REFERER = "Referer checking failed - %s does not match any trusted origins."
REASON_NO_CSRF_COOKIE = "CSRF cookie not set."
REASON_CSRF_TOKEN_MISSING = "CSRF token missing."
REASON_MALFORMED_REFERER = "Referer checking failed - Referer is malformed."
REASON_INSECURE_REFERER = (
"Referer checking failed - Referer is insecure while host is secure."
)
# The reason strings below are for passing to InvalidTokenFormat. They are
# phrases without a subject because they can be in reference to either the CSRF
# cookie or non-cookie token.
REASON_INCORRECT_LENGTH = "has incorrect length"
REASON_INVALID_CHARACTERS = "has invalid characters"
CSRF_SECRET_LENGTH = 32
CSRF_TOKEN_LENGTH = 2 * CSRF_SECRET_LENGTH
CSRF_ALLOWED_CHARS = string.ascii_letters + string.digits
CSRF_SESSION_KEY = "_csrftoken"
def _get_failure_view():
"""Return the view to be used for CSRF rejections."""
return get_callable(settings.CSRF_FAILURE_VIEW)
def _get_new_csrf_string():
return get_random_string(CSRF_SECRET_LENGTH, allowed_chars=CSRF_ALLOWED_CHARS)
def _mask_cipher_secret(secret):
"""
Given a secret (assumed to be a string of CSRF_ALLOWED_CHARS), generate a
token by adding a mask and applying it to the secret.
"""
mask = _get_new_csrf_string()
chars = CSRF_ALLOWED_CHARS
pairs = zip((chars.index(x) for x in secret), (chars.index(x) for x in mask))
cipher = "".join(chars[(x + y) % len(chars)] for x, y in pairs)
return mask + cipher
def _unmask_cipher_token(token):
"""
Given a token (assumed to be a string of CSRF_ALLOWED_CHARS, of length
CSRF_TOKEN_LENGTH, and that its first half is a mask), use it to decrypt
the second half to produce the original secret.
"""
mask = token[:CSRF_SECRET_LENGTH]
token = token[CSRF_SECRET_LENGTH:]
chars = CSRF_ALLOWED_CHARS
pairs = zip((chars.index(x) for x in token), (chars.index(x) for x in mask))
return "".join(chars[x - y] for x, y in pairs) # Note negative values are ok
def _add_new_csrf_cookie(request):
"""Generate a new random CSRF_COOKIE value, and add it to request.META."""
csrf_secret = _get_new_csrf_string()
request.META.update(
{
"CSRF_COOKIE": csrf_secret,
"CSRF_COOKIE_NEEDS_UPDATE": True,
}
)
return csrf_secret
def get_token(request):
"""
Return the CSRF token required for a POST form. The token is an
alphanumeric value. A new token is created if one is not already set.
A side effect of calling this function is to make the csrf_protect
decorator and the CsrfViewMiddleware add a CSRF cookie and a 'Vary: Cookie'
header to the outgoing response. For this reason, you may need to use this
function lazily, as is done by the csrf context processor.
"""
if "CSRF_COOKIE" in request.META:
csrf_secret = request.META["CSRF_COOKIE"]
# Since the cookie is being used, flag to send the cookie in
# process_response() (even if the client already has it) in order to
# renew the expiry timer.
request.META["CSRF_COOKIE_NEEDS_UPDATE"] = True
else:
csrf_secret = _add_new_csrf_cookie(request)
return _mask_cipher_secret(csrf_secret)
def rotate_token(request):
"""
Change the CSRF token in use for a request - should be done on login
for security purposes.
"""
_add_new_csrf_cookie(request)
class InvalidTokenFormat(Exception):
def __init__(self, reason):
self.reason = reason
def _check_token_format(token):
"""
Raise an InvalidTokenFormat error if the token has an invalid length or
characters that aren't allowed. The token argument can be a CSRF cookie
secret or non-cookie CSRF token, and either masked or unmasked.
"""
if len(token) not in (CSRF_TOKEN_LENGTH, CSRF_SECRET_LENGTH):
raise InvalidTokenFormat(REASON_INCORRECT_LENGTH)
# Make sure all characters are in CSRF_ALLOWED_CHARS.
if invalid_token_chars_re.search(token):
raise InvalidTokenFormat(REASON_INVALID_CHARACTERS)
def _does_token_match(request_csrf_token, csrf_secret):
"""
Return whether the given CSRF token matches the given CSRF secret, after
unmasking the token if necessary.
This function assumes that the request_csrf_token argument has been
validated to have the correct length (CSRF_SECRET_LENGTH or
CSRF_TOKEN_LENGTH characters) and allowed characters, and that if it has
length CSRF_TOKEN_LENGTH, it is a masked secret.
"""
# Only unmask tokens that are exactly CSRF_TOKEN_LENGTH characters long.
if len(request_csrf_token) == CSRF_TOKEN_LENGTH:
request_csrf_token = _unmask_cipher_token(request_csrf_token)
assert len(request_csrf_token) == CSRF_SECRET_LENGTH
return constant_time_compare(request_csrf_token, csrf_secret)
class RejectRequest(Exception):
def __init__(self, reason):
self.reason = reason
class CsrfViewMiddleware(MiddlewareMixin):
"""
Require a present and correct csrfmiddlewaretoken for POST requests that
have a CSRF cookie, and set an outgoing CSRF cookie.
This middleware should be used in conjunction with the {% csrf_token %}
template tag.
"""
@cached_property
def csrf_trusted_origins_hosts(self):
return [
urlsplit(origin).netloc.lstrip("*")
for origin in settings.CSRF_TRUSTED_ORIGINS
]
@cached_property
def allowed_origins_exact(self):
return {origin for origin in settings.CSRF_TRUSTED_ORIGINS if "*" not in origin}
@cached_property
def allowed_origin_subdomains(self):
"""
A mapping of allowed schemes to list of allowed netlocs, where all
subdomains of the netloc are allowed.
"""
allowed_origin_subdomains = defaultdict(list)
for parsed in (
urlsplit(origin)
for origin in settings.CSRF_TRUSTED_ORIGINS
if "*" in origin
):
allowed_origin_subdomains[parsed.scheme].append(parsed.netloc.lstrip("*"))
return allowed_origin_subdomains
# The _accept and _reject methods currently only exist for the sake of the
# requires_csrf_token decorator.
def _accept(self, request):
# Avoid checking the request twice by adding a custom attribute to
# request. This will be relevant when both decorator and middleware
# are used.
request.csrf_processing_done = True
return None
def _reject(self, request, reason):
response = _get_failure_view()(request, reason=reason)
log_response(
"Forbidden (%s): %s",
reason,
request.path,
response=response,
request=request,
logger=logger,
)
return response
def _get_secret(self, request):
"""
Return the CSRF secret originally associated with the request, or None
if it didn't have one.
If the CSRF_USE_SESSIONS setting is false, raises InvalidTokenFormat if
the request's secret has invalid characters or an invalid length.
"""
if settings.CSRF_USE_SESSIONS:
try:
csrf_secret = request.session.get(CSRF_SESSION_KEY)
except AttributeError:
raise ImproperlyConfigured(
"CSRF_USE_SESSIONS is enabled, but request.session is not "
"set. SessionMiddleware must appear before CsrfViewMiddleware "
"in MIDDLEWARE."
)
else:
try:
csrf_secret = request.COOKIES[settings.CSRF_COOKIE_NAME]
except KeyError:
csrf_secret = None
else:
# This can raise InvalidTokenFormat.
_check_token_format(csrf_secret)
if csrf_secret is None:
return None
# Django versions before 4.0 masked the secret before storing.
if len(csrf_secret) == CSRF_TOKEN_LENGTH:
csrf_secret = _unmask_cipher_token(csrf_secret)
return csrf_secret
def _set_csrf_cookie(self, request, response):
if settings.CSRF_USE_SESSIONS:
if request.session.get(CSRF_SESSION_KEY) != request.META["CSRF_COOKIE"]:
request.session[CSRF_SESSION_KEY] = request.META["CSRF_COOKIE"]
else:
response.set_cookie(
settings.CSRF_COOKIE_NAME,
request.META["CSRF_COOKIE"],
max_age=settings.CSRF_COOKIE_AGE,
domain=settings.CSRF_COOKIE_DOMAIN,
path=settings.CSRF_COOKIE_PATH,
secure=settings.CSRF_COOKIE_SECURE,
httponly=settings.CSRF_COOKIE_HTTPONLY,
samesite=settings.CSRF_COOKIE_SAMESITE,
)
# Set the Vary header since content varies with the CSRF cookie.
patch_vary_headers(response, ("Cookie",))
def _origin_verified(self, request):
request_origin = request.META["HTTP_ORIGIN"]
try:
good_host = request.get_host()
except DisallowedHost:
pass
else:
good_origin = "%s://%s" % (
"https" if request.is_secure() else "http",
good_host,
)
if request_origin == good_origin:
return True
if request_origin in self.allowed_origins_exact:
return True
try:
parsed_origin = urlsplit(request_origin)
except ValueError:
return False
parsed_origin_scheme = parsed_origin.scheme
parsed_origin_netloc = parsed_origin.netloc
return any(
is_same_domain(parsed_origin_netloc, host)
for host in self.allowed_origin_subdomains.get(parsed_origin_scheme, ())
)
def _check_referer(self, request):
referer = request.META.get("HTTP_REFERER")
if referer is None:
raise RejectRequest(REASON_NO_REFERER)
try:
referer = urlsplit(referer)
except ValueError:
raise RejectRequest(REASON_MALFORMED_REFERER)
# Make sure we have a valid URL for Referer.
if "" in (referer.scheme, referer.netloc):
raise RejectRequest(REASON_MALFORMED_REFERER)
# Ensure that our Referer is also secure.
if referer.scheme != "https":
raise RejectRequest(REASON_INSECURE_REFERER)
if any(
is_same_domain(referer.netloc, host)
for host in self.csrf_trusted_origins_hosts
):
return
# Allow matching the configured cookie domain.
good_referer = (
settings.SESSION_COOKIE_DOMAIN
if settings.CSRF_USE_SESSIONS
else settings.CSRF_COOKIE_DOMAIN
)
if good_referer is None:
# If no cookie domain is configured, allow matching the current
# host:port exactly if it's permitted by ALLOWED_HOSTS.
try:
# request.get_host() includes the port.
good_referer = request.get_host()
except DisallowedHost:
raise RejectRequest(REASON_BAD_REFERER % referer.geturl())
else:
server_port = request.get_port()
if server_port not in ("443", "80"):
good_referer = "%s:%s" % (good_referer, server_port)
if not is_same_domain(referer.netloc, good_referer):
raise RejectRequest(REASON_BAD_REFERER % referer.geturl())
def _bad_token_message(self, reason, token_source):
if token_source != "POST":
# Assume it is a settings.CSRF_HEADER_NAME value.
header_name = HttpHeaders.parse_header_name(token_source)
token_source = f"the {header_name!r} HTTP header"
return f"CSRF token from {token_source} {reason}."
def _check_token(self, request):
# Access csrf_secret via self._get_secret() as rotate_token() may have
# been called by an authentication middleware during the
# process_request() phase.
try:
csrf_secret = self._get_secret(request)
except InvalidTokenFormat as exc:
raise RejectRequest(f"CSRF cookie {exc.reason}.")
if csrf_secret is None:
# No CSRF cookie. For POST requests, we insist on a CSRF cookie,
# and in this way we can avoid all CSRF attacks, including login
# CSRF.
raise RejectRequest(REASON_NO_CSRF_COOKIE)
# Check non-cookie token for match.
request_csrf_token = ""
if request.method == "POST":
try:
request_csrf_token = request.POST.get("csrfmiddlewaretoken", "")
except UnreadablePostError:
# Handle a broken connection before we've completed reading the
# POST data. process_view shouldn't raise any exceptions, so
# we'll ignore and serve the user a 403 (assuming they're still
# listening, which they probably aren't because of the error).
pass
if request_csrf_token == "":
# Fall back to X-CSRFToken, to make things easier for AJAX, and
# possible for PUT/DELETE.
try:
# This can have length CSRF_SECRET_LENGTH or CSRF_TOKEN_LENGTH,
# depending on whether the client obtained the token from
# the DOM or the cookie (and if the cookie, whether the cookie
# was masked or unmasked).
request_csrf_token = request.META[settings.CSRF_HEADER_NAME]
except KeyError:
raise RejectRequest(REASON_CSRF_TOKEN_MISSING)
token_source = settings.CSRF_HEADER_NAME
else:
token_source = "POST"
try:
_check_token_format(request_csrf_token)
except InvalidTokenFormat as exc:
reason = self._bad_token_message(exc.reason, token_source)
raise RejectRequest(reason)
if not _does_token_match(request_csrf_token, csrf_secret):
reason = self._bad_token_message("incorrect", token_source)
raise RejectRequest(reason)
def process_request(self, request):
try:
csrf_secret = self._get_secret(request)
except InvalidTokenFormat:
_add_new_csrf_cookie(request)
else:
if csrf_secret is not None:
# Use the same secret next time. If the secret was originally
# masked, this also causes it to be replaced with the unmasked
# form, but only in cases where the secret is already getting
# saved anyways.
request.META["CSRF_COOKIE"] = csrf_secret
def process_view(self, request, callback, callback_args, callback_kwargs):
if getattr(request, "csrf_processing_done", False):
return None
# Wait until request.META["CSRF_COOKIE"] has been manipulated before
# bailing out, so that get_token still works
if getattr(callback, "csrf_exempt", False):
return None
# Assume that anything not defined as 'safe' by RFC 9110 needs
# protection
if request.method in ("GET", "HEAD", "OPTIONS", "TRACE"):
return self._accept(request)
if getattr(request, "_dont_enforce_csrf_checks", False):
# Mechanism to turn off CSRF checks for test suite. It comes after
# the creation of CSRF cookies, so that everything else continues
# to work exactly the same (e.g. cookies are sent, etc.), but
# before any branches that call the _reject method.
return self._accept(request)
# Reject the request if the Origin header doesn't match an allowed
# value.
if "HTTP_ORIGIN" in request.META:
if not self._origin_verified(request):
return self._reject(
request, REASON_BAD_ORIGIN % request.META["HTTP_ORIGIN"]
)
elif request.is_secure():
# If the Origin header wasn't provided, reject HTTPS requests if
# the Referer header doesn't match an allowed value.
#
# Suppose user visits http://example.com/
# An active network attacker (man-in-the-middle, MITM) sends a
# POST form that targets https://example.com/detonate-bomb/ and
# submits it via JavaScript.
#
# The attacker will need to provide a CSRF cookie and token, but
# that's no problem for a MITM and the session-independent secret
# we're using. So the MITM can circumvent the CSRF protection. This
# is true for any HTTP connection, but anyone using HTTPS expects
# better! For this reason, for https://example.com/ we need
# additional protection that treats http://example.com/ as
# completely untrusted. Under HTTPS, Barth et al. found that the
# Referer header is missing for same-domain requests in only about
# 0.2% of cases or less, so we can use strict Referer checking.
try:
self._check_referer(request)
except RejectRequest as exc:
return self._reject(request, exc.reason)
try:
self._check_token(request)
except RejectRequest as exc:
return self._reject(request, exc.reason)
return self._accept(request)
def process_response(self, request, response):
if request.META.get("CSRF_COOKIE_NEEDS_UPDATE"):
self._set_csrf_cookie(request, response)
# Unset the flag to prevent _set_csrf_cookie() from being
# unnecessarily called again in process_response() by other
# instances of CsrfViewMiddleware. This can happen e.g. when both a
# decorator and middleware are used. However,
# CSRF_COOKIE_NEEDS_UPDATE is still respected in subsequent calls
# e.g. in case rotate_token() is called in process_response() later
# by custom middleware but before those subsequent calls.
request.META["CSRF_COOKIE_NEEDS_UPDATE"] = False
return response
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/middleware/locale.py | django/middleware/locale.py | from django.conf import settings
from django.conf.urls.i18n import is_language_prefix_patterns_used
from django.http import HttpResponseRedirect
from django.urls import get_script_prefix, is_valid_path
from django.utils import translation
from django.utils.cache import patch_vary_headers
from django.utils.deprecation import MiddlewareMixin
class LocaleMiddleware(MiddlewareMixin):
"""
Parse a request and decide what translation object to install in the
current thread context. This allows pages to be dynamically translated to
the language the user desires (if the language is available).
"""
response_redirect_class = HttpResponseRedirect
def process_request(self, request):
urlconf = getattr(request, "urlconf", settings.ROOT_URLCONF)
(
i18n_patterns_used,
prefixed_default_language,
) = is_language_prefix_patterns_used(urlconf)
language = translation.get_language_from_request(
request, check_path=i18n_patterns_used
)
language_from_path = translation.get_language_from_path(request.path_info)
if (
not language_from_path
and i18n_patterns_used
and not prefixed_default_language
):
language = settings.LANGUAGE_CODE
translation.activate(language)
request.LANGUAGE_CODE = translation.get_language()
def process_response(self, request, response):
language = translation.get_language()
language_from_path = translation.get_language_from_path(request.path_info)
urlconf = getattr(request, "urlconf", settings.ROOT_URLCONF)
(
i18n_patterns_used,
prefixed_default_language,
) = is_language_prefix_patterns_used(urlconf)
if (
response.status_code == 404
and not language_from_path
and i18n_patterns_used
and prefixed_default_language
):
# Maybe the language code is missing in the URL? Try adding the
# language prefix and redirecting to that URL.
language_path = "/%s%s" % (language, request.path_info)
path_valid = is_valid_path(language_path, urlconf)
path_needs_slash = not path_valid and (
settings.APPEND_SLASH
and not language_path.endswith("/")
and is_valid_path("%s/" % language_path, urlconf)
)
if path_valid or path_needs_slash:
script_prefix = get_script_prefix()
# Insert language after the script prefix and before the
# rest of the URL
language_url = request.get_full_path(
force_append_slash=path_needs_slash
).replace(script_prefix, "%s%s/" % (script_prefix, language), 1)
# Redirect to the language-specific URL as detected by
# get_language_from_request(). HTTP caches may cache this
# redirect, so add the Vary header.
redirect = self.response_redirect_class(language_url)
patch_vary_headers(redirect, ("Accept-Language", "Cookie"))
return redirect
if not (i18n_patterns_used and language_from_path):
patch_vary_headers(response, ("Accept-Language",))
response.headers.setdefault("Content-Language", language)
return response
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/middleware/cache.py | django/middleware/cache.py | """
Cache middleware. If enabled, each Django-powered page will be cached based on
URL. The canonical way to enable cache middleware is to set
``UpdateCacheMiddleware`` as your first piece of middleware, and
``FetchFromCacheMiddleware`` as the last::
MIDDLEWARE = [
'django.middleware.cache.UpdateCacheMiddleware',
...
'django.middleware.cache.FetchFromCacheMiddleware'
]
This is counterintuitive, but correct: ``UpdateCacheMiddleware`` needs to run
last during the response phase, which processes middleware bottom-up;
``FetchFromCacheMiddleware`` needs to run last during the request phase, which
processes middleware top-down.
The single-class ``CacheMiddleware`` can be used for some simple sites.
However, if any other piece of middleware needs to affect the cache key, you'll
need to use the two-part ``UpdateCacheMiddleware`` and
``FetchFromCacheMiddleware``. This'll most often happen when you're using
Django's ``LocaleMiddleware``.
More details about how the caching works:
* Only GET or HEAD-requests with status code 200 are cached.
* The number of seconds each page is stored for is set by the "max-age" section
of the response's "Cache-Control" header, falling back to the
CACHE_MIDDLEWARE_SECONDS setting if the section was not found.
* This middleware expects that a HEAD request is answered with the same
response headers exactly like the corresponding GET request.
* When a hit occurs, a shallow copy of the original response object is returned
from process_request.
* Pages will be cached based on the contents of the request headers listed in
the response's "Vary" header.
* This middleware also sets ETag, Last-Modified, Expires and Cache-Control
headers on the response object.
"""
import time
from django.conf import settings
from django.core.cache import DEFAULT_CACHE_ALIAS, caches
from django.utils.cache import (
get_cache_key,
get_max_age,
has_vary_header,
learn_cache_key,
patch_response_headers,
)
from django.utils.deprecation import MiddlewareMixin
from django.utils.http import parse_http_date_safe
class UpdateCacheMiddleware(MiddlewareMixin):
"""
Response-phase cache middleware that updates the cache if the response is
cacheable.
Must be used as part of the two-part update/fetch cache middleware.
UpdateCacheMiddleware must be the first piece of middleware in MIDDLEWARE
so that it'll get called last during the response phase.
"""
def __init__(self, get_response):
super().__init__(get_response)
self.cache_timeout = settings.CACHE_MIDDLEWARE_SECONDS
self.page_timeout = None
self.key_prefix = settings.CACHE_MIDDLEWARE_KEY_PREFIX
self.cache_alias = settings.CACHE_MIDDLEWARE_ALIAS
@property
def cache(self):
return caches[self.cache_alias]
def _should_update_cache(self, request, response):
return hasattr(request, "_cache_update_cache") and request._cache_update_cache
def process_response(self, request, response):
"""Set the cache, if needed."""
if not self._should_update_cache(request, response):
# We don't need to update the cache, just return.
return response
if response.streaming or response.status_code not in (200, 304):
return response
# Don't cache responses that set a user-specific (and maybe security
# sensitive) cookie in response to a cookie-less request.
if (
not request.COOKIES
and response.cookies
and has_vary_header(response, "Cookie")
):
return response
# Don't cache responses when the Cache-Control header is set to
# private, no-cache, or no-store.
cache_control = response.get("Cache-Control", ())
if any(
directive in cache_control
for directive in (
"private",
"no-cache",
"no-store",
)
):
return response
# Page timeout takes precedence over the "max-age" and the default
# cache timeout.
timeout = self.page_timeout
if timeout is None:
# The timeout from the "max-age" section of the "Cache-Control"
# header takes precedence over the default cache timeout.
timeout = get_max_age(response)
if timeout is None:
timeout = self.cache_timeout
elif timeout == 0:
# max-age was set to 0, don't cache.
return response
patch_response_headers(response, timeout)
if timeout and response.status_code == 200:
cache_key = learn_cache_key(
request, response, timeout, self.key_prefix, cache=self.cache
)
if hasattr(response, "render") and callable(response.render):
response.add_post_render_callback(
lambda r: self.cache.set(cache_key, r, timeout)
)
else:
self.cache.set(cache_key, response, timeout)
return response
class FetchFromCacheMiddleware(MiddlewareMixin):
"""
Request-phase cache middleware that fetches a page from the cache.
Must be used as part of the two-part update/fetch cache middleware.
FetchFromCacheMiddleware must be the last piece of middleware in MIDDLEWARE
so that it'll get called last during the request phase.
"""
def __init__(self, get_response):
super().__init__(get_response)
self.key_prefix = settings.CACHE_MIDDLEWARE_KEY_PREFIX
self.cache_alias = settings.CACHE_MIDDLEWARE_ALIAS
@property
def cache(self):
return caches[self.cache_alias]
def process_request(self, request):
"""
Check whether the page is already cached and return the cached
version if available.
"""
if request.method not in ("GET", "HEAD"):
request._cache_update_cache = False
return None # Don't bother checking the cache.
# try and get the cached GET response
cache_key = get_cache_key(request, self.key_prefix, "GET", cache=self.cache)
if cache_key is None:
request._cache_update_cache = True
return None # No cache information available, need to rebuild.
response = self.cache.get(cache_key)
# if it wasn't found and we are looking for a HEAD, try looking just
# for that
if response is None and request.method == "HEAD":
cache_key = get_cache_key(
request, self.key_prefix, "HEAD", cache=self.cache
)
response = self.cache.get(cache_key)
if response is None:
request._cache_update_cache = True
return None # No cache information available, need to rebuild.
# Derive the age estimation of the cached response.
if (max_age_seconds := get_max_age(response)) is not None and (
expires_timestamp := parse_http_date_safe(response["Expires"])
) is not None:
now_timestamp = int(time.time())
remaining_seconds = expires_timestamp - now_timestamp
# Use Age: 0 if local clock got turned back.
response["Age"] = max(0, max_age_seconds - remaining_seconds)
# hit, return cached response
request._cache_update_cache = False
return response
class CacheMiddleware(UpdateCacheMiddleware, FetchFromCacheMiddleware):
"""
Cache middleware that provides basic behavior for many simple sites.
Also used as the hook point for the cache decorator, which is generated
using the decorator-from-middleware utility.
"""
def __init__(self, get_response, cache_timeout=None, page_timeout=None, **kwargs):
super().__init__(get_response)
# We need to differentiate between "provided, but using default value",
# and "not provided". If the value is provided using a default, then
# we fall back to system defaults. If it is not provided at all,
# we need to use middleware defaults.
try:
key_prefix = kwargs["key_prefix"]
if key_prefix is None:
key_prefix = ""
self.key_prefix = key_prefix
except KeyError:
pass
try:
cache_alias = kwargs["cache_alias"]
if cache_alias is None:
cache_alias = DEFAULT_CACHE_ALIAS
self.cache_alias = cache_alias
except KeyError:
pass
if cache_timeout is not None:
self.cache_timeout = cache_timeout
self.page_timeout = page_timeout
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/db/utils.py | django/db/utils.py | import pkgutil
from importlib import import_module
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
# For backwards compatibility with Django < 3.2
from django.utils.connection import ConnectionDoesNotExist # NOQA: F401
from django.utils.connection import BaseConnectionHandler
from django.utils.functional import cached_property
from django.utils.module_loading import import_string
DEFAULT_DB_ALIAS = "default"
DJANGO_VERSION_PICKLE_KEY = "_django_version"
class Error(Exception):
pass
class InterfaceError(Error):
pass
class DatabaseError(Error):
pass
class DataError(DatabaseError):
pass
class OperationalError(DatabaseError):
pass
class IntegrityError(DatabaseError):
pass
class InternalError(DatabaseError):
pass
class ProgrammingError(DatabaseError):
pass
class NotSupportedError(DatabaseError):
pass
class DatabaseErrorWrapper:
"""
Context manager and decorator that reraises backend-specific database
exceptions using Django's common wrappers.
"""
def __init__(self, wrapper):
"""
wrapper is a database wrapper.
It must have a Database attribute defining PEP-249 exceptions.
"""
self.wrapper = wrapper
def __del__(self):
del self.wrapper
def __enter__(self):
pass
def __exit__(self, exc_type, exc_value, traceback):
if exc_type is None:
return
for dj_exc_type in (
DataError,
OperationalError,
IntegrityError,
InternalError,
ProgrammingError,
NotSupportedError,
DatabaseError,
InterfaceError,
Error,
):
db_exc_type = getattr(self.wrapper.Database, dj_exc_type.__name__)
if issubclass(exc_type, db_exc_type):
dj_exc_value = dj_exc_type(*exc_value.args)
# Only set the 'errors_occurred' flag for errors that may make
# the connection unusable.
if dj_exc_type not in (DataError, IntegrityError):
self.wrapper.errors_occurred = True
raise dj_exc_value.with_traceback(traceback) from exc_value
def __call__(self, func):
# Note that we are intentionally not using @wraps here for performance
# reasons. Refs #21109.
def inner(*args, **kwargs):
with self:
return func(*args, **kwargs)
return inner
def load_backend(backend_name):
"""
Return a database backend's "base" module given a fully qualified database
backend name, or raise an error if it doesn't exist.
"""
# This backend was renamed in Django 1.9.
if backend_name == "django.db.backends.postgresql_psycopg2":
backend_name = "django.db.backends.postgresql"
try:
return import_module("%s.base" % backend_name)
except ImportError as e_user:
# The database backend wasn't found. Display a helpful error message
# listing all built-in database backends.
import django.db.backends
builtin_backends = [
name
for _, name, ispkg in pkgutil.iter_modules(django.db.backends.__path__)
if ispkg and name not in {"base", "dummy"}
]
if backend_name not in ["django.db.backends.%s" % b for b in builtin_backends]:
backend_reprs = map(repr, sorted(builtin_backends))
raise ImproperlyConfigured(
"%r isn't an available database backend or couldn't be "
"imported. Check the above exception. To use one of the "
"built-in backends, use 'django.db.backends.XXX', where XXX "
"is one of:\n"
" %s" % (backend_name, ", ".join(backend_reprs))
) from e_user
else:
# If there's some other error, this must be an error in Django
raise
class ConnectionHandler(BaseConnectionHandler):
settings_name = "DATABASES"
# Connections needs to still be an actual thread local, as it's truly
# thread-critical. Database backends should use @async_unsafe to protect
# their code from async contexts, but this will give those contexts
# separate connections in case it's needed as well. There's no cleanup
# after async contexts, though, so we don't allow that if we can help it.
thread_critical = True
def configure_settings(self, databases):
databases = super().configure_settings(databases)
if databases == {}:
databases[DEFAULT_DB_ALIAS] = {"ENGINE": "django.db.backends.dummy"}
elif DEFAULT_DB_ALIAS not in databases:
raise ImproperlyConfigured(
f"You must define a '{DEFAULT_DB_ALIAS}' database."
)
elif databases[DEFAULT_DB_ALIAS] == {}:
databases[DEFAULT_DB_ALIAS]["ENGINE"] = "django.db.backends.dummy"
# Configure default settings.
for conn in databases.values():
conn.setdefault("ATOMIC_REQUESTS", False)
conn.setdefault("AUTOCOMMIT", True)
conn.setdefault("ENGINE", "django.db.backends.dummy")
if conn["ENGINE"] == "django.db.backends." or not conn["ENGINE"]:
conn["ENGINE"] = "django.db.backends.dummy"
conn.setdefault("CONN_MAX_AGE", 0)
conn.setdefault("CONN_HEALTH_CHECKS", False)
conn.setdefault("OPTIONS", {})
conn.setdefault("TIME_ZONE", None)
for setting in ["NAME", "USER", "PASSWORD", "HOST", "PORT"]:
conn.setdefault(setting, "")
test_settings = conn.setdefault("TEST", {})
default_test_settings = [
("CHARSET", None),
("COLLATION", None),
("MIGRATE", True),
("MIRROR", None),
("NAME", None),
]
for key, value in default_test_settings:
test_settings.setdefault(key, value)
return databases
@property
def databases(self):
# Maintained for backward compatibility as some 3rd party packages have
# made use of this private API in the past. It is no longer used within
# Django itself.
return self.settings
def create_connection(self, alias):
db = self.settings[alias]
backend = load_backend(db["ENGINE"])
return backend.DatabaseWrapper(db, alias)
class ConnectionRouter:
def __init__(self, routers=None):
"""
If routers is not specified, default to settings.DATABASE_ROUTERS.
"""
self._routers = routers
@cached_property
def routers(self):
if self._routers is None:
self._routers = settings.DATABASE_ROUTERS
routers = []
for r in self._routers:
if isinstance(r, str):
router = import_string(r)()
else:
router = r
routers.append(router)
return routers
def _router_func(action):
def _route_db(self, model, **hints):
chosen_db = None
for router in self.routers:
try:
method = getattr(router, action)
except AttributeError:
# If the router doesn't have a method, skip to the next
# one.
pass
else:
chosen_db = method(model, **hints)
if chosen_db:
return chosen_db
instance = hints.get("instance")
if instance is not None and instance._state.db:
return instance._state.db
return DEFAULT_DB_ALIAS
return _route_db
db_for_read = _router_func("db_for_read")
db_for_write = _router_func("db_for_write")
def allow_relation(self, obj1, obj2, **hints):
for router in self.routers:
try:
method = router.allow_relation
except AttributeError:
# If the router doesn't have a method, skip to the next one.
pass
else:
allow = method(obj1, obj2, **hints)
if allow is not None:
return allow
return obj1._state.db == obj2._state.db
def allow_migrate(self, db, app_label, **hints):
for router in self.routers:
try:
method = router.allow_migrate
except AttributeError:
# If the router doesn't have a method, skip to the next one.
continue
allow = method(db, app_label, **hints)
if allow is not None:
return allow
return True
def allow_migrate_model(self, db, model):
return self.allow_migrate(
db,
model._meta.app_label,
model_name=model._meta.model_name,
model=model,
)
def get_migratable_models(self, app_config, db, include_auto_created=False):
"""Return app models allowed to be migrated on provided db."""
models = app_config.get_models(include_auto_created=include_auto_created)
return [model for model in models if self.allow_migrate_model(db, model)]
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/db/transaction.py | django/db/transaction.py | from contextlib import ContextDecorator, contextmanager
from django.db import (
DEFAULT_DB_ALIAS,
DatabaseError,
Error,
ProgrammingError,
connections,
)
class TransactionManagementError(ProgrammingError):
"""Transaction management is used improperly."""
pass
def get_connection(using=None):
"""
Get a database connection by name, or the default database connection
if no name is provided. This is a private API.
"""
if using is None:
using = DEFAULT_DB_ALIAS
return connections[using]
def get_autocommit(using=None):
"""Get the autocommit status of the connection."""
return get_connection(using).get_autocommit()
def set_autocommit(autocommit, using=None):
"""Set the autocommit status of the connection."""
return get_connection(using).set_autocommit(autocommit)
def commit(using=None):
"""Commit a transaction."""
get_connection(using).commit()
def rollback(using=None):
"""Roll back a transaction."""
get_connection(using).rollback()
def savepoint(using=None):
"""
Create a savepoint (if supported and required by the backend) inside the
current transaction. Return an identifier for the savepoint that will be
used for the subsequent rollback or commit.
"""
return get_connection(using).savepoint()
def savepoint_rollback(sid, using=None):
"""
Roll back the most recent savepoint (if one exists). Do nothing if
savepoints are not supported.
"""
get_connection(using).savepoint_rollback(sid)
def savepoint_commit(sid, using=None):
"""
Commit the most recent savepoint (if one exists). Do nothing if
savepoints are not supported.
"""
get_connection(using).savepoint_commit(sid)
def clean_savepoints(using=None):
"""
Reset the counter used to generate unique savepoint ids in this thread.
"""
get_connection(using).clean_savepoints()
def get_rollback(using=None):
"""Get the "needs rollback" flag -- for *advanced use* only."""
return get_connection(using).get_rollback()
def set_rollback(rollback, using=None):
"""
Set or unset the "needs rollback" flag -- for *advanced use* only.
When `rollback` is `True`, trigger a rollback when exiting the innermost
enclosing atomic block that has `savepoint=True` (that's the default). Use
this to force a rollback without raising an exception.
When `rollback` is `False`, prevent such a rollback. Use this only after
rolling back to a known-good state! Otherwise, you break the atomic block
and data corruption may occur.
"""
return get_connection(using).set_rollback(rollback)
@contextmanager
def mark_for_rollback_on_error(using=None):
"""
Internal low-level utility to mark a transaction as "needs rollback" when
an exception is raised while not enforcing the enclosed block to be in a
transaction. This is needed by Model.save() and friends to avoid starting a
transaction when in autocommit mode and a single query is executed.
It's equivalent to:
connection = get_connection(using)
if connection.get_autocommit():
yield
else:
with transaction.atomic(using=using, savepoint=False):
yield
but it uses low-level utilities to avoid performance overhead.
"""
try:
yield
except Exception as exc:
connection = get_connection(using)
if connection.in_atomic_block:
connection.needs_rollback = True
connection.rollback_exc = exc
raise
def on_commit(func, using=None, robust=False):
"""
Register `func` to be called when the current transaction is committed.
If the current transaction is rolled back, `func` will not be called.
"""
get_connection(using).on_commit(func, robust)
#################################
# Decorators / context managers #
#################################
class Atomic(ContextDecorator):
"""
Guarantee the atomic execution of a given block.
An instance can be used either as a decorator or as a context manager.
When it's used as a decorator, __call__ wraps the execution of the
decorated function in the instance itself, used as a context manager.
When it's used as a context manager, __enter__ creates a transaction or a
savepoint, depending on whether a transaction is already in progress, and
__exit__ commits the transaction or releases the savepoint on normal exit,
and rolls back the transaction or to the savepoint on exceptions.
It's possible to disable the creation of savepoints if the goal is to
ensure that some code runs within a transaction without creating overhead.
A stack of savepoint identifiers is maintained as an attribute of the
connection. None denotes the absence of a savepoint.
This allows reentrancy even if the same AtomicWrapper is reused. For
example, it's possible to define `oa = atomic('other')` and use `@oa` or
`with oa:` multiple times.
Since database connections are thread-local, this is thread-safe.
An atomic block can be tagged as durable. In this case, a RuntimeError is
raised if it's nested within another atomic block. This guarantees
that database changes in a durable block are committed to the database when
the block exits without error.
This is a private API.
"""
def __init__(self, using, savepoint, durable):
self.using = using
self.savepoint = savepoint
self.durable = durable
self._from_testcase = False
def __enter__(self):
connection = get_connection(self.using)
if (
self.durable
and connection.atomic_blocks
and not connection.atomic_blocks[-1]._from_testcase
):
raise RuntimeError(
"A durable atomic block cannot be nested within another "
"atomic block."
)
if not connection.in_atomic_block:
# Reset state when entering an outermost atomic block.
connection.commit_on_exit = True
connection.needs_rollback = False
if not connection.get_autocommit():
# Pretend we're already in an atomic block to bypass the code
# that disables autocommit to enter a transaction, and make a
# note to deal with this case in __exit__.
connection.in_atomic_block = True
connection.commit_on_exit = False
if connection.in_atomic_block:
# We're already in a transaction; create a savepoint, unless we
# were told not to or we're already waiting for a rollback. The
# second condition avoids creating useless savepoints and prevents
# overwriting needs_rollback until the rollback is performed.
if self.savepoint and not connection.needs_rollback:
sid = connection.savepoint()
connection.savepoint_ids.append(sid)
else:
connection.savepoint_ids.append(None)
else:
connection.set_autocommit(
False, force_begin_transaction_with_broken_autocommit=True
)
connection.in_atomic_block = True
if connection.in_atomic_block:
connection.atomic_blocks.append(self)
def __exit__(self, exc_type, exc_value, traceback):
connection = get_connection(self.using)
if connection.in_atomic_block:
connection.atomic_blocks.pop()
if connection.savepoint_ids:
sid = connection.savepoint_ids.pop()
else:
# Prematurely unset this flag to allow using commit or rollback.
connection.in_atomic_block = False
try:
if connection.closed_in_transaction:
# The database will perform a rollback by itself.
# Wait until we exit the outermost block.
pass
elif exc_type is None and not connection.needs_rollback:
if connection.in_atomic_block:
# Release savepoint if there is one
if sid is not None:
try:
connection.savepoint_commit(sid)
except DatabaseError:
try:
connection.savepoint_rollback(sid)
# The savepoint won't be reused. Release it to
# minimize overhead for the database server.
connection.savepoint_commit(sid)
except Error:
# If rolling back to a savepoint fails, mark
# for rollback at a higher level and avoid
# shadowing the original exception.
connection.needs_rollback = True
raise
else:
# Commit transaction
try:
connection.commit()
except DatabaseError:
try:
connection.rollback()
except Error:
# An error during rollback means that something
# went wrong with the connection. Drop it.
connection.close()
raise
else:
# This flag will be set to True again if there isn't a
# savepoint allowing to perform the rollback at this level.
connection.needs_rollback = False
if connection.in_atomic_block:
# Roll back to savepoint if there is one, mark for rollback
# otherwise.
if sid is None:
connection.needs_rollback = True
else:
try:
connection.savepoint_rollback(sid)
# The savepoint won't be reused. Release it to
# minimize overhead for the database server.
connection.savepoint_commit(sid)
except Error:
# If rolling back to a savepoint fails, mark for
# rollback at a higher level and avoid shadowing
# the original exception.
connection.needs_rollback = True
else:
# Roll back transaction
try:
connection.rollback()
except Error:
# An error during rollback means that something
# went wrong with the connection. Drop it.
connection.close()
finally:
# Outermost block exit when autocommit was enabled.
if not connection.in_atomic_block:
if connection.closed_in_transaction:
connection.connection = None
else:
connection.set_autocommit(True)
# Outermost block exit when autocommit was disabled.
elif not connection.savepoint_ids and not connection.commit_on_exit:
if connection.closed_in_transaction:
connection.connection = None
else:
connection.in_atomic_block = False
def atomic(using=None, savepoint=True, durable=False):
# Bare decorator: @atomic -- although the first argument is called
# `using`, it's actually the function being decorated.
if callable(using):
return Atomic(DEFAULT_DB_ALIAS, savepoint, durable)(using)
# Decorator: @atomic(...) or context manager: with atomic(...): ...
else:
return Atomic(using, savepoint, durable)
def _non_atomic_requests(view, using):
try:
view._non_atomic_requests.add(using)
except AttributeError:
view._non_atomic_requests = {using}
return view
def non_atomic_requests(using=None):
if callable(using):
return _non_atomic_requests(using, DEFAULT_DB_ALIAS)
else:
if using is None:
using = DEFAULT_DB_ALIAS
return lambda view: _non_atomic_requests(view, using)
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/db/__init__.py | django/db/__init__.py | from django.core import signals
from django.db.utils import (
DEFAULT_DB_ALIAS,
DJANGO_VERSION_PICKLE_KEY,
ConnectionHandler,
ConnectionRouter,
DatabaseError,
DataError,
Error,
IntegrityError,
InterfaceError,
InternalError,
NotSupportedError,
OperationalError,
ProgrammingError,
)
from django.utils.connection import ConnectionProxy
__all__ = [
"close_old_connections",
"connection",
"connections",
"reset_queries",
"router",
"DatabaseError",
"IntegrityError",
"InternalError",
"ProgrammingError",
"DataError",
"NotSupportedError",
"Error",
"InterfaceError",
"OperationalError",
"DEFAULT_DB_ALIAS",
"DJANGO_VERSION_PICKLE_KEY",
]
connections = ConnectionHandler()
router = ConnectionRouter()
# For backwards compatibility. Prefer connections['default'] instead.
connection = ConnectionProxy(connections, DEFAULT_DB_ALIAS)
# Register an event to reset saved queries when a Django request is started.
def reset_queries(**kwargs):
for conn in connections.all(initialized_only=True):
conn.queries_log.clear()
signals.request_started.connect(reset_queries)
# Register an event to reset transaction state and close connections past
# their lifetime.
def close_old_connections(**kwargs):
for conn in connections.all(initialized_only=True):
conn.close_if_unusable_or_obsolete()
signals.request_started.connect(close_old_connections)
signals.request_finished.connect(close_old_connections)
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/db/models/enums.py | django/db/models/enums.py | import enum
from enum import EnumType, IntEnum, StrEnum
from enum import property as enum_property
from django.utils.functional import Promise
__all__ = ["Choices", "IntegerChoices", "TextChoices"]
class ChoicesType(EnumType):
"""A metaclass for creating a enum choices."""
def __new__(metacls, classname, bases, classdict, **kwds):
labels = []
for key in classdict._member_names:
value = classdict[key]
if (
isinstance(value, (list, tuple))
and len(value) > 1
and isinstance(value[-1], (Promise, str))
):
*value, label = value
value = tuple(value)
else:
label = key.replace("_", " ").title()
labels.append(label)
# Use dict.__setitem__() to suppress defenses against double
# assignment in enum's classdict.
dict.__setitem__(classdict, key, value)
cls = super().__new__(metacls, classname, bases, classdict, **kwds)
for member, label in zip(cls.__members__.values(), labels):
member._label_ = label
return enum.unique(cls)
@property
def names(cls):
empty = ["__empty__"] if hasattr(cls, "__empty__") else []
return empty + [member.name for member in cls]
@property
def choices(cls):
empty = [(None, cls.__empty__)] if hasattr(cls, "__empty__") else []
return empty + [(member.value, member.label) for member in cls]
@property
def labels(cls):
return [label for _, label in cls.choices]
@property
def values(cls):
return [value for value, _ in cls.choices]
class Choices(enum.Enum, metaclass=ChoicesType):
"""Class for creating enumerated choices."""
do_not_call_in_templates = enum.nonmember(True)
@enum_property
def label(self):
return self._label_
# A similar format was proposed for Python 3.10.
def __repr__(self):
return f"{self.__class__.__qualname__}.{self._name_}"
class IntegerChoices(Choices, IntEnum):
"""Class for creating enumerated integer choices."""
pass
class TextChoices(Choices, StrEnum):
"""Class for creating enumerated string choices."""
@staticmethod
def _generate_next_value_(name, start, count, last_values):
return name
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/db/models/deletion.py | django/db/models/deletion.py | from collections import Counter, defaultdict
from functools import partial, reduce
from itertools import chain
from operator import attrgetter, or_
from django.db import IntegrityError, connections, models, transaction
from django.db.models import query_utils, signals, sql
class ProtectedError(IntegrityError):
def __init__(self, msg, protected_objects):
self.protected_objects = protected_objects
super().__init__(msg, protected_objects)
class RestrictedError(IntegrityError):
def __init__(self, msg, restricted_objects):
self.restricted_objects = restricted_objects
super().__init__(msg, restricted_objects)
def CASCADE(collector, field, sub_objs, using):
collector.collect(
sub_objs,
source=field.remote_field.model,
source_attr=field.name,
nullable=field.null,
fail_on_restricted=False,
)
if field.null and not connections[using].features.can_defer_constraint_checks:
collector.add_field_update(field, None, sub_objs)
def PROTECT(collector, field, sub_objs, using):
raise ProtectedError(
"Cannot delete some instances of model '%s' because they are "
"referenced through a protected foreign key: '%s.%s'"
% (
field.remote_field.model.__name__,
sub_objs[0].__class__.__name__,
field.name,
),
sub_objs,
)
def RESTRICT(collector, field, sub_objs, using):
collector.add_restricted_objects(field, sub_objs)
collector.add_dependency(field.remote_field.model, field.model)
def SET(value):
if callable(value):
def set_on_delete(collector, field, sub_objs, using):
collector.add_field_update(field, value(), sub_objs)
else:
def set_on_delete(collector, field, sub_objs, using):
collector.add_field_update(field, value, sub_objs)
set_on_delete.lazy_sub_objs = True
set_on_delete.deconstruct = lambda: ("django.db.models.SET", (value,), {})
return set_on_delete
def SET_NULL(collector, field, sub_objs, using):
collector.add_field_update(field, None, sub_objs)
SET_NULL.lazy_sub_objs = True
def SET_DEFAULT(collector, field, sub_objs, using):
collector.add_field_update(field, field.get_default(), sub_objs)
def DO_NOTHING(collector, field, sub_objs, using):
pass
class DatabaseOnDelete:
def __init__(self, operation, name, forced_collector=None):
self.operation = operation
self.forced_collector = forced_collector
self.__name__ = name
__call__ = DO_NOTHING
def on_delete_sql(self, schema_editor):
return schema_editor.connection.ops.fk_on_delete_sql(self.operation)
def __str__(self):
return self.__name__
DB_CASCADE = DatabaseOnDelete("CASCADE", "DB_CASCADE", CASCADE)
DB_SET_DEFAULT = DatabaseOnDelete("SET DEFAULT", "DB_SET_DEFAULT")
DB_SET_NULL = DatabaseOnDelete("SET NULL", "DB_SET_NULL")
SKIP_COLLECTION = frozenset([DO_NOTHING, DB_CASCADE, DB_SET_DEFAULT, DB_SET_NULL])
def get_candidate_relations_to_delete(opts):
# The candidate relations are the ones that come from N-1 and 1-1
# relations. N-N (i.e., many-to-many) relations aren't candidates for
# deletion.
return (
f
for f in opts.get_fields(include_hidden=True)
if f.auto_created and not f.concrete and (f.one_to_one or f.one_to_many)
)
class Collector:
def __init__(self, using, origin=None, force_collection=False):
self.using = using
# A Model or QuerySet object.
self.origin = origin
# Force collecting objects for deletion on the Python-level.
self.force_collection = force_collection
# Initially, {model: {instances}}, later values become lists.
self.data = defaultdict(set)
# {(field, value): [instances, …]}
self.field_updates = defaultdict(list)
# {model: {field: {instances}}}
self.restricted_objects = defaultdict(partial(defaultdict, set))
# fast_deletes is a list of queryset-likes that can be deleted without
# fetching the objects into memory.
self.fast_deletes = []
# Tracks deletion-order dependency for databases without transactions
# or ability to defer constraint checks. Only concrete model classes
# should be included, as the dependencies exist only between actual
# database tables; proxy models are represented here by their concrete
# parent.
self.dependencies = defaultdict(set) # {model: {models}}
def add(self, objs, source=None, nullable=False, reverse_dependency=False):
"""
Add 'objs' to the collection of objects to be deleted. If the call is
the result of a cascade, 'source' should be the model that caused it,
and 'nullable' should be set to True if the relation can be null.
Return a list of all objects that were not already collected.
"""
if not objs:
return []
new_objs = []
model = objs[0].__class__
instances = self.data[model]
for obj in objs:
if obj not in instances:
new_objs.append(obj)
instances.update(new_objs)
# Nullable relationships can be ignored -- they are nulled out before
# deleting, and therefore do not affect the order in which objects have
# to be deleted.
if source is not None and not nullable:
self.add_dependency(source, model, reverse_dependency=reverse_dependency)
return new_objs
def add_dependency(self, model, dependency, reverse_dependency=False):
if reverse_dependency:
model, dependency = dependency, model
self.dependencies[model._meta.concrete_model].add(
dependency._meta.concrete_model
)
self.data.setdefault(dependency, self.data.default_factory())
def add_field_update(self, field, value, objs):
"""
Schedule a field update. 'objs' must be a homogeneous iterable
collection of model instances (e.g. a QuerySet).
"""
self.field_updates[field, value].append(objs)
def add_restricted_objects(self, field, objs):
if objs:
model = objs[0].__class__
self.restricted_objects[model][field].update(objs)
def clear_restricted_objects_from_set(self, model, objs):
if model in self.restricted_objects:
self.restricted_objects[model] = {
field: items - objs
for field, items in self.restricted_objects[model].items()
}
def clear_restricted_objects_from_queryset(self, model, qs):
if model in self.restricted_objects:
objs = set(
qs.filter(
pk__in=[
obj.pk
for objs in self.restricted_objects[model].values()
for obj in objs
]
)
)
self.clear_restricted_objects_from_set(model, objs)
def _has_signal_listeners(self, model):
return signals.pre_delete.has_listeners(
model
) or signals.post_delete.has_listeners(model)
def can_fast_delete(self, objs, from_field=None):
"""
Determine if the objects in the given queryset-like or single object
can be fast-deleted. This can be done if there are no cascades, no
parents and no signal listeners for the object class.
The 'from_field' tells where we are coming from - we need this to
determine if the objects are in fact to be deleted. Allow also
skipping parent -> child -> parent chain preventing fast delete of
the child.
"""
if self.force_collection:
return False
if from_field and from_field.remote_field.on_delete is not CASCADE:
return False
if hasattr(objs, "_meta"):
model = objs._meta.model
elif hasattr(objs, "model") and hasattr(objs, "_raw_delete"):
model = objs.model
else:
return False
if self._has_signal_listeners(model):
return False
# The use of from_field comes from the need to avoid cascade back to
# parent when parent delete is cascading to child.
opts = model._meta
return (
all(
link == from_field
for link in opts.concrete_model._meta.parents.values()
)
and
# Foreign keys pointing to this model.
all(
related.field.remote_field.on_delete in SKIP_COLLECTION
for related in get_candidate_relations_to_delete(opts)
)
and (
# Something like generic foreign key.
not any(
hasattr(field, "bulk_related_objects")
for field in opts.private_fields
)
)
)
def get_del_batches(self, objs, fields):
"""
Return the objs in suitably sized batches for the used connection.
"""
conn_batch_size = max(
connections[self.using].ops.bulk_batch_size(fields, objs), 1
)
if len(objs) > conn_batch_size:
return [
objs[i : i + conn_batch_size]
for i in range(0, len(objs), conn_batch_size)
]
else:
return [objs]
def collect(
self,
objs,
source=None,
nullable=False,
collect_related=True,
source_attr=None,
reverse_dependency=False,
keep_parents=False,
fail_on_restricted=True,
):
"""
Add 'objs' to the collection of objects to be deleted as well as all
parent instances. 'objs' must be a homogeneous iterable collection of
model instances (e.g. a QuerySet). If 'collect_related' is True,
related objects will be handled by their respective on_delete handler.
If the call is the result of a cascade, 'source' should be the model
that caused it and 'nullable' should be set to True, if the relation
can be null.
If 'reverse_dependency' is True, 'source' will be deleted before the
current model, rather than after. (Needed for cascading to parent
models, the one case in which the cascade follows the forwards
direction of an FK rather than the reverse direction.)
If 'keep_parents' is True, data of parent model's will be not deleted.
If 'fail_on_restricted' is False, error won't be raised even if it's
prohibited to delete such objects due to RESTRICT, that defers
restricted object checking in recursive calls where the top-level call
may need to collect more objects to determine whether restricted ones
can be deleted.
"""
if self.can_fast_delete(objs):
self.fast_deletes.append(objs)
return
new_objs = self.add(
objs, source, nullable, reverse_dependency=reverse_dependency
)
if not new_objs:
return
model = new_objs[0].__class__
if not keep_parents:
# Recursively collect concrete model's parent models, but not their
# related objects. These will be found by meta.get_fields()
concrete_model = model._meta.concrete_model
for ptr in concrete_model._meta.parents.values():
if ptr:
parent_objs = [getattr(obj, ptr.name) for obj in new_objs]
self.collect(
parent_objs,
source=model,
source_attr=ptr.remote_field.related_name,
collect_related=False,
reverse_dependency=True,
fail_on_restricted=False,
)
if not collect_related:
return
model_fast_deletes = defaultdict(list)
protected_objects = defaultdict(list)
for related in get_candidate_relations_to_delete(model._meta):
# Preserve parent reverse relationships if keep_parents=True.
if (
keep_parents
and related.model._meta.concrete_model in model._meta.all_parents
):
continue
field = related.field
on_delete = field.remote_field.on_delete
if on_delete in SKIP_COLLECTION:
if self.force_collection and (
forced_on_delete := getattr(on_delete, "forced_collector", None)
):
on_delete = forced_on_delete
else:
continue
related_model = related.related_model
if self.can_fast_delete(related_model, from_field=field):
model_fast_deletes[related_model].append(field)
continue
batches = self.get_del_batches(new_objs, [field])
for batch in batches:
sub_objs = self.related_objects(related_model, [field], batch)
# Non-referenced fields can be deferred if no signal receivers
# are connected for the related model as they'll never be
# exposed to the user. Skip field deferring when some
# relationships are select_related as interactions between both
# features are hard to get right. This should only happen in
# the rare cases where .related_objects is overridden anyway.
if not (
sub_objs.query.select_related
or self._has_signal_listeners(related_model)
):
referenced_fields = set(
chain.from_iterable(
(rf.attname for rf in rel.field.foreign_related_fields)
for rel in get_candidate_relations_to_delete(
related_model._meta
)
)
)
sub_objs = sub_objs.only(*tuple(referenced_fields))
if getattr(on_delete, "lazy_sub_objs", False) or sub_objs:
try:
on_delete(self, field, sub_objs, self.using)
except ProtectedError as error:
key = "'%s.%s'" % (field.model.__name__, field.name)
protected_objects[key] += error.protected_objects
if protected_objects:
raise ProtectedError(
"Cannot delete some instances of model %r because they are "
"referenced through protected foreign keys: %s."
% (
model.__name__,
", ".join(protected_objects),
),
set(chain.from_iterable(protected_objects.values())),
)
for related_model, related_fields in model_fast_deletes.items():
batches = self.get_del_batches(new_objs, related_fields)
for batch in batches:
sub_objs = self.related_objects(related_model, related_fields, batch)
self.fast_deletes.append(sub_objs)
for field in model._meta.private_fields:
if hasattr(field, "bulk_related_objects"):
# It's something like generic foreign key.
sub_objs = field.bulk_related_objects(new_objs, self.using)
self.collect(
sub_objs, source=model, nullable=True, fail_on_restricted=False
)
if fail_on_restricted:
# Raise an error if collected restricted objects (RESTRICT) aren't
# candidates for deletion also collected via CASCADE.
for related_model, instances in self.data.items():
self.clear_restricted_objects_from_set(related_model, instances)
for qs in self.fast_deletes:
self.clear_restricted_objects_from_queryset(qs.model, qs)
if self.restricted_objects.values():
restricted_objects = defaultdict(list)
for related_model, fields in self.restricted_objects.items():
for field, objs in fields.items():
if objs:
key = "'%s.%s'" % (related_model.__name__, field.name)
restricted_objects[key] += objs
if restricted_objects:
raise RestrictedError(
"Cannot delete some instances of model %r because "
"they are referenced through restricted foreign keys: "
"%s."
% (
model.__name__,
", ".join(restricted_objects),
),
set(chain.from_iterable(restricted_objects.values())),
)
def related_objects(self, related_model, related_fields, objs):
"""
Get a QuerySet of the related model to objs via related fields.
"""
predicate = query_utils.Q.create(
[(f"{related_field.name}__in", objs) for related_field in related_fields],
connector=query_utils.Q.OR,
)
return related_model._base_manager.using(self.using).filter(predicate)
def instances_with_model(self):
for model, instances in self.data.items():
for obj in instances:
yield model, obj
def sort(self):
sorted_models = []
concrete_models = set()
models = list(self.data)
while len(sorted_models) < len(models):
found = False
for model in models:
if model in sorted_models:
continue
dependencies = self.dependencies.get(model._meta.concrete_model)
if not (dependencies and dependencies.difference(concrete_models)):
sorted_models.append(model)
concrete_models.add(model._meta.concrete_model)
found = True
if not found:
return
self.data = {model: self.data[model] for model in sorted_models}
def delete(self):
# sort instance collections
for model, instances in self.data.items():
self.data[model] = sorted(instances, key=attrgetter("pk"))
# if possible, bring the models in an order suitable for databases that
# don't support transactions or cannot defer constraint checks until
# the end of a transaction.
self.sort()
# number of objects deleted for each model label
deleted_counter = Counter()
# Optimize for the case with a single obj and no dependencies
if len(self.data) == 1 and len(instances) == 1:
instance = list(instances)[0]
if self.can_fast_delete(instance):
with transaction.mark_for_rollback_on_error(self.using):
count = sql.DeleteQuery(model).delete_batch(
[instance.pk], self.using
)
setattr(instance, model._meta.pk.attname, None)
return count, {model._meta.label: count}
with transaction.atomic(using=self.using, savepoint=False):
# send pre_delete signals
for model, obj in self.instances_with_model():
if not model._meta.auto_created:
signals.pre_delete.send(
sender=model,
instance=obj,
using=self.using,
origin=self.origin,
)
# fast deletes
for qs in self.fast_deletes:
count = qs._raw_delete(using=self.using)
if count:
deleted_counter[qs.model._meta.label] += count
# update fields
for (field, value), instances_list in self.field_updates.items():
updates = []
objs = []
for instances in instances_list:
if (
isinstance(instances, models.QuerySet)
and instances._result_cache is None
):
updates.append(instances)
else:
objs.extend(instances)
if updates:
combined_updates = reduce(or_, updates)
combined_updates.update(**{field.name: value})
if objs:
model = objs[0].__class__
query = sql.UpdateQuery(model)
query.update_batch(
list({obj.pk for obj in objs}), {field.name: value}, self.using
)
# reverse instance collections
for instances in self.data.values():
instances.reverse()
# delete instances
for model, instances in self.data.items():
query = sql.DeleteQuery(model)
pk_list = [obj.pk for obj in instances]
count = query.delete_batch(pk_list, self.using)
if count:
deleted_counter[model._meta.label] += count
if not model._meta.auto_created:
for obj in instances:
signals.post_delete.send(
sender=model,
instance=obj,
using=self.using,
origin=self.origin,
)
for model, instances in self.data.items():
for instance in instances:
setattr(instance, model._meta.pk.attname, None)
return sum(deleted_counter.values()), dict(deleted_counter)
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/db/models/lookups.py | django/db/models/lookups.py | import itertools
import math
from django.core.exceptions import EmptyResultSet, FullResultSet
from django.db.models.expressions import (
Case,
ColPairs,
Expression,
ExpressionList,
Func,
Value,
When,
)
from django.db.models.fields import (
BooleanField,
CharField,
DateTimeField,
Field,
IntegerField,
UUIDField,
)
from django.db.models.query_utils import RegisterLookupMixin
from django.utils.datastructures import OrderedSet
from django.utils.functional import cached_property
from django.utils.hashable import make_hashable
class Lookup(Expression):
lookup_name = None
prepare_rhs = True
can_use_none_as_rhs = False
def __init__(self, lhs, rhs):
self.lhs, self.rhs = lhs, rhs
self.rhs = self.get_prep_lookup()
self.lhs = self.get_prep_lhs()
if hasattr(self.lhs, "get_bilateral_transforms"):
bilateral_transforms = self.lhs.get_bilateral_transforms()
else:
bilateral_transforms = []
if bilateral_transforms:
# Warn the user as soon as possible if they are trying to apply
# a bilateral transformation on a nested QuerySet: that won't work.
from django.db.models.sql.query import Query # avoid circular import
if isinstance(rhs, Query):
raise NotImplementedError(
"Bilateral transformations on nested querysets are not implemented."
)
self.bilateral_transforms = bilateral_transforms
def apply_bilateral_transforms(self, value):
for transform in self.bilateral_transforms:
value = transform(value)
return value
def __repr__(self):
return f"{self.__class__.__name__}({self.lhs!r}, {self.rhs!r})"
def batch_process_rhs(self, compiler, connection, rhs=None):
if rhs is None:
rhs = self.rhs
if self.bilateral_transforms:
sqls, sqls_params = [], []
for p in rhs:
value = Value(p, output_field=self.lhs.output_field)
value = self.apply_bilateral_transforms(value)
value = value.resolve_expression(compiler.query)
sql, sql_params = compiler.compile(value)
sqls.append(sql)
sqls_params.extend(sql_params)
else:
_, params = self.get_db_prep_lookup(rhs, connection)
sqls, sqls_params = ["%s"] * len(params), params
return sqls, sqls_params
def get_source_expressions(self):
if self.rhs_is_direct_value():
return [self.lhs]
return [self.lhs, self.rhs]
def set_source_expressions(self, new_exprs):
if len(new_exprs) == 1:
self.lhs = new_exprs[0]
else:
self.lhs, self.rhs = new_exprs
def get_prep_lookup(self):
if not self.prepare_rhs or hasattr(self.rhs, "resolve_expression"):
return self.rhs
if hasattr(self.lhs, "output_field"):
if hasattr(self.lhs.output_field, "get_prep_value"):
return self.lhs.output_field.get_prep_value(self.rhs)
elif self.rhs_is_direct_value():
return Value(self.rhs)
return self.rhs
def get_prep_lhs(self):
if hasattr(self.lhs, "resolve_expression"):
return self.lhs
return Value(self.lhs)
def get_db_prep_lookup(self, value, connection):
return ("%s", (value,))
def process_lhs(self, compiler, connection, lhs=None):
lhs = lhs or self.lhs
if hasattr(lhs, "resolve_expression"):
lhs = lhs.resolve_expression(compiler.query)
sql, params = compiler.compile(lhs)
if isinstance(lhs, Lookup):
# Wrapped in parentheses to respect operator precedence.
sql = f"({sql})"
return sql, params
def process_rhs(self, compiler, connection):
value = self.rhs
if self.bilateral_transforms:
if self.rhs_is_direct_value():
# Do not call get_db_prep_lookup here as the value will be
# transformed before being used for lookup
value = Value(value, output_field=self.lhs.output_field)
value = self.apply_bilateral_transforms(value)
value = value.resolve_expression(compiler.query)
if hasattr(value, "as_sql"):
sql, params = compiler.compile(value)
if isinstance(value, ColPairs):
raise ValueError(
"CompositePrimaryKey cannot be used as a lookup value."
)
# Ensure expression is wrapped in parentheses to respect operator
# precedence but avoid double wrapping as it can be misinterpreted
# on some backends (e.g. subqueries on SQLite).
if not isinstance(value, Value) and sql and sql[0] != "(":
sql = "(%s)" % sql
return sql, params
else:
return self.get_db_prep_lookup(value, connection)
def rhs_is_direct_value(self):
return not hasattr(self.rhs, "as_sql")
def get_group_by_cols(self):
cols = []
for source in self.get_source_expressions():
cols.extend(source.get_group_by_cols())
return cols
def as_oracle(self, compiler, connection):
# Oracle doesn't allow EXISTS() and filters to be compared to another
# expression unless they're wrapped in a CASE WHEN.
wrapped = False
exprs = []
for expr in (self.lhs, self.rhs):
if connection.ops.conditional_expression_supported_in_where_clause(expr):
expr = Case(When(expr, then=True), default=False)
wrapped = True
exprs.append(expr)
lookup = type(self)(*exprs) if wrapped else self
return lookup.as_sql(compiler, connection)
@cached_property
def output_field(self):
return BooleanField()
@property
def identity(self):
return self.__class__, self.lhs, self.rhs
def __eq__(self, other):
if not isinstance(other, Lookup):
return NotImplemented
return self.identity == other.identity
def __hash__(self):
return hash(make_hashable(self.identity))
def resolve_expression(
self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False
):
c = self.copy()
c.is_summary = summarize
c.lhs = self.lhs.resolve_expression(
query, allow_joins, reuse, summarize, for_save
)
if hasattr(self.rhs, "resolve_expression"):
c.rhs = self.rhs.resolve_expression(
query, allow_joins, reuse, summarize, for_save
)
return c
def select_format(self, compiler, sql, params):
# Wrap filters with a CASE WHEN expression if a database backend
# (e.g. Oracle) doesn't support boolean expression in SELECT or GROUP
# BY list.
if not compiler.connection.features.supports_boolean_expr_in_select_clause:
sql = f"CASE WHEN {sql} THEN 1 ELSE 0 END"
return sql, params
@cached_property
def allowed_default(self):
return self.lhs.allowed_default and self.rhs.allowed_default
class Transform(RegisterLookupMixin, Func):
"""
RegisterLookupMixin() is first so that get_lookup() and get_transform()
first examine self and then check output_field.
"""
bilateral = False
arity = 1
@property
def lhs(self):
return self.get_source_expressions()[0]
def get_bilateral_transforms(self):
if hasattr(self.lhs, "get_bilateral_transforms"):
bilateral_transforms = self.lhs.get_bilateral_transforms()
else:
bilateral_transforms = []
if self.bilateral:
bilateral_transforms.append(self.__class__)
return bilateral_transforms
class BuiltinLookup(Lookup):
def process_lhs(self, compiler, connection, lhs=None):
lhs_sql, params = super().process_lhs(compiler, connection, lhs)
field_internal_type = self.lhs.output_field.get_internal_type()
lhs_sql = (
connection.ops.lookup_cast(self.lookup_name, field_internal_type) % lhs_sql
)
return lhs_sql, tuple(params)
def as_sql(self, compiler, connection):
lhs_sql, params = self.process_lhs(compiler, connection)
rhs_sql, rhs_params = self.process_rhs(compiler, connection)
params = (*params, *rhs_params)
rhs_sql = self.get_rhs_op(connection, rhs_sql)
return "%s %s" % (lhs_sql, rhs_sql), params
def get_rhs_op(self, connection, rhs):
return connection.operators[self.lookup_name] % rhs
class FieldGetDbPrepValueMixin:
"""
Some lookups require Field.get_db_prep_value() to be called on their
inputs.
"""
get_db_prep_lookup_value_is_iterable = False
def get_db_prep_lookup(self, value, connection):
# For relational fields, use the 'target_field' attribute of the
# output_field.
field = getattr(self.lhs.output_field, "target_field", None)
get_db_prep_value = (
getattr(field, "get_db_prep_value", None)
or self.lhs.output_field.get_db_prep_value
)
if not self.get_db_prep_lookup_value_is_iterable:
value = [value]
return (
"%s",
[
(
v
if hasattr(v, "as_sql")
else get_db_prep_value(v, connection, prepared=True)
)
for v in value
],
)
class FieldGetDbPrepValueIterableMixin(FieldGetDbPrepValueMixin):
"""
Some lookups require Field.get_db_prep_value() to be called on each value
in an iterable.
"""
get_db_prep_lookup_value_is_iterable = True
def get_prep_lookup(self):
if hasattr(self.rhs, "resolve_expression"):
return self.rhs
if any(hasattr(value, "resolve_expression") for value in self.rhs):
# Wrap direct values in Value expressions so they are handled by
# the database at compilation time, along with other expressions.
return ExpressionList(
*[
(
value
if hasattr(value, "resolve_expression")
else Value(value, getattr(self.lhs, "output_field", None))
)
for value in self.rhs
]
)
prepared_values = []
for rhs_value in self.rhs:
if (
self.prepare_rhs
and hasattr(self.lhs, "output_field")
and hasattr(self.lhs.output_field, "get_prep_value")
):
rhs_value = self.lhs.output_field.get_prep_value(rhs_value)
prepared_values.append(rhs_value)
return prepared_values
def process_rhs(self, compiler, connection):
if self.rhs_is_direct_value():
# rhs should be an iterable of values. Use batch_process_rhs()
# to prepare/transform those values.
return self.batch_process_rhs(compiler, connection)
elif isinstance(self.rhs, ExpressionList):
# rhs contains at least one expression. Unwrap them and delegate
# to batch_process_rhs() to prepare/transform those values.
copy = self.copy()
copy.rhs = self.rhs.get_source_expressions()
return copy.process_rhs(compiler, connection)
else:
return super().process_rhs(compiler, connection)
def resolve_expression_parameter(self, compiler, connection, sql, param):
params = [param]
if hasattr(param, "resolve_expression"):
param = param.resolve_expression(compiler.query)
if hasattr(param, "as_sql"):
sql, params = compiler.compile(param)
return sql, params
def batch_process_rhs(self, compiler, connection, rhs=None):
pre_processed = super().batch_process_rhs(compiler, connection, rhs)
# The params list may contain expressions which compile to a
# sql/param pair. Zip them to get sql and param pairs that refer to the
# same argument and attempt to replace them with the result of
# compiling the param step.
sql, params = zip(
*(
self.resolve_expression_parameter(compiler, connection, sql, param)
for sql, param in zip(*pre_processed)
)
)
params = itertools.chain.from_iterable(params)
return sql, tuple(params)
class PostgresOperatorLookup(Lookup):
"""Lookup defined by operators on PostgreSQL."""
postgres_operator = None
def as_postgresql(self, compiler, connection):
lhs, lhs_params = self.process_lhs(compiler, connection)
rhs, rhs_params = self.process_rhs(compiler, connection)
params = tuple(lhs_params) + tuple(rhs_params)
return "%s %s %s" % (lhs, self.postgres_operator, rhs), params
@Field.register_lookup
class Exact(FieldGetDbPrepValueMixin, BuiltinLookup):
lookup_name = "exact"
def get_prep_lookup(self):
from django.db.models.sql.query import Query # avoid circular import
if isinstance(query := self.rhs, Query):
if not query.has_limit_one():
raise ValueError(
"The QuerySet value for an exact lookup must be limited to "
"one result using slicing."
)
lhs_len = len(self.lhs) if isinstance(self.lhs, (ColPairs, tuple)) else 1
if (rhs_len := query._subquery_fields_len) != lhs_len:
raise ValueError(
f"The QuerySet value for the exact lookup must have {lhs_len} "
f"selected fields (received {rhs_len})"
)
if not query.has_select_fields:
query.clear_select_clause()
query.add_fields(["pk"])
return super().get_prep_lookup()
def as_sql(self, compiler, connection):
# Avoid comparison against direct rhs if lhs is a boolean value. That
# turns "boolfield__exact=True" into "WHERE boolean_field" instead of
# "WHERE boolean_field = True" when allowed.
if (
isinstance(self.rhs, bool)
and getattr(self.lhs, "conditional", False)
and connection.ops.conditional_expression_supported_in_where_clause(
self.lhs
)
):
lhs_sql, params = self.process_lhs(compiler, connection)
template = "%s" if self.rhs else "NOT %s"
return template % lhs_sql, params
return super().as_sql(compiler, connection)
@Field.register_lookup
class IExact(BuiltinLookup):
lookup_name = "iexact"
prepare_rhs = False
def process_rhs(self, qn, connection):
rhs, params = super().process_rhs(qn, connection)
if params:
params = (connection.ops.prep_for_iexact_query(params[0]), *params[1:])
return rhs, params
@Field.register_lookup
class GreaterThan(FieldGetDbPrepValueMixin, BuiltinLookup):
lookup_name = "gt"
@Field.register_lookup
class GreaterThanOrEqual(FieldGetDbPrepValueMixin, BuiltinLookup):
lookup_name = "gte"
@Field.register_lookup
class LessThan(FieldGetDbPrepValueMixin, BuiltinLookup):
lookup_name = "lt"
@Field.register_lookup
class LessThanOrEqual(FieldGetDbPrepValueMixin, BuiltinLookup):
lookup_name = "lte"
class IntegerFieldOverflow:
underflow_exception = EmptyResultSet
overflow_exception = EmptyResultSet
def process_rhs(self, compiler, connection):
rhs = self.rhs
if isinstance(rhs, int):
field_internal_type = self.lhs.output_field.get_internal_type()
min_value, max_value = connection.ops.integer_field_range(
field_internal_type
)
if min_value is not None and rhs < min_value:
raise self.underflow_exception
if max_value is not None and rhs > max_value:
raise self.overflow_exception
return super().process_rhs(compiler, connection)
class IntegerFieldFloatRounding:
"""
Allow floats to work as query values for IntegerField. Without this, the
decimal portion of the float would always be discarded.
"""
def get_prep_lookup(self):
if isinstance(self.rhs, float):
self.rhs = math.ceil(self.rhs)
return super().get_prep_lookup()
@IntegerField.register_lookup
class IntegerFieldExact(IntegerFieldOverflow, Exact):
pass
@IntegerField.register_lookup
class IntegerGreaterThan(IntegerFieldOverflow, GreaterThan):
underflow_exception = FullResultSet
@IntegerField.register_lookup
class IntegerGreaterThanOrEqual(
IntegerFieldOverflow, IntegerFieldFloatRounding, GreaterThanOrEqual
):
underflow_exception = FullResultSet
@IntegerField.register_lookup
class IntegerLessThan(IntegerFieldOverflow, IntegerFieldFloatRounding, LessThan):
overflow_exception = FullResultSet
@IntegerField.register_lookup
class IntegerLessThanOrEqual(IntegerFieldOverflow, LessThanOrEqual):
overflow_exception = FullResultSet
@Field.register_lookup
class In(FieldGetDbPrepValueIterableMixin, BuiltinLookup):
lookup_name = "in"
def get_prep_lookup(self):
from django.db.models.sql.query import Query # avoid circular import
if isinstance(self.rhs, Query):
lhs_len = len(self.lhs) if isinstance(self.lhs, (ColPairs, tuple)) else 1
if (rhs_len := self.rhs._subquery_fields_len) != lhs_len:
raise ValueError(
f"The QuerySet value for the 'in' lookup must have {lhs_len} "
f"selected fields (received {rhs_len})"
)
self.rhs.clear_ordering(clear_default=True)
if not self.rhs.has_select_fields:
self.rhs.clear_select_clause()
self.rhs.add_fields(["pk"])
return super().get_prep_lookup()
def process_rhs(self, compiler, connection):
db_rhs = getattr(self.rhs, "_db", None)
if db_rhs is not None and db_rhs != connection.alias:
raise ValueError(
"Subqueries aren't allowed across different databases. Force "
"the inner query to be evaluated using `list(inner_query)`."
)
if self.rhs_is_direct_value():
# Remove None from the list as NULL is never equal to anything.
try:
rhs = OrderedSet(self.rhs)
rhs.discard(None)
except TypeError: # Unhashable items in self.rhs
rhs = [r for r in self.rhs if r is not None]
if not rhs:
raise EmptyResultSet
# rhs should be an iterable; use batch_process_rhs() to
# prepare/transform those values.
sqls, sqls_params = self.batch_process_rhs(compiler, connection, rhs)
placeholder = "(" + ", ".join(sqls) + ")"
return (placeholder, sqls_params)
return super().process_rhs(compiler, connection)
def get_rhs_op(self, connection, rhs):
return "IN %s" % rhs
def as_sql(self, compiler, connection):
max_in_list_size = connection.ops.max_in_list_size()
if (
self.rhs_is_direct_value()
and max_in_list_size
and len(self.rhs) > max_in_list_size
):
return self.split_parameter_list_as_sql(compiler, connection)
return super().as_sql(compiler, connection)
def split_parameter_list_as_sql(self, compiler, connection):
# This is a special case for databases which limit the number of
# elements which can appear in an 'IN' clause.
max_in_list_size = connection.ops.max_in_list_size()
lhs, lhs_params = self.process_lhs(compiler, connection)
rhs, rhs_params = self.batch_process_rhs(compiler, connection)
in_clause_elements = ["("]
params = []
for offset in range(0, len(rhs_params), max_in_list_size):
if offset > 0:
in_clause_elements.append(" OR ")
in_clause_elements.append("%s IN (" % lhs)
params.extend(lhs_params)
sqls = rhs[offset : offset + max_in_list_size]
sqls_params = rhs_params[offset : offset + max_in_list_size]
param_group = ", ".join(sqls)
in_clause_elements.append(param_group)
in_clause_elements.append(")")
params.extend(sqls_params)
in_clause_elements.append(")")
return "".join(in_clause_elements), params
class PatternLookup(BuiltinLookup):
param_pattern = "%%%s%%"
prepare_rhs = False
def get_rhs_op(self, connection, rhs):
# Assume we are in startswith. We need to produce SQL like:
# col LIKE %s, ['thevalue%']
# For python values we can (and should) do that directly in Python,
# but if the value is for example reference to other column, then
# we need to add the % pattern match to the lookup by something like
# col LIKE othercol || '%%'
# So, for Python values we don't need any special pattern, but for
# SQL reference values or SQL transformations we need the correct
# pattern added.
if hasattr(self.rhs, "as_sql") or self.bilateral_transforms:
pattern = connection.pattern_ops[self.lookup_name].format(
connection.pattern_esc
)
return pattern.format(rhs)
else:
return super().get_rhs_op(connection, rhs)
def process_rhs(self, qn, connection):
rhs, params = super().process_rhs(qn, connection)
if self.rhs_is_direct_value() and params and not self.bilateral_transforms:
params = (
self.param_pattern % connection.ops.prep_for_like_query(params[0]),
*params[1:],
)
return rhs, params
@Field.register_lookup
class Contains(PatternLookup):
lookup_name = "contains"
@Field.register_lookup
class IContains(Contains):
lookup_name = "icontains"
@Field.register_lookup
class StartsWith(PatternLookup):
lookup_name = "startswith"
param_pattern = "%s%%"
@Field.register_lookup
class IStartsWith(StartsWith):
lookup_name = "istartswith"
@Field.register_lookup
class EndsWith(PatternLookup):
lookup_name = "endswith"
param_pattern = "%%%s"
@Field.register_lookup
class IEndsWith(EndsWith):
lookup_name = "iendswith"
@Field.register_lookup
class Range(FieldGetDbPrepValueIterableMixin, BuiltinLookup):
lookup_name = "range"
def get_rhs_op(self, connection, rhs):
return "BETWEEN %s AND %s" % (rhs[0], rhs[1])
@Field.register_lookup
class IsNull(BuiltinLookup):
lookup_name = "isnull"
prepare_rhs = False
def as_sql(self, compiler, connection):
if not isinstance(self.rhs, bool):
raise ValueError(
"The QuerySet value for an isnull lookup must be True or False."
)
if isinstance(self.lhs, Value):
if self.lhs.value is None or (
self.lhs.value == ""
and connection.features.interprets_empty_strings_as_nulls
):
result_exception = FullResultSet if self.rhs else EmptyResultSet
else:
result_exception = EmptyResultSet if self.rhs else FullResultSet
raise result_exception
sql, params = self.process_lhs(compiler, connection)
if self.rhs:
return "%s IS NULL" % sql, params
else:
return "%s IS NOT NULL" % sql, params
@Field.register_lookup
class Regex(BuiltinLookup):
lookup_name = "regex"
prepare_rhs = False
def as_sql(self, compiler, connection):
if self.lookup_name in connection.operators:
return super().as_sql(compiler, connection)
else:
lhs, lhs_params = self.process_lhs(compiler, connection)
rhs, rhs_params = self.process_rhs(compiler, connection)
params = (*lhs_params, *rhs_params)
sql_template = connection.ops.regex_lookup(self.lookup_name)
return sql_template % (lhs, rhs), params
@Field.register_lookup
class IRegex(Regex):
lookup_name = "iregex"
class YearLookup(Lookup):
def year_lookup_bounds(self, connection, year):
from django.db.models.functions import ExtractIsoYear
iso_year = isinstance(self.lhs, ExtractIsoYear)
output_field = self.lhs.lhs.output_field
if isinstance(output_field, DateTimeField):
bounds = connection.ops.year_lookup_bounds_for_datetime_field(
year,
iso_year=iso_year,
)
else:
bounds = connection.ops.year_lookup_bounds_for_date_field(
year,
iso_year=iso_year,
)
return bounds
def as_sql(self, compiler, connection):
# Avoid the extract operation if the rhs is a direct value to allow
# indexes to be used.
if self.rhs_is_direct_value():
# Skip the extract part by directly using the originating field,
# that is self.lhs.lhs.
lhs_sql, params = self.process_lhs(compiler, connection, self.lhs.lhs)
rhs_sql, _ = self.process_rhs(compiler, connection)
rhs_sql = self.get_direct_rhs_sql(connection, rhs_sql)
start, finish = self.year_lookup_bounds(connection, self.rhs)
params = (*params, *self.get_bound_params(start, finish))
return "%s %s" % (lhs_sql, rhs_sql), params
return super().as_sql(compiler, connection)
def get_direct_rhs_sql(self, connection, rhs):
return connection.operators[self.lookup_name] % rhs
def get_bound_params(self, start, finish):
raise NotImplementedError(
"subclasses of YearLookup must provide a get_bound_params() method"
)
class YearExact(YearLookup, Exact):
def get_direct_rhs_sql(self, connection, rhs):
return "BETWEEN %s AND %s"
def get_bound_params(self, start, finish):
return (start, finish)
class YearGt(YearLookup, GreaterThan):
def get_bound_params(self, start, finish):
return (finish,)
class YearGte(YearLookup, GreaterThanOrEqual):
def get_bound_params(self, start, finish):
return (start,)
class YearLt(YearLookup, LessThan):
def get_bound_params(self, start, finish):
return (start,)
class YearLte(YearLookup, LessThanOrEqual):
def get_bound_params(self, start, finish):
return (finish,)
class UUIDTextMixin:
"""
Strip hyphens from a value when filtering a UUIDField on backends without
a native datatype for UUID.
"""
def process_rhs(self, qn, connection):
if not connection.features.has_native_uuid_field:
from django.db.models.functions import Replace
if self.rhs_is_direct_value():
self.rhs = Value(self.rhs)
self.rhs = Replace(
self.rhs, Value("-"), Value(""), output_field=CharField()
)
rhs, params = super().process_rhs(qn, connection)
return rhs, params
@UUIDField.register_lookup
class UUIDIExact(UUIDTextMixin, IExact):
pass
@UUIDField.register_lookup
class UUIDContains(UUIDTextMixin, Contains):
pass
@UUIDField.register_lookup
class UUIDIContains(UUIDTextMixin, IContains):
pass
@UUIDField.register_lookup
class UUIDStartsWith(UUIDTextMixin, StartsWith):
pass
@UUIDField.register_lookup
class UUIDIStartsWith(UUIDTextMixin, IStartsWith):
pass
@UUIDField.register_lookup
class UUIDEndsWith(UUIDTextMixin, EndsWith):
pass
@UUIDField.register_lookup
class UUIDIEndsWith(UUIDTextMixin, IEndsWith):
pass
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/db/models/fetch_modes.py | django/db/models/fetch_modes.py | from django.core.exceptions import FieldFetchBlocked
class FetchMode:
__slots__ = ()
track_peers = False
def fetch(self, fetcher, instance):
raise NotImplementedError("Subclasses must implement this method.")
class FetchOne(FetchMode):
__slots__ = ()
def fetch(self, fetcher, instance):
fetcher.fetch_one(instance)
def __reduce__(self):
return "FETCH_ONE"
FETCH_ONE = FetchOne()
class FetchPeers(FetchMode):
__slots__ = ()
track_peers = True
def fetch(self, fetcher, instance):
instances = [
peer
for peer_weakref in instance._state.peers
if (peer := peer_weakref()) is not None
]
if len(instances) > 1:
fetcher.fetch_many(instances)
else:
fetcher.fetch_one(instance)
def __reduce__(self):
return "FETCH_PEERS"
FETCH_PEERS = FetchPeers()
class Raise(FetchMode):
__slots__ = ()
def fetch(self, fetcher, instance):
klass = instance.__class__.__qualname__
field_name = fetcher.field.name
raise FieldFetchBlocked(f"Fetching of {klass}.{field_name} blocked.") from None
def __reduce__(self):
return "RAISE"
RAISE = Raise()
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/db/models/query.py | django/db/models/query.py | """
The main QuerySet implementation. This provides the public API for the ORM.
"""
import copy
import operator
import warnings
from contextlib import nullcontext
from functools import reduce
from itertools import chain, islice
from weakref import ref as weak_ref
from asgiref.sync import sync_to_async
import django
from django.conf import settings
from django.core import exceptions
from django.db import (
DJANGO_VERSION_PICKLE_KEY,
IntegrityError,
NotSupportedError,
connections,
router,
transaction,
)
from django.db.models import AutoField, DateField, DateTimeField, Field, Max, sql
from django.db.models.constants import LOOKUP_SEP, OnConflict
from django.db.models.deletion import Collector
from django.db.models.expressions import Case, DatabaseDefault, F, Value, When
from django.db.models.fetch_modes import FETCH_ONE
from django.db.models.functions import Cast, Trunc
from django.db.models.query_utils import FilteredRelation, Q
from django.db.models.sql.constants import GET_ITERATOR_CHUNK_SIZE, ROW_COUNT
from django.db.models.utils import (
AltersData,
create_namedtuple_class,
resolve_callables,
)
from django.utils import timezone
from django.utils.deprecation import RemovedInDjango70Warning
from django.utils.functional import cached_property
# The maximum number of results to fetch in a get() query.
MAX_GET_RESULTS = 21
# The maximum number of items to display in a QuerySet.__repr__
REPR_OUTPUT_SIZE = 20
PROHIBITED_FILTER_KWARGS = frozenset(["_connector", "_negated"])
class BaseIterable:
def __init__(
self, queryset, chunked_fetch=False, chunk_size=GET_ITERATOR_CHUNK_SIZE
):
self.queryset = queryset
self.chunked_fetch = chunked_fetch
self.chunk_size = chunk_size
async def _async_generator(self):
# Generators don't actually start running until the first time you call
# next() on them, so make the generator object in the async thread and
# then repeatedly dispatch to it in a sync thread.
sync_generator = self.__iter__()
def next_slice(gen):
return list(islice(gen, self.chunk_size))
while True:
chunk = await sync_to_async(next_slice)(sync_generator)
for item in chunk:
yield item
if len(chunk) < self.chunk_size:
break
# __aiter__() is a *synchronous* method that has to then return an
# *asynchronous* iterator/generator. Thus, nest an async generator inside
# it.
# This is a generic iterable converter for now, and is going to suffer a
# performance penalty on large sets of items due to the cost of crossing
# over the sync barrier for each chunk. Custom __aiter__() methods should
# be added to each Iterable subclass, but that needs some work in the
# Compiler first.
def __aiter__(self):
return self._async_generator()
class ModelIterable(BaseIterable):
"""Iterable that yields a model instance for each row."""
def __iter__(self):
queryset = self.queryset
db = queryset.db
compiler = queryset.query.get_compiler(using=db)
fetch_mode = queryset._fetch_mode
# Execute the query. This will also fill compiler.select, klass_info,
# and annotations.
results = compiler.execute_sql(
chunked_fetch=self.chunked_fetch, chunk_size=self.chunk_size
)
select, klass_info, annotation_col_map = (
compiler.select,
compiler.klass_info,
compiler.annotation_col_map,
)
model_cls = klass_info["model"]
select_fields = klass_info["select_fields"]
model_fields_start, model_fields_end = select_fields[0], select_fields[-1] + 1
init_list = [
f[0].target.attname for f in select[model_fields_start:model_fields_end]
]
related_populators = get_related_populators(klass_info, select, db, fetch_mode)
known_related_objects = [
(
field,
related_objs,
operator.attrgetter(
*[
(
field.attname
if from_field == "self"
else queryset.model._meta.get_field(from_field).attname
)
for from_field in field.from_fields
]
),
)
for field, related_objs in queryset._known_related_objects.items()
]
peers = []
for row in compiler.results_iter(results):
obj = model_cls.from_db(
db,
init_list,
row[model_fields_start:model_fields_end],
fetch_mode=fetch_mode,
)
if fetch_mode.track_peers:
peers.append(weak_ref(obj))
obj._state.peers = peers
for rel_populator in related_populators:
rel_populator.populate(row, obj)
if annotation_col_map:
for attr_name, col_pos in annotation_col_map.items():
setattr(obj, attr_name, row[col_pos])
# Add the known related objects to the model.
for field, rel_objs, rel_getter in known_related_objects:
# Avoid overwriting objects loaded by, e.g., select_related().
if field.is_cached(obj):
continue
rel_obj_id = rel_getter(obj)
try:
rel_obj = rel_objs[rel_obj_id]
except KeyError:
pass # May happen in qs1 | qs2 scenarios.
else:
setattr(obj, field.name, rel_obj)
yield obj
class RawModelIterable(BaseIterable):
"""
Iterable that yields a model instance for each row from a raw queryset.
"""
def __iter__(self):
# Cache some things for performance reasons outside the loop.
db = self.queryset.db
query = self.queryset.query
connection = connections[db]
compiler = connection.ops.compiler("SQLCompiler")(query, connection, db)
query_iterator = iter(query)
try:
(
model_init_names,
model_init_pos,
annotation_fields,
) = self.queryset.resolve_model_init_order()
model_cls = self.queryset.model
if any(
f.attname not in model_init_names for f in model_cls._meta.pk_fields
):
raise exceptions.FieldDoesNotExist(
"Raw query must include the primary key"
)
fields = [self.queryset.model_fields.get(c) for c in self.queryset.columns]
cols = [f.get_col(f.model._meta.db_table) if f else None for f in fields]
converters = compiler.get_converters(cols)
if converters:
query_iterator = compiler.apply_converters(query_iterator, converters)
if compiler.has_composite_fields(cols):
query_iterator = compiler.composite_fields_to_tuples(
query_iterator, cols
)
fetch_mode = self.queryset._fetch_mode
peers = []
for values in query_iterator:
# Associate fields to values
model_init_values = [values[pos] for pos in model_init_pos]
instance = model_cls.from_db(
db, model_init_names, model_init_values, fetch_mode=fetch_mode
)
if fetch_mode.track_peers:
peers.append(weak_ref(instance))
instance._state.peers = peers
if annotation_fields:
for column, pos in annotation_fields:
setattr(instance, column, values[pos])
yield instance
finally:
# Done iterating the Query. If it has its own cursor, close it.
if hasattr(query, "cursor") and query.cursor:
query.cursor.close()
class ValuesIterable(BaseIterable):
"""
Iterable returned by QuerySet.values() that yields a dict for each row.
"""
def __iter__(self):
queryset = self.queryset
query = queryset.query
compiler = query.get_compiler(queryset.db)
if query.selected:
names = list(query.selected)
else:
# extra(select=...) cols are always at the start of the row.
names = [
*query.extra_select,
*query.values_select,
*query.annotation_select,
]
indexes = range(len(names))
for row in compiler.results_iter(
chunked_fetch=self.chunked_fetch, chunk_size=self.chunk_size
):
yield {names[i]: row[i] for i in indexes}
class ValuesListIterable(BaseIterable):
"""
Iterable returned by QuerySet.values_list(flat=False) that yields a tuple
for each row.
"""
def __iter__(self):
queryset = self.queryset
query = queryset.query
compiler = query.get_compiler(queryset.db)
return compiler.results_iter(
tuple_expected=True,
chunked_fetch=self.chunked_fetch,
chunk_size=self.chunk_size,
)
class NamedValuesListIterable(ValuesListIterable):
"""
Iterable returned by QuerySet.values_list(named=True) that yields a
namedtuple for each row.
"""
def __iter__(self):
queryset = self.queryset
if queryset._fields:
names = queryset._fields
else:
query = queryset.query
names = [
*query.extra_select,
*query.values_select,
*query.annotation_select,
]
tuple_class = create_namedtuple_class(*names)
new = tuple.__new__
for row in super().__iter__():
yield new(tuple_class, row)
class FlatValuesListIterable(BaseIterable):
"""
Iterable returned by QuerySet.values_list(flat=True) that yields single
values.
"""
def __iter__(self):
queryset = self.queryset
compiler = queryset.query.get_compiler(queryset.db)
for row in compiler.results_iter(
chunked_fetch=self.chunked_fetch, chunk_size=self.chunk_size
):
yield row[0]
class QuerySet(AltersData):
"""Represent a lazy database lookup for a set of objects."""
def __init__(self, model=None, query=None, using=None, hints=None):
self.model = model
self._db = using
self._hints = hints or {}
self._query = query or sql.Query(self.model)
self._result_cache = None
self._sticky_filter = False
self._for_write = False
self._prefetch_related_lookups = ()
self._prefetch_done = False
self._known_related_objects = {} # {rel_field: {pk: rel_obj}}
self._iterable_class = ModelIterable
self._fetch_mode = FETCH_ONE
self._fields = None
self._defer_next_filter = False
self._deferred_filter = None
@property
def query(self):
if self._deferred_filter:
negate, args, kwargs = self._deferred_filter
self._filter_or_exclude_inplace(negate, args, kwargs)
self._deferred_filter = None
return self._query
@query.setter
def query(self, value):
if value.values_select:
self._iterable_class = ValuesIterable
self._query = value
def as_manager(cls):
# Address the circular dependency between `Queryset` and `Manager`.
from django.db.models.manager import Manager
manager = Manager.from_queryset(cls)()
manager._built_with_as_manager = True
return manager
as_manager.queryset_only = True
as_manager = classmethod(as_manager)
########################
# PYTHON MAGIC METHODS #
########################
def __deepcopy__(self, memo):
"""Don't populate the QuerySet's cache."""
obj = self.__class__()
for k, v in self.__dict__.items():
if k == "_result_cache":
obj.__dict__[k] = None
else:
obj.__dict__[k] = copy.deepcopy(v, memo)
return obj
def __getstate__(self):
# Force the cache to be fully populated.
self._fetch_all()
return {**self.__dict__, DJANGO_VERSION_PICKLE_KEY: django.__version__}
def __setstate__(self, state):
pickled_version = state.get(DJANGO_VERSION_PICKLE_KEY)
if pickled_version:
if pickled_version != django.__version__:
warnings.warn(
"Pickled queryset instance's Django version %s does not "
"match the current version %s."
% (pickled_version, django.__version__),
RuntimeWarning,
stacklevel=2,
)
else:
warnings.warn(
"Pickled queryset instance's Django version is not specified.",
RuntimeWarning,
stacklevel=2,
)
self.__dict__.update(state)
def __repr__(self):
data = list(self[: REPR_OUTPUT_SIZE + 1])
if len(data) > REPR_OUTPUT_SIZE:
data[-1] = "...(remaining elements truncated)..."
return "<%s %r>" % (self.__class__.__name__, data)
def __len__(self):
self._fetch_all()
return len(self._result_cache)
def __iter__(self):
"""
The queryset iterator protocol uses three nested iterators in the
default case:
1. sql.compiler.execute_sql()
- Returns 100 rows at time (constants.GET_ITERATOR_CHUNK_SIZE)
using cursor.fetchmany(). This part is responsible for
doing some column masking, and returning the rows in chunks.
2. sql.compiler.results_iter()
- Returns one row at time. At this point the rows are still just
tuples. In some cases the return values are converted to
Python values at this location.
3. self.iterator()
- Responsible for turning the rows into model objects.
"""
self._fetch_all()
return iter(self._result_cache)
def __aiter__(self):
# Remember, __aiter__ itself is synchronous, it's the thing it returns
# that is async!
async def generator():
await sync_to_async(self._fetch_all)()
for item in self._result_cache:
yield item
return generator()
def __bool__(self):
self._fetch_all()
return bool(self._result_cache)
def __getitem__(self, k):
"""Retrieve an item or slice from the set of results."""
if not isinstance(k, (int, slice)):
raise TypeError(
"QuerySet indices must be integers or slices, not %s."
% type(k).__name__
)
if (isinstance(k, int) and k < 0) or (
isinstance(k, slice)
and (
(k.start is not None and k.start < 0)
or (k.stop is not None and k.stop < 0)
)
):
raise ValueError("Negative indexing is not supported.")
if self._result_cache is not None:
return self._result_cache[k]
if isinstance(k, slice):
qs = self._chain()
if k.start is not None:
start = int(k.start)
else:
start = None
if k.stop is not None:
stop = int(k.stop)
else:
stop = None
qs.query.set_limits(start, stop)
return list(qs)[:: k.step] if k.step else qs
qs = self._chain()
qs.query.set_limits(k, k + 1)
qs._fetch_all()
return qs._result_cache[0]
def __class_getitem__(cls, *args, **kwargs):
return cls
def __and__(self, other):
self._check_operator_queryset(other, "&")
self._merge_sanity_check(other)
if isinstance(other, EmptyQuerySet):
return other
if isinstance(self, EmptyQuerySet):
return self
combined = self._chain()
combined._merge_known_related_objects(other)
combined.query.combine(other.query, sql.AND)
return combined
def __or__(self, other):
self._check_operator_queryset(other, "|")
self._merge_sanity_check(other)
if isinstance(self, EmptyQuerySet):
return other
if isinstance(other, EmptyQuerySet):
return self
query = (
self
if self.query.can_filter()
else self.model._base_manager.filter(pk__in=self.values("pk"))
)
combined = query._chain()
combined._merge_known_related_objects(other)
if not other.query.can_filter():
other = other.model._base_manager.filter(pk__in=other.values("pk"))
combined.query.combine(other.query, sql.OR)
return combined
def __xor__(self, other):
self._check_operator_queryset(other, "^")
self._merge_sanity_check(other)
if isinstance(self, EmptyQuerySet):
return other
if isinstance(other, EmptyQuerySet):
return self
query = (
self
if self.query.can_filter()
else self.model._base_manager.filter(pk__in=self.values("pk"))
)
combined = query._chain()
combined._merge_known_related_objects(other)
if not other.query.can_filter():
other = other.model._base_manager.filter(pk__in=other.values("pk"))
combined.query.combine(other.query, sql.XOR)
return combined
####################################
# METHODS THAT DO DATABASE QUERIES #
####################################
def _iterator(self, use_chunked_fetch, chunk_size):
iterable = self._iterable_class(
self,
chunked_fetch=use_chunked_fetch,
chunk_size=chunk_size or 2000,
)
if not self._prefetch_related_lookups or chunk_size is None:
yield from iterable
return
iterator = iter(iterable)
while results := list(islice(iterator, chunk_size)):
prefetch_related_objects(results, *self._prefetch_related_lookups)
yield from results
def iterator(self, chunk_size=None):
"""
An iterator over the results from applying this QuerySet to the
database. chunk_size must be provided for QuerySets that prefetch
related objects. Otherwise, a default chunk_size of 2000 is supplied.
"""
if chunk_size is None:
if self._prefetch_related_lookups:
raise ValueError(
"chunk_size must be provided when using QuerySet.iterator() after "
"prefetch_related()."
)
elif chunk_size <= 0:
raise ValueError("Chunk size must be strictly positive.")
use_chunked_fetch = not connections[self.db].settings_dict.get(
"DISABLE_SERVER_SIDE_CURSORS"
)
return self._iterator(use_chunked_fetch, chunk_size)
async def aiterator(self, chunk_size=2000):
"""
An asynchronous iterator over the results from applying this QuerySet
to the database.
"""
if chunk_size <= 0:
raise ValueError("Chunk size must be strictly positive.")
use_chunked_fetch = not connections[self.db].settings_dict.get(
"DISABLE_SERVER_SIDE_CURSORS"
)
iterable = self._iterable_class(
self, chunked_fetch=use_chunked_fetch, chunk_size=chunk_size
)
if self._prefetch_related_lookups:
results = []
async for item in iterable:
results.append(item)
if len(results) >= chunk_size:
await aprefetch_related_objects(
results, *self._prefetch_related_lookups
)
for result in results:
yield result
results.clear()
if results:
await aprefetch_related_objects(
results, *self._prefetch_related_lookups
)
for result in results:
yield result
else:
async for item in iterable:
yield item
def aggregate(self, *args, **kwargs):
"""
Return a dictionary containing the calculations (aggregation)
over the current queryset.
If args is present the expression is passed as a kwarg using
the Aggregate object's default alias.
"""
if self.query.distinct_fields:
raise NotImplementedError("aggregate() + distinct(fields) not implemented.")
self._validate_values_are_expressions(
(*args, *kwargs.values()), method_name="aggregate"
)
for arg in args:
# The default_alias property raises TypeError if default_alias
# can't be set automatically or AttributeError if it isn't an
# attribute.
try:
arg.default_alias
except (AttributeError, TypeError):
raise TypeError("Complex aggregates require an alias")
kwargs[arg.default_alias] = arg
return self.query.chain().get_aggregation(self.db, kwargs)
async def aaggregate(self, *args, **kwargs):
return await sync_to_async(self.aggregate)(*args, **kwargs)
def count(self):
"""
Perform a SELECT COUNT() and return the number of records as an
integer.
If the QuerySet is already fully cached, return the length of the
cached results set to avoid multiple SELECT COUNT(*) calls.
"""
if self._result_cache is not None:
return len(self._result_cache)
return self.query.get_count(using=self.db)
async def acount(self):
return await sync_to_async(self.count)()
def get(self, *args, **kwargs):
"""
Perform the query and return a single object matching the given
keyword arguments.
"""
if self.query.combinator and (args or kwargs):
raise NotSupportedError(
"Calling QuerySet.get(...) with filters after %s() is not "
"supported." % self.query.combinator
)
clone = self._chain() if self.query.combinator else self.filter(*args, **kwargs)
if self.query.can_filter() and not self.query.distinct_fields:
clone = clone.order_by()
limit = None
if (
not clone.query.select_for_update
or connections[clone.db].features.supports_select_for_update_with_limit
):
limit = MAX_GET_RESULTS
clone.query.set_limits(high=limit)
num = len(clone)
if num == 1:
return clone._result_cache[0]
if not num:
raise self.model.DoesNotExist(
"%s matching query does not exist." % self.model._meta.object_name
)
raise self.model.MultipleObjectsReturned(
"get() returned more than one %s -- it returned %s!"
% (
self.model._meta.object_name,
num if not limit or num < limit else "more than %s" % (limit - 1),
)
)
async def aget(self, *args, **kwargs):
return await sync_to_async(self.get)(*args, **kwargs)
def create(self, **kwargs):
"""
Create a new object with the given kwargs, saving it to the database
and returning the created object.
"""
reverse_one_to_one_fields = frozenset(kwargs).intersection(
self.model._meta._reverse_one_to_one_field_names
)
if reverse_one_to_one_fields:
raise ValueError(
"The following fields do not exist in this model: %s"
% ", ".join(reverse_one_to_one_fields)
)
obj = self.model(**kwargs)
self._for_write = True
obj.save(force_insert=True, using=self.db)
obj._state.fetch_mode = self._fetch_mode
return obj
create.alters_data = True
async def acreate(self, **kwargs):
return await sync_to_async(self.create)(**kwargs)
acreate.alters_data = True
def _prepare_for_bulk_create(self, objs):
objs_with_pk, objs_without_pk = [], []
for obj in objs:
if isinstance(obj.pk, DatabaseDefault):
objs_without_pk.append(obj)
elif obj._is_pk_set():
objs_with_pk.append(obj)
else:
obj.pk = obj._meta.pk.get_pk_value_on_save(obj)
if obj._is_pk_set():
objs_with_pk.append(obj)
else:
objs_without_pk.append(obj)
obj._prepare_related_fields_for_save(operation_name="bulk_create")
return objs_with_pk, objs_without_pk
def _check_bulk_create_options(
self, ignore_conflicts, update_conflicts, update_fields, unique_fields
):
if ignore_conflicts and update_conflicts:
raise ValueError(
"ignore_conflicts and update_conflicts are mutually exclusive."
)
db_features = connections[self.db].features
if ignore_conflicts:
if not db_features.supports_ignore_conflicts:
raise NotSupportedError(
"This database backend does not support ignoring conflicts."
)
return OnConflict.IGNORE
elif update_conflicts:
if not db_features.supports_update_conflicts:
raise NotSupportedError(
"This database backend does not support updating conflicts."
)
if not update_fields:
raise ValueError(
"Fields that will be updated when a row insertion fails "
"on conflicts must be provided."
)
if unique_fields and not db_features.supports_update_conflicts_with_target:
raise NotSupportedError(
"This database backend does not support updating "
"conflicts with specifying unique fields that can trigger "
"the upsert."
)
if not unique_fields and db_features.supports_update_conflicts_with_target:
raise ValueError(
"Unique fields that can trigger the upsert must be provided."
)
# Updating primary keys and non-concrete fields is forbidden.
if any(not f.concrete for f in update_fields):
raise ValueError(
"bulk_create() can only be used with concrete fields in "
"update_fields."
)
if any(f in self.model._meta.pk_fields for f in update_fields):
raise ValueError(
"bulk_create() cannot be used with primary keys in "
"update_fields."
)
if unique_fields:
if any(not f.concrete for f in unique_fields):
raise ValueError(
"bulk_create() can only be used with concrete fields "
"in unique_fields."
)
return OnConflict.UPDATE
return None
def bulk_create(
self,
objs,
batch_size=None,
ignore_conflicts=False,
update_conflicts=False,
update_fields=None,
unique_fields=None,
):
"""
Insert each of the instances into the database. Do *not* call
save() on each of the instances, do not send any pre/post_save
signals, and do not set the primary key attribute if it is an
autoincrement field (except if
features.can_return_rows_from_bulk_insert=True).
Multi-table models are not supported.
"""
# When you bulk insert you don't get the primary keys back (if it's an
# autoincrement, except if can_return_rows_from_bulk_insert=True), so
# you can't insert into the child tables which references this. There
# are two workarounds:
# 1) This could be implemented if you didn't have an autoincrement pk
# 2) You could do it by doing O(n) normal inserts into the parent
# tables to get the primary keys back and then doing a single bulk
# insert into the childmost table.
# We currently set the primary keys on the objects when using
# PostgreSQL via the RETURNING ID clause. It should be possible for
# Oracle as well, but the semantics for extracting the primary keys is
# trickier so it's not done yet.
if batch_size is not None and batch_size <= 0:
raise ValueError("Batch size must be a positive integer.")
# Check that the parents share the same concrete model with the our
# model to detect the inheritance pattern ConcreteGrandParent ->
# MultiTableParent -> ProxyChild. Simply checking
# self.model._meta.proxy would not identify that case as involving
# multiple tables.
for parent in self.model._meta.all_parents:
if parent._meta.concrete_model is not self.model._meta.concrete_model:
raise ValueError("Can't bulk create a multi-table inherited model")
if not objs:
return objs
opts = self.model._meta
if unique_fields:
# Primary key is allowed in unique_fields.
unique_fields = [
self.model._meta.get_field(opts.pk.name if name == "pk" else name)
for name in unique_fields
]
if update_fields:
update_fields = [self.model._meta.get_field(name) for name in update_fields]
on_conflict = self._check_bulk_create_options(
ignore_conflicts,
update_conflicts,
update_fields,
unique_fields,
)
self._for_write = True
fields = [f for f in opts.concrete_fields if not f.generated]
objs = list(objs)
objs_with_pk, objs_without_pk = self._prepare_for_bulk_create(objs)
if objs_with_pk and objs_without_pk:
context = transaction.atomic(using=self.db, savepoint=False)
else:
context = nullcontext()
with context:
self._handle_order_with_respect_to(objs)
if objs_with_pk:
returned_columns = self._batched_insert(
objs_with_pk,
fields,
batch_size,
on_conflict=on_conflict,
update_fields=update_fields,
unique_fields=unique_fields,
)
for obj_with_pk, results in zip(objs_with_pk, returned_columns):
for result, field in zip(results, opts.db_returning_fields):
setattr(obj_with_pk, field.attname, result)
for obj_with_pk in objs_with_pk:
obj_with_pk._state.adding = False
obj_with_pk._state.db = self.db
if objs_without_pk:
fields = [f for f in fields if not isinstance(f, AutoField)]
returned_columns = self._batched_insert(
objs_without_pk,
fields,
batch_size,
on_conflict=on_conflict,
update_fields=update_fields,
unique_fields=unique_fields,
)
connection = connections[self.db]
if (
connection.features.can_return_rows_from_bulk_insert
and on_conflict is None
):
assert len(returned_columns) == len(objs_without_pk)
for obj_without_pk, results in zip(objs_without_pk, returned_columns):
for result, field in zip(results, opts.db_returning_fields):
setattr(obj_without_pk, field.attname, result)
obj_without_pk._state.adding = False
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | true |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/db/models/constants.py | django/db/models/constants.py | """
Constants used across the ORM in general.
"""
from enum import Enum
# Separator used to split filter strings apart.
LOOKUP_SEP = "__"
class OnConflict(Enum):
IGNORE = "ignore"
UPDATE = "update"
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/db/models/constraints.py | django/db/models/constraints.py | from enum import Enum
from types import NoneType
from django.core import checks
from django.core.exceptions import FieldDoesNotExist, ValidationError
from django.db import connections
from django.db.models.constants import LOOKUP_SEP
from django.db.models.expressions import Exists, ExpressionList, F, RawSQL
from django.db.models.indexes import IndexExpression
from django.db.models.lookups import Exact, IsNull
from django.db.models.query_utils import Q
from django.db.models.sql.query import Query
from django.db.utils import DEFAULT_DB_ALIAS
from django.utils.translation import gettext_lazy as _
__all__ = ["BaseConstraint", "CheckConstraint", "Deferrable", "UniqueConstraint"]
class BaseConstraint:
default_violation_error_message = _("Constraint “%(name)s” is violated.")
violation_error_code = None
violation_error_message = None
non_db_attrs = ("violation_error_code", "violation_error_message")
def __init__(
self, *, name, violation_error_code=None, violation_error_message=None
):
self.name = name
if violation_error_code is not None:
self.violation_error_code = violation_error_code
if violation_error_message is not None:
self.violation_error_message = violation_error_message
else:
self.violation_error_message = self.default_violation_error_message
@property
def contains_expressions(self):
return False
def constraint_sql(self, model, schema_editor):
raise NotImplementedError("This method must be implemented by a subclass.")
def create_sql(self, model, schema_editor):
raise NotImplementedError("This method must be implemented by a subclass.")
def remove_sql(self, model, schema_editor):
raise NotImplementedError("This method must be implemented by a subclass.")
@classmethod
def _expression_refs_exclude(cls, model, expression, exclude):
get_field = model._meta.get_field
for field_name, *__ in model._get_expr_references(expression):
if field_name == "pk":
field = model._meta.pk
else:
field = get_field(field_name)
if field_name in exclude or field.name in exclude:
return True
if field.generated and cls._expression_refs_exclude(
model, field.expression, exclude
):
return True
return False
def validate(self, model, instance, exclude=None, using=DEFAULT_DB_ALIAS):
raise NotImplementedError("This method must be implemented by a subclass.")
def get_violation_error_message(self):
return self.violation_error_message % {"name": self.name}
def check(self, model, connection):
return []
def _check_references(self, model, references):
from django.db.models.fields.composite import CompositePrimaryKey
errors = []
fields = set()
for field_name, *lookups in references:
# pk is an alias that won't be found by opts.get_field().
if field_name != "pk" or isinstance(model._meta.pk, CompositePrimaryKey):
fields.add(field_name)
if not lookups:
# If it has no lookups it cannot result in a JOIN.
continue
try:
if field_name == "pk":
field = model._meta.pk
else:
field = model._meta.get_field(field_name)
if not field.is_relation or field.many_to_many or field.one_to_many:
continue
except FieldDoesNotExist:
continue
# JOIN must happen at the first lookup.
first_lookup = lookups[0]
if (
hasattr(field, "get_transform")
and hasattr(field, "get_lookup")
and field.get_transform(first_lookup) is None
and field.get_lookup(first_lookup) is None
):
errors.append(
checks.Error(
"'constraints' refers to the joined field '%s'."
% LOOKUP_SEP.join([field_name, *lookups]),
obj=model,
id="models.E041",
)
)
errors.extend(model._check_local_fields(fields, "constraints"))
return errors
def deconstruct(self):
path = "%s.%s" % (self.__class__.__module__, self.__class__.__name__)
path = path.replace("django.db.models.constraints", "django.db.models")
kwargs = {"name": self.name}
if (
self.violation_error_message is not None
and self.violation_error_message != self.default_violation_error_message
):
kwargs["violation_error_message"] = self.violation_error_message
if self.violation_error_code is not None:
kwargs["violation_error_code"] = self.violation_error_code
return (path, (), kwargs)
def clone(self):
_, args, kwargs = self.deconstruct()
return self.__class__(*args, **kwargs)
class CheckConstraint(BaseConstraint):
def __init__(
self,
*,
condition,
name,
violation_error_code=None,
violation_error_message=None,
):
self.condition = condition
if not getattr(condition, "conditional", False):
raise TypeError(
"CheckConstraint.condition must be a Q instance or boolean expression."
)
super().__init__(
name=name,
violation_error_code=violation_error_code,
violation_error_message=violation_error_message,
)
def check(self, model, connection):
errors = []
if not (
connection.features.supports_table_check_constraints
or "supports_table_check_constraints" in model._meta.required_db_features
):
errors.append(
checks.Warning(
f"{connection.display_name} does not support check constraints.",
hint=(
"A constraint won't be created. Silence this warning if you "
"don't care about it."
),
obj=model,
id="models.W027",
)
)
elif (
connection.features.supports_table_check_constraints
or "supports_table_check_constraints"
not in model._meta.required_db_features
):
references = set()
condition = self.condition
if isinstance(condition, Q):
references.update(model._get_expr_references(condition))
if any(isinstance(expr, RawSQL) for expr in condition.flatten()):
errors.append(
checks.Warning(
f"Check constraint {self.name!r} contains RawSQL() expression "
"and won't be validated during the model full_clean().",
hint="Silence this warning if you don't care about it.",
obj=model,
id="models.W045",
),
)
errors.extend(self._check_references(model, references))
return errors
def _get_check_sql(self, model, schema_editor):
query = Query(model=model, alias_cols=False)
where = query.build_where(self.condition)
compiler = query.get_compiler(connection=schema_editor.connection)
sql, params = where.as_sql(compiler, schema_editor.connection)
return sql % tuple(schema_editor.quote_value(p) for p in params)
def constraint_sql(self, model, schema_editor):
check = self._get_check_sql(model, schema_editor)
return schema_editor._check_sql(self.name, check)
def create_sql(self, model, schema_editor):
check = self._get_check_sql(model, schema_editor)
return schema_editor._create_check_sql(model, self.name, check)
def remove_sql(self, model, schema_editor):
return schema_editor._delete_check_sql(model, self.name)
def validate(self, model, instance, exclude=None, using=DEFAULT_DB_ALIAS):
against = instance._get_field_expression_map(meta=model._meta, exclude=exclude)
# Ignore constraints with excluded fields in condition.
if exclude and self._expression_refs_exclude(model, self.condition, exclude):
return
if not Q(self.condition).check(against, using=using):
raise ValidationError(
self.get_violation_error_message(), code=self.violation_error_code
)
def __repr__(self):
return "<%s: condition=%s name=%s%s%s>" % (
self.__class__.__qualname__,
self.condition,
repr(self.name),
(
""
if self.violation_error_code is None
else " violation_error_code=%r" % self.violation_error_code
),
(
""
if self.violation_error_message is None
or self.violation_error_message == self.default_violation_error_message
else " violation_error_message=%r" % self.violation_error_message
),
)
def __eq__(self, other):
if isinstance(other, CheckConstraint):
return (
self.name == other.name
and self.condition == other.condition
and self.violation_error_code == other.violation_error_code
and self.violation_error_message == other.violation_error_message
)
return super().__eq__(other)
def deconstruct(self):
path, args, kwargs = super().deconstruct()
kwargs["condition"] = self.condition
return path, args, kwargs
class Deferrable(Enum):
DEFERRED = "deferred"
IMMEDIATE = "immediate"
# A similar format was proposed for Python 3.10.
def __repr__(self):
return f"{self.__class__.__qualname__}.{self._name_}"
class UniqueConstraint(BaseConstraint):
def __init__(
self,
*expressions,
fields=(),
name,
condition=None,
deferrable=None,
include=None,
opclasses=(),
nulls_distinct=None,
violation_error_code=None,
violation_error_message=None,
):
if not name:
raise ValueError("A unique constraint must be named.")
if not expressions and not fields:
raise ValueError(
"At least one field or expression is required to define a "
"unique constraint."
)
if expressions and fields:
raise ValueError(
"UniqueConstraint.fields and expressions are mutually exclusive."
)
if not isinstance(condition, (NoneType, Q)):
raise ValueError("UniqueConstraint.condition must be a Q instance.")
if condition and deferrable:
raise ValueError("UniqueConstraint with conditions cannot be deferred.")
if include and deferrable:
raise ValueError("UniqueConstraint with include fields cannot be deferred.")
if opclasses and deferrable:
raise ValueError("UniqueConstraint with opclasses cannot be deferred.")
if expressions and deferrable:
raise ValueError("UniqueConstraint with expressions cannot be deferred.")
if expressions and opclasses:
raise ValueError(
"UniqueConstraint.opclasses cannot be used with expressions. "
"Use django.contrib.postgres.indexes.OpClass() instead."
)
if not isinstance(deferrable, (NoneType, Deferrable)):
raise TypeError(
"UniqueConstraint.deferrable must be a Deferrable instance."
)
if not isinstance(include, (NoneType, list, tuple)):
raise TypeError("UniqueConstraint.include must be a list or tuple.")
if not isinstance(opclasses, (list, tuple)):
raise TypeError("UniqueConstraint.opclasses must be a list or tuple.")
if not isinstance(nulls_distinct, (NoneType, bool)):
raise TypeError("UniqueConstraint.nulls_distinct must be a bool.")
if opclasses and len(fields) != len(opclasses):
raise ValueError(
"UniqueConstraint.fields and UniqueConstraint.opclasses must "
"have the same number of elements."
)
self.fields = tuple(fields)
self.condition = condition
self.deferrable = deferrable
self.include = tuple(include) if include else ()
self.opclasses = opclasses
self.nulls_distinct = nulls_distinct
self.expressions = tuple(
F(expression) if isinstance(expression, str) else expression
for expression in expressions
)
super().__init__(
name=name,
violation_error_code=violation_error_code,
violation_error_message=violation_error_message,
)
@property
def contains_expressions(self):
return bool(self.expressions)
def check(self, model, connection):
errors = model._check_local_fields({*self.fields, *self.include}, "constraints")
required_db_features = model._meta.required_db_features
if self.condition is not None and not (
connection.features.supports_partial_indexes
or "supports_partial_indexes" in required_db_features
):
errors.append(
checks.Warning(
f"{connection.display_name} does not support unique constraints "
"with conditions.",
hint=(
"A constraint won't be created. Silence this warning if you "
"don't care about it."
),
obj=model,
id="models.W036",
)
)
if self.deferrable is not None and not (
connection.features.supports_deferrable_unique_constraints
or "supports_deferrable_unique_constraints" in required_db_features
):
errors.append(
checks.Warning(
f"{connection.display_name} does not support deferrable unique "
"constraints.",
hint=(
"A constraint won't be created. Silence this warning if you "
"don't care about it."
),
obj=model,
id="models.W038",
)
)
if self.include and not (
connection.features.supports_covering_indexes
or "supports_covering_indexes" in required_db_features
):
errors.append(
checks.Warning(
f"{connection.display_name} does not support unique constraints "
"with non-key columns.",
hint=(
"A constraint won't be created. Silence this warning if you "
"don't care about it."
),
obj=model,
id="models.W039",
)
)
if self.contains_expressions and not (
connection.features.supports_expression_indexes
or "supports_expression_indexes" in required_db_features
):
errors.append(
checks.Warning(
f"{connection.display_name} does not support unique constraints on "
"expressions.",
hint=(
"A constraint won't be created. Silence this warning if you "
"don't care about it."
),
obj=model,
id="models.W044",
)
)
if self.nulls_distinct is not None and not (
connection.features.supports_nulls_distinct_unique_constraints
or "supports_nulls_distinct_unique_constraints" in required_db_features
):
errors.append(
checks.Warning(
f"{connection.display_name} does not support "
"UniqueConstraint.nulls_distinct.",
hint=(
"A constraint won't be created. Silence this warning if you "
"don't care about it."
),
obj=model,
id="models.W047",
)
)
references = set()
if (
connection.features.supports_partial_indexes
or "supports_partial_indexes" not in required_db_features
) and isinstance(self.condition, Q):
references.update(model._get_expr_references(self.condition))
if self.contains_expressions and (
connection.features.supports_expression_indexes
or "supports_expression_indexes" not in required_db_features
):
for expression in self.expressions:
references.update(model._get_expr_references(expression))
errors.extend(self._check_references(model, references))
return errors
def _get_condition_sql(self, model, schema_editor):
if self.condition is None:
return None
query = Query(model=model, alias_cols=False)
where = query.build_where(self.condition)
compiler = query.get_compiler(connection=schema_editor.connection)
sql, params = where.as_sql(compiler, schema_editor.connection)
return sql % tuple(schema_editor.quote_value(p) for p in params)
def _get_index_expressions(self, model, schema_editor):
if not self.expressions:
return None
index_expressions = []
for expression in self.expressions:
index_expression = IndexExpression(expression)
index_expression.set_wrapper_classes(schema_editor.connection)
index_expressions.append(index_expression)
return ExpressionList(*index_expressions).resolve_expression(
Query(model, alias_cols=False),
)
def constraint_sql(self, model, schema_editor):
fields = [model._meta.get_field(field_name) for field_name in self.fields]
include = [
model._meta.get_field(field_name).column for field_name in self.include
]
condition = self._get_condition_sql(model, schema_editor)
expressions = self._get_index_expressions(model, schema_editor)
return schema_editor._unique_sql(
model,
fields,
self.name,
condition=condition,
deferrable=self.deferrable,
include=include,
opclasses=self.opclasses,
expressions=expressions,
nulls_distinct=self.nulls_distinct,
)
def create_sql(self, model, schema_editor):
fields = [model._meta.get_field(field_name) for field_name in self.fields]
include = [
model._meta.get_field(field_name).column for field_name in self.include
]
condition = self._get_condition_sql(model, schema_editor)
expressions = self._get_index_expressions(model, schema_editor)
return schema_editor._create_unique_sql(
model,
fields,
self.name,
condition=condition,
deferrable=self.deferrable,
include=include,
opclasses=self.opclasses,
expressions=expressions,
nulls_distinct=self.nulls_distinct,
)
def remove_sql(self, model, schema_editor):
condition = self._get_condition_sql(model, schema_editor)
include = [
model._meta.get_field(field_name).column for field_name in self.include
]
expressions = self._get_index_expressions(model, schema_editor)
return schema_editor._delete_unique_sql(
model,
self.name,
condition=condition,
deferrable=self.deferrable,
include=include,
opclasses=self.opclasses,
expressions=expressions,
nulls_distinct=self.nulls_distinct,
)
def __repr__(self):
return "<%s:%s%s%s%s%s%s%s%s%s%s>" % (
self.__class__.__qualname__,
"" if not self.fields else " fields=%s" % repr(self.fields),
"" if not self.expressions else " expressions=%s" % repr(self.expressions),
" name=%s" % repr(self.name),
"" if self.condition is None else " condition=%s" % self.condition,
"" if self.deferrable is None else " deferrable=%r" % self.deferrable,
"" if not self.include else " include=%s" % repr(self.include),
"" if not self.opclasses else " opclasses=%s" % repr(self.opclasses),
(
""
if self.nulls_distinct is None
else " nulls_distinct=%r" % self.nulls_distinct
),
(
""
if self.violation_error_code is None
else " violation_error_code=%r" % self.violation_error_code
),
(
""
if self.violation_error_message is None
or self.violation_error_message == self.default_violation_error_message
else " violation_error_message=%r" % self.violation_error_message
),
)
def __eq__(self, other):
if isinstance(other, UniqueConstraint):
return (
self.name == other.name
and self.fields == other.fields
and self.condition == other.condition
and self.deferrable == other.deferrable
and self.include == other.include
and self.opclasses == other.opclasses
and self.expressions == other.expressions
and self.nulls_distinct is other.nulls_distinct
and self.violation_error_code == other.violation_error_code
and self.violation_error_message == other.violation_error_message
)
return super().__eq__(other)
def deconstruct(self):
path, args, kwargs = super().deconstruct()
if self.fields:
kwargs["fields"] = self.fields
if self.condition:
kwargs["condition"] = self.condition
if self.deferrable:
kwargs["deferrable"] = self.deferrable
if self.include:
kwargs["include"] = self.include
if self.opclasses:
kwargs["opclasses"] = self.opclasses
if self.nulls_distinct is not None:
kwargs["nulls_distinct"] = self.nulls_distinct
return path, self.expressions, kwargs
def validate(self, model, instance, exclude=None, using=DEFAULT_DB_ALIAS):
queryset = model._default_manager.using(using)
if self.fields:
lookup_kwargs = {}
generated_field_names = []
for field_name in self.fields:
if exclude and field_name in exclude:
return
field = model._meta.get_field(field_name)
if field.generated:
if exclude and self._expression_refs_exclude(
model, field.expression, exclude
):
return
generated_field_names.append(field.name)
else:
lookup_value = getattr(instance, field.attname)
if (
self.nulls_distinct is not False
and lookup_value is None
or (
lookup_value == ""
and connections[
using
].features.interprets_empty_strings_as_nulls
)
):
# A composite constraint containing NULL value cannot
# cause a violation since NULL != NULL in SQL.
return
lookup_kwargs[field.name] = lookup_value
lookup_args = []
if generated_field_names:
field_expression_map = instance._get_field_expression_map(
meta=model._meta, exclude=exclude
)
for field_name in generated_field_names:
expression = field_expression_map[field_name]
if self.nulls_distinct is False:
lhs = F(field_name)
condition = Q(Exact(lhs, expression)) | Q(
IsNull(lhs, True), IsNull(expression, True)
)
lookup_args.append(condition)
else:
lookup_kwargs[field_name] = expression
queryset = queryset.filter(*lookup_args, **lookup_kwargs)
else:
# Ignore constraints with excluded fields.
if exclude and any(
self._expression_refs_exclude(model, expression, exclude)
for expression in self.expressions
):
return
replacements = {
F(field): value
for field, value in instance._get_field_expression_map(
meta=model._meta, exclude=exclude
).items()
}
filters = []
for expr in self.expressions:
if hasattr(expr, "get_expression_for_validation"):
expr = expr.get_expression_for_validation()
rhs = expr.replace_expressions(replacements)
condition = Exact(expr, rhs)
if self.nulls_distinct is False:
condition = Q(condition) | Q(IsNull(expr, True), IsNull(rhs, True))
filters.append(condition)
queryset = queryset.filter(*filters)
model_class_pk = instance._get_pk_val(model._meta)
if not instance._state.adding and instance._is_pk_set(model._meta):
queryset = queryset.exclude(pk=model_class_pk)
if not self.condition:
if queryset.exists():
if (
self.fields
and self.violation_error_message
== self.default_violation_error_message
):
# When fields are defined, use the unique_error_message()
# as a default for backward compatibility.
validation_error_message = instance.unique_error_message(
model, self.fields
)
raise ValidationError(
validation_error_message,
code=validation_error_message.code,
)
raise ValidationError(
self.get_violation_error_message(),
code=self.violation_error_code,
)
else:
# Ignore constraints with excluded fields in condition.
if exclude and self._expression_refs_exclude(
model, self.condition, exclude
):
return
against = instance._get_field_expression_map(
meta=model._meta, exclude=exclude
)
if (self.condition & Exists(queryset.filter(self.condition))).check(
against, using=using
):
raise ValidationError(
self.get_violation_error_message(),
code=self.violation_error_code,
)
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/db/models/signals.py | django/db/models/signals.py | from functools import partial
from django.db.models.utils import make_model_tuple
from django.dispatch import Signal
class_prepared = Signal()
class ModelSignal(Signal):
"""
Signal subclass that allows the sender to be lazily specified as a string
of the `app_label.ModelName` form.
"""
def _lazy_method(self, method, apps, receiver, sender, **kwargs):
from django.db.models.options import Options
# This partial takes a single optional argument named "sender".
partial_method = partial(method, receiver, **kwargs)
if isinstance(sender, str):
apps = apps or Options.default_apps
apps.lazy_model_operation(partial_method, make_model_tuple(sender))
else:
return partial_method(sender)
def connect(self, receiver, sender=None, weak=True, dispatch_uid=None, apps=None):
self._lazy_method(
super().connect,
apps,
receiver,
sender,
weak=weak,
dispatch_uid=dispatch_uid,
)
def disconnect(self, receiver=None, sender=None, dispatch_uid=None, apps=None):
return self._lazy_method(
super().disconnect, apps, receiver, sender, dispatch_uid=dispatch_uid
)
pre_init = ModelSignal(use_caching=True)
post_init = ModelSignal(use_caching=True)
pre_save = ModelSignal(use_caching=True)
post_save = ModelSignal(use_caching=True)
pre_delete = ModelSignal(use_caching=True)
post_delete = ModelSignal(use_caching=True)
m2m_changed = ModelSignal(use_caching=True)
pre_migrate = Signal()
post_migrate = Signal()
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/db/models/query_utils.py | django/db/models/query_utils.py | """
Various data structures used in query construction.
Factored out from django.db.models.query to avoid making the main module very
large and/or so that they can be used by other modules without getting into
circular import difficulties.
"""
import functools
import inspect
import logging
from collections import namedtuple
from contextlib import nullcontext
from django.core.exceptions import FieldError
from django.db import DEFAULT_DB_ALIAS, DatabaseError, connections, models, transaction
from django.db.models.constants import LOOKUP_SEP
from django.utils import tree
from django.utils.functional import cached_property
from django.utils.hashable import make_hashable
logger = logging.getLogger("django.db.models")
# PathInfo is used when converting lookups (fk__somecol). The contents
# describe the relation in Model terms (model Options and Fields for both
# sides of the relation. The join_field is the field backing the relation.
PathInfo = namedtuple(
"PathInfo",
"from_opts to_opts target_fields join_field m2m direct filtered_relation",
)
def subclasses(cls):
yield cls
for subclass in cls.__subclasses__():
yield from subclasses(subclass)
class Q(tree.Node):
"""
Encapsulate filters as objects that can then be combined logically (using
`&` and `|`).
"""
# Connection types
AND = "AND"
OR = "OR"
XOR = "XOR"
default = AND
conditional = True
connectors = (None, AND, OR, XOR)
def __init__(self, *args, _connector=None, _negated=False, **kwargs):
if _connector not in self.connectors:
connector_reprs = ", ".join(f"{conn!r}" for conn in self.connectors[1:])
raise ValueError(f"_connector must be one of {connector_reprs}, or None.")
super().__init__(
children=[*args, *sorted(kwargs.items())],
connector=_connector,
negated=_negated,
)
def _combine(self, other, conn):
if getattr(other, "conditional", False) is False:
raise TypeError(other)
if not self:
return other.copy()
if not other and isinstance(other, Q):
return self.copy()
obj = self.create(connector=conn)
obj.add(self, conn)
obj.add(other, conn)
return obj
def __or__(self, other):
return self._combine(other, self.OR)
def __and__(self, other):
return self._combine(other, self.AND)
def __xor__(self, other):
return self._combine(other, self.XOR)
def __invert__(self):
obj = self.copy()
obj.negate()
return obj
def resolve_expression(
self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False
):
# We must promote any new joins to left outer joins so that when Q is
# used as an expression, rows aren't filtered due to joins.
clause, joins = query._add_q(
self,
reuse,
allow_joins=allow_joins,
split_subq=False,
check_filterable=False,
summarize=summarize,
)
query.promote_joins(joins)
return clause
def replace_expressions(self, replacements):
if not replacements:
return self
clone = self.create(connector=self.connector, negated=self.negated)
for child in self.children:
child_replacement = child
if isinstance(child, tuple):
lhs, rhs = child
if LOOKUP_SEP in lhs:
path, lookup = lhs.rsplit(LOOKUP_SEP, 1)
else:
path = lhs
lookup = None
field = models.F(path)
if (
field_replacement := field.replace_expressions(replacements)
) is not field:
# Handle the implicit __exact case by falling back to an
# extra transform when get_lookup returns no match for the
# last component of the path.
if lookup is None:
lookup = "exact"
if (lookup_class := field_replacement.get_lookup(lookup)) is None:
if (
transform_class := field_replacement.get_transform(lookup)
) is not None:
field_replacement = transform_class(field_replacement)
lookup = "exact"
lookup_class = field_replacement.get_lookup(lookup)
if rhs is None and lookup == "exact":
lookup_class = field_replacement.get_lookup("isnull")
rhs = True
if lookup_class is not None:
child_replacement = lookup_class(field_replacement, rhs)
else:
child_replacement = child.replace_expressions(replacements)
clone.children.append(child_replacement)
return clone
def flatten(self):
"""
Recursively yield this Q object and all subexpressions, in depth-first
order.
"""
yield self
for child in self.children:
if isinstance(child, tuple):
# Use the lookup.
child = child[1]
if hasattr(child, "flatten"):
yield from child.flatten()
else:
yield child
def check(self, against, using=DEFAULT_DB_ALIAS):
"""
Do a database query to check if the expressions of the Q instance
matches against the expressions.
"""
# Avoid circular imports.
from django.db.models import BooleanField, Value
from django.db.models.functions import Coalesce
from django.db.models.sql import Query
from django.db.models.sql.constants import SINGLE
query = Query(None)
for name, value in against.items():
if not hasattr(value, "resolve_expression"):
value = Value(value)
query.add_annotation(value, name, select=False)
query.add_annotation(Value(1), "_check")
connection = connections[using]
# This will raise a FieldError if a field is missing in "against".
if connection.features.supports_comparing_boolean_expr:
query.add_q(Q(Coalesce(self, True, output_field=BooleanField())))
else:
query.add_q(self)
compiler = query.get_compiler(using=using)
context_manager = (
transaction.atomic(using=using)
if connection.in_atomic_block
else nullcontext()
)
try:
with context_manager:
return compiler.execute_sql(SINGLE) is not None
except DatabaseError as e:
logger.warning("Got a database error calling check() on %r: %s", self, e)
return True
def deconstruct(self):
path = "%s.%s" % (self.__class__.__module__, self.__class__.__name__)
if path.startswith("django.db.models.query_utils"):
path = path.replace("django.db.models.query_utils", "django.db.models")
args = tuple(self.children)
kwargs = {}
if self.connector != self.default:
kwargs["_connector"] = self.connector
if self.negated:
kwargs["_negated"] = True
return path, args, kwargs
@cached_property
def identity(self):
path, args, kwargs = self.deconstruct()
identity = [path, *kwargs.items()]
for child in args:
if isinstance(child, tuple):
arg, value = child
value = make_hashable(value)
identity.append((arg, value))
else:
identity.append(child)
return tuple(identity)
def __eq__(self, other):
if not isinstance(other, Q):
return NotImplemented
return other.identity == self.identity
def __hash__(self):
return hash(self.identity)
@cached_property
def referenced_base_fields(self):
"""
Retrieve all base fields referenced directly or through F expressions
excluding any fields referenced through joins.
"""
# Avoid circular imports.
from django.db.models.sql import query
return {
child.split(LOOKUP_SEP, 1)[0] for child in query.get_children_from_q(self)
}
class DeferredAttribute:
"""
A wrapper for a deferred-loading field. When the value is read from this
object the first time, the query is executed.
"""
def __init__(self, field):
self.field = field
def __get__(self, instance, cls=None):
"""
Retrieve and caches the value from the datastore on the first lookup.
Return the cached value.
"""
if instance is None:
return self
data = instance.__dict__
field_name = self.field.attname
if field_name not in data:
# Let's see if the field is part of the parent chain. If so we
# might be able to reuse the already loaded value. Refs #18343.
val = self._check_parent_chain(instance)
if val is None:
if not instance._is_pk_set():
raise AttributeError(
f"Cannot retrieve deferred field {field_name!r} "
"from an unsaved model."
)
instance._state.fetch_mode.fetch(self, instance)
else:
data[field_name] = val
return data[field_name]
def _check_parent_chain(self, instance):
"""
Check if the field value can be fetched from a parent field already
loaded in the instance. This can be done if the to-be fetched
field is a primary key field.
"""
opts = instance._meta
link_field = opts.get_ancestor_link(self.field.model)
if self.field.primary_key and self.field != link_field:
return getattr(instance, link_field.attname)
return None
def fetch_one(self, instance):
instance.refresh_from_db(fields=[self.field.attname])
def fetch_many(self, instances):
attname = self.field.attname
db = instances[0]._state.db
value_by_pk = (
self.field.model._base_manager.using(db)
.values_list(attname)
.in_bulk({i.pk for i in instances})
)
for instance in instances:
setattr(instance, attname, value_by_pk[instance.pk])
class class_or_instance_method:
"""
Hook used in RegisterLookupMixin to return partial functions depending on
the caller type (instance or class of models.Field).
"""
def __init__(self, class_method, instance_method):
self.class_method = class_method
self.instance_method = instance_method
def __get__(self, instance, owner):
if instance is None:
return functools.partial(self.class_method, owner)
return functools.partial(self.instance_method, instance)
class RegisterLookupMixin:
def _get_lookup(self, lookup_name):
return self.get_lookups().get(lookup_name, None)
@functools.cache
def get_class_lookups(cls):
class_lookups = [
parent.__dict__.get("class_lookups", {}) for parent in inspect.getmro(cls)
]
return cls.merge_dicts(class_lookups)
def get_instance_lookups(self):
class_lookups = self.get_class_lookups()
if instance_lookups := getattr(self, "instance_lookups", None):
return {**class_lookups, **instance_lookups}
return class_lookups
get_lookups = class_or_instance_method(get_class_lookups, get_instance_lookups)
get_class_lookups = classmethod(get_class_lookups)
def get_lookup(self, lookup_name):
from django.db.models.lookups import Lookup
found = self._get_lookup(lookup_name)
if found is None and hasattr(self, "output_field"):
return self.output_field.get_lookup(lookup_name)
if found is not None and not issubclass(found, Lookup):
return None
return found
def get_transform(self, lookup_name):
from django.db.models.lookups import Transform
found = self._get_lookup(lookup_name)
if found is None and hasattr(self, "output_field"):
return self.output_field.get_transform(lookup_name)
if found is not None and not issubclass(found, Transform):
return None
return found
@staticmethod
def merge_dicts(dicts):
"""
Merge dicts in reverse to preference the order of the original list.
e.g., merge_dicts([a, b]) will preference the keys in 'a' over those in
'b'.
"""
merged = {}
for d in reversed(dicts):
merged.update(d)
return merged
@classmethod
def _clear_cached_class_lookups(cls):
for subclass in subclasses(cls):
subclass.get_class_lookups.cache_clear()
def register_class_lookup(cls, lookup, lookup_name=None):
if lookup_name is None:
lookup_name = lookup.lookup_name
if "class_lookups" not in cls.__dict__:
cls.class_lookups = {}
cls.class_lookups[lookup_name] = lookup
cls._clear_cached_class_lookups()
return lookup
def register_instance_lookup(self, lookup, lookup_name=None):
if lookup_name is None:
lookup_name = lookup.lookup_name
if "instance_lookups" not in self.__dict__:
self.instance_lookups = {}
self.instance_lookups[lookup_name] = lookup
return lookup
register_lookup = class_or_instance_method(
register_class_lookup, register_instance_lookup
)
register_class_lookup = classmethod(register_class_lookup)
def _unregister_class_lookup(cls, lookup, lookup_name=None):
"""
Remove given lookup from cls lookups. For use in tests only as it's
not thread-safe.
"""
if lookup_name is None:
lookup_name = lookup.lookup_name
del cls.class_lookups[lookup_name]
cls._clear_cached_class_lookups()
def _unregister_instance_lookup(self, lookup, lookup_name=None):
"""
Remove given lookup from instance lookups. For use in tests only as
it's not thread-safe.
"""
if lookup_name is None:
lookup_name = lookup.lookup_name
del self.instance_lookups[lookup_name]
_unregister_lookup = class_or_instance_method(
_unregister_class_lookup, _unregister_instance_lookup
)
_unregister_class_lookup = classmethod(_unregister_class_lookup)
def select_related_descend(field, restricted, requested, select_mask):
"""
Return whether `field` should be used to descend deeper for
`select_related()` purposes.
Arguments:
* `field` - the field to be checked. Can be either a `Field` or
`ForeignObjectRel` instance.
* `restricted` - a boolean field, indicating if the field list has been
manually restricted using a select_related() clause.
* `requested` - the select_related() dictionary.
* `select_mask` - the dictionary of selected fields.
"""
# Only relationships can be descended.
if not field.remote_field:
return False
# Forward MTI parent links should not be explicitly descended as they are
# always JOIN'ed against (unless excluded by `select_mask`).
if getattr(field.remote_field, "parent_link", False):
return False
# When `select_related()` is used without a `*requested` mask all
# relationships are descended unless they are nullable.
if not restricted:
return not field.null
# When `select_related(*requested)` is used only fields that are part of
# `requested` should be descended.
if field.name not in requested:
return False
# Prevent invalid usages of `select_related()` and `only()`/`defer()` such
# as `select_related("a").only("b")` and `select_related("a").defer("a")`.
if select_mask and field not in select_mask:
raise FieldError(
f"Field {field.model._meta.object_name}.{field.name} cannot be both "
"deferred and traversed using select_related at the same time."
)
return True
def refs_expression(lookup_parts, annotations):
"""
Check if the lookup_parts contains references to the given annotations set.
Because the LOOKUP_SEP is contained in the default annotation names, check
each prefix of the lookup_parts for a match.
"""
for n in range(1, len(lookup_parts) + 1):
level_n_lookup = LOOKUP_SEP.join(lookup_parts[0:n])
if annotations.get(level_n_lookup):
return level_n_lookup, lookup_parts[n:]
return None, ()
def check_rel_lookup_compatibility(model, target_opts, field):
"""
Check that self.model is compatible with target_opts. Compatibility
is OK if:
1) model and opts match (where proxy inheritance is removed)
2) model is parent of opts' model or the other way around
"""
def check(opts):
return (
model._meta.concrete_model == opts.concrete_model
or opts.concrete_model in model._meta.all_parents
or model in opts.all_parents
)
# If the field is a primary key, then doing a query against the field's
# model is ok, too. Consider the case:
# class Restaurant(models.Model):
# place = OneToOneField(Place, primary_key=True):
# Restaurant.objects.filter(pk__in=Restaurant.objects.all()).
# If we didn't have the primary key check, then pk__in (== place__in) would
# give Place's opts as the target opts, but Restaurant isn't compatible
# with that. This logic applies only to primary keys, as when doing
# __in=qs, we are going to turn this into __in=qs.values('pk') later on.
return check(target_opts) or (
getattr(field, "primary_key", False) and check(field.model._meta)
)
class FilteredRelation:
"""Specify custom filtering in the ON clause of SQL joins."""
def __init__(self, relation_name, *, condition=Q()):
if not relation_name:
raise ValueError("relation_name cannot be empty.")
self.relation_name = relation_name
self.alias = None
if not isinstance(condition, Q):
raise ValueError("condition argument must be a Q() instance.")
# .condition and .resolved_condition have to be stored independently
# as the former must remain unchanged for Join.__eq__ to remain stable
# and reusable even once their .filtered_relation are resolved.
self.condition = condition
self.resolved_condition = None
def __eq__(self, other):
if not isinstance(other, self.__class__):
return NotImplemented
return (
self.relation_name == other.relation_name
and self.alias == other.alias
and self.condition == other.condition
)
def clone(self):
clone = FilteredRelation(self.relation_name, condition=self.condition)
clone.alias = self.alias
if (resolved_condition := self.resolved_condition) is not None:
clone.resolved_condition = resolved_condition.clone()
return clone
def relabeled_clone(self, change_map):
clone = self.clone()
if resolved_condition := clone.resolved_condition:
clone.resolved_condition = resolved_condition.relabeled_clone(change_map)
return clone
def resolve_expression(self, query, reuse, *args, **kwargs):
clone = self.clone()
clone.resolved_condition = query.build_filter(
self.condition,
can_reuse=reuse,
allow_joins=True,
split_subq=False,
update_join_types=False,
)[0]
return clone
def as_sql(self, compiler, connection):
return compiler.compile(self.resolved_condition)
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/django/db/models/expressions.py | django/db/models/expressions.py | import copy
import datetime
import functools
import inspect
from collections import defaultdict
from decimal import Decimal
from enum import Enum
from itertools import chain
from types import NoneType
from uuid import UUID
from django.core.exceptions import EmptyResultSet, FieldError, FullResultSet
from django.db import DatabaseError, NotSupportedError, connection
from django.db.models import fields
from django.db.models.constants import LOOKUP_SEP
from django.db.models.query_utils import Q
from django.utils.deconstruct import deconstructible
from django.utils.functional import cached_property, classproperty
from django.utils.hashable import make_hashable
class SQLiteNumericMixin:
"""
Some expressions with output_field=DecimalField() must be cast to
numeric to be properly filtered.
"""
def as_sqlite(self, compiler, connection, **extra_context):
sql, params = self.as_sql(compiler, connection, **extra_context)
try:
if self.output_field.get_internal_type() == "DecimalField":
sql = "(CAST(%s AS NUMERIC))" % sql
except FieldError:
pass
return sql, params
class Combinable:
"""
Provide the ability to combine one or two objects with
some connector. For example F('foo') + F('bar').
"""
# Arithmetic connectors
ADD = "+"
SUB = "-"
MUL = "*"
DIV = "/"
POW = "^"
# The following is a quoted % operator - it is quoted because it can be
# used in strings that also have parameter substitution.
MOD = "%%"
# Bitwise operators - note that these are generated by .bitand()
# and .bitor(), the '&' and '|' are reserved for boolean operator
# usage.
BITAND = "&"
BITOR = "|"
BITLEFTSHIFT = "<<"
BITRIGHTSHIFT = ">>"
BITXOR = "#"
def _combine(self, other, connector, reversed):
if not hasattr(other, "resolve_expression"):
# everything must be resolvable to an expression
other = Value(other)
if reversed:
return CombinedExpression(other, connector, self)
return CombinedExpression(self, connector, other)
#############
# OPERATORS #
#############
def __neg__(self):
return self._combine(-1, self.MUL, False)
def __add__(self, other):
return self._combine(other, self.ADD, False)
def __sub__(self, other):
return self._combine(other, self.SUB, False)
def __mul__(self, other):
return self._combine(other, self.MUL, False)
def __truediv__(self, other):
return self._combine(other, self.DIV, False)
def __mod__(self, other):
return self._combine(other, self.MOD, False)
def __pow__(self, other):
return self._combine(other, self.POW, False)
def __and__(self, other):
if getattr(self, "conditional", False) and getattr(other, "conditional", False):
return Q(self) & Q(other)
raise NotImplementedError(
"Use .bitand(), .bitor(), and .bitxor() for bitwise logical operations."
)
def bitand(self, other):
return self._combine(other, self.BITAND, False)
def bitleftshift(self, other):
return self._combine(other, self.BITLEFTSHIFT, False)
def bitrightshift(self, other):
return self._combine(other, self.BITRIGHTSHIFT, False)
def __xor__(self, other):
if getattr(self, "conditional", False) and getattr(other, "conditional", False):
return Q(self) ^ Q(other)
raise NotImplementedError(
"Use .bitand(), .bitor(), and .bitxor() for bitwise logical operations."
)
def bitxor(self, other):
return self._combine(other, self.BITXOR, False)
def __or__(self, other):
if getattr(self, "conditional", False) and getattr(other, "conditional", False):
return Q(self) | Q(other)
raise NotImplementedError(
"Use .bitand(), .bitor(), and .bitxor() for bitwise logical operations."
)
def bitor(self, other):
return self._combine(other, self.BITOR, False)
def __radd__(self, other):
return self._combine(other, self.ADD, True)
def __rsub__(self, other):
return self._combine(other, self.SUB, True)
def __rmul__(self, other):
return self._combine(other, self.MUL, True)
def __rtruediv__(self, other):
return self._combine(other, self.DIV, True)
def __rmod__(self, other):
return self._combine(other, self.MOD, True)
def __rpow__(self, other):
return self._combine(other, self.POW, True)
def __rand__(self, other):
raise NotImplementedError(
"Use .bitand(), .bitor(), and .bitxor() for bitwise logical operations."
)
def __ror__(self, other):
raise NotImplementedError(
"Use .bitand(), .bitor(), and .bitxor() for bitwise logical operations."
)
def __rxor__(self, other):
raise NotImplementedError(
"Use .bitand(), .bitor(), and .bitxor() for bitwise logical operations."
)
def __invert__(self):
return NegatedExpression(self)
class OutputFieldIsNoneError(FieldError):
pass
class BaseExpression:
"""Base class for all query expressions."""
empty_result_set_value = NotImplemented
# aggregate specific fields
is_summary = False
# Can the expression be used in a WHERE clause?
filterable = True
# Can the expression be used as a source expression in Window?
window_compatible = False
# Can the expression be used as a database default value?
allowed_default = False
# Can the expression be used during a constraint validation?
constraint_validation_compatible = True
# Does the expression possibly return more than one row?
set_returning = False
# Does the expression allow composite expressions?
allows_composite_expressions = False
def __init__(self, output_field=None):
if output_field is not None:
self.output_field = output_field
def __getstate__(self):
state = self.__dict__.copy()
state.pop("convert_value", None)
return state
def get_db_converters(self, connection):
return (
[]
if self.convert_value is self._convert_value_noop
else [self.convert_value]
) + self.output_field.get_db_converters(connection)
def get_source_expressions(self):
return []
def set_source_expressions(self, exprs):
assert not exprs
def _parse_expressions(self, *expressions):
return [
(
arg
if hasattr(arg, "resolve_expression")
else (F(arg) if isinstance(arg, str) else Value(arg))
)
for arg in expressions
]
def as_sql(self, compiler, connection):
"""
Responsible for returning a (sql, [params]) tuple to be included
in the current query.
Different backends can provide their own implementation, by
providing an `as_{vendor}` method and patching the Expression:
```
def override_as_sql(self, compiler, connection):
# custom logic
return super().as_sql(compiler, connection)
setattr(Expression, 'as_' + connection.vendor, override_as_sql)
```
Arguments:
* compiler: the query compiler responsible for generating the query.
Must have a compile method, returning a (sql, [params]) tuple.
Calling compiler(value) will return a quoted `value`.
* connection: the database connection used for the current query.
Return: (sql, params)
Where `sql` is a string containing ordered sql parameters to be
replaced with the elements of the list `params`.
"""
raise NotImplementedError("Subclasses must implement as_sql()")
@cached_property
def contains_aggregate(self):
return any(
expr and expr.contains_aggregate for expr in self.get_source_expressions()
)
@cached_property
def contains_over_clause(self):
return any(
expr and expr.contains_over_clause for expr in self.get_source_expressions()
)
@cached_property
def contains_column_references(self):
return any(
expr and expr.contains_column_references
for expr in self.get_source_expressions()
)
@cached_property
def contains_subquery(self):
return any(
expr and (getattr(expr, "subquery", False) or expr.contains_subquery)
for expr in self.get_source_expressions()
)
def resolve_expression(
self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False
):
"""
Provide the chance to do any preprocessing or validation before being
added to the query.
Arguments:
* query: the backend query implementation
* allow_joins: boolean allowing or denying use of joins
in this query
* reuse: a set of reusable joins for multijoins
* summarize: a terminal aggregate clause
* for_save: whether this expression about to be used in a save or
update
Return: an Expression to be added to the query.
"""
c = self.copy()
c.is_summary = summarize
source_expressions = [
(
expr.resolve_expression(query, allow_joins, reuse, summarize, for_save)
if expr is not None
else None
)
for expr in c.get_source_expressions()
]
if not self.allows_composite_expressions and any(
isinstance(expr, ColPairs) for expr in source_expressions
):
raise ValueError(
f"{self.__class__.__name__} expression does not support "
"composite primary keys."
)
c.set_source_expressions(source_expressions)
return c
@property
def conditional(self):
return isinstance(self.output_field, fields.BooleanField)
@property
def field(self):
return self.output_field
@cached_property
def output_field(self):
"""Return the output type of this expressions."""
output_field = self._resolve_output_field()
if output_field is None:
raise OutputFieldIsNoneError(
"Cannot resolve expression type, unknown output_field"
)
return output_field
@property
def _output_field_or_none(self):
"""
Return the output field of this expression, or None if
_resolve_output_field() didn't return an output type.
"""
try:
return self.output_field
except OutputFieldIsNoneError:
return
def _resolve_output_field(self):
"""
Attempt to infer the output type of the expression.
As a guess, if the output fields of all source fields match then simply
infer the same type here.
If a source's output field resolves to None, exclude it from this
check. If all sources are None, then an error is raised higher up the
stack in the output_field property.
"""
# This guess is mostly a bad idea, but there is quite a lot of code
# (especially 3rd party Func subclasses) that depend on it, we'd need a
# deprecation path to fix it.
sources_iter = (
source for source in self.get_source_fields() if source is not None
)
for output_field in sources_iter:
for source in sources_iter:
if not isinstance(output_field, source.__class__):
raise FieldError(
"Expression contains mixed types: %s, %s. You must "
"set output_field."
% (
output_field.__class__.__name__,
source.__class__.__name__,
)
)
return output_field
@staticmethod
def _convert_value_noop(value, expression, connection):
return value
@cached_property
def convert_value(self):
"""
Expressions provide their own converters because users have the option
of manually specifying the output_field which may be a different type
from the one the database returns.
"""
field = self.output_field
internal_type = field.get_internal_type()
if internal_type == "FloatField":
return lambda value, expression, connection: (
None if value is None else float(value)
)
elif internal_type.endswith("IntegerField"):
return lambda value, expression, connection: (
None if value is None else int(value)
)
elif internal_type == "DecimalField":
return lambda value, expression, connection: (
None if value is None else Decimal(value)
)
return self._convert_value_noop
def get_lookup(self, lookup):
return self.output_field.get_lookup(lookup)
def get_transform(self, name):
return self.output_field.get_transform(name)
def relabeled_clone(self, change_map):
clone = self.copy()
clone.set_source_expressions(
[
e.relabeled_clone(change_map) if e is not None else None
for e in self.get_source_expressions()
]
)
return clone
def replace_expressions(self, replacements):
if not replacements:
return self
if replacement := replacements.get(self):
return replacement
if not (source_expressions := self.get_source_expressions()):
return self
clone = self.copy()
clone.set_source_expressions(
[
None if expr is None else expr.replace_expressions(replacements)
for expr in source_expressions
]
)
return clone
def get_refs(self):
refs = set()
for expr in self.get_source_expressions():
if expr is None:
continue
refs |= expr.get_refs()
return refs
def copy(self):
return copy.copy(self)
def prefix_references(self, prefix):
clone = self.copy()
clone.set_source_expressions(
[
(
F(f"{prefix}{expr.name}")
if isinstance(expr, F)
else expr.prefix_references(prefix)
)
for expr in self.get_source_expressions()
]
)
return clone
def get_group_by_cols(self):
if not self.contains_aggregate:
return [self]
cols = []
for source in self.get_source_expressions():
cols.extend(source.get_group_by_cols())
return cols
def get_source_fields(self):
"""Return the underlying field types used by this aggregate."""
return [e._output_field_or_none for e in self.get_source_expressions()]
def asc(self, **kwargs):
return OrderBy(self, **kwargs)
def desc(self, **kwargs):
return OrderBy(self, descending=True, **kwargs)
def reverse_ordering(self):
return self
def flatten(self):
"""
Recursively yield this expression and all subexpressions, in
depth-first order.
"""
yield self
for expr in self.get_source_expressions():
if expr:
if hasattr(expr, "flatten"):
yield from expr.flatten()
else:
yield expr
def select_format(self, compiler, sql, params):
"""
Custom format for select clauses. For example, EXISTS expressions need
to be wrapped in CASE WHEN on Oracle.
"""
if hasattr(self.output_field, "select_format"):
return self.output_field.select_format(compiler, sql, params)
return sql, params
def get_expression_for_validation(self):
# Ignore expressions that cannot be used during a constraint
# validation.
if not getattr(self, "constraint_validation_compatible", True):
try:
(expression,) = self.get_source_expressions()
except ValueError as e:
raise ValueError(
"Expressions with constraint_validation_compatible set to False "
"must have only one source expression."
) from e
else:
return expression
return self
@deconstructible
class Expression(BaseExpression, Combinable):
"""An expression that can be combined with other expressions."""
@classproperty
@functools.lru_cache(maxsize=128)
def _constructor_signature(cls):
return inspect.signature(cls.__init__)
@classmethod
def _identity(cls, value):
if isinstance(value, tuple):
return tuple(map(cls._identity, value))
if isinstance(value, dict):
return tuple((key, cls._identity(val)) for key, val in value.items())
if isinstance(value, fields.Field):
if value.name and value.model:
return value.model._meta.label, value.name
return type(value)
return make_hashable(value)
@cached_property
def identity(self):
args, kwargs = self._constructor_args
signature = self._constructor_signature.bind_partial(self, *args, **kwargs)
signature.apply_defaults()
arguments = iter(signature.arguments.items())
next(arguments)
identity = [self.__class__]
for arg, value in arguments:
# If __init__() makes use of *args or **kwargs captures `value`
# will respectively be a tuple or a dict that must have its
# constituents unpacked (mainly if contain Field instances).
value = self._identity(value)
identity.append((arg, value))
return tuple(identity)
def __eq__(self, other):
if not isinstance(other, Expression):
return NotImplemented
return other.identity == self.identity
def __hash__(self):
return hash(self.identity)
# Type inference for CombinedExpression.output_field.
# Missing items will result in FieldError, by design.
#
# The current approach for NULL is based on lowest common denominator behavior
# i.e. if one of the supported databases is raising an error (rather than
# return NULL) for `val <op> NULL`, then Django raises FieldError.
_connector_combinations = [
# Numeric operations - operands of same type.
# PositiveIntegerField should take precedence over IntegerField (except
# subtraction).
{
connector: [
(
fields.PositiveIntegerField,
fields.PositiveIntegerField,
fields.PositiveIntegerField,
),
]
for connector in (
Combinable.ADD,
Combinable.MUL,
Combinable.DIV,
Combinable.MOD,
Combinable.POW,
)
},
# Other numeric operands.
{
connector: [
(fields.IntegerField, fields.IntegerField, fields.IntegerField),
(fields.FloatField, fields.FloatField, fields.FloatField),
(fields.DecimalField, fields.DecimalField, fields.DecimalField),
]
for connector in (
Combinable.ADD,
Combinable.SUB,
Combinable.MUL,
# Behavior for DIV with integer arguments follows Postgres/SQLite,
# not MySQL/Oracle.
Combinable.DIV,
Combinable.MOD,
Combinable.POW,
)
},
# Numeric operations - operands of different type.
{
connector: [
(fields.IntegerField, fields.DecimalField, fields.DecimalField),
(fields.DecimalField, fields.IntegerField, fields.DecimalField),
(fields.IntegerField, fields.FloatField, fields.FloatField),
(fields.FloatField, fields.IntegerField, fields.FloatField),
]
for connector in (
Combinable.ADD,
Combinable.SUB,
Combinable.MUL,
Combinable.DIV,
Combinable.MOD,
)
},
# Bitwise operators.
{
connector: [
(fields.IntegerField, fields.IntegerField, fields.IntegerField),
]
for connector in (
Combinable.BITAND,
Combinable.BITOR,
Combinable.BITLEFTSHIFT,
Combinable.BITRIGHTSHIFT,
Combinable.BITXOR,
)
},
# Numeric with NULL.
{
connector: list(
chain.from_iterable(
[(field_type, NoneType, field_type), (NoneType, field_type, field_type)]
for field_type in (
fields.IntegerField,
fields.DecimalField,
fields.FloatField,
)
)
)
for connector in (
Combinable.ADD,
Combinable.SUB,
Combinable.MUL,
Combinable.DIV,
Combinable.MOD,
Combinable.POW,
)
},
# Date/DateTimeField/DurationField/TimeField.
{
Combinable.ADD: [
# Date/DateTimeField.
(fields.DateField, fields.DurationField, fields.DateTimeField),
(fields.DateTimeField, fields.DurationField, fields.DateTimeField),
(fields.DurationField, fields.DateField, fields.DateTimeField),
(fields.DurationField, fields.DateTimeField, fields.DateTimeField),
# DurationField.
(fields.DurationField, fields.DurationField, fields.DurationField),
# TimeField.
(fields.TimeField, fields.DurationField, fields.TimeField),
(fields.DurationField, fields.TimeField, fields.TimeField),
],
},
{
Combinable.SUB: [
# Date/DateTimeField.
(fields.DateField, fields.DurationField, fields.DateTimeField),
(fields.DateTimeField, fields.DurationField, fields.DateTimeField),
(fields.DateField, fields.DateField, fields.DurationField),
(fields.DateField, fields.DateTimeField, fields.DurationField),
(fields.DateTimeField, fields.DateField, fields.DurationField),
(fields.DateTimeField, fields.DateTimeField, fields.DurationField),
# DurationField.
(fields.DurationField, fields.DurationField, fields.DurationField),
# TimeField.
(fields.TimeField, fields.DurationField, fields.TimeField),
(fields.TimeField, fields.TimeField, fields.DurationField),
],
},
]
_connector_combinators = defaultdict(list)
def register_combinable_fields(lhs, connector, rhs, result):
"""
Register combinable types:
lhs <connector> rhs -> result
e.g.
register_combinable_fields(
IntegerField, Combinable.ADD, FloatField, FloatField
)
"""
_connector_combinators[connector].append((lhs, rhs, result))
for d in _connector_combinations:
for connector, field_types in d.items():
for lhs, rhs, result in field_types:
register_combinable_fields(lhs, connector, rhs, result)
@functools.lru_cache(maxsize=128)
def _resolve_combined_type(connector, lhs_type, rhs_type):
combinators = _connector_combinators.get(connector, ())
for combinator_lhs_type, combinator_rhs_type, combined_type in combinators:
if issubclass(lhs_type, combinator_lhs_type) and issubclass(
rhs_type, combinator_rhs_type
):
return combined_type
class CombinedExpression(SQLiteNumericMixin, Expression):
def __init__(self, lhs, connector, rhs, output_field=None):
super().__init__(output_field=output_field)
self.connector = connector
self.lhs = lhs
self.rhs = rhs
def __repr__(self):
return "<{}: {}>".format(self.__class__.__name__, self)
def __str__(self):
return "{} {} {}".format(self.lhs, self.connector, self.rhs)
def get_source_expressions(self):
return [self.lhs, self.rhs]
def set_source_expressions(self, exprs):
self.lhs, self.rhs = exprs
def _resolve_output_field(self):
# We avoid using super() here for reasons given in
# Expression._resolve_output_field()
combined_type = _resolve_combined_type(
self.connector,
type(self.lhs._output_field_or_none),
type(self.rhs._output_field_or_none),
)
if combined_type is None:
raise FieldError(
f"Cannot infer type of {self.connector!r} expression involving these "
f"types: {self.lhs.output_field.__class__.__name__}, "
f"{self.rhs.output_field.__class__.__name__}. You must set "
f"output_field."
)
return combined_type()
def as_sql(self, compiler, connection):
expressions = []
expression_params = []
sql, params = compiler.compile(self.lhs)
expressions.append(sql)
expression_params.extend(params)
sql, params = compiler.compile(self.rhs)
expressions.append(sql)
expression_params.extend(params)
# order of precedence
expression_wrapper = "(%s)"
sql = connection.ops.combine_expression(self.connector, expressions)
return expression_wrapper % sql, expression_params
def resolve_expression(
self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False
):
resolved = super().resolve_expression(
query,
allow_joins,
reuse,
summarize,
for_save,
)
if not isinstance(self, (DurationExpression, TemporalSubtraction)):
try:
lhs_type = resolved.lhs.output_field.get_internal_type()
except (AttributeError, FieldError):
lhs_type = None
try:
rhs_type = resolved.rhs.output_field.get_internal_type()
except (AttributeError, FieldError):
rhs_type = None
if "DurationField" in {lhs_type, rhs_type} and lhs_type != rhs_type:
return DurationExpression(
resolved.lhs, resolved.connector, resolved.rhs
)
datetime_fields = {"DateField", "DateTimeField", "TimeField"}
if (
self.connector == self.SUB
and lhs_type in datetime_fields
and lhs_type == rhs_type
):
return TemporalSubtraction(resolved.lhs, resolved.rhs)
return resolved
@cached_property
def allowed_default(self):
return self.lhs.allowed_default and self.rhs.allowed_default
class DurationExpression(CombinedExpression):
def compile(self, side, compiler, connection):
try:
output = side.output_field
except FieldError:
pass
else:
if output.get_internal_type() == "DurationField":
sql, params = compiler.compile(side)
return connection.ops.format_for_duration_arithmetic(sql), params
return compiler.compile(side)
def as_sql(self, compiler, connection):
if connection.features.has_native_duration_field:
return super().as_sql(compiler, connection)
connection.ops.check_expression_support(self)
expressions = []
expression_params = []
sql, params = self.compile(self.lhs, compiler, connection)
expressions.append(sql)
expression_params.extend(params)
sql, params = self.compile(self.rhs, compiler, connection)
expressions.append(sql)
expression_params.extend(params)
# order of precedence
expression_wrapper = "(%s)"
sql = connection.ops.combine_duration_expression(self.connector, expressions)
return expression_wrapper % sql, expression_params
def as_sqlite(self, compiler, connection, **extra_context):
sql, params = self.as_sql(compiler, connection, **extra_context)
if self.connector in {Combinable.MUL, Combinable.DIV}:
try:
lhs_type = self.lhs.output_field.get_internal_type()
rhs_type = self.rhs.output_field.get_internal_type()
except (AttributeError, FieldError):
pass
else:
allowed_fields = {
"DecimalField",
"DurationField",
"FloatField",
"IntegerField",
}
if lhs_type not in allowed_fields or rhs_type not in allowed_fields:
raise DatabaseError(
f"Invalid arguments for operator {self.connector}."
)
return sql, params
class TemporalSubtraction(CombinedExpression):
output_field = fields.DurationField()
def __init__(self, lhs, rhs):
super().__init__(lhs, self.SUB, rhs)
def as_sql(self, compiler, connection):
connection.ops.check_expression_support(self)
lhs = compiler.compile(self.lhs)
rhs = compiler.compile(self.rhs)
return connection.ops.subtract_temporals(
self.lhs.output_field.get_internal_type(), lhs, rhs
)
@deconstructible(path="django.db.models.F")
class F(Combinable):
"""An object capable of resolving references to existing query objects."""
allowed_default = False
def __init__(self, name):
"""
Arguments:
* name: the name of the field this expression references
"""
self.name = name
def __repr__(self):
return "{}({})".format(self.__class__.__name__, self.name)
def __getitem__(self, subscript):
return Sliced(self, subscript)
def __contains__(self, other):
# Disable old-style iteration protocol inherited from implementing
# __getitem__() to prevent this method from hanging.
raise TypeError(f"argument of type '{self.__class__.__name__}' is not iterable")
def resolve_expression(
self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False
):
return query.resolve_ref(self.name, allow_joins, reuse, summarize)
def replace_expressions(self, replacements):
if (replacement := replacements.get(self)) is not None:
return replacement
field_name, *transforms = self.name.split(LOOKUP_SEP)
# Avoid unnecessarily looking up replacements with field_name again as
# in the vast majority of cases F instances won't be composed of any
# lookups.
if not transforms:
return self
if (
replacement := replacements.get(F(field_name))
) is None or replacement._output_field_or_none is None:
return self
for transform in transforms:
transform_class = replacement.get_transform(transform)
if transform_class is None:
return self
replacement = transform_class(replacement)
return replacement
def asc(self, **kwargs):
return OrderBy(self, **kwargs)
def desc(self, **kwargs):
return OrderBy(self, descending=True, **kwargs)
def __eq__(self, other):
return self.__class__ == other.__class__ and self.name == other.name
def __hash__(self):
return hash(self.name)
def copy(self):
return copy.copy(self)
class ResolvedOuterRef(F):
"""
An object that contains a reference to an outer query.
In this case, the reference to the outer query has been resolved because
the inner query has been used as a subquery.
"""
contains_aggregate = False
contains_over_clause = False
def as_sql(self, *args, **kwargs):
raise ValueError(
"This queryset contains a reference to an outer query and may "
"only be used in a subquery."
)
def resolve_expression(self, *args, **kwargs):
col = super().resolve_expression(*args, **kwargs)
if col.contains_over_clause:
raise NotSupportedError(
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.