repo
stringlengths
7
90
file_url
stringlengths
81
315
file_path
stringlengths
4
228
content
stringlengths
0
32.8k
language
stringclasses
1 value
license
stringclasses
7 values
commit_sha
stringlengths
40
40
retrieved_at
stringdate
2026-01-04 14:38:15
2026-01-05 02:33:18
truncated
bool
2 classes
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/gis_tests/geoapp/sitemaps.py
tests/gis_tests/geoapp/sitemaps.py
from django.contrib.gis.sitemaps import KMLSitemap, KMZSitemap from .models import City, Country sitemaps = { "kml": KMLSitemap([City, Country]), "kmz": KMZSitemap([City, Country]), }
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/gis_tests/geoapp/test_functions.py
tests/gis_tests/geoapp/test_functions.py
import json import math import re from decimal import Decimal from django.contrib.gis.db.models import GeometryField, PolygonField, functions from django.contrib.gis.geos import ( GEOSGeometry, LineString, MultiLineString, MultiPoint, MultiPolygon, Point, Polygon, fromstr, ) from django.contrib.gis.measure import Area from django.db import NotSupportedError, connection from django.db.models import F, IntegerField, Sum, Value from django.test import TestCase, skipUnlessDBFeature from ..utils import FuncTestMixin, can_save_multipoint from .models import ( City, Country, CountryWebMercator, Feature, ManyPointModel, State, ThreeDimensionalFeature, Track, ) class GISFunctionsTests(FuncTestMixin, TestCase): """ Testing functions from django/contrib/gis/db/models/functions.py. Area/Distance/Length/Perimeter are tested in distapp/tests. Please keep the tests in function's alphabetic order. """ fixtures = ["initial"] def test_asgeojson(self): if not connection.features.has_AsGeoJSON_function: with self.assertRaises(NotSupportedError): list(Country.objects.annotate(json=functions.AsGeoJSON("mpoly"))) return pueblo_json = '{"type":"Point","coordinates":[-104.609252,38.255001]}' houston_json = json.loads( '{"type":"Point","crs":{"type":"name","properties":' '{"name":"EPSG:4326"}},"coordinates":[-95.363151,29.763374]}' ) victoria_json = json.loads( '{"type":"Point",' '"bbox":[-123.30519600,48.46261100,-123.30519600,48.46261100],' '"coordinates":[-123.305196,48.462611]}' ) chicago_json = json.loads( '{"type":"Point","crs":{"type":"name","properties":{"name":"EPSG:4326"}},' '"bbox":[-87.65018,41.85039,-87.65018,41.85039],' '"coordinates":[-87.65018,41.85039]}' ) if "crs" in connection.features.unsupported_geojson_options: del houston_json["crs"] del chicago_json["crs"] if "bbox" in connection.features.unsupported_geojson_options: del chicago_json["bbox"] del victoria_json["bbox"] if "precision" in connection.features.unsupported_geojson_options: chicago_json["coordinates"] = [-87.650175, 41.850385] # Precision argument should only be an integer with self.assertRaises(TypeError): City.objects.annotate(geojson=functions.AsGeoJSON("point", precision="foo")) # Reference queries and values. # SELECT ST_AsGeoJson("geoapp_city"."point", 8, 0) # FROM "geoapp_city" WHERE "geoapp_city"."name" = 'Pueblo'; self.assertJSONEqual( pueblo_json, City.objects.annotate(geojson=functions.AsGeoJSON("point")) .get(name="Pueblo") .geojson, ) # SELECT ST_AsGeoJson("geoapp_city"."point", 8, 2) FROM "geoapp_city" # WHERE "geoapp_city"."name" = 'Houston'; # This time we want to include the CRS by using the `crs` keyword. self.assertJSONEqual( City.objects.annotate(json=functions.AsGeoJSON("point", crs=True)) .get(name="Houston") .json, houston_json, ) # SELECT ST_AsGeoJson("geoapp_city"."point", 8, 1) FROM "geoapp_city" # WHERE "geoapp_city"."name" = 'Houston'; # This time we include the bounding box by using the `bbox` keyword. self.assertJSONEqual( City.objects.annotate(geojson=functions.AsGeoJSON("point", bbox=True)) .get(name="Victoria") .geojson, victoria_json, ) # SELECT ST_AsGeoJson("geoapp_city"."point", 5, 3) FROM "geoapp_city" # WHERE "geoapp_city"."name" = 'Chicago'; # Finally, we set every available keyword. # MariaDB doesn't limit the number of decimals in bbox. if connection.ops.mariadb: chicago_json["bbox"] = [-87.650175, 41.850385, -87.650175, 41.850385] try: self.assertJSONEqual( City.objects.annotate( geojson=functions.AsGeoJSON( "point", bbox=True, crs=True, precision=5 ) ) .get(name="Chicago") .geojson, chicago_json, ) except AssertionError: # Give a second chance with different coords rounding. chicago_json["coordinates"][1] = 41.85038 self.assertJSONEqual( City.objects.annotate( geojson=functions.AsGeoJSON( "point", bbox=True, crs=True, precision=5 ) ) .get(name="Chicago") .geojson, chicago_json, ) @skipUnlessDBFeature("has_AsGeoJSON_function") def test_asgeojson_option_0(self): p1 = Point(1, 1, srid=4326) p2 = Point(-87.65018, 41.85039, srid=4326) obj = ManyPointModel.objects.create( point1=p1, point2=p2, point3=p2.transform(3857, clone=True), ) self.assertJSONEqual( ManyPointModel.objects.annotate(geojson=functions.AsGeoJSON("point3")) .get(pk=obj.pk) .geojson, # GeoJSON without CRS. json.loads( '{"type":"Point","coordinates":[-9757173.40553877, 5138594.87034608]}' ), ) @skipUnlessDBFeature("has_AsGML_function") def test_asgml(self): # Should throw a TypeError when trying to obtain GML from a # non-geometry field. qs = City.objects.all() with self.assertRaises(TypeError): qs.annotate(gml=functions.AsGML("name")) ptown = City.objects.annotate(gml=functions.AsGML("point", precision=9)).get( name="Pueblo" ) if connection.ops.oracle: # No precision parameter for Oracle :-/ gml_regex = re.compile( r'^<gml:Point srsName="EPSG:4326" ' r'xmlns:gml="http://www.opengis.net/gml">' r'<gml:coordinates decimal="\." cs="," ts=" ">' r"-104.60925\d+,38.25500\d+ " r"</gml:coordinates></gml:Point>" ) else: gml_regex = re.compile( r'^<gml:Point srsName="(urn:ogc:def:crs:)?EPSG:4326"><gml:coordinates>' r"-104\.60925\d+,38\.255001</gml:coordinates></gml:Point>" ) self.assertTrue(gml_regex.match(ptown.gml)) self.assertIn( '<gml:pos srsDimension="2">', City.objects.annotate(gml=functions.AsGML("point", version=3)) .get(name="Pueblo") .gml, ) @skipUnlessDBFeature("has_AsKML_function") def test_askml(self): # Should throw a TypeError when trying to obtain KML from a # non-geometry field. with self.assertRaises(TypeError): City.objects.annotate(kml=functions.AsKML("name")) # Ensuring the KML is as expected. ptown = City.objects.annotate(kml=functions.AsKML("point", precision=9)).get( name="Pueblo" ) self.assertEqual( "<Point><coordinates>-104.609252,38.255001</coordinates></Point>", ptown.kml ) @skipUnlessDBFeature("has_AsSVG_function") def test_assvg(self): with self.assertRaises(TypeError): City.objects.annotate(svg=functions.AsSVG("point", precision="foo")) # SELECT AsSVG(geoapp_city.point, 0, 8) FROM geoapp_city # WHERE name = 'Pueblo'; svg1 = 'cx="-104.609252" cy="-38.255001"' # Even though relative, only one point so it's practically the same # except for the 'c' letter prefix on the x,y values. svg2 = svg1.replace("c", "") self.assertEqual( svg1, City.objects.annotate(svg=functions.AsSVG("point")).get(name="Pueblo").svg, ) self.assertEqual( svg2, City.objects.annotate(svg=functions.AsSVG("point", relative=5)) .get(name="Pueblo") .svg, ) @skipUnlessDBFeature("has_AsWKB_function") def test_aswkb(self): wkb = ( City.objects.annotate( wkb=functions.AsWKB(Point(1, 2, srid=4326)), ) .first() .wkb ) # WKB is either XDR or NDR encoded. self.assertIn( bytes(wkb), ( b"\x00\x00\x00\x00\x01?\xf0\x00\x00\x00\x00\x00\x00@\x00\x00" b"\x00\x00\x00\x00\x00", b"\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xf0?\x00\x00" b"\x00\x00\x00\x00\x00@", ), ) @skipUnlessDBFeature("has_AsWKT_function") def test_aswkt(self): wkt = ( City.objects.annotate( wkt=functions.AsWKT(Point(1, 2, srid=4326)), ) .first() .wkt ) self.assertEqual( wkt, "POINT (1.0 2.0)" if connection.ops.oracle else "POINT(1 2)" ) @skipUnlessDBFeature("has_Azimuth_function") def test_azimuth(self): # Returns the azimuth in radians. azimuth_expr = functions.Azimuth(Point(0, 0, srid=4326), Point(1, 1, srid=4326)) self.assertAlmostEqual( City.objects.annotate(azimuth=azimuth_expr).first().azimuth, math.pi / 4, places=2, ) # Returns None if the two points are coincident. azimuth_expr = functions.Azimuth(Point(0, 0, srid=4326), Point(0, 0, srid=4326)) self.assertIsNone(City.objects.annotate(azimuth=azimuth_expr).first().azimuth) @skipUnlessDBFeature("has_BoundingCircle_function") def test_bounding_circle(self): def circle_num_points(num_seg): # num_seg is the number of segments per quarter circle. return (4 * num_seg) + 1 if connection.ops.postgis: expected_area = 169 elif connection.ops.spatialite: expected_area = 168 else: # Oracle. expected_area = 171 country = Country.objects.annotate( circle=functions.BoundingCircle("mpoly") ).order_by("name")[0] self.assertAlmostEqual(country.circle.area, expected_area, 0) if connection.ops.postgis: # By default num_seg=48. self.assertEqual(country.circle.num_points, circle_num_points(48)) tests = [12, Value(12, output_field=IntegerField())] for num_seq in tests: with self.subTest(num_seq=num_seq): country = Country.objects.annotate( circle=functions.BoundingCircle("mpoly", num_seg=num_seq), ).order_by("name")[0] if connection.ops.postgis: self.assertGreater(country.circle.area, 168.4, 0) self.assertLess(country.circle.area, 169.5, 0) self.assertEqual(country.circle.num_points, circle_num_points(12)) else: self.assertAlmostEqual(country.circle.area, expected_area, 0) @skipUnlessDBFeature("has_Centroid_function") def test_centroid(self): qs = State.objects.exclude(poly__isnull=True).annotate( centroid=functions.Centroid("poly") ) tol = ( 1.8 if connection.ops.mysql else (0.1 if connection.ops.oracle else 0.00001) ) for state in qs: self.assertTrue(state.poly.centroid.equals_exact(state.centroid, tol)) with self.assertRaisesMessage( TypeError, "'Centroid' takes exactly 1 argument (2 given)" ): State.objects.annotate(centroid=functions.Centroid("poly", "poly")) @skipUnlessDBFeature("has_Difference_function") def test_difference(self): geom = Point(5, 23, srid=4326) qs = Country.objects.annotate(diff=functions.Difference("mpoly", geom)) # Oracle does something screwy with the Texas geometry. if connection.ops.oracle: qs = qs.exclude(name="Texas") for c in qs: self.assertTrue(c.mpoly.difference(geom).equals(c.diff)) @skipUnlessDBFeature("has_Difference_function", "has_Transform_function") def test_difference_mixed_srid(self): """Testing with mixed SRID (Country has default 4326).""" geom = Point(556597.4, 2632018.6, srid=3857) # Spherical Mercator qs = Country.objects.annotate(difference=functions.Difference("mpoly", geom)) # Oracle does something screwy with the Texas geometry. if connection.ops.oracle: qs = qs.exclude(name="Texas") for c in qs: self.assertTrue(c.mpoly.difference(geom).equals(c.difference)) @skipUnlessDBFeature("has_Envelope_function") def test_envelope(self): countries = Country.objects.annotate(envelope=functions.Envelope("mpoly")) for country in countries: self.assertTrue(country.envelope.equals(country.mpoly.envelope)) @skipUnlessDBFeature("has_ForcePolygonCW_function") def test_force_polygon_cw(self): rings = ( ((0, 0), (5, 0), (0, 5), (0, 0)), ((1, 1), (1, 3), (3, 1), (1, 1)), ) rhr_rings = ( ((0, 0), (0, 5), (5, 0), (0, 0)), ((1, 1), (3, 1), (1, 3), (1, 1)), ) State.objects.create(name="Foo", poly=Polygon(*rings)) st = State.objects.annotate( force_polygon_cw=functions.ForcePolygonCW("poly") ).get(name="Foo") self.assertEqual(rhr_rings, st.force_polygon_cw.coords) @skipUnlessDBFeature("has_FromWKB_function") def test_fromwkb(self): g = Point(56.811078, 60.608647) pt1, pt2 = City.objects.values_list( functions.FromWKB(Value(g.wkb.tobytes())), functions.FromWKB(Value(g.wkb.tobytes()), srid=4326), )[0] self.assertIs(g.equals_exact(pt1, 0.00001), True) self.assertIsNone(pt1.srid) self.assertEqual(pt2.srid, 4326) @skipUnlessDBFeature("has_FromWKT_function") def test_fromwkt(self): g = Point(56.811078, 60.608647) pt1, pt2 = City.objects.values_list( functions.FromWKT(Value(g.wkt)), functions.FromWKT(Value(g.wkt), srid=4326), )[0] self.assertIs(g.equals_exact(pt1, 0.00001), True) self.assertIsNone(pt1.srid) self.assertEqual(pt2.srid, 4326) @skipUnlessDBFeature("has_GeoHash_function") def test_geohash(self): # Reference query: # SELECT ST_GeoHash(point) FROM geoapp_city WHERE name='Houston'; # SELECT ST_GeoHash(point, 5) FROM geoapp_city WHERE name='Houston'; ref_hash = "9vk1mfq8jx0c8e0386z6" h1 = City.objects.annotate(geohash=functions.GeoHash("point")).get( name="Houston" ) h2 = City.objects.annotate(geohash=functions.GeoHash("point", precision=5)).get( name="Houston" ) self.assertEqual(ref_hash, h1.geohash[: len(ref_hash)]) self.assertEqual(ref_hash[:5], h2.geohash) @skipUnlessDBFeature("has_GeometryDistance_function") def test_geometry_distance(self): point = Point(-90, 40, srid=4326) qs = City.objects.annotate( distance=functions.GeometryDistance("point", point) ).order_by("distance") distances = ( 2.99091995527296, 5.33507274054713, 9.33852187483721, 9.91769193646233, 11.556465744884, 14.713098433352, 34.3635252198568, 276.987855073372, ) for city, expected_distance in zip(qs, distances): with self.subTest(city=city): self.assertAlmostEqual(city.distance, expected_distance) @skipUnlessDBFeature("has_Intersection_function") def test_intersection(self): geom = Point(5, 23, srid=4326) qs = Country.objects.annotate(inter=functions.Intersection("mpoly", geom)) for c in qs: if connection.features.empty_intersection_returns_none: self.assertIsNone(c.inter) else: self.assertIs(c.inter.empty, True) @skipUnlessDBFeature("supports_empty_geometries", "has_IsEmpty_function") def test_isempty_geometry_empty(self): empty = City.objects.create(name="Nowhere", point=Point(srid=4326)) City.objects.create(name="Somewhere", point=Point(6.825, 47.1, srid=4326)) self.assertSequenceEqual( City.objects.annotate(isempty=functions.IsEmpty("point")).filter( isempty=True ), [empty], ) self.assertSequenceEqual(City.objects.filter(point__isempty=True), [empty]) @skipUnlessDBFeature("has_IsEmpty_function") def test_isempty_geometry_null(self): nowhere = State.objects.create(name="Nowhere", poly=None) qs = State.objects.annotate(isempty=functions.IsEmpty("poly")) self.assertSequenceEqual(qs.filter(isempty=None), [nowhere]) self.assertSequenceEqual( qs.filter(isempty=False).order_by("name").values_list("name", flat=True), ["Colorado", "Kansas"], ) self.assertSequenceEqual(qs.filter(isempty=True), []) self.assertSequenceEqual(State.objects.filter(poly__isempty=True), []) @skipUnlessDBFeature("has_IsValid_function") def test_isvalid(self): valid_geom = fromstr("POLYGON((0 0, 0 1, 1 1, 1 0, 0 0))") invalid_geom = fromstr("POLYGON((0 0, 0 1, 1 1, 1 0, 1 1, 1 0, 0 0))") State.objects.create(name="valid", poly=valid_geom) State.objects.create(name="invalid", poly=invalid_geom) valid = ( State.objects.filter(name="valid") .annotate(isvalid=functions.IsValid("poly")) .first() ) invalid = ( State.objects.filter(name="invalid") .annotate(isvalid=functions.IsValid("poly")) .first() ) self.assertIs(valid.isvalid, True) self.assertIs(invalid.isvalid, False) @skipUnlessDBFeature("has_Area_function") def test_area_with_regular_aggregate(self): # Create projected country objects, for this test to work on all # backends. for c in Country.objects.all(): CountryWebMercator.objects.create( name=c.name, mpoly=c.mpoly.transform(3857, clone=True) ) # Test in projected coordinate system qs = CountryWebMercator.objects.annotate(area_sum=Sum(functions.Area("mpoly"))) # Some backends (e.g. Oracle) cannot group by multipolygon values, so # defer such fields in the aggregation query. for c in qs.defer("mpoly"): result = c.area_sum # If the result is a measure object, get value. if isinstance(result, Area): result = result.sq_m self.assertAlmostEqual((result - c.mpoly.area) / c.mpoly.area, 0) @skipUnlessDBFeature("has_Area_function") def test_area_lookups(self): # Create projected countries so the test works on all backends. CountryWebMercator.objects.bulk_create( CountryWebMercator(name=c.name, mpoly=c.mpoly.transform(3857, clone=True)) for c in Country.objects.all() ) qs = CountryWebMercator.objects.annotate(area=functions.Area("mpoly")) self.assertEqual( qs.get(area__lt=Area(sq_km=500000)), CountryWebMercator.objects.get(name="New Zealand"), ) with self.assertRaisesMessage( ValueError, "AreaField only accepts Area measurement objects." ): qs.get(area__lt=500000) @skipUnlessDBFeature("has_ClosestPoint_function") def test_closest_point(self): qs = Country.objects.annotate( closest_point=functions.ClosestPoint("mpoly", functions.Centroid("mpoly")) ) for country in qs: self.assertIsInstance(country.closest_point, Point) self.assertEqual( country.mpoly.intersection(country.closest_point), country.closest_point, ) @skipUnlessDBFeature("has_LineLocatePoint_function") def test_line_locate_point(self): pos_expr = functions.LineLocatePoint( LineString((0, 0), (0, 3), srid=4326), Point(0, 1, srid=4326) ) self.assertAlmostEqual( State.objects.annotate(pos=pos_expr).first().pos, 0.3333333 ) @skipUnlessDBFeature("has_MakeValid_function") def test_make_valid(self): invalid_geom = fromstr("POLYGON((0 0, 0 1, 1 1, 1 0, 1 1, 1 0, 0 0))") State.objects.create(name="invalid", poly=invalid_geom) invalid = ( State.objects.filter(name="invalid") .annotate(repaired=functions.MakeValid("poly")) .first() ) self.assertIs(invalid.repaired.valid, True) self.assertTrue( invalid.repaired.equals( fromstr("POLYGON((0 0, 0 1, 1 1, 1 0, 0 0))", srid=invalid.poly.srid) ) ) @skipUnlessDBFeature("has_MakeValid_function") def test_make_valid_multipolygon(self): invalid_geom = fromstr( "POLYGON((0 0, 0 1 , 1 1 , 1 0, 0 0), (10 0, 10 1, 11 1, 11 0, 10 0))" ) State.objects.create(name="invalid", poly=invalid_geom) invalid = ( State.objects.filter(name="invalid") .annotate( repaired=functions.MakeValid("poly"), ) .get() ) self.assertIs(invalid.repaired.valid, True) self.assertTrue( invalid.repaired.equals( fromstr( "MULTIPOLYGON (((0 0, 0 1, 1 1, 1 0, 0 0)), " "((10 0, 10 1, 11 1, 11 0, 10 0)))", srid=invalid.poly.srid, ) ) ) self.assertEqual(len(invalid.repaired), 2) @skipUnlessDBFeature("has_MakeValid_function") def test_make_valid_output_field(self): # output_field is GeometryField instance because different geometry # types can be returned. output_field = functions.MakeValid( Value(Polygon(), PolygonField(srid=42)), ).output_field self.assertIs(output_field.__class__, GeometryField) self.assertEqual(output_field.srid, 42) @skipUnlessDBFeature("has_MemSize_function") def test_memsize(self): ptown = City.objects.annotate(size=functions.MemSize("point")).get( name="Pueblo" ) # Exact value depends on database and version. self.assertTrue(20 <= ptown.size <= 105) @skipUnlessDBFeature("has_NumGeometries_function") def test_num_geom(self): # Both 'countries' only have two geometries. for c in Country.objects.annotate(num_geom=functions.NumGeometries("mpoly")): self.assertEqual(2, c.num_geom) qs = City.objects.filter(point__isnull=False).annotate( num_geom=functions.NumGeometries("point") ) for city in qs: # The results for the number of geometries on non-collections # depends on the database. if connection.ops.mysql or connection.ops.mariadb: self.assertIsNone(city.num_geom) else: self.assertEqual(1, city.num_geom) @skipUnlessDBFeature("has_NumDimensions_function") def test_num_dimensions(self): for c in Country.objects.annotate(num_dims=functions.NumDimensions("mpoly")): self.assertEqual(2, c.num_dims) ThreeDimensionalFeature.objects.create( name="London", geom=Point(-0.126418, 51.500832, 0) ) qs = ThreeDimensionalFeature.objects.annotate( num_dims=functions.NumDimensions("geom") ) self.assertEqual(qs[0].num_dims, 3) qs = ThreeDimensionalFeature.objects.annotate( num_dims=F("geom__num_dimensions") ) self.assertEqual(qs[0].num_dims, 3) msg = "'NumDimensions' takes exactly 1 argument (2 given)" with self.assertRaisesMessage(TypeError, msg): Country.objects.annotate(num_dims=functions.NumDimensions("point", "error")) @skipUnlessDBFeature("has_NumPoints_function") def test_num_points(self): coords = [(-95.363151, 29.763374), (-95.448601, 29.713803)] Track.objects.create(name="Foo", line=LineString(coords)) qs = Track.objects.annotate(num_points=functions.NumPoints("line")) self.assertEqual(qs.first().num_points, 2) mpoly_qs = Country.objects.annotate(num_points=functions.NumPoints("mpoly")) if not connection.features.supports_num_points_poly: for c in mpoly_qs: self.assertIsNone(c.num_points) return for c in mpoly_qs: self.assertEqual(c.mpoly.num_points, c.num_points) for c in City.objects.annotate(num_points=functions.NumPoints("point")): self.assertEqual(c.num_points, 1) @skipUnlessDBFeature("has_PointOnSurface_function") def test_point_on_surface(self): qs = Country.objects.annotate( point_on_surface=functions.PointOnSurface("mpoly") ) for country in qs: self.assertTrue(country.mpoly.intersection(country.point_on_surface)) @skipUnlessDBFeature("has_Reverse_function") def test_reverse_geom(self): coords = [(-95.363151, 29.763374), (-95.448601, 29.713803)] Track.objects.create(name="Foo", line=LineString(coords)) track = Track.objects.annotate(reverse_geom=functions.Reverse("line")).get( name="Foo" ) coords.reverse() self.assertEqual(tuple(coords), track.reverse_geom.coords) @skipUnlessDBFeature("has_Rotate_function") def test_rotate(self): angle = math.pi tests = [ {"angle": angle}, {"angle": angle, "origin": Point(0, 0)}, {"angle": angle, "origin": Point(1, 1)}, ] for params in tests: with self.subTest(params=params): qs = Country.objects.annotate( rotated=functions.Rotate("mpoly", **params) ) for country in qs: for p1, p2 in zip(country.mpoly, country.rotated): for r1, r2 in zip(p1, p2): for c1, c2 in zip(r1.coords, r2.coords): origin = params.get("origin") if origin is None: origin = Point(0, 0) self.assertAlmostEqual(-c1[0] + 2 * origin.x, c2[0], 5) self.assertAlmostEqual(-c1[1] + 2 * origin.y, c2[1], 5) @skipUnlessDBFeature("has_Rotate_function") def test_rotate_invalid_params(self): angle = math.pi bad_params_tests = [ {"angle": angle, "origin": 0}, {"angle": angle, "origin": [0, 0]}, ] msg = "origin argument must be a Point" for params in bad_params_tests: with self.subTest(params=params), self.assertRaisesMessage(TypeError, msg): functions.Rotate("mpoly", **params) @skipUnlessDBFeature("has_Scale_function") def test_scale(self): xfac, yfac = 2, 3 tol = 5 # The low precision tolerance is for SpatiaLite qs = Country.objects.annotate(scaled=functions.Scale("mpoly", xfac, yfac)) for country in qs: for p1, p2 in zip(country.mpoly, country.scaled): for r1, r2 in zip(p1, p2): for c1, c2 in zip(r1.coords, r2.coords): self.assertAlmostEqual(c1[0] * xfac, c2[0], tol) self.assertAlmostEqual(c1[1] * yfac, c2[1], tol) # Test float/Decimal values qs = Country.objects.annotate( scaled=functions.Scale("mpoly", 1.5, Decimal("2.5")) ) self.assertGreater(qs[0].scaled.area, qs[0].mpoly.area) @skipUnlessDBFeature("has_SnapToGrid_function") def test_snap_to_grid(self): # Let's try and break snap_to_grid() with bad combinations of # arguments. for bad_args in ((), range(3), range(5)): with self.assertRaises(ValueError): Country.objects.annotate(snap=functions.SnapToGrid("mpoly", *bad_args)) for bad_args in (("1.0",), (1.0, None), tuple(map(str, range(4)))): with self.assertRaises(TypeError): Country.objects.annotate(snap=functions.SnapToGrid("mpoly", *bad_args)) # Boundary for San Marino, courtesy of Bjorn Sandvik of # thematicmapping.org from the world borders dataset he provides. wkt = ( "MULTIPOLYGON(((12.41580 43.95795,12.45055 43.97972,12.45389 43.98167," "12.46250 43.98472,12.47167 43.98694,12.49278 43.98917," "12.50555 43.98861,12.51000 43.98694,12.51028 43.98277," "12.51167 43.94333,12.51056 43.93916,12.49639 43.92333," "12.49500 43.91472,12.48778 43.90583,12.47444 43.89722," "12.46472 43.89555,12.45917 43.89611,12.41639 43.90472," "12.41222 43.90610,12.40782 43.91366,12.40389 43.92667," "12.40500 43.94833,12.40889 43.95499,12.41580 43.95795)))" ) Country.objects.create(name="San Marino", mpoly=fromstr(wkt)) # Because floating-point arithmetic isn't exact, we set a tolerance # to pass into GEOS `equals_exact`. tol = 0.000000001 # SELECT AsText(ST_SnapToGrid("geoapp_country"."mpoly", 0.1)) # FROM "geoapp_country" # WHERE "geoapp_country"."name" = 'San Marino'; ref = fromstr("MULTIPOLYGON(((12.4 44,12.5 44,12.5 43.9,12.4 43.9,12.4 44)))") self.assertTrue( ref.equals_exact( Country.objects.annotate(snap=functions.SnapToGrid("mpoly", 0.1)) .get(name="San Marino") .snap, tol, ) ) # SELECT AsText(ST_SnapToGrid("geoapp_country"."mpoly", 0.05, 0.23)) # FROM "geoapp_country" # WHERE "geoapp_country"."name" = 'San Marino'; ref = fromstr( "MULTIPOLYGON(((12.4 43.93,12.45 43.93,12.5 43.93,12.45 43.93,12.4 43.93)))" ) self.assertTrue( ref.equals_exact( Country.objects.annotate(snap=functions.SnapToGrid("mpoly", 0.05, 0.23)) .get(name="San Marino") .snap, tol, ) ) # SELECT AsText( # ST_SnapToGrid("geoapp_country"."mpoly", 0.5, 0.17, 0.05, 0.23)) # FROM "geoapp_country" # WHERE "geoapp_country"."name" = 'San Marino'; ref = fromstr( "MULTIPOLYGON(((12.4 43.87,12.45 43.87,12.45 44.1,12.5 44.1,12.5 43.87," "12.45 43.87,12.4 43.87)))" ) self.assertTrue( ref.equals_exact( Country.objects.annotate( snap=functions.SnapToGrid("mpoly", 0.05, 0.23, 0.5, 0.17) ) .get(name="San Marino") .snap, tol, ) ) @skipUnlessDBFeature("has_SymDifference_function") def test_sym_difference(self): geom = Point(5, 23, srid=4326) qs = Country.objects.annotate( sym_difference=functions.SymDifference("mpoly", geom) ) # Oracle does something screwy with the Texas geometry. if connection.ops.oracle: qs = qs.exclude(name="Texas") for country in qs: self.assertTrue( country.mpoly.sym_difference(geom).equals(country.sym_difference) ) @skipUnlessDBFeature("has_Transform_function") def test_transform(self): # Pre-transformed points for Houston and Pueblo. ptown = fromstr("POINT(992363.390841912 481455.395105533)", srid=2774) # Asserting the result of the transform operation with the values in # the pre-transformed points. h = City.objects.annotate(pt=functions.Transform("point", ptown.srid)).get( name="Pueblo" )
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
true
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/gis_tests/geoapp/models.py
tests/gis_tests/geoapp/models.py
from django.contrib.gis.db import models from ..utils import gisfield_may_be_null class NamedModel(models.Model): name = models.CharField(max_length=30) class Meta: abstract = True def __str__(self): return self.name class Country(NamedModel): mpoly = models.MultiPolygonField() # SRID, by default, is 4326 class CountryWebMercator(NamedModel): mpoly = models.MultiPolygonField(srid=3857) class City(NamedModel): point = models.PointField() class Meta: app_label = "geoapp" # This is an inherited model from City class PennsylvaniaCity(City): county = models.CharField(max_length=30) founded = models.DateTimeField(null=True) class Meta: app_label = "geoapp" class State(NamedModel): poly = models.PolygonField( null=gisfield_may_be_null ) # Allowing NULL geometries here. class Meta: app_label = "geoapp" class Track(NamedModel): line = models.LineStringField() class MultiFields(NamedModel): city = models.ForeignKey(City, models.CASCADE) point = models.PointField() poly = models.PolygonField() class UniqueTogetherModel(models.Model): city = models.CharField(max_length=30) point = models.PointField() class Meta: unique_together = ("city", "point") required_db_features = ["supports_geometry_field_unique_index"] class Truth(models.Model): val = models.BooleanField(default=False) class Feature(NamedModel): geom = models.GeometryField() class ThreeDimensionalFeature(NamedModel): geom = models.GeometryField(dim=3) class Meta: required_db_features = {"supports_3d_storage"} class MinusOneSRID(models.Model): geom = models.PointField(srid=-1) # Minus one SRID. class NonConcreteField(models.IntegerField): def db_type(self, connection): return None def get_attname_column(self): attname, column = super().get_attname_column() return attname, None class NonConcreteModel(NamedModel): non_concrete = NonConcreteField() point = models.PointField(geography=True) class ManyPointModel(NamedModel): point1 = models.PointField() point2 = models.PointField() point3 = models.PointField(srid=3857) class Points(models.Model): geom = models.MultiPointField() class Lines(models.Model): geom = models.MultiLineStringField() class GeometryCollectionModel(models.Model): geom = models.GeometryCollectionField()
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/gis_tests/geoapp/test_feeds.py
tests/gis_tests/geoapp/test_feeds.py
from xml.dom import minidom from django.conf import settings from django.contrib.sites.models import Site from django.test import TestCase, modify_settings, override_settings from .models import City @modify_settings(INSTALLED_APPS={"append": "django.contrib.sites"}) @override_settings(ROOT_URLCONF="gis_tests.geoapp.urls") class GeoFeedTest(TestCase): fixtures = ["initial"] @classmethod def setUpTestData(cls): Site(id=settings.SITE_ID, domain="example.com", name="example.com").save() def assertChildNodes(self, elem, expected): "Taken from syndication/tests.py." actual = {n.nodeName for n in elem.childNodes} expected = set(expected) self.assertEqual(actual, expected) def test_geofeed_rss(self): "Tests geographic feeds using GeoRSS over RSSv2." # Uses `GEOSGeometry` in `item_geometry` doc1 = minidom.parseString(self.client.get("/feeds/rss1/").content) # Uses a 2-tuple in `item_geometry` doc2 = minidom.parseString(self.client.get("/feeds/rss2/").content) feed1, feed2 = doc1.firstChild, doc2.firstChild # Making sure the box got added to the second GeoRSS feed. self.assertChildNodes( feed2.getElementsByTagName("channel")[0], [ "title", "link", "description", "language", "lastBuildDate", "item", "georss:box", "atom:link", ], ) # Incrementing through the feeds. for feed in [feed1, feed2]: # Ensuring the georss namespace was added to the <rss> element. self.assertEqual( feed.getAttribute("xmlns:georss"), "http://www.georss.org/georss" ) chan = feed.getElementsByTagName("channel")[0] items = chan.getElementsByTagName("item") self.assertEqual(len(items), City.objects.count()) # Ensuring the georss element was added to each item in the feed. for item in items: self.assertChildNodes( item, ["title", "link", "description", "guid", "georss:point"] ) def test_geofeed_atom(self): "Testing geographic feeds using GeoRSS over Atom." doc1 = minidom.parseString(self.client.get("/feeds/atom1/").content) doc2 = minidom.parseString(self.client.get("/feeds/atom2/").content) feed1, feed2 = doc1.firstChild, doc2.firstChild # Making sure the box got added to the second GeoRSS feed. self.assertChildNodes( feed2, ["title", "link", "id", "updated", "entry", "georss:box"] ) for feed in [feed1, feed2]: # Ensuring the georsss namespace was added to the <feed> element. self.assertEqual( feed.getAttribute("xmlns:georss"), "http://www.georss.org/georss" ) entries = feed.getElementsByTagName("entry") self.assertEqual(len(entries), City.objects.count()) # Ensuring the georss element was added to each entry in the feed. for entry in entries: self.assertChildNodes( entry, ["title", "link", "id", "summary", "georss:point"] ) def test_geofeed_w3c(self): "Testing geographic feeds using W3C Geo." doc = minidom.parseString(self.client.get("/feeds/w3cgeo1/").content) feed = doc.firstChild # Ensuring the geo namespace was added to the <feed> element. self.assertEqual( feed.getAttribute("xmlns:geo"), "http://www.w3.org/2003/01/geo/wgs84_pos#" ) chan = feed.getElementsByTagName("channel")[0] items = chan.getElementsByTagName("item") self.assertEqual(len(items), City.objects.count()) # Ensuring the geo:lat and geo:lon element was added to each item in # the feed. for item in items: self.assertChildNodes( item, ["title", "link", "description", "guid", "geo:lat", "geo:lon"] ) # Boxes and Polygons aren't allowed in W3C Geo feeds. with self.assertRaises(ValueError): # Box in <channel> self.client.get("/feeds/w3cgeo2/") with self.assertRaises(ValueError): # Polygons in <entry> self.client.get("/feeds/w3cgeo3/")
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/gis_tests/geoapp/test_sitemaps.py
tests/gis_tests/geoapp/test_sitemaps.py
import zipfile from io import BytesIO from xml.dom import minidom from django.conf import settings from django.contrib.sites.models import Site from django.test import TestCase, modify_settings, override_settings from .models import City, Country @modify_settings( INSTALLED_APPS={"append": ["django.contrib.sites", "django.contrib.sitemaps"]} ) @override_settings(ROOT_URLCONF="gis_tests.geoapp.urls") class GeoSitemapTest(TestCase): @classmethod def setUpTestData(cls): Site(id=settings.SITE_ID, domain="example.com", name="example.com").save() def assertChildNodes(self, elem, expected): "Taken from syndication/tests.py." actual = {n.nodeName for n in elem.childNodes} expected = set(expected) self.assertEqual(actual, expected) def test_geositemap_kml(self): "Tests KML/KMZ geographic sitemaps." for kml_type in ("kml", "kmz"): doc = minidom.parseString( self.client.get("/sitemaps/%s.xml" % kml_type).content ) # Ensuring the right sitemaps namespace is present. urlset = doc.firstChild self.assertEqual( urlset.getAttribute("xmlns"), "http://www.sitemaps.org/schemas/sitemap/0.9", ) urls = urlset.getElementsByTagName("url") self.assertEqual(2, len(urls)) # Should only be 2 sitemaps. for url in urls: self.assertChildNodes(url, ["loc"]) # Getting the relative URL since we don't have a real site. kml_url = ( url.getElementsByTagName("loc")[0] .childNodes[0] .data.split("http://example.com")[1] ) if kml_type == "kml": kml_doc = minidom.parseString(self.client.get(kml_url).content) elif kml_type == "kmz": # Have to decompress KMZ before parsing. buf = BytesIO(self.client.get(kml_url).content) with zipfile.ZipFile(buf) as zf: self.assertEqual(1, len(zf.filelist)) self.assertEqual("doc.kml", zf.filelist[0].filename) kml_doc = minidom.parseString(zf.read("doc.kml")) # Ensuring the correct number of placemarks are in the KML doc. if "city" in kml_url: model = City elif "country" in kml_url: model = Country self.assertEqual( model.objects.count(), len(kml_doc.getElementsByTagName("Placemark")), )
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/gis_tests/geoapp/__init__.py
tests/gis_tests/geoapp/__init__.py
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/gis_tests/geoapp/test_indexes.py
tests/gis_tests/geoapp/test_indexes.py
from django.contrib.gis.db import models from django.db import connection from django.db.models import Index from django.test import TransactionTestCase from django.test.utils import isolate_apps from .models import City class SchemaIndexesTests(TransactionTestCase): available_apps = [] models = [City] def get_indexes(self, table): with connection.cursor() as cursor: constraints = connection.introspection.get_constraints(cursor, table) return { name: constraint["columns"] for name, constraint in constraints.items() if constraint["index"] } def has_spatial_indexes(self, table): if connection.ops.mysql: with connection.cursor() as cursor: return connection.introspection.supports_spatial_index(cursor, table) elif connection.ops.oracle: # Spatial indexes in Meta.indexes are not supported by the Oracle # backend (see #31252). return False return True def test_using_sql(self): if not connection.ops.postgis: self.skipTest("This is a PostGIS-specific test.") index = Index(fields=["point"]) editor = connection.schema_editor() self.assertIn( "%s USING " % editor.quote_name(City._meta.db_table), str(index.create_sql(City, editor)), ) @isolate_apps("gis_tests.geoapp") def test_namespaced_db_table(self): if not connection.ops.postgis: self.skipTest("PostGIS-specific test.") class SchemaCity(models.Model): point = models.PointField() class Meta: app_label = "geoapp" db_table = 'django_schema"."geoapp_schema_city' index = Index(fields=["point"]) editor = connection.schema_editor() create_index_sql = str(index.create_sql(SchemaCity, editor)) self.assertIn( "%s USING " % editor.quote_name(SchemaCity._meta.db_table), create_index_sql, ) self.assertIn( 'CREATE INDEX "geoapp_schema_city_point_9ed70651_id" ', create_index_sql, ) def test_index_name(self): if not self.has_spatial_indexes(City._meta.db_table): self.skipTest("Spatial indexes in Meta.indexes are not supported.") index_name = "custom_point_index_name" index = Index(fields=["point"], name=index_name) with connection.schema_editor() as editor: editor.add_index(City, index) indexes = self.get_indexes(City._meta.db_table) self.assertIn(index_name, indexes) self.assertEqual(indexes[index_name], ["point"]) editor.remove_index(City, index)
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/gis_tests/geoapp/tests.py
tests/gis_tests/geoapp/tests.py
from io import StringIO from unittest import skipIf from django.contrib.gis import gdal from django.contrib.gis.db.models import Extent, MakeLine, Union, functions from django.contrib.gis.geos import ( GeometryCollection, GEOSGeometry, LinearRing, LineString, MultiLineString, MultiPoint, MultiPolygon, Point, Polygon, fromstr, ) from django.core.files.temp import NamedTemporaryFile from django.core.management import call_command from django.db import DatabaseError, NotSupportedError, connection from django.db.models import F, OuterRef, Subquery from django.test import TestCase, skipUnlessDBFeature from django.test.utils import CaptureQueriesContext from ..utils import cannot_save_multipoint, skipUnlessGISLookup from .models import ( City, Country, Feature, GeometryCollectionModel, Lines, MinusOneSRID, MultiFields, NonConcreteModel, PennsylvaniaCity, Points, State, ThreeDimensionalFeature, Track, ) class GeoModelTest(TestCase): fixtures = ["initial"] def test_fixtures(self): "Testing geographic model initialization from fixtures." # Ensuring that data was loaded from initial data fixtures. self.assertEqual(2, Country.objects.count()) self.assertEqual(8, City.objects.count()) self.assertEqual(2, State.objects.count()) def test_proxy(self): "Testing Lazy-Geometry support (using the GeometryProxy)." # Testing on a Point pnt = Point(0, 0) nullcity = City(name="NullCity", point=pnt) nullcity.save() # Making sure TypeError is thrown when trying to set with an # incompatible type. for bad in [5, 2.0, LineString((0, 0), (1, 1))]: with self.assertRaisesMessage(TypeError, "Cannot set"): nullcity.point = bad # Now setting with a compatible GEOS Geometry, saving, and ensuring # the save took, notice no SRID is explicitly set. new = Point(5, 23) nullcity.point = new # Ensuring that the SRID is automatically set to that of the # field after assignment, but before saving. self.assertEqual(4326, nullcity.point.srid) nullcity.save() # Ensuring the point was saved correctly after saving self.assertEqual(new, City.objects.get(name="NullCity").point) # Setting the X and Y of the Point nullcity.point.x = 23 nullcity.point.y = 5 # Checking assignments pre & post-save. self.assertNotEqual( Point(23, 5, srid=4326), City.objects.get(name="NullCity").point ) nullcity.save() self.assertEqual( Point(23, 5, srid=4326), City.objects.get(name="NullCity").point ) nullcity.delete() # Testing on a Polygon shell = LinearRing((0, 0), (0, 90), (100, 90), (100, 0), (0, 0)) inner = LinearRing((40, 40), (40, 60), (60, 60), (60, 40), (40, 40)) # Creating a State object using a built Polygon ply = Polygon(shell, inner) nullstate = State(name="NullState", poly=ply) self.assertEqual(4326, nullstate.poly.srid) # SRID auto-set from None nullstate.save() ns = State.objects.get(name="NullState") self.assertEqual(connection.ops.Adapter._fix_polygon(ply), ns.poly) # Testing the `ogr` and `srs` lazy-geometry properties. self.assertIsInstance(ns.poly.ogr, gdal.OGRGeometry) self.assertEqual(ns.poly.wkb, ns.poly.ogr.wkb) self.assertIsInstance(ns.poly.srs, gdal.SpatialReference) self.assertEqual("WGS 84", ns.poly.srs.name) # Changing the interior ring on the poly attribute. new_inner = LinearRing((30, 30), (30, 70), (70, 70), (70, 30), (30, 30)) ns.poly[1] = new_inner ply[1] = new_inner self.assertEqual(4326, ns.poly.srid) ns.save() self.assertEqual( connection.ops.Adapter._fix_polygon(ply), State.objects.get(name="NullState").poly, ) ns.delete() @skipUnlessDBFeature("supports_transform") def test_lookup_insert_transform(self): "Testing automatic transform for lookups and inserts." # San Antonio in 'WGS84' (SRID 4326) sa_4326 = "POINT (-98.493183 29.424170)" wgs_pnt = fromstr(sa_4326, srid=4326) # Our reference point in WGS84 # San Antonio in 'WGS 84 / Pseudo-Mercator' (SRID 3857) other_srid_pnt = wgs_pnt.transform(3857, clone=True) # Constructing & querying with a point from a different SRID. Oracle # `SDO_OVERLAPBDYINTERSECT` operates differently from # `ST_Intersects`, so contains is used instead. if connection.ops.oracle: tx = Country.objects.get(mpoly__contains=other_srid_pnt) else: tx = Country.objects.get(mpoly__intersects=other_srid_pnt) self.assertEqual("Texas", tx.name) # Creating San Antonio. Remember the Alamo. sa = City.objects.create(name="San Antonio", point=other_srid_pnt) # Now verifying that San Antonio was transformed correctly sa = City.objects.get(name="San Antonio") self.assertAlmostEqual(wgs_pnt.x, sa.point.x, 6) self.assertAlmostEqual(wgs_pnt.y, sa.point.y, 6) # If the GeometryField SRID is -1, then we shouldn't perform any # transformation if the SRID of the input geometry is different. m1 = MinusOneSRID(geom=Point(17, 23, srid=4326)) m1.save() self.assertEqual(-1, m1.geom.srid) def test_createnull(self): "Testing creating a model instance and the geometry being None" c = City() self.assertIsNone(c.point) def test_geometryfield(self): "Testing the general GeometryField." Feature(name="Point", geom=Point(1, 1)).save() Feature(name="LineString", geom=LineString((0, 0), (1, 1), (5, 5))).save() Feature( name="Polygon", geom=Polygon(LinearRing((0, 0), (0, 5), (5, 5), (5, 0), (0, 0))), ).save() Feature( name="GeometryCollection", geom=GeometryCollection( Point(2, 2), LineString((0, 0), (2, 2)), Polygon(LinearRing((0, 0), (0, 5), (5, 5), (5, 0), (0, 0))), ), ).save() f_1 = Feature.objects.get(name="Point") self.assertIsInstance(f_1.geom, Point) self.assertEqual((1.0, 1.0), f_1.geom.tuple) f_2 = Feature.objects.get(name="LineString") self.assertIsInstance(f_2.geom, LineString) self.assertEqual(((0.0, 0.0), (1.0, 1.0), (5.0, 5.0)), f_2.geom.tuple) f_3 = Feature.objects.get(name="Polygon") self.assertIsInstance(f_3.geom, Polygon) f_4 = Feature.objects.get(name="GeometryCollection") self.assertIsInstance(f_4.geom, GeometryCollection) self.assertEqual(f_3.geom, f_4.geom[2]) @skipUnlessDBFeature("supports_transform") def test_inherited_geofields(self): "Database functions on inherited Geometry fields." # Creating a Pennsylvanian city. PennsylvaniaCity.objects.create( name="Mansfield", county="Tioga", point="POINT(-77.071445 41.823881)" ) # All transformation SQL will need to be performed on the # _parent_ table. qs = PennsylvaniaCity.objects.annotate( new_point=functions.Transform("point", srid=32128) ) self.assertEqual(1, qs.count()) for pc in qs: self.assertEqual(32128, pc.new_point.srid) def test_raw_sql_query(self): "Testing raw SQL query." cities1 = City.objects.all() point_select = connection.ops.select % "point" cities2 = list( City.objects.raw( "select id, name, %s as point from geoapp_city" % point_select ) ) self.assertEqual(len(cities1), len(cities2)) with self.assertNumQueries(0): # Ensure point isn't deferred. self.assertIsInstance(cities2[0].point, Point) def test_gis_query_as_string(self): """GIS queries can be represented as strings.""" query = City.objects.filter(point__within=Polygon.from_bbox((0, 0, 2, 2))) self.assertIn( connection.ops.quote_name(City._meta.db_table), str(query.query), ) def test_dumpdata_loaddata_cycle(self): """ Test a dumpdata/loaddata cycle with geographic data. """ out = StringIO() original_data = list(City.objects.order_by("name")) call_command("dumpdata", "geoapp.City", stdout=out) result = out.getvalue() houston = City.objects.get(name="Houston") self.assertIn('"point": "%s"' % houston.point.ewkt, result) # Reload now dumped data with NamedTemporaryFile(mode="w", suffix=".json") as tmp: tmp.write(result) tmp.seek(0) call_command("loaddata", tmp.name, verbosity=0) self.assertEqual(original_data, list(City.objects.order_by("name"))) @skipUnlessDBFeature("supports_empty_geometries") def test_empty_geometries(self): geometry_classes = [ Point, LineString, LinearRing, Polygon, MultiPoint, MultiLineString, MultiPolygon, GeometryCollection, ] for klass in geometry_classes: g = klass(srid=4326) model_class = Feature if g.hasz: if not connection.features.supports_3d_storage: continue else: model_class = ThreeDimensionalFeature feature = model_class.objects.create(name=f"Empty {klass.__name__}", geom=g) feature.refresh_from_db() if klass is LinearRing: # LinearRing isn't representable in WKB, so GEOSGeomtry.wkb # uses LineString instead. g = LineString(srid=4326) self.assertEqual(feature.geom, g) self.assertEqual(feature.geom.srid, g.srid) class SaveLoadTests(TestCase): def test_multilinestringfield(self): geom = MultiLineString( LineString((0, 0), (1, 1), (5, 5)), LineString((0, 0), (0, 5), (5, 5), (5, 0), (0, 0)), ) obj = Lines.objects.create(geom=geom) obj.refresh_from_db() self.assertEqual(obj.geom.tuple, geom.tuple) def test_multilinestring_with_linearring(self): geom = MultiLineString( LineString((0, 0), (1, 1), (5, 5)), LinearRing((0, 0), (0, 5), (5, 5), (5, 0), (0, 0)), ) obj = Lines.objects.create(geom=geom) obj.refresh_from_db() self.assertEqual(obj.geom.tuple, geom.tuple) self.assertEqual(obj.geom[1].__class__.__name__, "LineString") self.assertEqual(obj.geom[0].tuple, geom[0].tuple) # LinearRings are transformed to LineString. self.assertEqual(obj.geom[1].__class__.__name__, "LineString") self.assertEqual(obj.geom[1].tuple, geom[1].tuple) @skipIf(cannot_save_multipoint, "MariaDB cannot save MultiPoint due to a bug.") def test_multipointfield(self): geom = MultiPoint(Point(1, 1), Point(0, 0)) obj = Points.objects.create(geom=geom) obj.refresh_from_db() self.assertEqual(obj.geom, geom) def test_geometrycollectionfield(self): geom = GeometryCollection( Point(2, 2), LineString((0, 0), (2, 2)), Polygon(LinearRing((0, 0), (0, 5), (5, 5), (5, 0), (0, 0))), ) obj = GeometryCollectionModel.objects.create(geom=geom) obj.refresh_from_db() self.assertIs(obj.geom.equals(geom), True) class GeoLookupTest(TestCase): fixtures = ["initial"] @skipUnlessGISLookup("disjoint") def test_disjoint_lookup(self): "Testing the `disjoint` lookup type." ptown = City.objects.get(name="Pueblo") qs1 = City.objects.filter(point__disjoint=ptown.point) self.assertEqual(7, qs1.count()) qs2 = State.objects.filter(poly__disjoint=ptown.point) self.assertEqual(1, qs2.count()) self.assertEqual("Kansas", qs2[0].name) @skipUnlessGISLookup("contained") def test_contained(self): # Getting Texas, yes we were a country -- once ;) texas = Country.objects.get(name="Texas") # Seeing what cities are in Texas, should get Houston and Dallas, # and Oklahoma City because 'contained' only checks on the # _bounding box_ of the Geometries. qs = City.objects.filter(point__contained=texas.mpoly) self.assertEqual(3, qs.count()) cities = ["Houston", "Dallas", "Oklahoma City"] for c in qs: self.assertIn(c.name, cities) @skipUnlessGISLookup("contains") def test_contains(self): houston = City.objects.get(name="Houston") wellington = City.objects.get(name="Wellington") pueblo = City.objects.get(name="Pueblo") okcity = City.objects.get(name="Oklahoma City") lawrence = City.objects.get(name="Lawrence") # Now testing contains on the countries using the points for # Houston and Wellington. tx = Country.objects.get(mpoly__contains=houston.point) # Query w/GEOSGeometry nz = Country.objects.get( mpoly__contains=wellington.point.hex ) # Query w/EWKBHEX self.assertEqual("Texas", tx.name) self.assertEqual("New Zealand", nz.name) # Testing `contains` on the states using the point for Lawrence. ks = State.objects.get(poly__contains=lawrence.point) self.assertEqual("Kansas", ks.name) # Pueblo and Oklahoma City (even though OK City is within the bounding # box of Texas) are not contained in Texas or New Zealand. self.assertEqual( len(Country.objects.filter(mpoly__contains=pueblo.point)), 0 ) # Query w/GEOSGeometry object self.assertEqual( len(Country.objects.filter(mpoly__contains=okcity.point.wkt)), 0 ) # Query w/WKT @skipUnlessGISLookup("bbcontains") def test_bbcontains(self): # OK City is contained w/in bounding box of Texas. okcity = City.objects.get(name="Oklahoma City") qs = Country.objects.filter(mpoly__bbcontains=okcity.point) self.assertEqual(1, len(qs)) self.assertEqual("Texas", qs[0].name) @skipUnlessGISLookup("crosses") def test_crosses_lookup(self): Track.objects.create(name="Line1", line=LineString([(-95, 29), (-60, 0)])) self.assertEqual( Track.objects.filter( line__crosses=LineString([(-95, 0), (-60, 29)]) ).count(), 1, ) self.assertEqual( Track.objects.filter( line__crosses=LineString([(-95, 30), (0, 30)]) ).count(), 0, ) @skipUnlessDBFeature("supports_isvalid_lookup") def test_isvalid_lookup(self): invalid_geom = fromstr("POLYGON((0 0, 0 1, 1 1, 1 0, 1 1, 1 0, 0 0))") State.objects.create(name="invalid", poly=invalid_geom) qs = State.objects.all() if connection.ops.oracle: # Kansas has adjacent vertices with distance 6.99244813842e-12 # which is smaller than the default Oracle tolerance. qs = qs.exclude(name="Kansas") self.assertEqual( State.objects.filter(name="Kansas", poly__isvalid=False).count(), 1 ) self.assertEqual(qs.filter(poly__isvalid=False).count(), 1) self.assertEqual(qs.filter(poly__isvalid=True).count(), qs.count() - 1) @skipUnlessGISLookup("left", "right") def test_left_right_lookups(self): "Testing the 'left' and 'right' lookup types." # Left: A << B => true if xmax(A) < xmin(B) # Right: A >> B => true if xmin(A) > xmax(B) # See: BOX2D_left() and BOX2D_right() in lwgeom_box2dfloat4.c in # PostGIS source. # Getting the borders for Colorado & Kansas co_border = State.objects.get(name="Colorado").poly ks_border = State.objects.get(name="Kansas").poly # Note: Wellington has an 'X' value of 174, so it will not be # considered to the left of CO. # These cities should be strictly to the right of the CO border. cities = [ "Houston", "Dallas", "Oklahoma City", "Lawrence", "Chicago", "Wellington", ] qs = City.objects.filter(point__right=co_border) self.assertEqual(6, len(qs)) for c in qs: self.assertIn(c.name, cities) # These cities should be strictly to the right of the KS border. cities = ["Chicago", "Wellington"] qs = City.objects.filter(point__right=ks_border) self.assertEqual(2, len(qs)) for c in qs: self.assertIn(c.name, cities) # Note: Wellington has an 'X' value of 174, so it will not be # considered # to the left of CO. vic = City.objects.get(point__left=co_border) self.assertEqual("Victoria", vic.name) cities = ["Pueblo", "Victoria"] qs = City.objects.filter(point__left=ks_border) self.assertEqual(2, len(qs)) for c in qs: self.assertIn(c.name, cities) @skipUnlessGISLookup("strictly_above", "strictly_below") def test_strictly_above_below_lookups(self): dallas = City.objects.get(name="Dallas") self.assertQuerySetEqual( City.objects.filter(point__strictly_above=dallas.point).order_by("name"), ["Chicago", "Lawrence", "Oklahoma City", "Pueblo", "Victoria"], lambda b: b.name, ) self.assertQuerySetEqual( City.objects.filter(point__strictly_below=dallas.point).order_by("name"), ["Houston", "Wellington"], lambda b: b.name, ) @skipUnlessGISLookup("same_as", "equals") def test_equals_lookups(self): "Testing the 'same_as' and 'equals' lookup types." pnt = fromstr("POINT (-95.363151 29.763374)", srid=4326) c1 = City.objects.get(point=pnt) c2 = City.objects.get(point__same_as=pnt) c3 = City.objects.get(point__equals=pnt) for c in [c1, c2, c3]: self.assertEqual("Houston", c.name) @skipUnlessDBFeature("supports_null_geometries") def test_null_geometries(self): "Testing NULL geometry support, and the `isnull` lookup type." # Creating a state with a NULL boundary. State.objects.create(name="Puerto Rico") # Querying for both NULL and Non-NULL values. nullqs = State.objects.filter(poly__isnull=True) validqs = State.objects.filter(poly__isnull=False) # Puerto Rico should be NULL (it's a commonwealth unincorporated # territory) self.assertEqual(1, len(nullqs)) self.assertEqual("Puerto Rico", nullqs[0].name) # GeometryField=None is an alias for __isnull=True. self.assertCountEqual(State.objects.filter(poly=None), nullqs) self.assertCountEqual(State.objects.exclude(poly=None), validqs) # The valid states should be Colorado & Kansas self.assertEqual(2, len(validqs)) state_names = [s.name for s in validqs] self.assertIn("Colorado", state_names) self.assertIn("Kansas", state_names) # Saving another commonwealth w/a NULL geometry. nmi = State.objects.create(name="Northern Mariana Islands", poly=None) self.assertIsNone(nmi.poly) # Assigning a geometry and saving -- then UPDATE back to NULL. nmi.poly = "POLYGON((0 0,1 0,1 1,1 0,0 0))" nmi.save() State.objects.filter(name="Northern Mariana Islands").update(poly=None) self.assertIsNone(State.objects.get(name="Northern Mariana Islands").poly) @skipUnlessDBFeature( "supports_null_geometries", "supports_crosses_lookup", "supports_relate_lookup" ) def test_null_geometries_excluded_in_lookups(self): """NULL features are excluded in spatial lookup functions.""" null = State.objects.create(name="NULL", poly=None) queries = [ ("equals", Point(1, 1)), ("disjoint", Point(1, 1)), ("touches", Point(1, 1)), ("crosses", LineString((0, 0), (1, 1), (5, 5))), ("within", Point(1, 1)), ("overlaps", LineString((0, 0), (1, 1), (5, 5))), ("contains", LineString((0, 0), (1, 1), (5, 5))), ("intersects", LineString((0, 0), (1, 1), (5, 5))), ("relate", (Point(1, 1), "T*T***FF*")), ("same_as", Point(1, 1)), ("exact", Point(1, 1)), ("coveredby", Point(1, 1)), ("covers", Point(1, 1)), ] for lookup, geom in queries: with self.subTest(lookup=lookup): self.assertNotIn( null, State.objects.filter(**{"poly__%s" % lookup: geom}) ) def test_wkt_string_in_lookup(self): # Valid WKT strings don't emit error logs. with self.assertNoLogs("django.contrib.gis", "ERROR"): State.objects.filter(poly__intersects="LINESTRING(0 0, 1 1, 5 5)") @skipUnlessGISLookup("coveredby") def test_coveredby_lookup(self): poly = Polygon(LinearRing((0, 0), (0, 5), (5, 5), (5, 0), (0, 0))) state = State.objects.create(name="Test", poly=poly) small_poly = Polygon(LinearRing((0, 0), (1, 4), (4, 4), (4, 1), (0, 0))) qs = State.objects.filter(poly__coveredby=small_poly) self.assertSequenceEqual(qs, []) large_poly = Polygon(LinearRing((0, 0), (-1, 6), (6, 6), (6, -1), (0, 0))) qs = State.objects.filter(poly__coveredby=large_poly) self.assertSequenceEqual(qs, [state]) if not connection.ops.oracle: # On Oracle, COVEREDBY doesn't match for EQUAL objects. qs = State.objects.filter(poly__coveredby=poly) self.assertSequenceEqual(qs, [state]) @skipUnlessGISLookup("covers") def test_covers_lookup(self): poly = Polygon(LinearRing((0, 0), (0, 5), (5, 5), (5, 0), (0, 0))) state = State.objects.create(name="Test", poly=poly) small_poly = Polygon(LinearRing((0, 0), (1, 4), (4, 4), (4, 1), (0, 0))) qs = State.objects.filter(poly__covers=small_poly) self.assertSequenceEqual(qs, [state]) large_poly = Polygon(LinearRing((-1, -1), (-1, 6), (6, 6), (6, -1), (-1, -1))) qs = State.objects.filter(poly__covers=large_poly) self.assertSequenceEqual(qs, []) if not connection.ops.oracle: # On Oracle, COVERS doesn't match for EQUAL objects. qs = State.objects.filter(poly__covers=poly) self.assertSequenceEqual(qs, [state]) @skipUnlessDBFeature("supports_relate_lookup") def test_relate_lookup(self): "Testing the 'relate' lookup type." # To make things more interesting, we will have our Texas reference # point in different SRIDs. pnt1 = fromstr("POINT (649287.0363174 4177429.4494686)", srid=2847) pnt2 = fromstr("POINT(-98.4919715741052 29.4333344025053)", srid=4326) # Not passing in a geometry as first param raises a TypeError when # initializing the QuerySet. with self.assertRaises(ValueError): Country.objects.filter(mpoly__relate=(23, "foo")) # Making sure the right exception is raised for the given # bad arguments. for bad_args, e in [ ((pnt1, 0), ValueError), ((pnt2, "T*T***FF*", 0), ValueError), ]: qs = Country.objects.filter(mpoly__relate=bad_args) with self.assertRaises(e): qs.count() contains_mask = "T*T***FF*" within_mask = "T*F**F***" intersects_mask = "T********" # Relate works differently on Oracle. if connection.ops.oracle: contains_mask = "contains" within_mask = "inside" # TODO: This is not quite the same as the PostGIS mask above intersects_mask = "overlapbdyintersect" # Testing contains relation mask. if connection.features.supports_transform: self.assertEqual( Country.objects.get(mpoly__relate=(pnt1, contains_mask)).name, "Texas", ) self.assertEqual( "Texas", Country.objects.get(mpoly__relate=(pnt2, contains_mask)).name ) # Testing within relation mask. ks = State.objects.get(name="Kansas") self.assertEqual( "Lawrence", # Remove ".filter(name="Lawrence")" once PostGIS 3.5.4 is released. # https://lists.osgeo.org/pipermail/postgis-devel/2025-July/030581.html City.objects.filter(name="Lawrence") .get(point__relate=(ks.poly, within_mask)) .name, ) # Testing intersection relation mask. if not connection.ops.oracle: if connection.features.supports_transform: self.assertEqual( Country.objects.get(mpoly__relate=(pnt1, intersects_mask)).name, "Texas", ) self.assertEqual( "Texas", Country.objects.get(mpoly__relate=(pnt2, intersects_mask)).name ) self.assertEqual( "Lawrence", City.objects.get(point__relate=(ks.poly, intersects_mask)).name, ) # With a complex geometry expression mask = "anyinteract" if connection.ops.oracle else within_mask self.assertFalse( City.objects.exclude( point__relate=(functions.Union("point", "point"), mask) ) ) @skipUnlessDBFeature("has_Union_function") def test_gis_lookups_with_complex_expressions(self): multiple_arg_lookups = { "dwithin", "relate", } # These lookups are tested elsewhere. lookups = connection.ops.gis_operators.keys() - multiple_arg_lookups self.assertTrue(lookups, "No lookups found") for lookup in lookups: with self.subTest(lookup): City.objects.filter( **{"point__" + lookup: functions.Union("point", "point")} ).exists() @skipUnlessGISLookup("within") def test_subquery_annotation(self): multifields = MultiFields.objects.create( city=City.objects.create(point=Point(1, 1)), point=Point(2, 2), poly=Polygon.from_bbox((0, 0, 2, 2)), ) qs = MultiFields.objects.annotate( city_point=Subquery( City.objects.filter( id=OuterRef("city"), ).values("point") ), ).filter( city_point__within=F("poly"), ) self.assertEqual(qs.get(), multifields) class GeoQuerySetTest(TestCase): # TODO: GeoQuerySet is removed, organize these test better. fixtures = ["initial"] @skipUnlessDBFeature("supports_extent_aggr") def test_extent(self): """ Testing the `Extent` aggregate. """ # Reference query: # SELECT ST_extent(point) # FROM geoapp_city # WHERE (name='Houston' or name='Dallas');` # => BOX(-96.8016128540039 29.7633724212646,-95.3631439208984 # 32.7820587158203) expected = ( -96.8016128540039, 29.7633724212646, -95.3631439208984, 32.782058715820, ) qs = City.objects.filter(name__in=("Houston", "Dallas")) extent = qs.aggregate(Extent("point"))["point__extent"] for val, exp in zip(extent, expected): self.assertAlmostEqual(exp, val, 4) self.assertIsNone( City.objects.filter(name=("Smalltown")).aggregate(Extent("point"))[ "point__extent" ] ) @skipUnlessDBFeature("supports_extent_aggr") def test_extent_with_limit(self): """ Testing if extent supports limit. """ extent1 = City.objects.aggregate(Extent("point"))["point__extent"] extent2 = City.objects.all()[:3].aggregate(Extent("point"))["point__extent"] self.assertNotEqual(extent1, extent2) def test_make_line(self): """ Testing the `MakeLine` aggregate. """ if not connection.features.supports_make_line_aggr: with self.assertRaises(NotSupportedError): City.objects.aggregate(MakeLine("point")) return # MakeLine on an inappropriate field returns simply None self.assertIsNone(State.objects.aggregate(MakeLine("poly"))["poly__makeline"]) # Reference query: # SELECT AsText(ST_MakeLine(geoapp_city.point)) FROM geoapp_city; line = City.objects.aggregate(MakeLine("point"))["point__makeline"] ref_points = City.objects.values_list("point", flat=True) self.assertIsInstance(line, LineString) self.assertEqual(len(line), ref_points.count()) # Compare pairs of manually sorted points, as the default ordering is # flaky. for point, ref_city in zip(sorted(line), sorted(ref_points)): point_x, point_y = point self.assertAlmostEqual(point_x, ref_city.x, 5) self.assertAlmostEqual(point_y, ref_city.y, 5) @skipUnlessDBFeature("supports_union_aggr") def test_unionagg(self): """ Testing the `Union` aggregate. """ tx = Country.objects.get(name="Texas").mpoly # Houston, Dallas -- Ordering may differ depending on backend or GEOS # version. union = GEOSGeometry("MULTIPOINT(-96.801611 32.782057,-95.363151 29.763374)") qs = City.objects.filter(point__within=tx) with self.assertRaises(ValueError): qs.aggregate(Union("name")) # Using `field_name` keyword argument in one query and specifying an # order in the other (which should not be used because this is # an aggregate method on a spatial column) u1 = qs.aggregate(Union("point"))["point__union"] u2 = qs.order_by("name").aggregate(Union("point"))["point__union"] self.assertTrue(union.equals(u1)) self.assertTrue(union.equals(u2)) qs = City.objects.filter(name="NotACity") self.assertIsNone(qs.aggregate(Union("point"))["point__union"]) @skipUnlessDBFeature("supports_union_aggr") def test_geoagg_subquery(self): tx = Country.objects.get(name="Texas") union = GEOSGeometry("MULTIPOINT(-96.801611 32.782057,-95.363151 29.763374)") # Use distinct() to force the usage of a subquery for aggregation. with CaptureQueriesContext(connection) as ctx: self.assertIs( union.equals( City.objects.filter(point__within=tx.mpoly) .distinct() .aggregate( Union("point"), )["point__union"], ), True, ) self.assertIn("subquery", ctx.captured_queries[0]["sql"]) @skipUnlessDBFeature("supports_tolerance_parameter") def test_unionagg_tolerance(self): City.objects.create( point=fromstr("POINT(-96.467222 32.751389)", srid=4326), name="Forney", ) tx = Country.objects.get(name="Texas").mpoly # Tolerance is greater than distance between Forney and Dallas, that's # why Dallas is ignored. forney_houston = GEOSGeometry( "MULTIPOINT(-95.363151 29.763374, -96.467222 32.751389)", srid=4326, ) self.assertIs( forney_houston.equals_exact( City.objects.filter(point__within=tx).aggregate( Union("point", tolerance=32000), )["point__union"], tolerance=10e-6, ), True, ) @skipUnlessDBFeature("supports_tolerance_parameter") def test_unionagg_tolerance_escaping(self): tx = Country.objects.get(name="Texas").mpoly with self.assertRaises(DatabaseError): City.objects.filter(point__within=tx).aggregate(
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
true
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/gis_tests/geoapp/test_serializers.py
tests/gis_tests/geoapp/test_serializers.py
import json from django.contrib.gis.geos import LinearRing, Point, Polygon from django.core import serializers from django.test import TestCase from .models import City, MultiFields, PennsylvaniaCity class GeoJSONSerializerTests(TestCase): fixtures = ["initial"] def test_builtin_serializers(self): """ 'geojson' should be listed in available serializers. """ all_formats = set(serializers.get_serializer_formats()) public_formats = set(serializers.get_public_serializer_formats()) self.assertIn("geojson", all_formats) self.assertIn("geojson", public_formats) def test_serialization_base(self): geojson = serializers.serialize("geojson", City.objects.order_by("name")) geodata = json.loads(geojson) self.assertEqual(list(geodata.keys()), ["type", "features"]) self.assertEqual(geodata["type"], "FeatureCollection") self.assertEqual(len(geodata["features"]), len(City.objects.all())) self.assertEqual(geodata["features"][0]["geometry"]["type"], "Point") self.assertEqual(geodata["features"][0]["properties"]["name"], "Chicago") first_city = City.objects.order_by("name").first() self.assertEqual(geodata["features"][0]["id"], first_city.pk) self.assertEqual(geodata["features"][0]["properties"]["pk"], str(first_city.pk)) def test_geometry_field_option(self): """ When a model has several geometry fields, the 'geometry_field' option can be used to specify the field to use as the 'geometry' key. """ MultiFields.objects.create( city=City.objects.first(), name="Name", point=Point(5, 23), poly=Polygon(LinearRing((0, 0), (0, 5), (5, 5), (5, 0), (0, 0))), ) geojson = serializers.serialize("geojson", MultiFields.objects.all()) geodata = json.loads(geojson) self.assertEqual(geodata["features"][0]["geometry"]["type"], "Point") geojson = serializers.serialize( "geojson", MultiFields.objects.all(), geometry_field="poly" ) geodata = json.loads(geojson) self.assertEqual(geodata["features"][0]["geometry"]["type"], "Polygon") # geometry_field is considered even if not in fields (#26138). geojson = serializers.serialize( "geojson", MultiFields.objects.all(), geometry_field="poly", fields=("city",), ) geodata = json.loads(geojson) self.assertEqual(geodata["features"][0]["geometry"]["type"], "Polygon") def test_id_field_option(self): """ By default Django uses the pk of the object as the id for a feature. The 'id_field' option can be used to specify a different field to use as the id. """ cities = City.objects.order_by("name") geojson = serializers.serialize("geojson", cities, id_field="name") geodata = json.loads(geojson) self.assertEqual(geodata["features"][0]["id"], cities[0].name) def test_fields_option(self): """ The fields option allows to define a subset of fields to be present in the 'properties' of the generated output. """ PennsylvaniaCity.objects.create( name="Mansfield", county="Tioga", point="POINT(-77.071445 41.823881)" ) geojson = serializers.serialize( "geojson", PennsylvaniaCity.objects.all(), fields=("county", "point"), ) geodata = json.loads(geojson) self.assertIn("county", geodata["features"][0]["properties"]) self.assertNotIn("founded", geodata["features"][0]["properties"]) self.assertNotIn("pk", geodata["features"][0]["properties"]) def test_srid_option(self): geojson = serializers.serialize( "geojson", City.objects.order_by("name"), srid=2847 ) geodata = json.loads(geojson) coordinates = geodata["features"][0]["geometry"]["coordinates"] # Different PROJ versions use different transformations, all are # correct as having a 1 meter accuracy. self.assertAlmostEqual(coordinates[0], 1564802, -1) self.assertAlmostEqual(coordinates[1], 5613214, -1) def test_deserialization_exception(self): """ GeoJSON cannot be deserialized. """ with self.assertRaises(serializers.base.SerializerDoesNotExist): serializers.deserialize("geojson", "{}")
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/gis_tests/geoapp/feeds.py
tests/gis_tests/geoapp/feeds.py
from django.contrib.gis import feeds from .models import City class TestGeoRSS1(feeds.Feed): link = "/city/" title = "Test GeoDjango Cities" def items(self): return City.objects.all() def item_link(self, item): return "/city/%s/" % item.pk def item_geometry(self, item): return item.point class TestGeoRSS2(TestGeoRSS1): def geometry(self, obj): # This should attach a <georss:box> element for the extent of # the cities in the database. This tuple came from # calling `City.objects.aggregate(Extent())` -- we can't do that call # here because `Extent` is not implemented for MySQL/Oracle. return (-123.30, -41.32, 174.78, 48.46) def item_geometry(self, item): # Returning a simple tuple for the geometry. return item.point.x, item.point.y class TestGeoAtom1(TestGeoRSS1): feed_type = feeds.GeoAtom1Feed class TestGeoAtom2(TestGeoRSS2): feed_type = feeds.GeoAtom1Feed def geometry(self, obj): # This time we'll use a 2-tuple of coordinates for the box. return ((-123.30, -41.32), (174.78, 48.46)) class TestW3CGeo1(TestGeoRSS1): feed_type = feeds.W3CGeoFeed # The following feeds are invalid, and will raise exceptions. class TestW3CGeo2(TestGeoRSS2): feed_type = feeds.W3CGeoFeed class TestW3CGeo3(TestGeoRSS1): feed_type = feeds.W3CGeoFeed def item_geometry(self, item): from django.contrib.gis.geos import Polygon return Polygon(((0, 0), (0, 1), (1, 1), (1, 0), (0, 0))) # The feed dictionary to use for URLs. feed_dict = { "rss1": TestGeoRSS1, "rss2": TestGeoRSS2, "atom1": TestGeoAtom1, "atom2": TestGeoAtom2, "w3cgeo1": TestW3CGeo1, "w3cgeo2": TestW3CGeo2, "w3cgeo3": TestW3CGeo3, }
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/gis_tests/geoapp/urls.py
tests/gis_tests/geoapp/urls.py
from django.contrib.gis import views as gis_views from django.contrib.gis.sitemaps import views as gis_sitemap_views from django.contrib.sitemaps import views as sitemap_views from django.urls import path from .feeds import feed_dict from .sitemaps import sitemaps urlpatterns = [ path("feeds/<path:url>/", gis_views.feed, {"feed_dict": feed_dict}), ] urlpatterns += [ path("sitemaps/<section>.xml", sitemap_views.sitemap, {"sitemaps": sitemaps}), ] urlpatterns += [ path( "sitemaps/kml/<label>/<model>/<field_name>.kml", gis_sitemap_views.kml, name="django.contrib.gis.sitemaps.views.kml", ), path( "sitemaps/kml/<label>/<model>/<field_name>.kmz", gis_sitemap_views.kmz, name="django.contrib.gis.sitemaps.views.kmz", ), ]
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/gis_tests/gis_migrations/test_operations.py
tests/gis_tests/gis_migrations/test_operations.py
from unittest import skipUnless from django.contrib.gis.db.models import fields from django.contrib.gis.geos import MultiPolygon, Polygon from django.core.exceptions import ImproperlyConfigured from django.db import connection, migrations, models from django.db.migrations.migration import Migration from django.db.migrations.state import ProjectState from django.test import TransactionTestCase, skipIfDBFeature, skipUnlessDBFeature try: GeometryColumns = connection.ops.geometry_columns() HAS_GEOMETRY_COLUMNS = True except NotImplementedError: HAS_GEOMETRY_COLUMNS = False class OperationTestCase(TransactionTestCase): available_apps = ["gis_tests.gis_migrations"] get_opclass_query = """ SELECT opcname, c.relname FROM pg_opclass AS oc JOIN pg_index as i on oc.oid = ANY(i.indclass) JOIN pg_class as c on c.oid = i.indexrelid WHERE c.relname = %s """ def tearDown(self): # Delete table after testing if hasattr(self, "current_state"): self.apply_operations( "gis", self.current_state, [migrations.DeleteModel("Neighborhood")] ) super().tearDown() @property def has_spatial_indexes(self): if connection.ops.mysql: with connection.cursor() as cursor: return connection.introspection.supports_spatial_index( cursor, "gis_neighborhood" ) return True def get_table_description(self, table): with connection.cursor() as cursor: return connection.introspection.get_table_description(cursor, table) def assertColumnExists(self, table, column): self.assertIn(column, [c.name for c in self.get_table_description(table)]) def assertColumnNotExists(self, table, column): self.assertNotIn(column, [c.name for c in self.get_table_description(table)]) def apply_operations(self, app_label, project_state, operations): migration = Migration("name", app_label) migration.operations = operations with connection.schema_editor() as editor: return migration.apply(project_state, editor) def set_up_test_model(self, force_raster_creation=False): test_fields = [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=100, unique=True)), ("geom", fields.MultiPolygonField(srid=4326)), ] if connection.features.supports_raster or force_raster_creation: test_fields += [("rast", fields.RasterField(srid=4326, null=True))] operations = [migrations.CreateModel("Neighborhood", test_fields)] self.current_state = self.apply_operations("gis", ProjectState(), operations) def assertGeometryColumnsCount(self, expected_count): self.assertEqual( GeometryColumns.objects.filter( **{ "%s__iexact" % GeometryColumns.table_name_col(): "gis_neighborhood", } ).count(), expected_count, ) def assertSpatialIndexExists(self, table, column, raster=False): with connection.cursor() as cursor: constraints = connection.introspection.get_constraints(cursor, table) if raster: self.assertTrue( any( "st_convexhull(%s)" % column in c["definition"] for c in constraints.values() if c["definition"] is not None ) ) else: self.assertIn([column], [c["columns"] for c in constraints.values()]) def assertSpatialIndexNotExists(self, table, column, raster=False): with connection.cursor() as cursor: constraints = connection.introspection.get_constraints(cursor, table) if raster: self.assertFalse( any( "st_convexhull(%s)" % column in c["definition"] for c in constraints.values() if c["definition"] is not None ) ) else: self.assertNotIn([column], [c["columns"] for c in constraints.values()]) def alter_gis_model( self, migration_class, model_name, field_name, field_class=None, field_class_kwargs=None, ): args = [model_name, field_name] if field_class: field_class_kwargs = field_class_kwargs or {} args.append(field_class(**field_class_kwargs)) operation = migration_class(*args) old_state = self.current_state.clone() operation.state_forwards("gis", self.current_state) with connection.schema_editor() as editor: operation.database_forwards("gis", editor, old_state, self.current_state) class OperationTests(OperationTestCase): def setUp(self): super().setUp() self.set_up_test_model() def test_add_geom_field(self): """ Test the AddField operation with a geometry-enabled column. """ self.alter_gis_model( migrations.AddField, "Neighborhood", "path", fields.LineStringField ) self.assertColumnExists("gis_neighborhood", "path") # Test GeometryColumns when available if HAS_GEOMETRY_COLUMNS: self.assertGeometryColumnsCount(2) # Test spatial indices when available if self.has_spatial_indexes: self.assertSpatialIndexExists("gis_neighborhood", "path") @skipUnless(connection.vendor == "mysql", "MySQL specific test") def test_remove_geom_field_nullable_with_index(self): # MySQL doesn't support spatial indexes on NULL columns. with self.assertNumQueries(1) as ctx: self.alter_gis_model( migrations.AddField, "Neighborhood", "path", fields.LineStringField, field_class_kwargs={"null": True}, ) self.assertColumnExists("gis_neighborhood", "path") self.assertNotIn("CREATE SPATIAL INDEX", ctx.captured_queries[0]["sql"]) with self.assertNumQueries(1), self.assertNoLogs("django.contrib.gis", "ERROR"): self.alter_gis_model(migrations.RemoveField, "Neighborhood", "path") self.assertColumnNotExists("gis_neighborhood", "path") @skipUnless(HAS_GEOMETRY_COLUMNS, "Backend doesn't support GeometryColumns.") def test_geom_col_name(self): self.assertEqual( GeometryColumns.geom_col_name(), "column_name" if connection.ops.oracle else "f_geometry_column", ) @skipUnlessDBFeature("supports_raster") def test_add_raster_field(self): """ Test the AddField operation with a raster-enabled column. """ self.alter_gis_model( migrations.AddField, "Neighborhood", "heatmap", fields.RasterField ) self.assertColumnExists("gis_neighborhood", "heatmap") # Test spatial indices when available if self.has_spatial_indexes: self.assertSpatialIndexExists("gis_neighborhood", "heatmap", raster=True) def test_add_blank_geom_field(self): """ Should be able to add a GeometryField with blank=True. """ self.alter_gis_model( migrations.AddField, "Neighborhood", "path", fields.LineStringField, field_class_kwargs={"blank": True}, ) self.assertColumnExists("gis_neighborhood", "path") # Test GeometryColumns when available if HAS_GEOMETRY_COLUMNS: self.assertGeometryColumnsCount(2) # Test spatial indices when available if self.has_spatial_indexes: self.assertSpatialIndexExists("gis_neighborhood", "path") @skipUnlessDBFeature("supports_raster") def test_add_blank_raster_field(self): """ Should be able to add a RasterField with blank=True. """ self.alter_gis_model( migrations.AddField, "Neighborhood", "heatmap", fields.RasterField, field_class_kwargs={"blank": True}, ) self.assertColumnExists("gis_neighborhood", "heatmap") # Test spatial indices when available if self.has_spatial_indexes: self.assertSpatialIndexExists("gis_neighborhood", "heatmap", raster=True) def test_remove_geom_field(self): """ Test the RemoveField operation with a geometry-enabled column. """ self.alter_gis_model(migrations.RemoveField, "Neighborhood", "geom") self.assertColumnNotExists("gis_neighborhood", "geom") # Test GeometryColumns when available if HAS_GEOMETRY_COLUMNS: self.assertGeometryColumnsCount(0) @skipUnlessDBFeature("supports_raster") def test_remove_raster_field(self): """ Test the RemoveField operation with a raster-enabled column. """ self.alter_gis_model(migrations.RemoveField, "Neighborhood", "rast") self.assertColumnNotExists("gis_neighborhood", "rast") def test_create_model_spatial_index(self): if not self.has_spatial_indexes: self.skipTest("No support for Spatial indexes") self.assertSpatialIndexExists("gis_neighborhood", "geom") if connection.features.supports_raster: self.assertSpatialIndexExists("gis_neighborhood", "rast", raster=True) @skipUnlessDBFeature("can_alter_geometry_field") def test_alter_field_add_spatial_index(self): if not self.has_spatial_indexes: self.skipTest("No support for Spatial indexes") self.alter_gis_model( migrations.AddField, "Neighborhood", "point", fields.PointField, field_class_kwargs={"spatial_index": False}, ) self.assertSpatialIndexNotExists("gis_neighborhood", "point") self.alter_gis_model( migrations.AlterField, "Neighborhood", "point", fields.PointField, field_class_kwargs={"spatial_index": True}, ) self.assertSpatialIndexExists("gis_neighborhood", "point") @skipUnlessDBFeature("can_alter_geometry_field") def test_alter_field_remove_spatial_index(self): if not self.has_spatial_indexes: self.skipTest("No support for Spatial indexes") self.assertSpatialIndexExists("gis_neighborhood", "geom") self.alter_gis_model( migrations.AlterField, "Neighborhood", "geom", fields.MultiPolygonField, field_class_kwargs={"spatial_index": False}, ) self.assertSpatialIndexNotExists("gis_neighborhood", "geom") @skipUnlessDBFeature("can_alter_geometry_field") @skipUnless(connection.vendor == "mysql", "MySQL specific test") def test_alter_field_nullable_with_spatial_index(self): if not self.has_spatial_indexes: self.skipTest("No support for Spatial indexes") self.alter_gis_model( migrations.AddField, "Neighborhood", "point", fields.PointField, field_class_kwargs={"spatial_index": False, "null": True}, ) # MySQL doesn't support spatial indexes on NULL columns. self.assertSpatialIndexNotExists("gis_neighborhood", "point") self.alter_gis_model( migrations.AlterField, "Neighborhood", "point", fields.PointField, field_class_kwargs={"spatial_index": True, "null": True}, ) self.assertSpatialIndexNotExists("gis_neighborhood", "point") self.alter_gis_model( migrations.AlterField, "Neighborhood", "point", fields.PointField, field_class_kwargs={"spatial_index": False, "null": True}, ) self.assertSpatialIndexNotExists("gis_neighborhood", "point") @skipUnlessDBFeature("can_alter_geometry_field") def test_alter_field_with_spatial_index(self): if not self.has_spatial_indexes: self.skipTest("No support for Spatial indexes") self.alter_gis_model( migrations.AddField, "Neighborhood", "point", fields.PointField, field_class_kwargs={"spatial_index": True}, ) self.assertSpatialIndexExists("gis_neighborhood", "point") self.alter_gis_model( migrations.AlterField, "Neighborhood", "point", fields.PointField, field_class_kwargs={"spatial_index": True, "srid": 3086}, ) self.assertSpatialIndexExists("gis_neighborhood", "point") @skipUnlessDBFeature("supports_3d_storage") def test_add_3d_field_opclass(self): if not connection.ops.postgis: self.skipTest("PostGIS-specific test.") self.alter_gis_model( migrations.AddField, "Neighborhood", "point3d", field_class=fields.PointField, field_class_kwargs={"dim": 3}, ) self.assertColumnExists("gis_neighborhood", "point3d") self.assertSpatialIndexExists("gis_neighborhood", "point3d") with connection.cursor() as cursor: index_name = "gis_neighborhood_point3d_113bc868_id" cursor.execute(self.get_opclass_query, [index_name]) self.assertEqual( cursor.fetchall(), [("gist_geometry_ops_nd", index_name)], ) @skipUnlessDBFeature("can_alter_geometry_field", "supports_3d_storage") def test_alter_geom_field_dim(self): Neighborhood = self.current_state.apps.get_model("gis", "Neighborhood") p1 = Polygon(((0, 0), (0, 1), (1, 1), (1, 0), (0, 0))) Neighborhood.objects.create(name="TestDim", geom=MultiPolygon(p1, p1)) # Add 3rd dimension. self.alter_gis_model( migrations.AlterField, "Neighborhood", "geom", fields.MultiPolygonField, field_class_kwargs={"dim": 3}, ) self.assertTrue(Neighborhood.objects.first().geom.hasz) # Rewind to 2 dimensions. self.alter_gis_model( migrations.AlterField, "Neighborhood", "geom", fields.MultiPolygonField, field_class_kwargs={"dim": 2}, ) self.assertFalse(Neighborhood.objects.first().geom.hasz) @skipUnlessDBFeature( "supports_column_check_constraints", "can_introspect_check_constraints" ) def test_add_check_constraint(self): Neighborhood = self.current_state.apps.get_model("gis", "Neighborhood") poly = Polygon(((0, 0), (0, 1), (1, 1), (1, 0), (0, 0))) constraint = models.CheckConstraint( condition=models.Q(geom=poly), name="geom_within_constraint", ) Neighborhood._meta.constraints = [constraint] with connection.schema_editor() as editor: editor.add_constraint(Neighborhood, constraint) with connection.cursor() as cursor: constraints = connection.introspection.get_constraints( cursor, Neighborhood._meta.db_table, ) self.assertIn("geom_within_constraint", constraints) @skipIfDBFeature("supports_raster") class NoRasterSupportTests(OperationTestCase): def test_create_raster_model_on_db_without_raster_support(self): msg = "Raster fields require backends with raster support." with self.assertRaisesMessage(ImproperlyConfigured, msg): self.set_up_test_model(force_raster_creation=True) def test_add_raster_field_on_db_without_raster_support(self): msg = "Raster fields require backends with raster support." with self.assertRaisesMessage(ImproperlyConfigured, msg): self.set_up_test_model() self.alter_gis_model( migrations.AddField, "Neighborhood", "heatmap", fields.RasterField )
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/gis_tests/gis_migrations/__init__.py
tests/gis_tests/gis_migrations/__init__.py
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/gis_tests/gis_migrations/test_commands.py
tests/gis_tests/gis_migrations/test_commands.py
from django.core.management import call_command from django.db import connection from django.test import TransactionTestCase class MigrateTests(TransactionTestCase): """ Tests running the migrate command in GeoDjango. """ available_apps = ["gis_tests.gis_migrations"] def get_table_description(self, table): with connection.cursor() as cursor: return connection.introspection.get_table_description(cursor, table) def assertTableExists(self, table): with connection.cursor() as cursor: self.assertIn(table, connection.introspection.table_names(cursor)) def assertTableNotExists(self, table): with connection.cursor() as cursor: self.assertNotIn(table, connection.introspection.table_names(cursor)) def test_migrate_gis(self): """ Tests basic usage of the migrate command when a model uses GeoDjango fields (#22001). It's also used to showcase an error in migrations where spatialite is enabled and geo tables are renamed resulting in unique constraint failure on geometry_columns (#23030). """ # The right tables exist self.assertTableExists("gis_migrations_neighborhood") self.assertTableExists("gis_migrations_household") self.assertTableExists("gis_migrations_family") if connection.features.supports_raster: self.assertTableExists("gis_migrations_heatmap") # Unmigrate models. call_command("migrate", "gis_migrations", "0001", verbosity=0) # All tables are gone self.assertTableNotExists("gis_migrations_neighborhood") self.assertTableNotExists("gis_migrations_household") self.assertTableNotExists("gis_migrations_family") if connection.features.supports_raster: self.assertTableNotExists("gis_migrations_heatmap") # Even geometry columns metadata try: GeoColumn = connection.ops.geometry_columns() except NotImplementedError: # Not all GIS backends have geometry columns model pass else: qs = GeoColumn.objects.filter( **{ "%s__in" % GeoColumn.table_name_col(): ["gis_neighborhood", "gis_household"] } ) self.assertEqual(qs.count(), 0) # Revert the "unmigration" call_command("migrate", "gis_migrations", verbosity=0)
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/gis_tests/gis_migrations/migrations/0001_setup_extensions.py
tests/gis_tests/gis_migrations/migrations/0001_setup_extensions.py
from django.db import connection, migrations if connection.features.supports_raster: from django.contrib.postgres.operations import CreateExtension class Migration(migrations.Migration): operations = [ CreateExtension("postgis_raster"), ] else: class Migration(migrations.Migration): operations = []
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/gis_tests/gis_migrations/migrations/__init__.py
tests/gis_tests/gis_migrations/migrations/__init__.py
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/gis_tests/gis_migrations/migrations/0002_create_models.py
tests/gis_tests/gis_migrations/migrations/0002_create_models.py
from django.contrib.gis.db import models from django.db import connection, migrations ops = [ migrations.CreateModel( name="Neighborhood", fields=[ ( "id", models.AutoField( verbose_name="ID", serialize=False, auto_created=True, primary_key=True, ), ), ("name", models.CharField(max_length=100, unique=True)), ("geom", models.MultiPolygonField(srid=4326)), ], options={}, bases=(models.Model,), ), migrations.CreateModel( name="Household", fields=[ ( "id", models.AutoField( verbose_name="ID", serialize=False, auto_created=True, primary_key=True, ), ), ( "neighborhood", models.ForeignKey( "gis_migrations.Neighborhood", models.SET_NULL, to_field="id", null=True, ), ), ("address", models.CharField(max_length=100)), ("zip_code", models.IntegerField(null=True, blank=True)), ("geom", models.PointField(srid=4326, geography=True)), ], options={}, bases=(models.Model,), ), migrations.CreateModel( name="Family", fields=[ ( "id", models.AutoField( verbose_name="ID", serialize=False, auto_created=True, primary_key=True, ), ), ("name", models.CharField(max_length=100, unique=True)), ], options={}, bases=(models.Model,), ), migrations.AddField( model_name="household", name="family", field=models.ForeignKey( "gis_migrations.Family", models.SET_NULL, blank=True, null=True ), preserve_default=True, ), ] if connection.features.supports_raster: ops += [ migrations.CreateModel( name="Heatmap", fields=[ ( "id", models.AutoField( verbose_name="ID", serialize=False, auto_created=True, primary_key=True, ), ), ("name", models.CharField(max_length=100, unique=True)), ("rast", models.fields.RasterField(srid=4326)), ], options={}, bases=(models.Model,), ), ] class Migration(migrations.Migration): """ Used for gis-specific migration tests. """ dependencies = [ ("gis_migrations", "0001_setup_extensions"), ] operations = ops
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/gis_tests/data/__init__.py
tests/gis_tests/data/__init__.py
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/gis_tests/data/rasters/__init__.py
tests/gis_tests/data/rasters/__init__.py
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/gis_tests/data/rasters/textrasters.py
tests/gis_tests/data/rasters/textrasters.py
""" Text-based test rasters """ JSON_RASTER = """{ "srid": 4326, "origin": [0, 0], "scale": [-1, 1], "skew": [0, 0], "width": 5, "height": 5, "nr_of_bands": 1, "bands": [{"data": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24]}] } """
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/field_subclassing/fields.py
tests/field_subclassing/fields.py
from django.db import models from django.db.models.query_utils import DeferredAttribute class CustomTypedField(models.TextField): def db_type(self, connection): return "custom_field" class CustomDeferredAttribute(DeferredAttribute): def __get__(self, instance, cls=None): self._count_call(instance, "get") return super().__get__(instance, cls) def __set__(self, instance, value): self._count_call(instance, "set") instance.__dict__[self.field.attname] = value def _count_call(self, instance, get_or_set): count_attr = "_%s_%s_count" % (self.field.attname, get_or_set) count = getattr(instance, count_attr, 0) setattr(instance, count_attr, count + 1) class CustomDescriptorField(models.CharField): descriptor_class = CustomDeferredAttribute
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/field_subclassing/__init__.py
tests/field_subclassing/__init__.py
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/field_subclassing/tests.py
tests/field_subclassing/tests.py
from django.db import connection, models from django.test import SimpleTestCase from .fields import CustomDescriptorField, CustomTypedField class TestDbType(SimpleTestCase): def test_db_parameters_respects_db_type(self): f = CustomTypedField() self.assertEqual(f.db_parameters(connection)["type"], "custom_field") class DescriptorClassTest(SimpleTestCase): def test_descriptor_class(self): class CustomDescriptorModel(models.Model): name = CustomDescriptorField(max_length=32) m = CustomDescriptorModel() self.assertFalse(hasattr(m, "_name_get_count")) # The field is set to its default in the model constructor. self.assertEqual(m._name_set_count, 1) m.name = "foo" self.assertFalse(hasattr(m, "_name_get_count")) self.assertEqual(m._name_set_count, 2) self.assertEqual(m.name, "foo") self.assertEqual(m._name_get_count, 1) self.assertEqual(m._name_set_count, 2) m.name = "bar" self.assertEqual(m._name_get_count, 1) self.assertEqual(m._name_set_count, 3) self.assertEqual(m.name, "bar") self.assertEqual(m._name_get_count, 2) self.assertEqual(m._name_set_count, 3)
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/signed_cookies_tests/__init__.py
tests/signed_cookies_tests/__init__.py
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/signed_cookies_tests/tests.py
tests/signed_cookies_tests/tests.py
from datetime import timedelta from django.core import signing from django.http import HttpRequest, HttpResponse from django.test import SimpleTestCase, override_settings from django.test.utils import freeze_time class SignedCookieTest(SimpleTestCase): def test_can_set_and_read_signed_cookies(self): response = HttpResponse() response.set_signed_cookie("c", "hello") self.assertIn("c", response.cookies) self.assertTrue(response.cookies["c"].value.startswith("hello:")) request = HttpRequest() request.COOKIES["c"] = response.cookies["c"].value value = request.get_signed_cookie("c") self.assertEqual(value, "hello") def test_can_use_salt(self): response = HttpResponse() response.set_signed_cookie("a", "hello", salt="one") request = HttpRequest() request.COOKIES["a"] = response.cookies["a"].value value = request.get_signed_cookie("a", salt="one") self.assertEqual(value, "hello") with self.assertRaises(signing.BadSignature): request.get_signed_cookie("a", salt="two") def test_detects_tampering(self): response = HttpResponse() response.set_signed_cookie("c", "hello") request = HttpRequest() request.COOKIES["c"] = response.cookies["c"].value[:-2] + "$$" with self.assertRaises(signing.BadSignature): request.get_signed_cookie("c") def test_default_argument_suppresses_exceptions(self): response = HttpResponse() response.set_signed_cookie("c", "hello") request = HttpRequest() request.COOKIES["c"] = response.cookies["c"].value[:-2] + "$$" self.assertIsNone(request.get_signed_cookie("c", default=None)) def test_max_age_argument(self): value = "hello" with freeze_time(123456789): response = HttpResponse() response.set_signed_cookie("c", value) request = HttpRequest() request.COOKIES["c"] = response.cookies["c"].value self.assertEqual(request.get_signed_cookie("c"), value) with freeze_time(123456800): self.assertEqual(request.get_signed_cookie("c", max_age=12), value) self.assertEqual(request.get_signed_cookie("c", max_age=11), value) self.assertEqual( request.get_signed_cookie("c", max_age=timedelta(seconds=11)), value ) with self.assertRaises(signing.SignatureExpired): request.get_signed_cookie("c", max_age=10) with self.assertRaises(signing.SignatureExpired): request.get_signed_cookie("c", max_age=timedelta(seconds=10)) def test_set_signed_cookie_max_age_argument(self): response = HttpResponse() response.set_signed_cookie("c", "value", max_age=100) self.assertEqual(response.cookies["c"]["max-age"], 100) response.set_signed_cookie("d", "value", max_age=timedelta(hours=2)) self.assertEqual(response.cookies["d"]["max-age"], 7200) @override_settings(SECRET_KEY=b"\xe7") def test_signed_cookies_with_binary_key(self): response = HttpResponse() response.set_signed_cookie("c", "hello") request = HttpRequest() request.COOKIES["c"] = response.cookies["c"].value self.assertEqual(request.get_signed_cookie("c"), "hello")
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/shell/models.py
tests/shell/models.py
from django.db import models class Marker(models.Model): pass class Phone(models.Model): name = models.CharField(max_length=50)
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/shell/__init__.py
tests/shell/__init__.py
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/shell/tests.py
tests/shell/tests.py
import os import subprocess import sys import unittest from unittest import mock from django import __version__ from django.conf import settings from django.contrib.auth.models import Group, Permission, User from django.contrib.contenttypes.models import ContentType from django.core.management import CommandError, call_command from django.core.management.commands import shell from django.db import connection, models, reset_queries from django.db.models import functions from django.test import SimpleTestCase from django.test.utils import captured_stdin, captured_stdout, override_settings from django.urls import resolve, reverse from django.utils import timezone from .models import Marker, Phone class ShellCommandTestCase(SimpleTestCase): script_globals = 'print("__name__" in globals() and "Phone" in globals())' script_with_inline_function = ( "import django\ndef f():\n print(django.__version__)\nf()" ) def test_command_option(self): with self.assertLogs("test", "INFO") as cm: with captured_stdout(): call_command( "shell", command=( "import django; from logging import getLogger; " 'getLogger("test").info(django.__version__)' ), ) self.assertEqual(cm.records[0].getMessage(), __version__) def test_command_option_globals(self): with captured_stdout() as stdout: call_command("shell", command=self.script_globals, verbosity=0) self.assertEqual(stdout.getvalue().strip(), "True") def test_command_option_inline_function_call(self): with captured_stdout() as stdout: call_command("shell", command=self.script_with_inline_function, verbosity=0) self.assertEqual(stdout.getvalue().strip(), __version__) @override_settings(INSTALLED_APPS=["shell"]) def test_no_settings(self): test_environ = os.environ.copy() if "DJANGO_SETTINGS_MODULE" in test_environ: del test_environ["DJANGO_SETTINGS_MODULE"] error = ( "Automatic imports are disabled since settings are not configured.\n" "DJANGO_SETTINGS_MODULE value is None.\n" "HINT: Ensure that the settings module is configured and set.\n\n" ) for verbosity, assertError in [ ("0", self.assertNotIn), ("1", self.assertIn), ("2", self.assertIn), ]: with self.subTest(verbosity=verbosity, get_auto_imports="models"): p = subprocess.run( [ sys.executable, "-m", "django", "shell", "-c", "print(globals())", "-v", verbosity, ], capture_output=True, env=test_environ, text=True, umask=-1, ) assertError(error, p.stdout) self.assertNotIn("Marker", p.stdout) self.assertNotIn("reset_queries", p.stdout) self.assertNotIn("imported automatically", p.stdout) with self.subTest(verbosity=verbosity, get_auto_imports="without-models"): with mock.patch( "django.core.management.commands.shell.Command.get_auto_imports", return_value=["django.urls.resolve"], ): p = subprocess.run( [ sys.executable, "-m", "django", "shell", "-c", "print(globals())", "-v", verbosity, ], capture_output=True, env=test_environ, text=True, umask=-1, ) assertError(error, p.stdout) self.assertNotIn("resolve", p.stdout) @unittest.skipIf( sys.platform == "win32", "Windows select() doesn't support file descriptors." ) @mock.patch("django.core.management.commands.shell.select") def test_stdin_read(self, select): with captured_stdin() as stdin, captured_stdout() as stdout: stdin.write("print(100)\n") stdin.seek(0) call_command("shell", verbosity=0) self.assertEqual(stdout.getvalue().strip(), "100") @unittest.skipIf( sys.platform == "win32", "Windows select() doesn't support file descriptors.", ) @mock.patch("django.core.management.commands.shell.select") # [1] def test_stdin_read_globals(self, select): with captured_stdin() as stdin, captured_stdout() as stdout: stdin.write(self.script_globals) stdin.seek(0) call_command("shell", verbosity=0) self.assertEqual(stdout.getvalue().strip(), "True") @unittest.skipIf( sys.platform == "win32", "Windows select() doesn't support file descriptors.", ) @mock.patch("django.core.management.commands.shell.select") # [1] def test_stdin_read_inline_function_call(self, select): with captured_stdin() as stdin, captured_stdout() as stdout: stdin.write(self.script_with_inline_function) stdin.seek(0) call_command("shell", verbosity=0) self.assertEqual(stdout.getvalue().strip(), __version__) def test_ipython(self): cmd = shell.Command() mock_ipython = mock.Mock(start_ipython=mock.MagicMock()) options = {"verbosity": 0, "no_imports": False} with mock.patch.dict(sys.modules, {"IPython": mock_ipython}): cmd.ipython(options) self.assertEqual( mock_ipython.start_ipython.mock_calls, [mock.call(argv=[], user_ns=cmd.get_namespace(**options))], ) @mock.patch("django.core.management.commands.shell.select.select") # [1] @mock.patch.dict("sys.modules", {"IPython": None}) def test_shell_with_ipython_not_installed(self, select): select.return_value = ([], [], []) with self.assertRaisesMessage( CommandError, "Couldn't import ipython interface." ): call_command("shell", interface="ipython") def test_bpython(self): cmd = shell.Command() mock_bpython = mock.Mock(embed=mock.MagicMock()) options = {"verbosity": 0, "no_imports": False} with mock.patch.dict(sys.modules, {"bpython": mock_bpython}): cmd.bpython(options) self.assertEqual( mock_bpython.embed.mock_calls, [mock.call(cmd.get_namespace(**options))] ) @mock.patch("django.core.management.commands.shell.select.select") # [1] @mock.patch.dict("sys.modules", {"bpython": None}) def test_shell_with_bpython_not_installed(self, select): select.return_value = ([], [], []) with self.assertRaisesMessage( CommandError, "Couldn't import bpython interface." ): call_command("shell", interface="bpython") def test_python(self): cmd = shell.Command() mock_code = mock.Mock(interact=mock.MagicMock()) options = {"verbosity": 0, "no_startup": True, "no_imports": False} with mock.patch.dict(sys.modules, {"code": mock_code}): cmd.python(options) self.assertEqual( mock_code.interact.mock_calls, [mock.call(local=cmd.get_namespace(**options))], ) # [1] Patch select to prevent tests failing when the test suite is run # in parallel mode. The tests are run in a subprocess and the subprocess's # stdin is closed and replaced by /dev/null. Reading from /dev/null always # returns EOF and so select always shows that sys.stdin is ready to read. # This causes problems because of the call to select.select() toward the # end of shell's handle() method. class ShellCommandAutoImportsTestCase(SimpleTestCase): @override_settings( INSTALLED_APPS=["shell", "django.contrib.auth", "django.contrib.contenttypes"] ) def test_get_namespace(self): namespace = shell.Command().get_namespace() self.assertEqual( namespace, { "settings": settings, "connection": connection, "reset_queries": reset_queries, "models": models, "functions": functions, "timezone": timezone, "Marker": Marker, "Phone": Phone, "ContentType": ContentType, "Group": Group, "Permission": Permission, "User": User, }, ) @override_settings(INSTALLED_APPS=[]) def test_get_namespace_default_imports(self): namespace = shell.Command().get_namespace() self.assertEqual( namespace, { "settings": settings, "connection": connection, "reset_queries": reset_queries, "models": models, "functions": functions, "timezone": timezone, }, ) @override_settings( INSTALLED_APPS=["model_forms", "contenttypes_tests", "forms_tests"] ) def test_get_namespace_precedence(self): # All of these apps define an `Article` model. The one defined first in # INSTALLED_APPS, takes precedence. import model_forms.models namespace = shell.Command().get_namespace() self.assertIs(namespace.get("Article"), model_forms.models.Article) @override_settings( INSTALLED_APPS=["shell", "django.contrib.auth", "django.contrib.contenttypes"] ) def test_get_namespace_overridden(self): class TestCommand(shell.Command): def get_auto_imports(self): return super().get_auto_imports() + [ "django.urls.reverse", "django.urls.resolve", ] namespace = TestCommand().get_namespace() self.assertEqual( namespace, { "resolve": resolve, "reverse": reverse, "settings": settings, "connection": connection, "reset_queries": reset_queries, "models": models, "functions": functions, "timezone": timezone, "Marker": Marker, "Phone": Phone, "ContentType": ContentType, "Group": Group, "Permission": Permission, "User": User, }, ) @override_settings( INSTALLED_APPS=["shell", "django.contrib.auth", "django.contrib.contenttypes"] ) def test_no_imports_flag(self): for verbosity in (0, 1, 2, 3): with self.subTest(verbosity=verbosity), captured_stdout() as stdout: namespace = shell.Command().get_namespace( verbosity=verbosity, no_imports=True ) self.assertEqual(namespace, {}) self.assertEqual(stdout.getvalue().strip(), "") @override_settings( INSTALLED_APPS=["shell", "django.contrib.auth", "django.contrib.contenttypes"] ) def test_verbosity_zero(self): with captured_stdout() as stdout: cmd = shell.Command() namespace = cmd.get_namespace(verbosity=0) self.assertEqual(len(namespace), len(cmd.get_auto_imports())) self.assertEqual(stdout.getvalue().strip(), "") @override_settings( INSTALLED_APPS=["shell", "django.contrib.auth", "django.contrib.contenttypes"] ) def test_verbosity_one(self): with captured_stdout() as stdout: cmd = shell.Command() namespace = cmd.get_namespace(verbosity=1) self.assertEqual(len(namespace), len(cmd.get_auto_imports())) self.assertEqual( stdout.getvalue().strip(), "12 objects imported automatically (use -v 2 for details).", ) @override_settings(INSTALLED_APPS=["shell", "django.contrib.contenttypes"]) @mock.patch.dict(sys.modules, {"isort": None}) def test_message_with_stdout_listing_objects_with_isort_not_installed(self): class TestCommand(shell.Command): def get_auto_imports(self): # Include duplicate import strings to ensure proper handling, # independent of isort's deduplication (#36252). return super().get_auto_imports() + [ "django.urls.reverse", "django.urls.resolve", "shell", "django", "django.urls.reverse", "shell", "django", ] with captured_stdout() as stdout: TestCommand().get_namespace(verbosity=2) self.assertEqual( stdout.getvalue().strip(), "13 objects imported automatically:\n\n" " import shell\n" " import django\n" " from django.conf import settings\n" " from django.db import connection, models, reset_queries\n" " from django.db.models import functions\n" " from django.utils import timezone\n" " from django.contrib.contenttypes.models import ContentType\n" " from shell.models import Phone, Marker\n" " from django.urls import reverse, resolve", ) def test_message_with_stdout_one_object(self): class TestCommand(shell.Command): def get_auto_imports(self): return ["django.db.connection"] with captured_stdout() as stdout: TestCommand().get_namespace(verbosity=2) cases = { 0: "", 1: "1 object imported automatically (use -v 2 for details).", 2: ( "1 object imported automatically:\n\n" " from django.db import connection" ), } for verbosity, expected in cases.items(): with self.subTest(verbosity=verbosity): with captured_stdout() as stdout: TestCommand().get_namespace(verbosity=verbosity) self.assertEqual(stdout.getvalue().strip(), expected) def test_message_with_stdout_zero_object(self): class TestCommand(shell.Command): def get_auto_imports(self): return [] with captured_stdout() as stdout: TestCommand().get_namespace(verbosity=2) cases = { 0: "", 1: "0 objects imported automatically.", 2: "0 objects imported automatically.", } for verbosity, expected in cases.items(): with self.subTest(verbosity=verbosity): with captured_stdout() as stdout: TestCommand().get_namespace(verbosity=verbosity) self.assertEqual(stdout.getvalue().strip(), expected) @override_settings(INSTALLED_APPS=[]) def test_message_with_stdout_no_installed_apps(self): cases = { 0: "", 1: "6 objects imported automatically (use -v 2 for details).", 2: "6 objects imported automatically:\n\n" " from django.conf import settings\n" " from django.db import connection, models, reset_queries\n" " from django.db.models import functions\n" " from django.utils import timezone", } for verbosity, expected in cases.items(): with self.subTest(verbosity=verbosity): with captured_stdout() as stdout: shell.Command().get_namespace(verbosity=verbosity) self.assertEqual(stdout.getvalue().strip(), expected) def test_message_with_stdout_overriden_none_result(self): class TestCommand(shell.Command): def get_auto_imports(self): return None for verbosity in [0, 1, 2]: with self.subTest(verbosity=verbosity): with captured_stdout() as stdout: result = TestCommand().get_namespace(verbosity=verbosity) self.assertEqual(result, {}) self.assertEqual(stdout.getvalue().strip(), "") @override_settings(INSTALLED_APPS=["shell", "django.contrib.contenttypes"]) def test_message_with_stdout_listing_objects_with_isort(self): sorted_imports = ( " from shell.models import Marker, Phone\n\n" " from django.db import connection, models, reset_queries\n" " from django.db.models import functions\n" " from django.contrib.contenttypes.models import ContentType\n" " from django.conf import settings\n" " from django.utils import timezone" ) mock_isort_code = mock.Mock(code=mock.MagicMock(return_value=sorted_imports)) class TestCommand(shell.Command): def get_auto_imports(self): return super().get_auto_imports() + [ "django.urls.reverse", "django.urls.resolve", "django", ] with ( mock.patch.dict(sys.modules, {"isort": mock_isort_code}), captured_stdout() as stdout, ): TestCommand().get_namespace(verbosity=2) self.assertEqual( stdout.getvalue().strip(), "12 objects imported automatically:\n\n" + sorted_imports, ) def test_override_get_auto_imports(self): class TestCommand(shell.Command): def get_auto_imports(self): return [ "model_forms", "shell", "does.not.exist", "doesntexisteither", ] with captured_stdout() as stdout: TestCommand().get_namespace(verbosity=2) expected = ( "2 objects could not be automatically imported:\n\n" " does.not.exist\n" " doesntexisteither\n\n" "2 objects imported automatically:\n\n" " import model_forms\n" " import shell\n\n" ) self.assertEqual(stdout.getvalue(), expected) def test_override_get_auto_imports_one_error(self): class TestCommand(shell.Command): def get_auto_imports(self): return [ "foo", ] expected = ( "1 object could not be automatically imported:\n\n foo\n\n" "0 objects imported automatically.\n\n" ) for verbosity, expected in [(0, ""), (1, expected), (2, expected)]: with self.subTest(verbosity=verbosity): with captured_stdout() as stdout: TestCommand().get_namespace(verbosity=verbosity) self.assertEqual(stdout.getvalue(), expected) def test_override_get_auto_imports_many_errors(self): class TestCommand(shell.Command): def get_auto_imports(self): return [ "does.not.exist", "doesntexisteither", ] expected = ( "2 objects could not be automatically imported:\n\n" " does.not.exist\n" " doesntexisteither\n\n" "0 objects imported automatically.\n\n" ) for verbosity, expected in [(0, ""), (1, expected), (2, expected)]: with self.subTest(verbosity=verbosity): with captured_stdout() as stdout: TestCommand().get_namespace(verbosity=verbosity) self.assertEqual(stdout.getvalue(), expected)
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/schema/test_logging.py
tests/schema/test_logging.py
from django.db import connection from django.test import TestCase class SchemaLoggerTests(TestCase): def test_extra_args(self): editor = connection.schema_editor(collect_sql=True) sql = "SELECT * FROM foo WHERE id in (%s, %s)" params = [42, 1337] with self.assertLogs("django.db.backends.schema", "DEBUG") as cm: editor.execute(sql, params) if connection.features.schema_editor_uses_clientside_param_binding: sql = "SELECT * FROM foo WHERE id in (42, 1337)" params = None self.assertEqual(cm.records[0].sql, sql) self.assertEqual(cm.records[0].params, params) self.assertEqual(cm.records[0].getMessage(), f"{sql}; (params {params})")
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/schema/models.py
tests/schema/models.py
from django.apps.registry import Apps from django.db import models # Because we want to test creation and deletion of these as separate things, # these models are all inserted into a separate Apps so the main test # runner doesn't migrate them. new_apps = Apps() class Author(models.Model): name = models.CharField(max_length=255) height = models.PositiveIntegerField(null=True, blank=True) weight = models.IntegerField(null=True, blank=True) uuid = models.UUIDField(null=True) class Meta: apps = new_apps class AuthorCharFieldWithIndex(models.Model): char_field = models.CharField(max_length=31, db_index=True) class Meta: apps = new_apps class AuthorTextFieldWithIndex(models.Model): text_field = models.TextField(db_index=True) class Meta: apps = new_apps class AuthorWithDefaultHeight(models.Model): name = models.CharField(max_length=255) height = models.PositiveIntegerField(null=True, blank=True, default=42) class Meta: apps = new_apps class AuthorWithEvenLongerName(models.Model): name = models.CharField(max_length=255) height = models.PositiveIntegerField(null=True, blank=True) class Meta: apps = new_apps class AuthorWithIndexedName(models.Model): name = models.CharField(max_length=255, db_index=True) class Meta: apps = new_apps class AuthorWithUniqueName(models.Model): name = models.CharField(max_length=255, unique=True) class Meta: apps = new_apps class AuthorWithUniqueNameAndBirthday(models.Model): name = models.CharField(max_length=255) birthday = models.DateField() class Meta: apps = new_apps unique_together = [["name", "birthday"]] class Book(models.Model): author = models.ForeignKey(Author, models.CASCADE) title = models.CharField(max_length=100, db_index=True) pub_date = models.DateTimeField() # tags = models.ManyToManyField("Tag", related_name="books") class Meta: apps = new_apps class BookWeak(models.Model): author = models.ForeignKey(Author, models.CASCADE, db_constraint=False) title = models.CharField(max_length=100, db_index=True) pub_date = models.DateTimeField() class Meta: apps = new_apps class BookWithLongName(models.Model): author_foreign_key_with_really_long_field_name = models.ForeignKey( AuthorWithEvenLongerName, models.CASCADE, ) class Meta: apps = new_apps class BookWithO2O(models.Model): author = models.OneToOneField(Author, models.CASCADE) title = models.CharField(max_length=100, db_index=True) pub_date = models.DateTimeField() class Meta: apps = new_apps db_table = "schema_book" class BookWithSlug(models.Model): author = models.ForeignKey(Author, models.CASCADE) title = models.CharField(max_length=100, db_index=True) pub_date = models.DateTimeField() slug = models.CharField(max_length=20, unique=True) class Meta: apps = new_apps db_table = "schema_book" class BookWithoutAuthor(models.Model): title = models.CharField(max_length=100, db_index=True) pub_date = models.DateTimeField() class Meta: apps = new_apps db_table = "schema_book" class BookForeignObj(models.Model): title = models.CharField(max_length=100, db_index=True) author_id = models.IntegerField() class Meta: apps = new_apps class IntegerPK(models.Model): i = models.IntegerField(primary_key=True) j = models.IntegerField(unique=True) class Meta: apps = new_apps db_table = "INTEGERPK" # uppercase to ensure proper quoting class Note(models.Model): info = models.TextField() address = models.TextField(null=True) class Meta: apps = new_apps class NoteRename(models.Model): detail_info = models.TextField() class Meta: apps = new_apps db_table = "schema_note" class Tag(models.Model): title = models.CharField(max_length=255) slug = models.SlugField(unique=True) class Meta: apps = new_apps class TagM2MTest(models.Model): title = models.CharField(max_length=255) slug = models.SlugField(unique=True) class Meta: apps = new_apps class TagUniqueRename(models.Model): title = models.CharField(max_length=255) slug2 = models.SlugField(unique=True) class Meta: apps = new_apps db_table = "schema_tag" # Based on tests/reserved_names/models.py class Thing(models.Model): when = models.CharField(max_length=1, primary_key=True) class Meta: apps = new_apps db_table = "drop" def __str__(self): return self.when class UniqueTest(models.Model): year = models.IntegerField() slug = models.SlugField(unique=False) class Meta: apps = new_apps unique_together = ["year", "slug"] class Node(models.Model): node_id = models.AutoField(primary_key=True) parent = models.ForeignKey("self", models.CASCADE, null=True, blank=True) class Meta: apps = new_apps
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/schema/fields.py
tests/schema/fields.py
from functools import partial from django.db import models from django.db.models.fields.related import ( RECURSIVE_RELATIONSHIP_CONSTANT, ManyToManyDescriptor, RelatedField, create_many_to_many_intermediary_model, ) class CustomManyToManyField(RelatedField): """ Ticket #24104 - Need to have a custom ManyToManyField, which is not an inheritor of ManyToManyField. """ many_to_many = True def __init__( self, to, db_constraint=True, swappable=True, related_name=None, related_query_name=None, limit_choices_to=None, symmetrical=None, through=None, through_fields=None, db_table=None, **kwargs, ): try: to._meta except AttributeError: to = str(to) kwargs["rel"] = models.ManyToManyRel( self, to, related_name=related_name, related_query_name=related_query_name, limit_choices_to=limit_choices_to, symmetrical=( symmetrical if symmetrical is not None else (to == RECURSIVE_RELATIONSHIP_CONSTANT) ), through=through, through_fields=through_fields, db_constraint=db_constraint, ) self.swappable = swappable self.db_table = db_table if kwargs["rel"].through is not None and self.db_table is not None: raise ValueError( "Cannot specify a db_table if an intermediary model is used." ) super().__init__( related_name=related_name, related_query_name=related_query_name, limit_choices_to=limit_choices_to, **kwargs, ) def contribute_to_class(self, cls, name, **kwargs): if self.remote_field.symmetrical and ( self.remote_field.model == "self" or self.remote_field.model == cls._meta.object_name ): self.remote_field.related_name = "%s_rel_+" % name super().contribute_to_class(cls, name, **kwargs) if ( not self.remote_field.through and not cls._meta.abstract and not cls._meta.swapped ): self.remote_field.through = create_many_to_many_intermediary_model( self, cls ) setattr(cls, self.name, ManyToManyDescriptor(self.remote_field)) self.m2m_db_table = partial(self._get_m2m_db_table, cls._meta) def get_internal_type(self): return "ManyToManyField" # Copy those methods from ManyToManyField because they don't call super() # internally contribute_to_related_class = models.ManyToManyField.__dict__[ "contribute_to_related_class" ] _get_m2m_attr = models.ManyToManyField.__dict__["_get_m2m_attr"] _get_m2m_reverse_attr = models.ManyToManyField.__dict__["_get_m2m_reverse_attr"] _get_m2m_db_table = models.ManyToManyField.__dict__["_get_m2m_db_table"] class InheritedManyToManyField(models.ManyToManyField): pass class MediumBlobField(models.BinaryField): """ A MySQL BinaryField that uses a different blob size. """ def db_type(self, connection): return "MEDIUMBLOB"
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/schema/__init__.py
tests/schema/__init__.py
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/schema/tests.py
tests/schema/tests.py
import datetime import itertools import unittest from copy import copy from decimal import Decimal from unittest import mock from django.core.exceptions import FieldError from django.core.management.color import no_style from django.core.serializers.json import DjangoJSONEncoder from django.db import ( DatabaseError, DataError, IntegrityError, OperationalError, connection, ) from django.db.backends.utils import truncate_name from django.db.models import ( CASCADE, DB_CASCADE, DB_SET_NULL, PROTECT, AutoField, BigAutoField, BigIntegerField, BinaryField, BooleanField, CharField, CheckConstraint, DateField, DateTimeField, DecimalField, DurationField, F, FloatField, ForeignKey, ForeignObject, GeneratedField, Index, IntegerField, JSONField, ManyToManyField, Model, OneToOneField, OrderBy, PositiveIntegerField, Q, SlugField, SmallAutoField, SmallIntegerField, TextField, TimeField, UniqueConstraint, UUIDField, Value, ) from django.db.models.fields.json import KT, KeyTextTransform from django.db.models.functions import ( Abs, Cast, Collate, Concat, Lower, Random, Round, Upper, ) from django.db.models.indexes import IndexExpression from django.db.transaction import TransactionManagementError, atomic from django.test import TransactionTestCase, skipIfDBFeature, skipUnlessDBFeature from django.test.utils import CaptureQueriesContext, isolate_apps, register_lookup from .fields import CustomManyToManyField, InheritedManyToManyField, MediumBlobField from .models import ( Author, AuthorCharFieldWithIndex, AuthorTextFieldWithIndex, AuthorWithDefaultHeight, AuthorWithEvenLongerName, AuthorWithIndexedName, AuthorWithUniqueName, AuthorWithUniqueNameAndBirthday, Book, BookForeignObj, BookWeak, BookWithLongName, BookWithO2O, BookWithoutAuthor, BookWithSlug, IntegerPK, Node, Note, NoteRename, Tag, TagM2MTest, TagUniqueRename, Thing, UniqueTest, new_apps, ) class SchemaTests(TransactionTestCase): """ Tests for the schema-alteration code. Be aware that these tests are more liable than most to false results, as sometimes the code to check if a test has worked is almost as complex as the code it is testing. """ available_apps = [] models = [ Author, AuthorCharFieldWithIndex, AuthorTextFieldWithIndex, AuthorWithDefaultHeight, AuthorWithEvenLongerName, Book, BookWeak, BookWithLongName, BookWithO2O, BookWithSlug, IntegerPK, Node, Note, Tag, TagM2MTest, TagUniqueRename, Thing, UniqueTest, ] # Utility functions def setUp(self): # local_models should contain test dependent model classes that will be # automatically removed from the app cache on test tear down. self.local_models = [] # isolated_local_models contains models that are in test methods # decorated with @isolate_apps. self.isolated_local_models = [] def tearDown(self): # Delete any tables made for our models self.delete_tables() new_apps.clear_cache() for model in new_apps.get_models(): model._meta._expire_cache() if "schema" in new_apps.all_models: for model in self.local_models: for many_to_many in model._meta.many_to_many: through = many_to_many.remote_field.through if through and through._meta.auto_created: del new_apps.all_models["schema"][through._meta.model_name] del new_apps.all_models["schema"][model._meta.model_name] if self.isolated_local_models: with connection.schema_editor() as editor: for model in self.isolated_local_models: editor.delete_model(model) def delete_tables(self): "Deletes all model tables for our models for a clean test environment" converter = connection.introspection.identifier_converter with connection.schema_editor() as editor: connection.disable_constraint_checking() table_names = connection.introspection.table_names() if connection.features.ignores_table_name_case: table_names = [table_name.lower() for table_name in table_names] for model in itertools.chain(SchemaTests.models, self.local_models): tbl = converter(model._meta.db_table) if connection.features.ignores_table_name_case: tbl = tbl.lower() if tbl in table_names: editor.delete_model(model) table_names.remove(tbl) connection.enable_constraint_checking() def column_classes(self, model): with connection.cursor() as cursor: columns = { d[0]: (connection.introspection.get_field_type(d[1], d), d) for d in connection.introspection.get_table_description( cursor, model._meta.db_table, ) } # SQLite has a different format for field_type for name, (type, desc) in columns.items(): if isinstance(type, tuple): columns[name] = (type[0], desc) return columns def get_primary_key(self, table): with connection.cursor() as cursor: return connection.introspection.get_primary_key_column(cursor, table) def get_indexes(self, table): """ Get the indexes on the table using a new cursor. """ with connection.cursor() as cursor: return [ c["columns"][0] for c in connection.introspection.get_constraints( cursor, table ).values() if c["index"] and len(c["columns"]) == 1 ] def get_uniques(self, table): with connection.cursor() as cursor: return [ c["columns"][0] for c in connection.introspection.get_constraints( cursor, table ).values() if c["unique"] and len(c["columns"]) == 1 ] def get_constraints(self, table): """ Get the constraints on a table using a new cursor. """ with connection.cursor() as cursor: return connection.introspection.get_constraints(cursor, table) def get_constraints_for_column(self, model, column_name): constraints = self.get_constraints(model._meta.db_table) constraints_for_column = [] for name, details in constraints.items(): if details["columns"] == [column_name]: constraints_for_column.append(name) return sorted(constraints_for_column) def get_constraint_opclasses(self, constraint_name): with connection.cursor() as cursor: sql = """ SELECT opcname FROM pg_opclass AS oc JOIN pg_index as i on oc.oid = ANY(i.indclass) JOIN pg_class as c on c.oid = i.indexrelid WHERE c.relname = %s """ cursor.execute(sql, [constraint_name]) return [row[0] for row in cursor.fetchall()] def check_added_field_default( self, schema_editor, model, field, field_name, expected_default, cast_function=None, ): with connection.cursor() as cursor: schema_editor.add_field(model, field) cursor.execute( "SELECT {} FROM {};".format(field_name, model._meta.db_table) ) database_default = cursor.fetchall()[0][0] if cast_function and type(database_default) is not type(expected_default): database_default = cast_function(database_default) self.assertEqual(database_default, expected_default) def get_constraints_count(self, table, column, fk_to): """ Return a dict with keys 'fks', 'uniques, and 'indexes' indicating the number of foreign keys, unique constraints, and indexes on `table`.`column`. The `fk_to` argument is a 2-tuple specifying the expected foreign key relationship's (table, column). """ with connection.cursor() as cursor: constraints = connection.introspection.get_constraints(cursor, table) counts = {"fks": 0, "uniques": 0, "indexes": 0} for c in constraints.values(): if c["columns"] == [column]: if c["foreign_key"] == fk_to: counts["fks"] += 1 if c["unique"]: counts["uniques"] += 1 elif c["index"]: counts["indexes"] += 1 return counts def get_column_collation(self, table, column): with connection.cursor() as cursor: return next( f.collation for f in connection.introspection.get_table_description(cursor, table) if f.name == column ) def get_column_comment(self, table, column): with connection.cursor() as cursor: return next( f.comment for f in connection.introspection.get_table_description(cursor, table) if f.name == column ) def get_table_comment(self, table): with connection.cursor() as cursor: return next( t.comment for t in connection.introspection.get_table_list(cursor) if t.name == table ) def assert_column_comment_not_exists(self, table, column): with connection.cursor() as cursor: columns = connection.introspection.get_table_description(cursor, table) self.assertFalse(any([c.name == column and c.comment for c in columns])) def assertIndexOrder(self, table, index, order): constraints = self.get_constraints(table) self.assertIn(index, constraints) index_orders = constraints[index]["orders"] self.assertTrue( all(val == expected for val, expected in zip(index_orders, order)) ) def assertForeignKeyExists(self, model, column, expected_fk_table, field="id"): """ Fail if the FK constraint on `model.Meta.db_table`.`column` to `expected_fk_table`.id doesn't exist. """ if not connection.features.can_introspect_foreign_keys: return constraints = self.get_constraints(model._meta.db_table) constraint_fk = None for details in constraints.values(): if details["columns"] == [column] and details["foreign_key"]: constraint_fk = details["foreign_key"] break self.assertEqual(constraint_fk, (expected_fk_table, field)) def assertForeignKeyNotExists(self, model, column, expected_fk_table): if not connection.features.can_introspect_foreign_keys: return with self.assertRaises(AssertionError): self.assertForeignKeyExists(model, column, expected_fk_table) # Tests def test_creation_deletion(self): """ Tries creating a model's table, and then deleting it. """ with connection.schema_editor() as editor: # Create the table editor.create_model(Author) # The table is there list(Author.objects.all()) # Clean up that table editor.delete_model(Author) # No deferred SQL should be left over. self.assertEqual(editor.deferred_sql, []) # The table is gone with self.assertRaises(DatabaseError): list(Author.objects.all()) @skipUnlessDBFeature("supports_foreign_keys") def test_fk(self): "Creating tables out of FK order, then repointing, works" # Create the table with connection.schema_editor() as editor: editor.create_model(Book) editor.create_model(Author) editor.create_model(Tag) # Initial tables are there list(Author.objects.all()) list(Book.objects.all()) # Make sure the FK constraint is present with self.assertRaises(IntegrityError): Book.objects.create( author_id=1, title="Much Ado About Foreign Keys", pub_date=datetime.datetime.now(), ) # Repoint the FK constraint old_field = Book._meta.get_field("author") new_field = ForeignKey(Tag, CASCADE) new_field.set_attributes_from_name("author") with connection.schema_editor() as editor: editor.alter_field(Book, old_field, new_field, strict=True) self.assertForeignKeyExists(Book, "author_id", "schema_tag") @skipUnlessDBFeature("can_create_inline_fk") def test_inline_fk(self): # Create some tables. with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(Book) editor.create_model(Note) self.assertForeignKeyNotExists(Note, "book_id", "schema_book") # Add a foreign key from one to the other. with connection.schema_editor() as editor: new_field = ForeignKey(Book, CASCADE) new_field.set_attributes_from_name("book") editor.add_field(Note, new_field) self.assertForeignKeyExists(Note, "book_id", "schema_book") # Creating a FK field with a constraint uses a single statement without # a deferred ALTER TABLE. self.assertFalse( [ sql for sql in (str(statement) for statement in editor.deferred_sql) if sql.startswith("ALTER TABLE") and "ADD CONSTRAINT" in sql ] ) @skipUnlessDBFeature("can_create_inline_fk", "supports_on_delete_db_cascade") def test_inline_fk_db_on_delete(self): with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(Book) editor.create_model(Note) self.assertForeignKeyNotExists(Note, "book_id", "schema_book") # Add a foreign key from model to the other. with ( CaptureQueriesContext(connection) as ctx, connection.schema_editor() as editor, ): new_field = ForeignKey(Book, DB_CASCADE) new_field.set_attributes_from_name("book") editor.add_field(Note, new_field) self.assertForeignKeyExists(Note, "book_id", "schema_book") # Creating a FK field with a constraint uses a single statement without # a deferred ALTER TABLE. self.assertFalse( [ sql for sql in (str(statement) for statement in editor.deferred_sql) if sql.startswith("ALTER TABLE") and "ADD CONSTRAINT" in sql ] ) # ON DELETE clause is used. self.assertTrue( any( capture_query["sql"].startswith("ALTER TABLE") and "ON DELETE" in capture_query["sql"] for capture_query in ctx.captured_queries ) ) @skipUnlessDBFeature("can_create_inline_fk") def test_add_inline_fk_update_data(self): with connection.schema_editor() as editor: editor.create_model(Node) # Add an inline foreign key and update data in the same transaction. new_field = ForeignKey(Node, CASCADE, related_name="new_fk", null=True) new_field.set_attributes_from_name("new_parent_fk") parent = Node.objects.create() with connection.schema_editor() as editor: editor.add_field(Node, new_field) editor.execute("UPDATE schema_node SET new_parent_fk_id = %s;", [parent.pk]) assertIndex = ( self.assertIn if connection.features.indexes_foreign_keys else self.assertNotIn ) assertIndex("new_parent_fk_id", self.get_indexes(Node._meta.db_table)) @skipUnlessDBFeature( "can_create_inline_fk", "allows_multiple_constraints_on_same_fields", ) @isolate_apps("schema") def test_add_inline_fk_index_update_data(self): class Node(Model): class Meta: app_label = "schema" with connection.schema_editor() as editor: editor.create_model(Node) # Add an inline foreign key, update data, and an index in the same # transaction. new_field = ForeignKey(Node, CASCADE, related_name="new_fk", null=True) new_field.set_attributes_from_name("new_parent_fk") parent = Node.objects.create() with connection.schema_editor() as editor: editor.add_field(Node, new_field) Node._meta.add_field(new_field) editor.execute("UPDATE schema_node SET new_parent_fk_id = %s;", [parent.pk]) editor.add_index( Node, Index(fields=["new_parent_fk"], name="new_parent_inline_fk_idx") ) self.assertIn("new_parent_fk_id", self.get_indexes(Node._meta.db_table)) @skipUnlessDBFeature("supports_foreign_keys") def test_char_field_with_db_index_to_fk(self): # Create the table with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(AuthorCharFieldWithIndex) # Change CharField to FK old_field = AuthorCharFieldWithIndex._meta.get_field("char_field") new_field = ForeignKey(Author, CASCADE, blank=True) new_field.set_attributes_from_name("char_field") with connection.schema_editor() as editor: editor.alter_field( AuthorCharFieldWithIndex, old_field, new_field, strict=True ) self.assertForeignKeyExists( AuthorCharFieldWithIndex, "char_field_id", "schema_author" ) @skipUnlessDBFeature("supports_foreign_keys") @skipUnlessDBFeature("supports_index_on_text_field") def test_text_field_with_db_index_to_fk(self): # Create the table with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(AuthorTextFieldWithIndex) # Change TextField to FK old_field = AuthorTextFieldWithIndex._meta.get_field("text_field") new_field = ForeignKey(Author, CASCADE, blank=True) new_field.set_attributes_from_name("text_field") with connection.schema_editor() as editor: editor.alter_field( AuthorTextFieldWithIndex, old_field, new_field, strict=True ) self.assertForeignKeyExists( AuthorTextFieldWithIndex, "text_field_id", "schema_author" ) @isolate_apps("schema") def test_char_field_pk_to_auto_field(self): class Foo(Model): id = CharField(max_length=255, primary_key=True) class Meta: app_label = "schema" with connection.schema_editor() as editor: editor.create_model(Foo) self.isolated_local_models = [Foo] old_field = Foo._meta.get_field("id") new_field = AutoField(primary_key=True) new_field.set_attributes_from_name("id") new_field.model = Foo with connection.schema_editor() as editor: editor.alter_field(Foo, old_field, new_field, strict=True) @skipUnlessDBFeature("supports_foreign_keys") def test_fk_to_proxy(self): "Creating a FK to a proxy model creates database constraints." class AuthorProxy(Author): class Meta: app_label = "schema" apps = new_apps proxy = True class AuthorRef(Model): author = ForeignKey(AuthorProxy, on_delete=CASCADE) class Meta: app_label = "schema" apps = new_apps self.local_models = [AuthorProxy, AuthorRef] # Create the table with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(AuthorRef) self.assertForeignKeyExists(AuthorRef, "author_id", "schema_author") @skipUnlessDBFeature("supports_foreign_keys", "can_introspect_foreign_keys") def test_fk_db_constraint(self): "The db_constraint parameter is respected" # Create the table with connection.schema_editor() as editor: editor.create_model(Tag) editor.create_model(Author) editor.create_model(BookWeak) # Initial tables are there list(Author.objects.all()) list(Tag.objects.all()) list(BookWeak.objects.all()) self.assertForeignKeyNotExists(BookWeak, "author_id", "schema_author") # Make a db_constraint=False FK new_field = ForeignKey(Tag, CASCADE, db_constraint=False) new_field.set_attributes_from_name("tag") with connection.schema_editor() as editor: editor.add_field(Author, new_field) self.assertForeignKeyNotExists(Author, "tag_id", "schema_tag") # Alter to one with a constraint new_field2 = ForeignKey(Tag, CASCADE) new_field2.set_attributes_from_name("tag") with connection.schema_editor() as editor: editor.alter_field(Author, new_field, new_field2, strict=True) self.assertForeignKeyExists(Author, "tag_id", "schema_tag") # Alter to one without a constraint again new_field2 = ForeignKey(Tag, CASCADE) new_field2.set_attributes_from_name("tag") with connection.schema_editor() as editor: editor.alter_field(Author, new_field2, new_field, strict=True) self.assertForeignKeyNotExists(Author, "tag_id", "schema_tag") @skipUnlessDBFeature( "supports_foreign_keys", "can_introspect_foreign_keys", "supports_on_delete_db_cascade", ) def test_fk_alter_on_delete(self): with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(Book) self.assertForeignKeyExists(Book, "author_id", "schema_author") # Change CASCADE to DB_CASCADE. old_field = Book._meta.get_field("author") new_field = ForeignKey(Author, DB_CASCADE) new_field.set_attributes_from_name("author") with ( connection.schema_editor() as editor, CaptureQueriesContext(connection) as ctx, ): editor.alter_field(Book, old_field, new_field) self.assertForeignKeyExists(Book, "author_id", "schema_author") self.assertIs( any("ON DELETE" in query["sql"] for query in ctx.captured_queries), True ) # Change DB_CASCADE to CASCADE. old_field = new_field new_field = ForeignKey(Author, CASCADE) new_field.set_attributes_from_name("author") with ( connection.schema_editor() as editor, CaptureQueriesContext(connection) as ctx, ): editor.alter_field(Book, old_field, new_field) self.assertForeignKeyExists(Book, "author_id", "schema_author") self.assertIs( any("ON DELETE" in query["sql"] for query in ctx.captured_queries), False ) @isolate_apps("schema") @skipUnlessDBFeature("supports_foreign_keys", "can_introspect_foreign_keys") def test_create_model_db_on_delete(self): class Parent(Model): class Meta: app_label = "schema" class Child(Model): parent_fk = ForeignKey(Parent, DB_SET_NULL, null=True) class Meta: app_label = "schema" with connection.schema_editor() as editor: editor.create_model(Parent) with CaptureQueriesContext(connection) as ctx: with connection.schema_editor() as editor: editor.create_model(Child) self.assertForeignKeyNotExists(Child, "parent_id", "schema_parent") self.assertIs( any("ON DELETE" in query["sql"] for query in ctx.captured_queries), True ) @isolate_apps("schema") def test_no_db_constraint_added_during_primary_key_change(self): """ When a primary key that's pointed to by a ForeignKey with db_constraint=False is altered, a foreign key constraint isn't added. """ class Author(Model): class Meta: app_label = "schema" class BookWeak(Model): author = ForeignKey(Author, CASCADE, db_constraint=False) class Meta: app_label = "schema" with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(BookWeak) self.assertForeignKeyNotExists(BookWeak, "author_id", "schema_author") old_field = Author._meta.get_field("id") new_field = BigAutoField(primary_key=True) new_field.model = Author new_field.set_attributes_from_name("id") # @isolate_apps() and inner models are needed to have the model # relations populated, otherwise this doesn't act as a regression test. self.assertEqual(len(new_field.model._meta.related_objects), 1) with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) self.assertForeignKeyNotExists(BookWeak, "author_id", "schema_author") def _test_m2m_db_constraint(self, M2MFieldClass): class LocalAuthorWithM2M(Model): name = CharField(max_length=255) class Meta: app_label = "schema" apps = new_apps self.local_models = [LocalAuthorWithM2M] # Create the table with connection.schema_editor() as editor: editor.create_model(Tag) editor.create_model(LocalAuthorWithM2M) # Initial tables are there list(LocalAuthorWithM2M.objects.all()) list(Tag.objects.all()) # Make a db_constraint=False FK new_field = M2MFieldClass(Tag, related_name="authors", db_constraint=False) new_field.contribute_to_class(LocalAuthorWithM2M, "tags") # Add the field with connection.schema_editor() as editor: editor.add_field(LocalAuthorWithM2M, new_field) self.assertForeignKeyNotExists( new_field.remote_field.through, "tag_id", "schema_tag" ) @skipUnlessDBFeature("supports_foreign_keys") def test_m2m_db_constraint(self): self._test_m2m_db_constraint(ManyToManyField) @skipUnlessDBFeature("supports_foreign_keys") def test_m2m_db_constraint_custom(self): self._test_m2m_db_constraint(CustomManyToManyField) @skipUnlessDBFeature("supports_foreign_keys") def test_m2m_db_constraint_inherited(self): self._test_m2m_db_constraint(InheritedManyToManyField) def test_add_field(self): """ Tests adding fields to models """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Ensure there's no age field columns = self.column_classes(Author) self.assertNotIn("age", columns) # Add the new field new_field = IntegerField(null=True) new_field.set_attributes_from_name("age") with ( CaptureQueriesContext(connection) as ctx, connection.schema_editor() as editor, ): editor.add_field(Author, new_field) drop_default_sql = editor.sql_alter_column_no_default % { "column": editor.quote_name(new_field.name), } self.assertFalse( any(drop_default_sql in query["sql"] for query in ctx.captured_queries) ) # Table is not rebuilt. self.assertIs( any("CREATE TABLE" in query["sql"] for query in ctx.captured_queries), False ) self.assertIs( any("DROP TABLE" in query["sql"] for query in ctx.captured_queries), False ) columns = self.column_classes(Author) self.assertEqual( columns["age"][0], connection.features.introspected_field_types["IntegerField"], ) self.assertTrue(columns["age"][1][6]) def test_add_field_remove_field(self): """ Adding a field and removing it removes all deferred sql referring to it. """ with connection.schema_editor() as editor: # Create a table with a unique constraint on the slug field. editor.create_model(Tag) # Remove the slug column. editor.remove_field(Tag, Tag._meta.get_field("slug")) self.assertEqual(editor.deferred_sql, []) def test_add_field_temp_default(self): """ Tests adding fields to models with a temporary default """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Ensure there's no age field columns = self.column_classes(Author) self.assertNotIn("age", columns) # Add some rows of data Author.objects.create(name="Andrew", height=30) Author.objects.create(name="Andrea") # Add a not-null field new_field = CharField(max_length=30, default="Godwin") new_field.set_attributes_from_name("surname") with connection.schema_editor() as editor: editor.add_field(Author, new_field) columns = self.column_classes(Author) self.assertEqual( columns["surname"][0], connection.features.introspected_field_types["CharField"], ) self.assertEqual( columns["surname"][1][6], connection.features.interprets_empty_strings_as_nulls, ) def test_add_field_temp_default_boolean(self): """ Tests adding fields to models with a temporary default where the default is False. (#21783) """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Ensure there's no age field columns = self.column_classes(Author) self.assertNotIn("age", columns) # Add some rows of data Author.objects.create(name="Andrew", height=30) Author.objects.create(name="Andrea") # Add a not-null field new_field = BooleanField(default=False) new_field.set_attributes_from_name("awesome") with connection.schema_editor() as editor: editor.add_field(Author, new_field) columns = self.column_classes(Author) # BooleanField are stored as TINYINT(1) on MySQL. field_type = columns["awesome"][0] self.assertEqual( field_type, connection.features.introspected_field_types["BooleanField"] ) def test_add_field_default_transform(self): """ Tests adding fields to models with a default that is not directly valid in the database (#22581) """ class TestTransformField(IntegerField): # Weird field that saves the count of items in its value def get_default(self): return self.default def get_prep_value(self, value): if value is None: return 0 return len(value) # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Add some rows of data Author.objects.create(name="Andrew", height=30) Author.objects.create(name="Andrea") # Add the field with a default it needs to cast (to string in this # case) new_field = TestTransformField(default={1: 2}) new_field.set_attributes_from_name("thing") with connection.schema_editor() as editor: editor.add_field(Author, new_field) # Ensure the field is there columns = self.column_classes(Author) field_type, field_info = columns["thing"] self.assertEqual( field_type, connection.features.introspected_field_types["IntegerField"]
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
true
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/indexes/models.py
tests/indexes/models.py
from django.db import models class CurrentTranslation(models.ForeignObject): """ Creates virtual relation to the translation with model cache enabled. """ # Avoid validation requires_unique_target = False def __init__(self, to, on_delete, from_fields, to_fields, **kwargs): # Disable reverse relation kwargs["related_name"] = "+" # Set unique to enable model cache. kwargs["unique"] = True super().__init__(to, on_delete, from_fields, to_fields, **kwargs) class ArticleTranslation(models.Model): article = models.ForeignKey("indexes.Article", models.CASCADE) article_no_constraint = models.ForeignKey( "indexes.Article", models.CASCADE, db_constraint=False, related_name="+" ) language = models.CharField(max_length=10, unique=True) content = models.TextField() class Article(models.Model): headline = models.CharField(max_length=100) pub_date = models.DateTimeField() published = models.BooleanField(default=False) # Add virtual relation to the ArticleTranslation model. translation = CurrentTranslation( ArticleTranslation, models.CASCADE, ["id"], ["article"] ) class Meta: indexes = [models.Index(fields=["headline", "pub_date"])] class IndexedArticle(models.Model): headline = models.CharField(max_length=100, db_index=True) body = models.TextField(db_index=True) slug = models.CharField(max_length=40, unique=True) class Meta: required_db_features = {"supports_index_on_text_field"} class IndexedArticle2(models.Model): headline = models.CharField(max_length=100) body = models.TextField()
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/indexes/__init__.py
tests/indexes/__init__.py
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/indexes/tests.py
tests/indexes/tests.py
import datetime from unittest import skipUnless from django.conf import settings from django.db import connection from django.db.models import CASCADE, CharField, ForeignKey, Index, Q from django.db.models.functions import Lower from django.test import ( TestCase, TransactionTestCase, skipIfDBFeature, skipUnlessDBFeature, ) from django.test.utils import override_settings from django.utils import timezone from .models import Article, ArticleTranslation, IndexedArticle2 class SchemaIndexesTests(TestCase): """ Test index handling by the db.backends.schema infrastructure. """ def test_index_name_hash(self): """ Index names should be deterministic. """ editor = connection.schema_editor() index_name = editor._create_index_name( table_name=Article._meta.db_table, column_names=("c1",), suffix="123", ) self.assertEqual(index_name, "indexes_article_c1_a52bd80b123") def test_index_name(self): """ Index names on the built-in database backends:: * Are truncated as needed. * Include all the column names. * Include a deterministic hash. """ long_name = "l%sng" % ("o" * 100) editor = connection.schema_editor() index_name = editor._create_index_name( table_name=Article._meta.db_table, column_names=("c1", "c2", long_name), suffix="ix", ) expected = { "mysql": "indexes_article_c1_c2_looooooooooooooooooo_255179b2ix", "oracle": "indexes_a_c1_c2_loo_255179b2ix", "postgresql": "indexes_article_c1_c2_loooooooooooooooooo_255179b2ix", "sqlite": "indexes_article_c1_c2_l%sng_255179b2ix" % ("o" * 100), } if connection.vendor not in expected: self.skipTest( "This test is only supported on the built-in database backends." ) self.assertEqual(index_name, expected[connection.vendor]) def test_quoted_index_name(self): editor = connection.schema_editor() index_sql = [str(statement) for statement in editor._model_indexes_sql(Article)] self.assertEqual(len(index_sql), 1) # Ensure the index name is properly quoted. self.assertIn( connection.ops.quote_name(Article._meta.indexes[0].name), index_sql[0], ) def test_columns_list_sql(self): index = Index(fields=["headline"], name="whitespace_idx") editor = connection.schema_editor() self.assertIn( "(%s)" % editor.quote_name("headline"), str(index.create_sql(Article, editor)), ) @skipUnlessDBFeature("supports_index_column_ordering") def test_descending_columns_list_sql(self): index = Index(fields=["-headline"], name="whitespace_idx") editor = connection.schema_editor() self.assertIn( "(%s DESC)" % editor.quote_name("headline"), str(index.create_sql(Article, editor)), ) @skipUnlessDBFeature("can_create_inline_fk", "can_rollback_ddl") def test_alter_field_unique_false_removes_deferred_sql(self): field_added = CharField(max_length=127, unique=True) field_added.set_attributes_from_name("charfield_added") field_to_alter = CharField(max_length=127, unique=True) field_to_alter.set_attributes_from_name("charfield_altered") altered_field = CharField(max_length=127, unique=False) altered_field.set_attributes_from_name("charfield_altered") with connection.schema_editor() as editor: editor.add_field(ArticleTranslation, field_added) editor.add_field(ArticleTranslation, field_to_alter) self.assertEqual(len(editor.deferred_sql), 2) editor.alter_field(ArticleTranslation, field_to_alter, altered_field) self.assertEqual(len(editor.deferred_sql), 1) self.assertIn("charfield_added", str(editor.deferred_sql[0].parts["name"])) class SchemaIndexesNotPostgreSQLTests(TransactionTestCase): available_apps = ["indexes"] def test_create_index_ignores_opclasses(self): index = Index( name="test_ops_class", fields=["headline"], opclasses=["varchar_pattern_ops"], ) with connection.schema_editor() as editor: # This would error if opclasses weren't ignored. editor.add_index(IndexedArticle2, index) # The `condition` parameter is ignored by databases that don't support partial # indexes. @skipIfDBFeature("supports_partial_indexes") class PartialIndexConditionIgnoredTests(TransactionTestCase): available_apps = ["indexes"] def test_condition_ignored(self): index = Index( name="test_condition_ignored", fields=["published"], condition=Q(published=True), ) with connection.schema_editor() as editor: # This would error if condition weren't ignored. editor.add_index(Article, index) self.assertNotIn( "WHERE %s" % editor.quote_name("published"), str(index.create_sql(Article, editor)), ) @skipUnless(connection.vendor == "postgresql", "PostgreSQL tests") class SchemaIndexesPostgreSQLTests(TransactionTestCase): available_apps = ["indexes"] get_opclass_query = """ SELECT opcname, c.relname FROM pg_opclass AS oc JOIN pg_index as i on oc.oid = ANY(i.indclass) JOIN pg_class as c on c.oid = i.indexrelid WHERE c.relname = '%s' """ def test_text_indexes(self): """Test creation of PostgreSQL-specific text indexes (#12234)""" from .models import IndexedArticle index_sql = [ str(statement) for statement in connection.schema_editor()._model_indexes_sql( IndexedArticle ) ] self.assertEqual(len(index_sql), 5) self.assertIn('("headline" varchar_pattern_ops)', index_sql[1]) self.assertIn('("body" text_pattern_ops)', index_sql[3]) # unique=True and db_index=True should only create the varchar-specific # index (#19441). self.assertIn('("slug" varchar_pattern_ops)', index_sql[4]) def test_virtual_relation_indexes(self): """Test indexes are not created for related objects""" index_sql = connection.schema_editor()._model_indexes_sql(Article) self.assertEqual(len(index_sql), 1) def test_ops_class(self): index = Index( name="test_ops_class", fields=["headline"], opclasses=["varchar_pattern_ops"], ) with connection.schema_editor() as editor: editor.add_index(IndexedArticle2, index) with editor.connection.cursor() as cursor: cursor.execute(self.get_opclass_query % "test_ops_class") self.assertEqual( cursor.fetchall(), [("varchar_pattern_ops", "test_ops_class")] ) def test_ops_class_multiple_columns(self): index = Index( name="test_ops_class_multiple", fields=["headline", "body"], opclasses=["varchar_pattern_ops", "text_pattern_ops"], ) with connection.schema_editor() as editor: editor.add_index(IndexedArticle2, index) with editor.connection.cursor() as cursor: cursor.execute(self.get_opclass_query % "test_ops_class_multiple") expected_ops_classes = ( ("varchar_pattern_ops", "test_ops_class_multiple"), ("text_pattern_ops", "test_ops_class_multiple"), ) self.assertCountEqual(cursor.fetchall(), expected_ops_classes) def test_ops_class_partial(self): index = Index( name="test_ops_class_partial", fields=["body"], opclasses=["text_pattern_ops"], condition=Q(headline__contains="China"), ) with connection.schema_editor() as editor: editor.add_index(IndexedArticle2, index) with editor.connection.cursor() as cursor: cursor.execute(self.get_opclass_query % "test_ops_class_partial") self.assertCountEqual( cursor.fetchall(), [("text_pattern_ops", "test_ops_class_partial")] ) def test_ops_class_partial_tablespace(self): indexname = "test_ops_class_tblspace" index = Index( name=indexname, fields=["body"], opclasses=["text_pattern_ops"], condition=Q(headline__contains="China"), db_tablespace="pg_default", ) with connection.schema_editor() as editor: editor.add_index(IndexedArticle2, index) self.assertIn( 'TABLESPACE "pg_default" ', str(index.create_sql(IndexedArticle2, editor)), ) with editor.connection.cursor() as cursor: cursor.execute(self.get_opclass_query % indexname) self.assertCountEqual(cursor.fetchall(), [("text_pattern_ops", indexname)]) def test_ops_class_descending(self): indexname = "test_ops_class_ordered" index = Index( name=indexname, fields=["-body"], opclasses=["text_pattern_ops"], ) with connection.schema_editor() as editor: editor.add_index(IndexedArticle2, index) with editor.connection.cursor() as cursor: cursor.execute(self.get_opclass_query % indexname) self.assertCountEqual(cursor.fetchall(), [("text_pattern_ops", indexname)]) def test_ops_class_descending_partial(self): indexname = "test_ops_class_ordered_partial" index = Index( name=indexname, fields=["-body"], opclasses=["text_pattern_ops"], condition=Q(headline__contains="China"), ) with connection.schema_editor() as editor: editor.add_index(IndexedArticle2, index) with editor.connection.cursor() as cursor: cursor.execute(self.get_opclass_query % indexname) self.assertCountEqual(cursor.fetchall(), [("text_pattern_ops", indexname)]) @skipUnlessDBFeature("supports_covering_indexes") def test_ops_class_include(self): index_name = "test_ops_class_include" index = Index( name=index_name, fields=["body"], opclasses=["text_pattern_ops"], include=["headline"], ) with connection.schema_editor() as editor: editor.add_index(IndexedArticle2, index) with editor.connection.cursor() as cursor: cursor.execute(self.get_opclass_query % index_name) self.assertCountEqual(cursor.fetchall(), [("text_pattern_ops", index_name)]) @skipUnlessDBFeature("supports_covering_indexes") def test_ops_class_include_tablespace(self): index_name = "test_ops_class_include_tblspace" index = Index( name=index_name, fields=["body"], opclasses=["text_pattern_ops"], include=["headline"], db_tablespace="pg_default", ) with connection.schema_editor() as editor: editor.add_index(IndexedArticle2, index) self.assertIn( 'TABLESPACE "pg_default"', str(index.create_sql(IndexedArticle2, editor)), ) with editor.connection.cursor() as cursor: cursor.execute(self.get_opclass_query % index_name) self.assertCountEqual(cursor.fetchall(), [("text_pattern_ops", index_name)]) def test_ops_class_columns_lists_sql(self): index = Index( fields=["headline"], name="whitespace_idx", opclasses=["text_pattern_ops"], ) with connection.schema_editor() as editor: self.assertIn( "(%s text_pattern_ops)" % editor.quote_name("headline"), str(index.create_sql(Article, editor)), ) def test_ops_class_descending_columns_list_sql(self): index = Index( fields=["-headline"], name="whitespace_idx", opclasses=["text_pattern_ops"], ) with connection.schema_editor() as editor: self.assertIn( "(%s text_pattern_ops DESC)" % editor.quote_name("headline"), str(index.create_sql(Article, editor)), ) @skipUnless(connection.vendor == "mysql", "MySQL tests") class SchemaIndexesMySQLTests(TransactionTestCase): available_apps = ["indexes"] def test_no_index_for_foreignkey(self): """ MySQL on InnoDB already creates indexes automatically for foreign keys. (#14180). An index should be created if db_constraint=False (#26171). """ with connection.cursor() as cursor: storage = connection.introspection.get_storage_engine( cursor, ArticleTranslation._meta.db_table, ) if storage != "InnoDB": self.skipTest("This test only applies to the InnoDB storage engine") index_sql = [ str(statement) for statement in connection.schema_editor()._model_indexes_sql( ArticleTranslation ) ] self.assertEqual( index_sql, [ "CREATE INDEX " "`indexes_articletranslation_article_no_constraint_id_d6c0806b` " "ON `indexes_articletranslation` (`article_no_constraint_id`)" ], ) # The index also shouldn't be created if the ForeignKey is added after # the model was created. field_created = False try: with connection.schema_editor() as editor: new_field = ForeignKey(Article, CASCADE) new_field.set_attributes_from_name("new_foreign_key") editor.add_field(ArticleTranslation, new_field) field_created = True # No deferred SQL. The FK constraint is included in the # statement to add the field. self.assertFalse(editor.deferred_sql) finally: if field_created: with connection.schema_editor() as editor: editor.remove_field(ArticleTranslation, new_field) @skipUnlessDBFeature("supports_partial_indexes") # SQLite doesn't support timezone-aware datetimes when USE_TZ is False. @override_settings(USE_TZ=True) class PartialIndexTests(TransactionTestCase): # Schema editor is used to create the index to test that it works. available_apps = ["indexes"] def test_partial_index(self): with connection.schema_editor() as editor: index = Index( name="recent_article_idx", fields=["pub_date"], condition=Q( pub_date__gt=datetime.datetime( year=2015, month=1, day=1, # PostgreSQL would otherwise complain about the lookup # being converted to a mutable function (by removing # the timezone in the cast) which is forbidden. tzinfo=timezone.get_current_timezone(), ), ), ) self.assertIn( "WHERE %s" % editor.quote_name("pub_date"), str(index.create_sql(Article, schema_editor=editor)), ) editor.add_index(index=index, model=Article) with connection.cursor() as cursor: self.assertIn( index.name, connection.introspection.get_constraints( cursor=cursor, table_name=Article._meta.db_table, ), ) editor.remove_index(index=index, model=Article) def test_integer_restriction_partial(self): with connection.schema_editor() as editor: index = Index( name="recent_article_idx", fields=["id"], condition=Q(pk__gt=1), ) self.assertIn( "WHERE %s" % editor.quote_name("id"), str(index.create_sql(Article, schema_editor=editor)), ) editor.add_index(index=index, model=Article) with connection.cursor() as cursor: self.assertIn( index.name, connection.introspection.get_constraints( cursor=cursor, table_name=Article._meta.db_table, ), ) editor.remove_index(index=index, model=Article) def test_boolean_restriction_partial(self): with connection.schema_editor() as editor: index = Index( name="published_index", fields=["published"], condition=Q(published=True), ) self.assertIn( "WHERE %s" % editor.quote_name("published"), str(index.create_sql(Article, schema_editor=editor)), ) editor.add_index(index=index, model=Article) with connection.cursor() as cursor: self.assertIn( index.name, connection.introspection.get_constraints( cursor=cursor, table_name=Article._meta.db_table, ), ) editor.remove_index(index=index, model=Article) @skipUnlessDBFeature("supports_functions_in_partial_indexes") def test_multiple_conditions(self): with connection.schema_editor() as editor: index = Index( name="recent_article_idx", fields=["pub_date", "headline"], condition=( Q( pub_date__gt=datetime.datetime( year=2015, month=1, day=1, tzinfo=timezone.get_current_timezone(), ) ) & Q(headline__contains="China") ), ) sql = str(index.create_sql(Article, schema_editor=editor)) where = sql.find("WHERE") self.assertIn("WHERE (%s" % editor.quote_name("pub_date"), sql) # Because each backend has different syntax for the operators, # check ONLY the occurrence of headline in the SQL. self.assertGreater(sql.rfind("headline"), where) editor.add_index(index=index, model=Article) with connection.cursor() as cursor: self.assertIn( index.name, connection.introspection.get_constraints( cursor=cursor, table_name=Article._meta.db_table, ), ) editor.remove_index(index=index, model=Article) def test_is_null_condition(self): with connection.schema_editor() as editor: index = Index( name="recent_article_idx", fields=["pub_date"], condition=Q(pub_date__isnull=False), ) self.assertIn( "WHERE %s IS NOT NULL" % editor.quote_name("pub_date"), str(index.create_sql(Article, schema_editor=editor)), ) editor.add_index(index=index, model=Article) with connection.cursor() as cursor: self.assertIn( index.name, connection.introspection.get_constraints( cursor=cursor, table_name=Article._meta.db_table, ), ) editor.remove_index(index=index, model=Article) @skipUnlessDBFeature("supports_expression_indexes") def test_partial_func_index(self): index_name = "partial_func_idx" index = Index( Lower("headline").desc(), name=index_name, condition=Q(pub_date__isnull=False), ) with connection.schema_editor() as editor: editor.add_index(index=index, model=Article) sql = index.create_sql(Article, schema_editor=editor) table = Article._meta.db_table self.assertIs(sql.references_column(table, "headline"), True) sql = str(sql) self.assertIn("LOWER(%s)" % editor.quote_name("headline"), sql) self.assertIn( "WHERE %s IS NOT NULL" % editor.quote_name("pub_date"), sql, ) self.assertGreater(sql.find("WHERE"), sql.find("LOWER")) with connection.cursor() as cursor: constraints = connection.introspection.get_constraints( cursor=cursor, table_name=table, ) self.assertIn(index_name, constraints) if connection.features.supports_index_column_ordering: self.assertEqual(constraints[index_name]["orders"], ["DESC"]) with connection.schema_editor() as editor: editor.remove_index(Article, index) with connection.cursor() as cursor: self.assertNotIn( index_name, connection.introspection.get_constraints( cursor=cursor, table_name=table, ), ) @skipUnlessDBFeature("supports_covering_indexes") class CoveringIndexTests(TransactionTestCase): available_apps = ["indexes"] def test_covering_index(self): index = Index( name="covering_headline_idx", fields=["headline"], include=["pub_date", "published"], ) with connection.schema_editor() as editor: self.assertIn( "(%s) INCLUDE (%s, %s)" % ( editor.quote_name("headline"), editor.quote_name("pub_date"), editor.quote_name("published"), ), str(index.create_sql(Article, editor)), ) editor.add_index(Article, index) with connection.cursor() as cursor: constraints = connection.introspection.get_constraints( cursor=cursor, table_name=Article._meta.db_table, ) self.assertIn(index.name, constraints) self.assertEqual( constraints[index.name]["columns"], ["headline", "pub_date", "published"], ) editor.remove_index(Article, index) with connection.cursor() as cursor: self.assertNotIn( index.name, connection.introspection.get_constraints( cursor=cursor, table_name=Article._meta.db_table, ), ) def test_covering_partial_index(self): index = Index( name="covering_partial_headline_idx", fields=["headline"], include=["pub_date"], condition=Q(pub_date__isnull=False), ) with connection.schema_editor() as editor: extra_sql = "" if settings.DEFAULT_INDEX_TABLESPACE: extra_sql = "TABLESPACE %s " % editor.quote_name( settings.DEFAULT_INDEX_TABLESPACE ) self.assertIn( "(%s) INCLUDE (%s) %sWHERE %s " % ( editor.quote_name("headline"), editor.quote_name("pub_date"), extra_sql, editor.quote_name("pub_date"), ), str(index.create_sql(Article, editor)), ) editor.add_index(Article, index) with connection.cursor() as cursor: constraints = connection.introspection.get_constraints( cursor=cursor, table_name=Article._meta.db_table, ) self.assertIn(index.name, constraints) self.assertEqual( constraints[index.name]["columns"], ["headline", "pub_date"], ) editor.remove_index(Article, index) with connection.cursor() as cursor: self.assertNotIn( index.name, connection.introspection.get_constraints( cursor=cursor, table_name=Article._meta.db_table, ), ) @skipUnlessDBFeature("supports_expression_indexes") def test_covering_func_index(self): index_name = "covering_func_headline_idx" index = Index(Lower("headline"), name=index_name, include=["pub_date"]) with connection.schema_editor() as editor: editor.add_index(index=index, model=Article) sql = index.create_sql(Article, schema_editor=editor) table = Article._meta.db_table self.assertIs(sql.references_column(table, "headline"), True) sql = str(sql) self.assertIn("LOWER(%s)" % editor.quote_name("headline"), sql) self.assertIn("INCLUDE (%s)" % editor.quote_name("pub_date"), sql) self.assertGreater(sql.find("INCLUDE"), sql.find("LOWER")) with connection.cursor() as cursor: constraints = connection.introspection.get_constraints( cursor=cursor, table_name=table, ) self.assertIn(index_name, constraints) self.assertIn("pub_date", constraints[index_name]["columns"]) with connection.schema_editor() as editor: editor.remove_index(Article, index) with connection.cursor() as cursor: self.assertNotIn( index_name, connection.introspection.get_constraints( cursor=cursor, table_name=table, ), ) @skipIfDBFeature("supports_covering_indexes") class CoveringIndexIgnoredTests(TransactionTestCase): available_apps = ["indexes"] def test_covering_ignored(self): index = Index( name="test_covering_ignored", fields=["headline"], include=["pub_date"], ) with connection.schema_editor() as editor: editor.add_index(Article, index) self.assertNotIn( "INCLUDE (%s)" % editor.quote_name("headline"), str(index.create_sql(Article, editor)), )
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/dbshell/test_mysql.py
tests/dbshell/test_mysql.py
import os import signal import subprocess import sys from pathlib import Path from unittest import mock, skipUnless from django.db import connection from django.db.backends.mysql.client import DatabaseClient from django.test import SimpleTestCase class MySqlDbshellCommandTestCase(SimpleTestCase): def settings_to_cmd_args_env(self, settings_dict, parameters=None): if parameters is None: parameters = [] return DatabaseClient.settings_to_cmd_args_env(settings_dict, parameters) def test_fails_with_keyerror_on_incomplete_config(self): with self.assertRaises(KeyError): self.settings_to_cmd_args_env({}) def test_basic_params_specified_in_settings(self): expected_args = [ "mysql", "--user=someuser", "--host=somehost", "--port=444", "somedbname", ] expected_env = {"MYSQL_PWD": "somepassword"} self.assertEqual( self.settings_to_cmd_args_env( { "NAME": "somedbname", "USER": "someuser", "PASSWORD": "somepassword", "HOST": "somehost", "PORT": 444, "OPTIONS": {}, } ), (expected_args, expected_env), ) def test_options_override_settings_proper_values(self): settings_port = 444 options_port = 555 self.assertNotEqual(settings_port, options_port, "test pre-req") expected_args = [ "mysql", "--user=optionuser", "--host=optionhost", "--port=%s" % options_port, "optiondbname", ] expected_env = {"MYSQL_PWD": "optionpassword"} for keys in [("database", "password"), ("db", "passwd")]: with self.subTest(keys=keys): database, password = keys self.assertEqual( self.settings_to_cmd_args_env( { "NAME": "settingdbname", "USER": "settinguser", "PASSWORD": "settingpassword", "HOST": "settinghost", "PORT": settings_port, "OPTIONS": { database: "optiondbname", "user": "optionuser", password: "optionpassword", "host": "optionhost", "port": options_port, }, } ), (expected_args, expected_env), ) def test_options_non_deprecated_keys_preferred(self): expected_args = [ "mysql", "--user=someuser", "--host=somehost", "--port=444", "optiondbname", ] expected_env = {"MYSQL_PWD": "optionpassword"} self.assertEqual( self.settings_to_cmd_args_env( { "NAME": "settingdbname", "USER": "someuser", "PASSWORD": "settingpassword", "HOST": "somehost", "PORT": 444, "OPTIONS": { "database": "optiondbname", "db": "deprecatedoptiondbname", "password": "optionpassword", "passwd": "deprecatedoptionpassword", }, } ), (expected_args, expected_env), ) def test_options_charset(self): expected_args = [ "mysql", "--user=someuser", "--host=somehost", "--port=444", "--default-character-set=utf8mb4", "somedbname", ] expected_env = {"MYSQL_PWD": "somepassword"} self.assertEqual( self.settings_to_cmd_args_env( { "NAME": "somedbname", "USER": "someuser", "PASSWORD": "somepassword", "HOST": "somehost", "PORT": 444, "OPTIONS": {"charset": "utf8mb4"}, } ), (expected_args, expected_env), ) def test_can_connect_using_sockets(self): expected_args = [ "mysql", "--user=someuser", "--socket=/path/to/mysql.socket.file", "somedbname", ] expected_env = {"MYSQL_PWD": "somepassword"} self.assertEqual( self.settings_to_cmd_args_env( { "NAME": "somedbname", "USER": "someuser", "PASSWORD": "somepassword", "HOST": "/path/to/mysql.socket.file", "PORT": None, "OPTIONS": {}, } ), (expected_args, expected_env), ) def test_ssl_certificate_is_added(self): expected_args = [ "mysql", "--user=someuser", "--host=somehost", "--port=444", "--ssl-ca=sslca", "--ssl-cert=sslcert", "--ssl-key=sslkey", "somedbname", ] expected_env = {"MYSQL_PWD": "somepassword"} self.assertEqual( self.settings_to_cmd_args_env( { "NAME": "somedbname", "USER": "someuser", "PASSWORD": "somepassword", "HOST": "somehost", "PORT": 444, "OPTIONS": { "ssl": { "ca": "sslca", "cert": "sslcert", "key": "sslkey", }, }, } ), (expected_args, expected_env), ) def test_parameters(self): self.assertEqual( self.settings_to_cmd_args_env( { "NAME": "somedbname", "USER": None, "PASSWORD": None, "HOST": None, "PORT": None, "OPTIONS": {}, }, ["--help"], ), (["mysql", "somedbname", "--help"], None), ) def test_crash_password_does_not_leak(self): # The password doesn't leak in an exception that results from a client # crash. args, env = DatabaseClient.settings_to_cmd_args_env( { "NAME": "somedbname", "USER": "someuser", "PASSWORD": "somepassword", "HOST": "somehost", "PORT": 444, "OPTIONS": {}, }, [], ) if env: env = {**os.environ, **env} fake_client = Path(__file__).with_name("fake_client.py") args[0:1] = [sys.executable, str(fake_client)] with self.assertRaises(subprocess.CalledProcessError) as ctx: subprocess.run(args, check=True, env=env) self.assertNotIn("somepassword", str(ctx.exception)) @skipUnless(connection.vendor == "mysql", "Requires a MySQL connection") def test_sigint_handler(self): """SIGINT is ignored in Python and passed to mysql to abort queries.""" def _mock_subprocess_run(*args, **kwargs): handler = signal.getsignal(signal.SIGINT) self.assertEqual(handler, signal.SIG_IGN) sigint_handler = signal.getsignal(signal.SIGINT) # The default handler isn't SIG_IGN. self.assertNotEqual(sigint_handler, signal.SIG_IGN) with mock.patch("subprocess.run", new=_mock_subprocess_run): connection.client.runshell([]) # dbshell restores the original handler. self.assertEqual(sigint_handler, signal.getsignal(signal.SIGINT))
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/dbshell/test_postgresql.py
tests/dbshell/test_postgresql.py
import os import signal import subprocess import sys from pathlib import Path from unittest import mock, skipUnless from django.db import connection from django.db.backends.postgresql.client import DatabaseClient from django.test import SimpleTestCase class PostgreSqlDbshellCommandTestCase(SimpleTestCase): def settings_to_cmd_args_env(self, settings_dict, parameters=None): if parameters is None: parameters = [] settings_dict.setdefault("OPTIONS", {}) return DatabaseClient.settings_to_cmd_args_env(settings_dict, parameters) def test_basic(self): self.assertEqual( self.settings_to_cmd_args_env( { "NAME": "dbname", "USER": "someuser", "PASSWORD": "somepassword", "HOST": "somehost", "PORT": "444", } ), ( ["psql", "-U", "someuser", "-h", "somehost", "-p", "444", "dbname"], {"PGPASSWORD": "somepassword"}, ), ) def test_nopass(self): self.assertEqual( self.settings_to_cmd_args_env( { "NAME": "dbname", "USER": "someuser", "HOST": "somehost", "PORT": "444", } ), ( ["psql", "-U", "someuser", "-h", "somehost", "-p", "444", "dbname"], None, ), ) def test_ssl_certificate(self): self.assertEqual( self.settings_to_cmd_args_env( { "NAME": "dbname", "USER": "someuser", "HOST": "somehost", "PORT": "444", "OPTIONS": { "sslmode": "verify-ca", "sslrootcert": "root.crt", "sslcert": "client.crt", "sslkey": "client.key", }, } ), ( ["psql", "-U", "someuser", "-h", "somehost", "-p", "444", "dbname"], { "PGSSLCERT": "client.crt", "PGSSLKEY": "client.key", "PGSSLMODE": "verify-ca", "PGSSLROOTCERT": "root.crt", }, ), ) def test_service(self): self.assertEqual( self.settings_to_cmd_args_env({"OPTIONS": {"service": "django_test"}}), (["psql"], {"PGSERVICE": "django_test"}), ) def test_passfile(self): self.assertEqual( self.settings_to_cmd_args_env( { "NAME": "dbname", "USER": "someuser", "HOST": "somehost", "PORT": "444", "OPTIONS": { "passfile": "~/.custompgpass", }, } ), ( ["psql", "-U", "someuser", "-h", "somehost", "-p", "444", "dbname"], {"PGPASSFILE": "~/.custompgpass"}, ), ) self.assertEqual( self.settings_to_cmd_args_env( { "OPTIONS": { "service": "django_test", "passfile": "~/.custompgpass", }, } ), ( ["psql"], {"PGSERVICE": "django_test", "PGPASSFILE": "~/.custompgpass"}, ), ) def test_column(self): self.assertEqual( self.settings_to_cmd_args_env( { "NAME": "dbname", "USER": "some:user", "PASSWORD": "some:password", "HOST": "::1", "PORT": "444", } ), ( ["psql", "-U", "some:user", "-h", "::1", "-p", "444", "dbname"], {"PGPASSWORD": "some:password"}, ), ) def test_accent(self): username = "rôle" password = "sésame" self.assertEqual( self.settings_to_cmd_args_env( { "NAME": "dbname", "USER": username, "PASSWORD": password, "HOST": "somehost", "PORT": "444", } ), ( ["psql", "-U", username, "-h", "somehost", "-p", "444", "dbname"], {"PGPASSWORD": password}, ), ) def test_parameters(self): self.assertEqual( self.settings_to_cmd_args_env({"NAME": "dbname"}, ["--help"]), (["psql", "--help", "dbname"], None), ) @skipUnless(connection.vendor == "postgresql", "Requires a PostgreSQL connection") def test_sigint_handler(self): """SIGINT is ignored in Python and passed to psql to abort queries.""" def _mock_subprocess_run(*args, **kwargs): handler = signal.getsignal(signal.SIGINT) self.assertEqual(handler, signal.SIG_IGN) sigint_handler = signal.getsignal(signal.SIGINT) # The default handler isn't SIG_IGN. self.assertNotEqual(sigint_handler, signal.SIG_IGN) with mock.patch("subprocess.run", new=_mock_subprocess_run): connection.client.runshell([]) # dbshell restores the original handler. self.assertEqual(sigint_handler, signal.getsignal(signal.SIGINT)) def test_crash_password_does_not_leak(self): # The password doesn't leak in an exception that results from a client # crash. args, env = self.settings_to_cmd_args_env({"PASSWORD": "somepassword"}, []) if env: env = {**os.environ, **env} fake_client = Path(__file__).with_name("fake_client.py") args[0:1] = [sys.executable, str(fake_client)] with self.assertRaises(subprocess.CalledProcessError) as ctx: subprocess.run(args, check=True, env=env) self.assertNotIn("somepassword", str(ctx.exception))
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/dbshell/__init__.py
tests/dbshell/__init__.py
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/dbshell/tests.py
tests/dbshell/tests.py
from unittest import mock from django.core.management import call_command from django.core.management.base import CommandError from django.db import connection from django.test import SimpleTestCase class DbshellCommandTestCase(SimpleTestCase): def test_command_missing(self): msg = ( "You appear not to have the %r program installed or on your path." % connection.client.executable_name ) with self.assertRaisesMessage(CommandError, msg): with mock.patch("subprocess.run", side_effect=FileNotFoundError): call_command("dbshell")
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/dbshell/test_sqlite.py
tests/dbshell/test_sqlite.py
import subprocess from pathlib import Path from unittest import mock, skipUnless from django.core.management import CommandError, call_command from django.db import connection from django.db.backends.sqlite3.client import DatabaseClient from django.test import SimpleTestCase class SqliteDbshellCommandTestCase(SimpleTestCase): def settings_to_cmd_args_env(self, settings_dict, parameters=None): if parameters is None: parameters = [] return DatabaseClient.settings_to_cmd_args_env(settings_dict, parameters) def test_path_name(self): self.assertEqual( self.settings_to_cmd_args_env({"NAME": Path("test.db.sqlite3")}), (["sqlite3", Path("test.db.sqlite3")], None), ) def test_parameters(self): self.assertEqual( self.settings_to_cmd_args_env({"NAME": "test.db.sqlite3"}, ["-help"]), (["sqlite3", "test.db.sqlite3", "-help"], None), ) @skipUnless(connection.vendor == "sqlite", "SQLite test") def test_non_zero_exit_status_when_path_to_db_is_path(self): sqlite_with_path = { "ENGINE": "django.db.backends.sqlite3", "NAME": Path("test.db.sqlite3"), } cmd_args = self.settings_to_cmd_args_env(sqlite_with_path)[0] msg = '"sqlite3 test.db.sqlite3" returned non-zero exit status 1.' with ( mock.patch( "django.db.backends.sqlite3.client.DatabaseClient.runshell", side_effect=subprocess.CalledProcessError(returncode=1, cmd=cmd_args), ), self.assertRaisesMessage(CommandError, msg), ): call_command("dbshell")
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/dbshell/fake_client.py
tests/dbshell/fake_client.py
import sys sys.exit(1)
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/dbshell/test_oracle.py
tests/dbshell/test_oracle.py
from unittest import mock, skipUnless from django.db import connection from django.db.backends.oracle.client import DatabaseClient from django.test import SimpleTestCase @skipUnless(connection.vendor == "oracle", "Requires oracledb to be installed") class OracleDbshellTests(SimpleTestCase): def settings_to_cmd_args_env(self, settings_dict, parameters=None, rlwrap=False): if parameters is None: parameters = [] with mock.patch( "shutil.which", return_value="/usr/bin/rlwrap" if rlwrap else None ): return DatabaseClient.settings_to_cmd_args_env(settings_dict, parameters) def test_without_rlwrap(self): expected_args = [ "sqlplus", "-L", connection.client.connect_string(connection.settings_dict), ] self.assertEqual( self.settings_to_cmd_args_env(connection.settings_dict, rlwrap=False), (expected_args, None), ) def test_with_rlwrap(self): expected_args = [ "/usr/bin/rlwrap", "sqlplus", "-L", connection.client.connect_string(connection.settings_dict), ] self.assertEqual( self.settings_to_cmd_args_env(connection.settings_dict, rlwrap=True), (expected_args, None), ) def test_parameters(self): expected_args = [ "sqlplus", "-L", connection.client.connect_string(connection.settings_dict), "-HELP", ] self.assertEqual( self.settings_to_cmd_args_env( connection.settings_dict, parameters=["-HELP"], ), (expected_args, None), )
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/defer/models.py
tests/defer/models.py
""" Tests for defer() and only(). """ from django.db import models class Secondary(models.Model): first = models.CharField(max_length=50) second = models.CharField(max_length=50) class Primary(models.Model): name = models.CharField(max_length=50) value = models.CharField(max_length=50) related = models.ForeignKey(Secondary, models.CASCADE) def __str__(self): return self.name class PrimaryOneToOne(models.Model): name = models.CharField(max_length=50) value = models.CharField(max_length=50) related = models.OneToOneField( Secondary, models.CASCADE, related_name="primary_o2o" ) class Child(Primary): pass class BigChild(Primary): other = models.CharField(max_length=50) class ChildProxy(Child): class Meta: proxy = True class RefreshPrimaryProxy(Primary): class Meta: proxy = True def refresh_from_db(self, using=None, fields=None, **kwargs): # Reloads all deferred fields if any of the fields is deferred. if fields is not None: fields = set(fields) deferred_fields = self.get_deferred_fields() if fields.intersection(deferred_fields): fields = fields.union(deferred_fields) super().refresh_from_db(using, fields, **kwargs) class ShadowParent(models.Model): """ ShadowParent declares a scalar, rather than a field. When this is overridden, the field value, rather than the scalar value must still be used when the field is deferred. """ name = "aphrodite" class ShadowChild(ShadowParent): name = models.CharField(default="adonis", max_length=6)
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/defer/__init__.py
tests/defer/__init__.py
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/defer/tests.py
tests/defer/tests.py
from django.core.exceptions import FieldDoesNotExist, FieldError, FieldFetchBlocked from django.db.models import FETCH_PEERS, RAISE from django.test import SimpleTestCase, TestCase from .models import ( BigChild, Child, ChildProxy, Primary, PrimaryOneToOne, RefreshPrimaryProxy, Secondary, ShadowChild, ) class AssertionMixin: def assert_delayed(self, obj, num): """ Instances with deferred fields look the same as normal instances when we examine attribute values. Therefore, this method returns the number of deferred fields on returned instances. """ count = len(obj.get_deferred_fields()) self.assertEqual(count, num) class DeferTests(AssertionMixin, TestCase): @classmethod def setUpTestData(cls): cls.s1 = Secondary.objects.create(first="x1", second="y1") cls.p1 = Primary.objects.create(name="p1", value="xx", related=cls.s1) cls.p2 = Primary.objects.create(name="p2", value="yy", related=cls.s1) def test_defer(self): qs = Primary.objects.all() self.assert_delayed(qs.defer("name")[0], 1) self.assert_delayed(qs.defer("name").get(pk=self.p1.pk), 1) self.assert_delayed(qs.defer("related__first")[0], 0) self.assert_delayed(qs.defer("name").defer("value")[0], 2) def test_only(self): qs = Primary.objects.all() self.assert_delayed(qs.only("name")[0], 2) self.assert_delayed(qs.only("name").get(pk=self.p1.pk), 2) self.assert_delayed(qs.only("name").only("value")[0], 2) self.assert_delayed(qs.only("related__first")[0], 2) # Using 'pk' with only() should result in 3 deferred fields, namely all # of them except the model's primary key see #15494 self.assert_delayed(qs.only("pk")[0], 3) # You can use 'pk' with reverse foreign key lookups. # The related_id is always set even if it's not fetched from the DB, # so pk and related_id are not deferred. self.assert_delayed(self.s1.primary_set.only("pk")[0], 2) def test_defer_only_chaining(self): qs = Primary.objects.all() self.assert_delayed(qs.only("name", "value").defer("name")[0], 2) self.assert_delayed(qs.defer("name").only("value", "name")[0], 2) self.assert_delayed(qs.defer("name").only("name").only("value")[0], 2) self.assert_delayed(qs.defer("name").only("value")[0], 2) self.assert_delayed(qs.only("name").defer("value")[0], 2) self.assert_delayed(qs.only("name").defer("name").defer("value")[0], 1) self.assert_delayed(qs.only("name").defer("name", "value")[0], 1) def test_defer_only_clear(self): qs = Primary.objects.all() self.assert_delayed(qs.only("name").defer("name")[0], 0) self.assert_delayed(qs.defer("name").only("name")[0], 0) def test_defer_on_an_already_deferred_field(self): qs = Primary.objects.all() self.assert_delayed(qs.defer("name")[0], 1) self.assert_delayed(qs.defer("name").defer("name")[0], 1) def test_defer_none_to_clear_deferred_set(self): qs = Primary.objects.all() self.assert_delayed(qs.defer("name", "value")[0], 2) self.assert_delayed(qs.defer(None)[0], 0) self.assert_delayed(qs.only("name").defer(None)[0], 0) def test_only_none_raises_error(self): msg = "Cannot pass None as an argument to only()." with self.assertRaisesMessage(TypeError, msg): Primary.objects.only(None) def test_defer_extra(self): qs = Primary.objects.all() self.assert_delayed(qs.defer("name").extra(select={"a": 1})[0], 1) self.assert_delayed(qs.extra(select={"a": 1}).defer("name")[0], 1) def test_defer_values_does_not_defer(self): # User values() won't defer anything (you get the full list of # dictionaries back), but it still works. self.assertEqual( Primary.objects.defer("name").values()[0], { "id": self.p1.id, "name": "p1", "value": "xx", "related_id": self.s1.id, }, ) def test_only_values_does_not_defer(self): self.assertEqual( Primary.objects.only("name").values()[0], { "id": self.p1.id, "name": "p1", "value": "xx", "related_id": self.s1.id, }, ) def test_get(self): # Using defer() and only() with get() is also valid. qs = Primary.objects.all() self.assert_delayed(qs.defer("name").get(pk=self.p1.pk), 1) self.assert_delayed(qs.only("name").get(pk=self.p1.pk), 2) def test_defer_with_select_related(self): obj = Primary.objects.select_related().defer( "related__first", "related__second" )[0] self.assert_delayed(obj.related, 2) self.assert_delayed(obj, 0) def test_only_with_select_related(self): obj = Primary.objects.select_related().only("related__first")[0] self.assert_delayed(obj, 2) self.assert_delayed(obj.related, 1) self.assertEqual(obj.related_id, self.s1.pk) self.assertEqual(obj.name, "p1") def test_defer_foreign_keys_are_deferred_and_not_traversed(self): # select_related() overrides defer(). with self.assertNumQueries(1): obj = Primary.objects.defer("related").select_related()[0] self.assert_delayed(obj, 1) self.assertEqual(obj.related.id, self.s1.pk) def test_saving_object_with_deferred_field(self): # Saving models with deferred fields is possible (but inefficient, # since every field has to be retrieved first). obj = Primary.objects.defer("value").get(name="p2") obj.name = "a new name" obj.save() self.assertQuerySetEqual( Primary.objects.all(), [ "p1", "a new name", ], lambda p: p.name, ordered=False, ) def test_defer_baseclass_when_subclass_has_no_added_fields(self): # Regression for #10572 - A subclass with no extra fields can defer # fields from the base class Child.objects.create(name="c1", value="foo", related=self.s1) # You can defer a field on a baseclass when the subclass has no fields obj = Child.objects.defer("value").get(name="c1") self.assert_delayed(obj, 1) self.assertEqual(obj.name, "c1") self.assertEqual(obj.value, "foo") def test_only_baseclass_when_subclass_has_no_added_fields(self): # You can retrieve a single column on a base class with no fields Child.objects.create(name="c1", value="foo", related=self.s1) obj = Child.objects.only("name").get(name="c1") # on an inherited model, its PK is also fetched, hence '3' deferred # fields. self.assert_delayed(obj, 3) self.assertEqual(obj.name, "c1") self.assertEqual(obj.value, "foo") def test_defer_of_overridden_scalar(self): ShadowChild.objects.create() obj = ShadowChild.objects.defer("name").get() self.assertEqual(obj.name, "adonis") def test_defer_fk_attname(self): primary = Primary.objects.defer("related_id").get(name="p1") with self.assertNumQueries(1): self.assertEqual(primary.related_id, self.p1.related_id) def test_only_fetch_mode_fetch_peers(self): p1, p2 = Primary.objects.fetch_mode(FETCH_PEERS).only("name") with self.assertNumQueries(1): p1.value with self.assertNumQueries(0): p2.value def test_only_fetch_mode_fetch_peers_single(self): p1 = Primary.objects.fetch_mode(FETCH_PEERS).only("name").get(name="p1") with self.assertNumQueries(1): p1.value def test_defer_fetch_mode_fetch_peers(self): p1, p2 = Primary.objects.fetch_mode(FETCH_PEERS).defer("value") with self.assertNumQueries(1): p1.value with self.assertNumQueries(0): p2.value def test_defer_fetch_mode_fetch_peers_single(self): p1 = Primary.objects.fetch_mode(FETCH_PEERS).defer("value").get(name="p1") with self.assertNumQueries(1): p1.value def test_only_fetch_mode_raise(self): p1 = Primary.objects.fetch_mode(RAISE).only("name").get(name="p1") msg = "Fetching of Primary.value blocked." with self.assertRaisesMessage(FieldFetchBlocked, msg) as cm: p1.value self.assertIsNone(cm.exception.__cause__) self.assertTrue(cm.exception.__suppress_context__) def test_defer_fetch_mode_raise(self): p1 = Primary.objects.fetch_mode(RAISE).defer("value").get(name="p1") msg = "Fetching of Primary.value blocked." with self.assertRaisesMessage(FieldFetchBlocked, msg) as cm: p1.value self.assertIsNone(cm.exception.__cause__) self.assertTrue(cm.exception.__suppress_context__) class DeferOtherDatabaseTests(TestCase): databases = {"other"} @classmethod def setUpTestData(cls): cls.s1 = Secondary.objects.using("other").create(first="x1", second="y1") cls.p1 = Primary.objects.using("other").create( name="p1", value="xx", related=cls.s1 ) cls.p2 = Primary.objects.using("other").create( name="p2", value="yy", related=cls.s1 ) def test_defer_fetch_mode_fetch_peers(self): p1, p2 = Primary.objects.using("other").fetch_mode(FETCH_PEERS).defer("value") with self.assertNumQueries(1, using="other"): p1.value with self.assertNumQueries(0, using="other"): p2.value class BigChildDeferTests(AssertionMixin, TestCase): @classmethod def setUpTestData(cls): cls.s1 = Secondary.objects.create(first="x1", second="y1") BigChild.objects.create(name="b1", value="foo", related=cls.s1, other="bar") def test_defer_baseclass_when_subclass_has_added_field(self): # You can defer a field on a baseclass obj = BigChild.objects.defer("value").get(name="b1") self.assert_delayed(obj, 1) self.assertEqual(obj.name, "b1") self.assertEqual(obj.value, "foo") self.assertEqual(obj.other, "bar") def test_defer_subclass(self): # You can defer a field on a subclass obj = BigChild.objects.defer("other").get(name="b1") self.assert_delayed(obj, 1) self.assertEqual(obj.name, "b1") self.assertEqual(obj.value, "foo") self.assertEqual(obj.other, "bar") def test_defer_subclass_both(self): # Deferring fields from both superclass and subclass works. obj = BigChild.objects.defer("other", "value").get(name="b1") self.assert_delayed(obj, 2) def test_only_baseclass_when_subclass_has_added_field(self): # You can retrieve a single field on a baseclass obj = BigChild.objects.only("name").get(name="b1") # when inherited model, its PK is also fetched, hence '4' deferred # fields. self.assert_delayed(obj, 4) self.assertEqual(obj.name, "b1") self.assertEqual(obj.value, "foo") self.assertEqual(obj.other, "bar") def test_only_subclass(self): # You can retrieve a single field on a subclass obj = BigChild.objects.only("other").get(name="b1") self.assert_delayed(obj, 4) self.assertEqual(obj.name, "b1") self.assertEqual(obj.value, "foo") self.assertEqual(obj.other, "bar") class TestDefer2(AssertionMixin, TestCase): def test_defer_proxy(self): """ Ensure select_related together with only on a proxy model behaves as expected. See #17876. """ related = Secondary.objects.create(first="x1", second="x2") ChildProxy.objects.create(name="p1", value="xx", related=related) children = ChildProxy.objects.select_related().only("id", "name") self.assertEqual(len(children), 1) child = children[0] self.assert_delayed(child, 2) self.assertEqual(child.name, "p1") self.assertEqual(child.value, "xx") def test_defer_inheritance_pk_chaining(self): """ When an inherited model is fetched from the DB, its PK is also fetched. When getting the PK of the parent model it is useful to use the already fetched parent model PK if it happens to be available. """ s1 = Secondary.objects.create(first="x1", second="y1") bc = BigChild.objects.create(name="b1", value="foo", related=s1, other="bar") bc_deferred = BigChild.objects.only("name").get(pk=bc.pk) with self.assertNumQueries(0): bc_deferred.id self.assertEqual(bc_deferred.pk, bc_deferred.id) def test_eq(self): s1 = Secondary.objects.create(first="x1", second="y1") s1_defer = Secondary.objects.only("pk").get(pk=s1.pk) self.assertEqual(s1, s1_defer) self.assertEqual(s1_defer, s1) def test_refresh_not_loading_deferred_fields(self): s = Secondary.objects.create() rf = Primary.objects.create(name="foo", value="bar", related=s) rf2 = Primary.objects.only("related", "value").get() rf.name = "new foo" rf.value = "new bar" rf.save() with self.assertNumQueries(1): rf2.refresh_from_db() self.assertEqual(rf2.value, "new bar") with self.assertNumQueries(1): self.assertEqual(rf2.name, "new foo") def test_custom_refresh_on_deferred_loading(self): s = Secondary.objects.create() rf = RefreshPrimaryProxy.objects.create(name="foo", value="bar", related=s) rf2 = RefreshPrimaryProxy.objects.only("related").get() rf.name = "new foo" rf.value = "new bar" rf.save() with self.assertNumQueries(1): # Customized refresh_from_db() reloads all deferred fields on # access of any of them. self.assertEqual(rf2.name, "new foo") self.assertEqual(rf2.value, "new bar") def test_refresh_when_one_field_deferred(self): s = Secondary.objects.create() PrimaryOneToOne.objects.create(name="foo", value="bar", related=s) s = Secondary.objects.defer("first").get() p_before = s.primary_o2o s.refresh_from_db() self.assertIsNot(s.primary_o2o, p_before) class InvalidDeferTests(SimpleTestCase): def test_invalid_defer(self): msg = "Primary has no field named 'missing'" with self.assertRaisesMessage(FieldDoesNotExist, msg): list(Primary.objects.defer("missing")) with self.assertRaisesMessage(FieldError, "missing"): list(Primary.objects.defer("value__missing")) msg = "Secondary has no field named 'missing'" with self.assertRaisesMessage(FieldDoesNotExist, msg): list(Primary.objects.defer("related__missing")) def test_invalid_only(self): msg = "Primary has no field named 'missing'" with self.assertRaisesMessage(FieldDoesNotExist, msg): list(Primary.objects.only("missing")) with self.assertRaisesMessage(FieldError, "missing"): list(Primary.objects.only("value__missing")) msg = "Secondary has no field named 'missing'" with self.assertRaisesMessage(FieldDoesNotExist, msg): list(Primary.objects.only("related__missing")) def test_defer_select_related_raises_invalid_query(self): msg = ( "Field Primary.related cannot be both deferred and traversed using " "select_related at the same time." ) with self.assertRaisesMessage(FieldError, msg): Primary.objects.defer("related").select_related("related")[0] def test_only_select_related_raises_invalid_query(self): msg = ( "Field Primary.related cannot be both deferred and traversed using " "select_related at the same time." ) with self.assertRaisesMessage(FieldError, msg): Primary.objects.only("name").select_related("related")[0] class DeferredRelationTests(TestCase): @classmethod def setUpTestData(cls): cls.secondary = Secondary.objects.create(first="a", second="b") cls.primary = PrimaryOneToOne.objects.create( name="Bella", value="Baxter", related=cls.secondary ) def test_defer_not_clear_cached_relations(self): obj = Secondary.objects.defer("first").get(pk=self.secondary.pk) with self.assertNumQueries(1): obj.primary_o2o obj.first # Accessing a deferred field. with self.assertNumQueries(0): obj.primary_o2o def test_only_not_clear_cached_relations(self): obj = Secondary.objects.only("first").get(pk=self.secondary.pk) with self.assertNumQueries(1): obj.primary_o2o obj.second # Accessing a deferred field. with self.assertNumQueries(0): obj.primary_o2o
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/datetimes/models.py
tests/datetimes/models.py
from django.db import models class Article(models.Model): title = models.CharField(max_length=100) pub_date = models.DateTimeField() published_on = models.DateField(null=True) categories = models.ManyToManyField("Category", related_name="articles") def __str__(self): return self.title class Comment(models.Model): article = models.ForeignKey(Article, models.CASCADE, related_name="comments") text = models.TextField() pub_date = models.DateTimeField() approval_date = models.DateTimeField(null=True) def __str__(self): return "Comment to %s (%s)" % (self.article.title, self.pub_date) class Category(models.Model): name = models.CharField(max_length=255)
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/datetimes/__init__.py
tests/datetimes/__init__.py
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/datetimes/tests.py
tests/datetimes/tests.py
import datetime from django.test import TestCase, override_settings from django.utils import timezone from .models import Article, Category, Comment class DateTimesTests(TestCase): def test_related_model_traverse(self): a1 = Article.objects.create( title="First one", pub_date=datetime.datetime(2005, 7, 28, 9, 0, 0), ) a2 = Article.objects.create( title="Another one", pub_date=datetime.datetime(2010, 7, 28, 10, 0, 0), ) a3 = Article.objects.create( title="Third one, in the first day", pub_date=datetime.datetime(2005, 7, 28, 17, 0, 0), ) a1.comments.create( text="Im the HULK!", pub_date=datetime.datetime(2005, 7, 28, 9, 30, 0), ) a1.comments.create( text="HULK SMASH!", pub_date=datetime.datetime(2005, 7, 29, 1, 30, 0), ) a2.comments.create( text="LMAO", pub_date=datetime.datetime(2010, 7, 28, 10, 10, 10), ) a3.comments.create( text="+1", pub_date=datetime.datetime(2005, 8, 29, 10, 10, 10), ) c = Category.objects.create(name="serious-news") c.articles.add(a1, a3) self.assertSequenceEqual( Comment.objects.datetimes("article__pub_date", "year"), [ datetime.datetime(2005, 1, 1), datetime.datetime(2010, 1, 1), ], ) self.assertSequenceEqual( Comment.objects.datetimes("article__pub_date", "month"), [ datetime.datetime(2005, 7, 1), datetime.datetime(2010, 7, 1), ], ) self.assertSequenceEqual( Comment.objects.datetimes("article__pub_date", "week"), [ datetime.datetime(2005, 7, 25), datetime.datetime(2010, 7, 26), ], ) self.assertSequenceEqual( Comment.objects.datetimes("article__pub_date", "day"), [ datetime.datetime(2005, 7, 28), datetime.datetime(2010, 7, 28), ], ) self.assertSequenceEqual( Article.objects.datetimes("comments__pub_date", "day"), [ datetime.datetime(2005, 7, 28), datetime.datetime(2005, 7, 29), datetime.datetime(2005, 8, 29), datetime.datetime(2010, 7, 28), ], ) self.assertSequenceEqual( Article.objects.datetimes("comments__approval_date", "day"), [] ) self.assertSequenceEqual( Category.objects.datetimes("articles__pub_date", "day"), [ datetime.datetime(2005, 7, 28), ], ) @override_settings(USE_TZ=True) def test_21432(self): now = timezone.localtime(timezone.now().replace(microsecond=0)) Article.objects.create(title="First one", pub_date=now) qs = Article.objects.datetimes("pub_date", "second") self.assertEqual(qs[0], now) def test_datetimes_returns_available_dates_for_given_scope_and_given_field(self): pub_dates = [ datetime.datetime(2005, 7, 28, 12, 15), datetime.datetime(2005, 7, 29, 2, 15), datetime.datetime(2005, 7, 30, 5, 15), datetime.datetime(2005, 7, 31, 19, 15), ] for i, pub_date in enumerate(pub_dates): Article(pub_date=pub_date, title="title #{}".format(i)).save() self.assertSequenceEqual( Article.objects.datetimes("pub_date", "year"), [datetime.datetime(2005, 1, 1, 0, 0)], ) self.assertSequenceEqual( Article.objects.datetimes("pub_date", "month"), [datetime.datetime(2005, 7, 1, 0, 0)], ) self.assertSequenceEqual( Article.objects.datetimes("pub_date", "week"), [datetime.datetime(2005, 7, 25, 0, 0)], ) self.assertSequenceEqual( Article.objects.datetimes("pub_date", "day"), [ datetime.datetime(2005, 7, 28, 0, 0), datetime.datetime(2005, 7, 29, 0, 0), datetime.datetime(2005, 7, 30, 0, 0), datetime.datetime(2005, 7, 31, 0, 0), ], ) self.assertSequenceEqual( Article.objects.datetimes("pub_date", "day", order="ASC"), [ datetime.datetime(2005, 7, 28, 0, 0), datetime.datetime(2005, 7, 29, 0, 0), datetime.datetime(2005, 7, 30, 0, 0), datetime.datetime(2005, 7, 31, 0, 0), ], ) self.assertSequenceEqual( Article.objects.datetimes("pub_date", "day", order="DESC"), [ datetime.datetime(2005, 7, 31, 0, 0), datetime.datetime(2005, 7, 30, 0, 0), datetime.datetime(2005, 7, 29, 0, 0), datetime.datetime(2005, 7, 28, 0, 0), ], ) def test_datetimes_has_lazy_iterator(self): pub_dates = [ datetime.datetime(2005, 7, 28, 12, 15), datetime.datetime(2005, 7, 29, 2, 15), datetime.datetime(2005, 7, 30, 5, 15), datetime.datetime(2005, 7, 31, 19, 15), ] for i, pub_date in enumerate(pub_dates): Article(pub_date=pub_date, title="title #{}".format(i)).save() # Use iterator() with datetimes() to return a generator that lazily # requests each result one at a time, to save memory. dates = [] with self.assertNumQueries(0): article_datetimes_iterator = Article.objects.datetimes( "pub_date", "day", order="DESC" ).iterator() with self.assertNumQueries(1): for article in article_datetimes_iterator: dates.append(article) self.assertEqual( dates, [ datetime.datetime(2005, 7, 31, 0, 0), datetime.datetime(2005, 7, 30, 0, 0), datetime.datetime(2005, 7, 29, 0, 0), datetime.datetime(2005, 7, 28, 0, 0), ], ) def test_datetimes_disallows_date_fields(self): dt = datetime.datetime(2005, 7, 28, 12, 15) Article.objects.create( pub_date=dt, published_on=dt.date(), title="Don't put dates into datetime functions!", ) with self.assertRaisesMessage( ValueError, "Cannot truncate DateField 'published_on' to DateTimeField" ): list(Article.objects.datetimes("published_on", "second")) def test_datetimes_fails_when_given_invalid_kind_argument(self): msg = ( "'kind' must be one of 'year', 'month', 'week', 'day', 'hour', " "'minute', or 'second'." ) with self.assertRaisesMessage(ValueError, msg): Article.objects.datetimes("pub_date", "bad_kind") def test_datetimes_fails_when_given_invalid_order_argument(self): msg = "'order' must be either 'ASC' or 'DESC'." with self.assertRaisesMessage(ValueError, msg): Article.objects.datetimes("pub_date", "year", order="bad order")
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/field_deconstruction/__init__.py
tests/field_deconstruction/__init__.py
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/field_deconstruction/tests.py
tests/field_deconstruction/tests.py
from django.apps import apps from django.db import models from django.test import SimpleTestCase, override_settings from django.test.utils import isolate_lru_cache from django.utils.choices import normalize_choices class FieldDeconstructionTests(SimpleTestCase): """ Tests the deconstruct() method on all core fields. """ def test_name(self): """ Tests the outputting of the correct name if assigned one. """ # First try using a "normal" field field = models.CharField(max_length=65) name, path, args, kwargs = field.deconstruct() self.assertIsNone(name) field.set_attributes_from_name("is_awesome_test") name, path, args, kwargs = field.deconstruct() self.assertEqual(name, "is_awesome_test") # Now try with a ForeignKey field = models.ForeignKey("some_fake.ModelName", models.CASCADE) name, path, args, kwargs = field.deconstruct() self.assertIsNone(name) field.set_attributes_from_name("author") name, path, args, kwargs = field.deconstruct() self.assertEqual(name, "author") def test_db_tablespace(self): field = models.Field() _, _, args, kwargs = field.deconstruct() self.assertEqual(args, []) self.assertEqual(kwargs, {}) # With a DEFAULT_DB_TABLESPACE. with self.settings(DEFAULT_DB_TABLESPACE="foo"): _, _, args, kwargs = field.deconstruct() self.assertEqual(args, []) self.assertEqual(kwargs, {}) # With a db_tablespace. field = models.Field(db_tablespace="foo") _, _, args, kwargs = field.deconstruct() self.assertEqual(args, []) self.assertEqual(kwargs, {"db_tablespace": "foo"}) # With a db_tablespace equal to DEFAULT_DB_TABLESPACE. with self.settings(DEFAULT_DB_TABLESPACE="foo"): _, _, args, kwargs = field.deconstruct() self.assertEqual(args, []) self.assertEqual(kwargs, {"db_tablespace": "foo"}) def test_auto_field(self): field = models.AutoField(primary_key=True) field.set_attributes_from_name("id") name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.AutoField") self.assertEqual(args, []) self.assertEqual(kwargs, {"primary_key": True}) def test_big_integer_field(self): field = models.BigIntegerField() name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.BigIntegerField") self.assertEqual(args, []) self.assertEqual(kwargs, {}) def test_boolean_field(self): field = models.BooleanField() name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.BooleanField") self.assertEqual(args, []) self.assertEqual(kwargs, {}) field = models.BooleanField(default=True) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.BooleanField") self.assertEqual(args, []) self.assertEqual(kwargs, {"default": True}) def test_char_field(self): field = models.CharField(max_length=65) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.CharField") self.assertEqual(args, []) self.assertEqual(kwargs, {"max_length": 65}) field = models.CharField(max_length=65, null=True, blank=True) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.CharField") self.assertEqual(args, []) self.assertEqual(kwargs, {"max_length": 65, "null": True, "blank": True}) def test_char_field_choices(self): field = models.CharField(max_length=1, choices=(("A", "One"), ("B", "Two"))) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.CharField") self.assertEqual(args, []) self.assertEqual( kwargs, {"choices": [("A", "One"), ("B", "Two")], "max_length": 1} ) def test_choices_iterator(self): field = models.IntegerField(choices=((i, str(i)) for i in range(3))) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.IntegerField") self.assertEqual(args, []) self.assertEqual(kwargs, {"choices": [(0, "0"), (1, "1"), (2, "2")]}) def test_choices_iterable(self): # Pass an iterable (but not an iterator) to choices. field = models.IntegerField(choices="012345") name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.IntegerField") self.assertEqual(args, []) self.assertEqual(kwargs, {"choices": normalize_choices("012345")}) def test_choices_callable(self): def get_choices(): return [(i, str(i)) for i in range(3)] field = models.IntegerField(choices=get_choices) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.IntegerField") self.assertEqual(args, []) self.assertEqual(kwargs, {"choices": get_choices}) def test_csi_field(self): field = models.CommaSeparatedIntegerField(max_length=100) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.CommaSeparatedIntegerField") self.assertEqual(args, []) self.assertEqual(kwargs, {"max_length": 100}) def test_date_field(self): field = models.DateField() name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.DateField") self.assertEqual(args, []) self.assertEqual(kwargs, {}) field = models.DateField(auto_now=True) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.DateField") self.assertEqual(args, []) self.assertEqual(kwargs, {"auto_now": True}) def test_datetime_field(self): field = models.DateTimeField() name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.DateTimeField") self.assertEqual(args, []) self.assertEqual(kwargs, {}) field = models.DateTimeField(auto_now_add=True) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.DateTimeField") self.assertEqual(args, []) self.assertEqual(kwargs, {"auto_now_add": True}) # Bug #21785 field = models.DateTimeField(auto_now=True, auto_now_add=True) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.DateTimeField") self.assertEqual(args, []) self.assertEqual(kwargs, {"auto_now_add": True, "auto_now": True}) def test_decimal_field(self): field = models.DecimalField(max_digits=5, decimal_places=2) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.DecimalField") self.assertEqual(args, []) self.assertEqual(kwargs, {"max_digits": 5, "decimal_places": 2}) def test_decimal_field_0_decimal_places(self): """ A DecimalField with decimal_places=0 should work (#22272). """ field = models.DecimalField(max_digits=5, decimal_places=0) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.DecimalField") self.assertEqual(args, []) self.assertEqual(kwargs, {"max_digits": 5, "decimal_places": 0}) def test_email_field(self): field = models.EmailField() name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.EmailField") self.assertEqual(args, []) self.assertEqual(kwargs, {"max_length": 254}) field = models.EmailField(max_length=255) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.EmailField") self.assertEqual(args, []) self.assertEqual(kwargs, {"max_length": 255}) def test_file_field(self): field = models.FileField(upload_to="foo/bar") name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.FileField") self.assertEqual(args, []) self.assertEqual(kwargs, {"upload_to": "foo/bar"}) # Test max_length field = models.FileField(upload_to="foo/bar", max_length=200) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.FileField") self.assertEqual(args, []) self.assertEqual(kwargs, {"upload_to": "foo/bar", "max_length": 200}) def test_file_path_field(self): field = models.FilePathField(match=r".*\.txt$") name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.FilePathField") self.assertEqual(args, []) self.assertEqual(kwargs, {"match": r".*\.txt$"}) field = models.FilePathField(recursive=True, allow_folders=True, max_length=123) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.FilePathField") self.assertEqual(args, []) self.assertEqual( kwargs, {"recursive": True, "allow_folders": True, "max_length": 123} ) def test_float_field(self): field = models.FloatField() name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.FloatField") self.assertEqual(args, []) self.assertEqual(kwargs, {}) def test_foreign_key(self): # Test basic pointing from django.contrib.auth.models import Permission field = models.ForeignKey("auth.Permission", models.CASCADE) field.remote_field.model = Permission field.remote_field.field_name = "id" name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.ForeignKey") self.assertEqual(args, []) self.assertEqual(kwargs, {"to": "auth.permission", "on_delete": models.CASCADE}) self.assertFalse(hasattr(kwargs["to"], "setting_name")) # Test swap detection for swappable model field = models.ForeignKey("auth.User", models.CASCADE) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.ForeignKey") self.assertEqual(args, []) self.assertEqual(kwargs, {"to": "auth.user", "on_delete": models.CASCADE}) self.assertEqual(kwargs["to"].setting_name, "AUTH_USER_MODEL") # Swap detection for lowercase swappable model. field = models.ForeignKey("auth.user", models.CASCADE) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.ForeignKey") self.assertEqual(args, []) self.assertEqual(kwargs, {"to": "auth.user", "on_delete": models.CASCADE}) self.assertEqual(kwargs["to"].setting_name, "AUTH_USER_MODEL") # Test nonexistent (for now) model field = models.ForeignKey("something.Else", models.CASCADE) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.ForeignKey") self.assertEqual(args, []) self.assertEqual(kwargs, {"to": "something.else", "on_delete": models.CASCADE}) # Test on_delete field = models.ForeignKey("auth.User", models.SET_NULL) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.ForeignKey") self.assertEqual(args, []) self.assertEqual(kwargs, {"to": "auth.user", "on_delete": models.SET_NULL}) # Test to_field preservation field = models.ForeignKey("auth.Permission", models.CASCADE, to_field="foobar") name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.ForeignKey") self.assertEqual(args, []) self.assertEqual( kwargs, { "to": "auth.permission", "to_field": "foobar", "on_delete": models.CASCADE, }, ) # Test related_name preservation field = models.ForeignKey( "auth.Permission", models.CASCADE, related_name="foobar" ) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.ForeignKey") self.assertEqual(args, []) self.assertEqual( kwargs, { "to": "auth.permission", "related_name": "foobar", "on_delete": models.CASCADE, }, ) # Test related_query_name field = models.ForeignKey( "auth.Permission", models.CASCADE, related_query_name="foobar" ) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.ForeignKey") self.assertEqual(args, []) self.assertEqual( kwargs, { "to": "auth.permission", "related_query_name": "foobar", "on_delete": models.CASCADE, }, ) # Test limit_choices_to field = models.ForeignKey( "auth.Permission", models.CASCADE, limit_choices_to={"foo": "bar"} ) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.ForeignKey") self.assertEqual(args, []) self.assertEqual( kwargs, { "to": "auth.permission", "limit_choices_to": {"foo": "bar"}, "on_delete": models.CASCADE, }, ) # Test unique field = models.ForeignKey("auth.Permission", models.CASCADE, unique=True) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.ForeignKey") self.assertEqual(args, []) self.assertEqual( kwargs, {"to": "auth.permission", "unique": True, "on_delete": models.CASCADE}, ) @override_settings(AUTH_USER_MODEL="auth.Permission") def test_foreign_key_swapped(self): with isolate_lru_cache(apps.get_swappable_settings_name): # It doesn't matter that we swapped out user for permission; # there's no validation. We just want to check the setting stuff # works. field = models.ForeignKey("auth.Permission", models.CASCADE) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.ForeignKey") self.assertEqual(args, []) self.assertEqual(kwargs, {"to": "auth.permission", "on_delete": models.CASCADE}) self.assertEqual(kwargs["to"].setting_name, "AUTH_USER_MODEL") # Model names are case-insensitive. with isolate_lru_cache(apps.get_swappable_settings_name): # It doesn't matter that we swapped out user for permission; # there's no validation. We just want to check the setting stuff # works. field = models.ForeignKey("auth.permission", models.CASCADE) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.ForeignKey") self.assertEqual(args, []) self.assertEqual(kwargs, {"to": "auth.permission", "on_delete": models.CASCADE}) self.assertEqual(kwargs["to"].setting_name, "AUTH_USER_MODEL") def test_one_to_one(self): # Test basic pointing from django.contrib.auth.models import Permission field = models.OneToOneField("auth.Permission", models.CASCADE) field.remote_field.model = Permission field.remote_field.field_name = "id" name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.OneToOneField") self.assertEqual(args, []) self.assertEqual(kwargs, {"to": "auth.permission", "on_delete": models.CASCADE}) self.assertFalse(hasattr(kwargs["to"], "setting_name")) # Test swap detection for swappable model field = models.OneToOneField("auth.User", models.CASCADE) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.OneToOneField") self.assertEqual(args, []) self.assertEqual(kwargs, {"to": "auth.user", "on_delete": models.CASCADE}) self.assertEqual(kwargs["to"].setting_name, "AUTH_USER_MODEL") # Test nonexistent (for now) model field = models.OneToOneField("something.Else", models.CASCADE) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.OneToOneField") self.assertEqual(args, []) self.assertEqual(kwargs, {"to": "something.else", "on_delete": models.CASCADE}) # Test on_delete field = models.OneToOneField("auth.User", models.SET_NULL) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.OneToOneField") self.assertEqual(args, []) self.assertEqual(kwargs, {"to": "auth.user", "on_delete": models.SET_NULL}) # Test to_field field = models.OneToOneField( "auth.Permission", models.CASCADE, to_field="foobar" ) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.OneToOneField") self.assertEqual(args, []) self.assertEqual( kwargs, { "to": "auth.permission", "to_field": "foobar", "on_delete": models.CASCADE, }, ) # Test related_name field = models.OneToOneField( "auth.Permission", models.CASCADE, related_name="foobar" ) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.OneToOneField") self.assertEqual(args, []) self.assertEqual( kwargs, { "to": "auth.permission", "related_name": "foobar", "on_delete": models.CASCADE, }, ) # Test related_query_name field = models.OneToOneField( "auth.Permission", models.CASCADE, related_query_name="foobar" ) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.OneToOneField") self.assertEqual(args, []) self.assertEqual( kwargs, { "to": "auth.permission", "related_query_name": "foobar", "on_delete": models.CASCADE, }, ) # Test limit_choices_to field = models.OneToOneField( "auth.Permission", models.CASCADE, limit_choices_to={"foo": "bar"} ) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.OneToOneField") self.assertEqual(args, []) self.assertEqual( kwargs, { "to": "auth.permission", "limit_choices_to": {"foo": "bar"}, "on_delete": models.CASCADE, }, ) # Test unique field = models.OneToOneField("auth.Permission", models.CASCADE, unique=True) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.OneToOneField") self.assertEqual(args, []) self.assertEqual(kwargs, {"to": "auth.permission", "on_delete": models.CASCADE}) def test_image_field(self): field = models.ImageField( upload_to="foo/barness", width_field="width", height_field="height" ) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.ImageField") self.assertEqual(args, []) self.assertEqual( kwargs, { "upload_to": "foo/barness", "width_field": "width", "height_field": "height", }, ) def test_integer_field(self): field = models.IntegerField() name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.IntegerField") self.assertEqual(args, []) self.assertEqual(kwargs, {}) def test_ip_address_field(self): field = models.IPAddressField() name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.IPAddressField") self.assertEqual(args, []) self.assertEqual(kwargs, {}) def test_generic_ip_address_field(self): field = models.GenericIPAddressField() name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.GenericIPAddressField") self.assertEqual(args, []) self.assertEqual(kwargs, {}) field = models.GenericIPAddressField(protocol="IPv6") name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.GenericIPAddressField") self.assertEqual(args, []) self.assertEqual(kwargs, {"protocol": "IPv6"}) def test_many_to_many_field(self): # Test normal field = models.ManyToManyField("auth.Permission") name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.ManyToManyField") self.assertEqual(args, []) self.assertEqual(kwargs, {"to": "auth.permission"}) self.assertFalse(hasattr(kwargs["to"], "setting_name")) # Test swappable field = models.ManyToManyField("auth.User") name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.ManyToManyField") self.assertEqual(args, []) self.assertEqual(kwargs, {"to": "auth.user"}) self.assertEqual(kwargs["to"].setting_name, "AUTH_USER_MODEL") # Test through field = models.ManyToManyField("auth.Permission", through="auth.Group") name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.ManyToManyField") self.assertEqual(args, []) self.assertEqual(kwargs, {"to": "auth.permission", "through": "auth.Group"}) # Test through_fields field = models.ManyToManyField( "auth.Permission", through="auth.Group", through_fields=("foo", "permissions"), ) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.ManyToManyField") self.assertEqual(args, []) self.assertEqual( kwargs, { "to": "auth.permission", "through": "auth.Group", "through_fields": ("foo", "permissions"), }, ) # Test custom db_table field = models.ManyToManyField("auth.Permission", db_table="custom_table") name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.ManyToManyField") self.assertEqual(args, []) self.assertEqual(kwargs, {"to": "auth.permission", "db_table": "custom_table"}) # Test related_name field = models.ManyToManyField("auth.Permission", related_name="custom_table") name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.ManyToManyField") self.assertEqual(args, []) self.assertEqual( kwargs, {"to": "auth.permission", "related_name": "custom_table"} ) # Test related_query_name field = models.ManyToManyField("auth.Permission", related_query_name="foobar") name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.ManyToManyField") self.assertEqual(args, []) self.assertEqual( kwargs, {"to": "auth.permission", "related_query_name": "foobar"} ) # Test limit_choices_to field = models.ManyToManyField( "auth.Permission", limit_choices_to={"foo": "bar"} ) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.ManyToManyField") self.assertEqual(args, []) self.assertEqual( kwargs, {"to": "auth.permission", "limit_choices_to": {"foo": "bar"}} ) @override_settings(AUTH_USER_MODEL="auth.Permission") def test_many_to_many_field_swapped(self): with isolate_lru_cache(apps.get_swappable_settings_name): # It doesn't matter that we swapped out user for permission; # there's no validation. We just want to check the setting stuff # works. field = models.ManyToManyField("auth.Permission") name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.ManyToManyField") self.assertEqual(args, []) self.assertEqual(kwargs, {"to": "auth.permission"}) self.assertEqual(kwargs["to"].setting_name, "AUTH_USER_MODEL") def test_many_to_many_field_related_name(self): class MyModel(models.Model): flag = models.BooleanField(default=True) m2m = models.ManyToManyField("self") m2m_related_name = models.ManyToManyField( "self", related_query_name="custom_query_name", limit_choices_to={"flag": True}, ) name, path, args, kwargs = MyModel.m2m.field.deconstruct() self.assertEqual(path, "django.db.models.ManyToManyField") self.assertEqual(args, []) # deconstruct() should not include attributes which were not passed to # the field during initialization. self.assertEqual(kwargs, {"to": "field_deconstruction.mymodel"}) # Passed attributes. name, path, args, kwargs = MyModel.m2m_related_name.field.deconstruct() self.assertEqual(path, "django.db.models.ManyToManyField") self.assertEqual(args, []) self.assertEqual( kwargs, { "to": "field_deconstruction.mymodel", "related_query_name": "custom_query_name", "limit_choices_to": {"flag": True}, }, ) def test_positive_integer_field(self): field = models.PositiveIntegerField() name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.PositiveIntegerField") self.assertEqual(args, []) self.assertEqual(kwargs, {}) def test_positive_small_integer_field(self): field = models.PositiveSmallIntegerField() name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.PositiveSmallIntegerField") self.assertEqual(args, []) self.assertEqual(kwargs, {}) def test_positive_big_integer_field(self): field = models.PositiveBigIntegerField() name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.PositiveBigIntegerField") self.assertEqual(args, []) self.assertEqual(kwargs, {}) def test_slug_field(self): field = models.SlugField() name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.SlugField") self.assertEqual(args, []) self.assertEqual(kwargs, {}) field = models.SlugField(db_index=False, max_length=231) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.SlugField") self.assertEqual(args, []) self.assertEqual(kwargs, {"db_index": False, "max_length": 231}) def test_small_integer_field(self): field = models.SmallIntegerField() name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.SmallIntegerField") self.assertEqual(args, []) self.assertEqual(kwargs, {}) def test_text_field(self): field = models.TextField() name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.TextField") self.assertEqual(args, []) self.assertEqual(kwargs, {}) def test_time_field(self): field = models.TimeField() name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.TimeField") self.assertEqual(args, []) self.assertEqual(kwargs, {}) field = models.TimeField(auto_now=True) name, path, args, kwargs = field.deconstruct() self.assertEqual(args, []) self.assertEqual(kwargs, {"auto_now": True}) field = models.TimeField(auto_now_add=True) name, path, args, kwargs = field.deconstruct() self.assertEqual(args, []) self.assertEqual(kwargs, {"auto_now_add": True}) def test_url_field(self): field = models.URLField() name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.URLField") self.assertEqual(args, []) self.assertEqual(kwargs, {}) field = models.URLField(max_length=231) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.URLField") self.assertEqual(args, []) self.assertEqual(kwargs, {"max_length": 231}) def test_binary_field(self): field = models.BinaryField() name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.BinaryField") self.assertEqual(args, []) self.assertEqual(kwargs, {}) field = models.BinaryField(editable=True) name, path, args, kwargs = field.deconstruct() self.assertEqual(args, []) self.assertEqual(kwargs, {"editable": True})
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/user_commands/models.py
tests/user_commands/models.py
""" User-registered management commands The ``manage.py`` utility provides a number of useful commands for managing a Django project. If you want to add a utility command of your own, you can. The user-defined command ``dance`` is defined in the management/commands subdirectory of this test application. It is a simple command that responds with a printed message when invoked. For more details on how to define your own ``manage.py`` commands, look at the ``django.core.management.commands`` directory. This directory contains the definitions for the base Django ``manage.py`` commands. """
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/user_commands/utils.py
tests/user_commands/utils.py
from io import StringIO from unittest import mock class AssertFormatterFailureCaughtContext: def __init__(self, test, shutil_which_result="nonexistent"): self.stdout = StringIO() self.stderr = StringIO() self.test = test self.shutil_which_result = shutil_which_result def __enter__(self): self.mocker = mock.patch( "django.core.management.utils.shutil.which", return_value=self.shutil_which_result, ) self.mocker.start() return self def __exit__(self, exc_type, exc_value, traceback): self.mocker.stop() self.test.assertIn("Formatters failed to launch", self.stderr.getvalue())
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/user_commands/__init__.py
tests/user_commands/__init__.py
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/user_commands/tests.py
tests/user_commands/tests.py
import os import sys import unittest from argparse import ArgumentDefaultsHelpFormatter from io import BytesIO, StringIO, TextIOWrapper from pathlib import Path from unittest import mock from admin_scripts.tests import AdminScriptTestCase from django.apps import apps from django.core import management from django.core.checks import Tags from django.core.management import BaseCommand, CommandError, find_commands from django.core.management.base import OutputWrapper from django.core.management.utils import ( find_command, get_random_secret_key, is_ignored_path, normalize_path_patterns, popen_wrapper, run_formatters, ) from django.db import connection from django.test import SimpleTestCase, override_settings from django.test.utils import captured_stderr, extend_sys_path from django.utils import translation from django.utils.version import PY314, PY315 from .management.commands import dance from .utils import AssertFormatterFailureCaughtContext class OutputWrapperTests(SimpleTestCase): def test_unhandled_exceptions(self): cases = [ StringIO("Hello world"), TextIOWrapper(BytesIO(b"Hello world")), ] for out in cases: with self.subTest(out=out): wrapper = OutputWrapper(out) out.close() unraisable_exceptions = [] def unraisablehook(unraisable): unraisable_exceptions.append(unraisable) sys.__unraisablehook__(unraisable) with mock.patch.object(sys, "unraisablehook", unraisablehook): del wrapper self.assertEqual(unraisable_exceptions, []) # A minimal set of apps to avoid system checks running on all apps. @override_settings( INSTALLED_APPS=[ "django.contrib.auth", "django.contrib.contenttypes", "user_commands", ], ) class CommandTests(SimpleTestCase): def test_command(self): out = StringIO() management.call_command("dance", stdout=out) self.assertIn("I don't feel like dancing Rock'n'Roll.\n", out.getvalue()) def test_command_style(self): out = StringIO() management.call_command("dance", style="Jive", stdout=out) self.assertIn("I don't feel like dancing Jive.\n", out.getvalue()) # Passing options as arguments also works (thanks argparse) management.call_command("dance", "--style", "Jive", stdout=out) self.assertIn("I don't feel like dancing Jive.\n", out.getvalue()) def test_language_preserved(self): with translation.override("fr"): management.call_command("dance", verbosity=0) self.assertEqual(translation.get_language(), "fr") def test_explode(self): """An unknown command raises CommandError""" with self.assertRaisesMessage(CommandError, "Unknown command: 'explode'"): management.call_command(("explode",)) def test_system_exit(self): """Exception raised in a command should raise CommandError with call_command, but SystemExit when run from command line """ with self.assertRaises(CommandError) as cm: management.call_command("dance", example="raise") self.assertEqual(cm.exception.returncode, 3) dance.Command.requires_system_checks = [] try: with captured_stderr() as stderr, self.assertRaises(SystemExit) as cm: management.ManagementUtility( ["manage.py", "dance", "--example=raise"] ).execute() self.assertEqual(cm.exception.code, 3) finally: dance.Command.requires_system_checks = "__all__" self.assertIn("CommandError", stderr.getvalue()) def test_no_translations_deactivate_translations(self): """ When the Command handle method is decorated with @no_translations, translations are deactivated inside the command. """ current_locale = translation.get_language() with translation.override("pl"): result = management.call_command("no_translations") self.assertIsNone(result) self.assertEqual(translation.get_language(), current_locale) def test_find_command_without_PATH(self): """ find_command should still work when the PATH environment variable doesn't exist (#22256). """ current_path = os.environ.pop("PATH", None) try: self.assertIsNone(find_command("_missing_")) finally: if current_path is not None: os.environ["PATH"] = current_path def test_discover_commands_in_eggs(self): """ Management commands can also be loaded from Python eggs. """ egg_dir = "%s/eggs" % os.path.dirname(__file__) egg_name = "%s/basic.egg" % egg_dir with extend_sys_path(egg_name): with self.settings(INSTALLED_APPS=["commandegg"]): cmds = find_commands( os.path.join(apps.get_app_config("commandegg").path, "management") ) self.assertEqual(cmds, ["eggcommand"]) def test_call_command_option_parsing(self): """ When passing the long option name to call_command, the available option key is the option dest name (#22985). """ out = StringIO() management.call_command("dance", stdout=out, opt_3=True) self.assertIn("option3", out.getvalue()) self.assertNotIn("opt_3", out.getvalue()) self.assertNotIn("opt-3", out.getvalue()) def test_call_command_option_parsing_non_string_arg(self): """ It should be possible to pass non-string arguments to call_command. """ out = StringIO() management.call_command("dance", 1, verbosity=0, stdout=out) self.assertIn("You passed 1 as a positional argument.", out.getvalue()) def test_calling_a_command_with_only_empty_parameter_should_ends_gracefully(self): out = StringIO() management.call_command("hal", "--empty", stdout=out) self.assertEqual(out.getvalue(), "\nDave, I can't do that.\n") def test_calling_command_with_app_labels_and_parameters_should_be_ok(self): out = StringIO() management.call_command("hal", "myapp", "--verbosity", "3", stdout=out) self.assertIn( "Dave, my mind is going. I can feel it. I can feel it.\n", out.getvalue() ) def test_calling_command_with_parameters_and_app_labels_at_the_end_should_be_ok( self, ): out = StringIO() management.call_command("hal", "--verbosity", "3", "myapp", stdout=out) self.assertIn( "Dave, my mind is going. I can feel it. I can feel it.\n", out.getvalue() ) def test_calling_a_command_with_no_app_labels_and_parameters_raise_command_error( self, ): with self.assertRaises(CommandError): management.call_command("hal") def test_output_transaction(self): output = management.call_command( "transaction", stdout=StringIO(), no_color=True ) self.assertTrue( output.strip().startswith(connection.ops.start_transaction_sql()) ) self.assertTrue(output.strip().endswith(connection.ops.end_transaction_sql())) def test_call_command_no_checks(self): """ By default, call_command should not trigger the check framework, unless specifically asked. """ self.counter = 0 def patched_check(self_, **kwargs): self.counter += 1 self.kwargs = kwargs saved_check = BaseCommand.check BaseCommand.check = patched_check try: management.call_command("dance", verbosity=0) self.assertEqual(self.counter, 0) management.call_command("dance", verbosity=0, skip_checks=False) self.assertEqual(self.counter, 1) self.assertEqual(self.kwargs, {}) finally: BaseCommand.check = saved_check def test_requires_system_checks_empty(self): with mock.patch( "django.core.management.base.BaseCommand.check" ) as mocked_check: management.call_command("no_system_checks") self.assertIs(mocked_check.called, False) def test_requires_system_checks_specific(self): with mock.patch( "django.core.management.base.BaseCommand.check" ) as mocked_check: management.call_command("specific_system_checks", skip_checks=False) mocked_check.assert_called_once_with(tags=[Tags.staticfiles, Tags.models]) def test_requires_system_checks_invalid(self): class Command(BaseCommand): requires_system_checks = "x" msg = "requires_system_checks must be a list or tuple." with self.assertRaisesMessage(TypeError, msg): Command() def test_check_migrations(self): requires_migrations_checks = dance.Command.requires_migrations_checks self.assertIs(requires_migrations_checks, False) try: with mock.patch.object(BaseCommand, "check_migrations") as check_migrations: management.call_command("dance", verbosity=0) self.assertFalse(check_migrations.called) dance.Command.requires_migrations_checks = True management.call_command("dance", verbosity=0) self.assertTrue(check_migrations.called) finally: dance.Command.requires_migrations_checks = requires_migrations_checks def test_call_command_unrecognized_option(self): msg = ( "Unknown option(s) for dance command: unrecognized. Valid options " "are: example, force_color, help, integer, no_color, opt_3, " "option3, pythonpath, settings, skip_checks, stderr, stdout, " "style, traceback, verbosity, version." ) with self.assertRaisesMessage(TypeError, msg): management.call_command("dance", unrecognized=1) msg = ( "Unknown option(s) for dance command: unrecognized, unrecognized2. " "Valid options are: example, force_color, help, integer, no_color, " "opt_3, option3, pythonpath, settings, skip_checks, stderr, " "stdout, style, traceback, verbosity, version." ) with self.assertRaisesMessage(TypeError, msg): management.call_command("dance", unrecognized=1, unrecognized2=1) def test_call_command_with_required_parameters_in_options(self): out = StringIO() management.call_command( "required_option", need_me="foo", needme2="bar", stdout=out ) self.assertIn("need_me", out.getvalue()) self.assertIn("needme2", out.getvalue()) def test_call_command_with_required_parameters_in_mixed_options(self): out = StringIO() management.call_command( "required_option", "--need-me=foo", needme2="bar", stdout=out ) self.assertIn("need_me", out.getvalue()) self.assertIn("needme2", out.getvalue()) def test_command_add_arguments_after_common_arguments(self): out = StringIO() management.call_command("common_args", stdout=out) self.assertIn("Detected that --version already exists", out.getvalue()) def test_mutually_exclusive_group_required_options(self): out = StringIO() management.call_command("mutually_exclusive_required", foo_id=1, stdout=out) self.assertIn("foo_id", out.getvalue()) management.call_command( "mutually_exclusive_required", foo_name="foo", stdout=out ) self.assertIn("foo_name", out.getvalue()) msg = ( "Error: one of the arguments --foo-id --foo-name --foo-list " "--append_const --const --count --flag_false --flag_true is " "required" ) with self.assertRaisesMessage(CommandError, msg): management.call_command("mutually_exclusive_required", stdout=out) def test_mutually_exclusive_group_required_const_options(self): tests = [ ("append_const", [42]), ("const", 31), ("count", 1), ("flag_false", False), ("flag_true", True), ] for arg, value in tests: out = StringIO() expected_output = "%s=%s" % (arg, value) with self.subTest(arg=arg): management.call_command( "mutually_exclusive_required", "--%s" % arg, stdout=out, ) self.assertIn(expected_output, out.getvalue()) out.truncate(0) management.call_command( "mutually_exclusive_required", **{arg: value, "stdout": out}, ) self.assertIn(expected_output, out.getvalue()) def test_mutually_exclusive_group_required_with_same_dest_options(self): tests = [ {"until": "2"}, {"for": "1", "until": "2"}, ] msg = ( "Cannot pass the dest 'until' that matches multiple arguments via " "**options." ) for options in tests: with self.subTest(options=options): with self.assertRaisesMessage(TypeError, msg): management.call_command( "mutually_exclusive_required_with_same_dest", **options, ) def test_mutually_exclusive_group_required_with_same_dest_args(self): tests = [ ("--until=1",), ("--until", 1), ("--for=1",), ("--for", 1), ] for args in tests: out = StringIO() with self.subTest(options=args): management.call_command( "mutually_exclusive_required_with_same_dest", *args, stdout=out, ) output = out.getvalue() self.assertIn("until=1", output) def test_required_list_option(self): tests = [ (("--foo-list", [1, 2]), {}), ((), {"foo_list": [1, 2]}), ] for command in ["mutually_exclusive_required", "required_list_option"]: for args, kwargs in tests: with self.subTest(command=command, args=args, kwargs=kwargs): out = StringIO() management.call_command( command, *args, **kwargs, stdout=out, ) self.assertIn("foo_list=[1, 2]", out.getvalue()) def test_required_const_options(self): args = { "append_const": [42], "const": 31, "count": 1, "flag_false": False, "flag_true": True, } expected_output = "\n".join( "%s=%s" % (arg, value) for arg, value in args.items() ) out = StringIO() management.call_command( "required_constant_option", "--append_const", "--const", "--count", "--flag_false", "--flag_true", stdout=out, ) self.assertIn(expected_output, out.getvalue()) out.truncate(0) management.call_command("required_constant_option", **args, stdout=out) self.assertIn(expected_output, out.getvalue()) def test_subparser(self): out = StringIO() management.call_command("subparser", "foo", 12, stdout=out) self.assertIn("bar", out.getvalue()) def test_subparser_dest_args(self): out = StringIO() management.call_command("subparser_dest", "foo", bar=12, stdout=out) self.assertIn("bar", out.getvalue()) def test_subparser_dest_required_args(self): out = StringIO() management.call_command( "subparser_required", "foo_1", "foo_2", bar=12, stdout=out ) self.assertIn("bar", out.getvalue()) def test_subparser_invalid_option(self): msg = r"invalid choice: 'test' \(choose from '?foo'?\)" with self.assertRaisesRegex(CommandError, msg): management.call_command("subparser", "test", 12) msg = "Error: the following arguments are required: subcommand" with self.assertRaisesMessage(CommandError, msg): management.call_command("subparser_dest", subcommand="foo", bar=12) def test_create_parser_kwargs(self): """BaseCommand.create_parser() passes kwargs to CommandParser.""" epilog = "some epilog text" parser = BaseCommand().create_parser( "prog_name", "subcommand", epilog=epilog, formatter_class=ArgumentDefaultsHelpFormatter, ) self.assertEqual(parser.epilog, epilog) self.assertEqual(parser.formatter_class, ArgumentDefaultsHelpFormatter) def test_outputwrapper_flush(self): out = StringIO() with mock.patch.object(out, "flush") as mocked_flush: management.call_command("outputwrapper", stdout=out) self.assertIn("Working...", out.getvalue()) self.assertIs(mocked_flush.called, True) @unittest.skipUnless(PY314 and not PY315, "Only relevant for Python 3.14") def test_suggest_on_error_defaults_true(self): command = BaseCommand() parser = command.create_parser("prog_name", "subcommand") self.assertTrue(parser.suggest_on_error) @unittest.skipUnless(PY314 and not PY315, "Only relevant for Python 3.14") def test_suggest_on_error_explicit_false(self): command = BaseCommand() parser = command.create_parser( "prog_name", "subcommand", suggest_on_error=False ) self.assertFalse(parser.suggest_on_error) @unittest.skipUnless(PY314, "Only relevant for Python 3.14+") def test_color_enabled_by_default(self): with mock.patch.dict(os.environ, {}, clear=True): command = BaseCommand() parser = command.create_parser("prog_name", "subcommand") self.assertTrue(parser.color) @unittest.skipUnless(PY314, "Only relevant for Python 3.14+") def test_color_disabled_with_django_colors_nocolor(self): with mock.patch.dict(os.environ, {"DJANGO_COLORS": "nocolor"}): command = BaseCommand() parser = command.create_parser("prog_name", "subcommand") self.assertFalse(parser.color) @unittest.skipUnless(PY314, "Only relevant for Python 3.14+") def test_force_color_does_not_affect_argparse_color(self): with mock.patch.dict(os.environ, {}, clear=True): command = BaseCommand(force_color=True) parser = command.create_parser("prog_name", "subcommand") self.assertTrue(parser.color) @unittest.skipUnless(PY314, "Only relevant for Python 3.14+") def test_no_color_flag_disables_color(self): with mock.patch.object(sys, "argv", ["manage.py", "mycommand", "--no-color"]): command = BaseCommand() parser = command.create_parser("manage.py", "mycommand") self.assertFalse(parser.color) class CommandRunTests(AdminScriptTestCase): """ Tests that need to run by simulating the command line, not by call_command. """ def test_script_prefix_set_in_commands(self): self.write_settings( "settings.py", apps=["user_commands"], sdict={ "ROOT_URLCONF": '"user_commands.urls"', "FORCE_SCRIPT_NAME": '"/PREFIX/"', }, ) out, err = self.run_manage(["reverse_url"]) self.assertNoOutput(err) self.assertEqual(out.strip(), "/PREFIX/some/url/") def test_disallowed_abbreviated_options(self): """ To avoid conflicts with custom options, commands don't allow abbreviated forms of the --setting and --pythonpath options. """ self.write_settings("settings.py", apps=["user_commands"]) out, err = self.run_manage(["set_option", "--set", "foo"]) self.assertNoOutput(err) self.assertEqual(out.strip(), "Set foo") def test_skip_checks(self): self.write_settings( "settings.py", apps=["django.contrib.staticfiles", "user_commands"], sdict={ # (staticfiles.E001) The STATICFILES_DIRS setting is not a # tuple or list. "STATICFILES_DIRS": '"foo"', }, ) out, err = self.run_manage(["set_option", "--skip-checks", "--set", "foo"]) self.assertNoOutput(err) self.assertEqual(out.strip(), "Set foo") def test_subparser_error_formatting(self): self.write_settings("settings.py", apps=["user_commands"]) out, err = self.run_manage(["subparser", "foo", "twelve"]) self.maxDiff = None self.assertNoOutput(out) err_lines = err.splitlines() self.assertEqual(len(err_lines), 2) self.assertEqual( err_lines[1], "manage.py subparser foo: error: argument bar: invalid int value: 'twelve'", ) def test_subparser_non_django_error_formatting(self): self.write_settings("settings.py", apps=["user_commands"]) out, err = self.run_manage(["subparser_vanilla", "foo", "seven"]) self.assertNoOutput(out) err_lines = err.splitlines() self.assertEqual(len(err_lines), 2) self.assertEqual( err_lines[1], "manage.py subparser_vanilla foo: error: argument bar: invalid int value: " "'seven'", ) class UtilsTests(SimpleTestCase): def test_no_existent_external_program(self): msg = "Error executing a_42_command_that_doesnt_exist_42" with self.assertRaisesMessage(CommandError, msg): popen_wrapper(["a_42_command_that_doesnt_exist_42"]) def test_get_random_secret_key(self): key = get_random_secret_key() self.assertEqual(len(key), 50) for char in key: self.assertIn(char, "abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)") def test_is_ignored_path_true(self): patterns = ( ["foo/bar/baz"], ["baz"], ["foo/bar/baz"], ["*/baz"], ["*"], ["b?z"], ["[abc]az"], ["*/ba[!z]/baz"], ) for ignore_patterns in patterns: with self.subTest(ignore_patterns=ignore_patterns): self.assertIs( is_ignored_path("foo/bar/baz", ignore_patterns=ignore_patterns), True, ) def test_is_ignored_path_false(self): self.assertIs( is_ignored_path( "foo/bar/baz", ignore_patterns=["foo/bar/bat", "bar", "flub/blub"] ), False, ) def test_normalize_path_patterns_truncates_wildcard_base(self): expected = [os.path.normcase(p) for p in ["foo/bar", "bar/*/"]] self.assertEqual(normalize_path_patterns(["foo/bar/*", "bar/*/"]), expected) def test_run_formatters_handles_oserror_for_black_path(self): test_files_path = Path(__file__).parent / "test_files" cases = [ ( FileNotFoundError, str(test_files_path / "nonexistent"), ), ( OSError if sys.platform == "win32" else PermissionError, str(test_files_path / "black"), ), ] for exception, location in cases: with ( self.subTest(exception.__qualname__), AssertFormatterFailureCaughtContext( self, shutil_which_result=location ) as ctx, ): run_formatters([], stderr=ctx.stderr) parsed_error = ctx.stderr.getvalue() self.assertIn(exception.__qualname__, parsed_error) if sys.platform != "win32": self.assertIn(location, parsed_error)
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/user_commands/urls.py
tests/user_commands/urls.py
from django.urls import path urlpatterns = [ path("some/url/", lambda req: req, name="some_url"), ]
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/user_commands/management/__init__.py
tests/user_commands/management/__init__.py
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/user_commands/management/commands/subparser.py
tests/user_commands/management/commands/subparser.py
from django.core.management.base import BaseCommand class Command(BaseCommand): def add_arguments(self, parser): subparsers = parser.add_subparsers() parser_foo = subparsers.add_parser("foo") parser_foo.add_argument("bar", type=int) def handle(self, *args, **options): self.stdout.write(",".join(options))
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/user_commands/management/commands/hal.py
tests/user_commands/management/commands/hal.py
from django.core.management.base import BaseCommand, CommandError class Command(BaseCommand): help = "Useless command." def add_arguments(self, parser): parser.add_argument( "args", metavar="app_label", nargs="*", help="Specify the app label(s) to works on.", ) parser.add_argument("--empty", action="store_true", help="Do nothing.") def handle(self, *app_labels, **options): app_labels = set(app_labels) if options["empty"]: self.stdout.write() self.stdout.write("Dave, I can't do that.") return if not app_labels: raise CommandError("I'm sorry Dave, I'm afraid I can't do that.") # raise an error if some --parameter is flowing from options to args for app_label in app_labels: if app_label.startswith("--"): raise CommandError("Sorry, Dave, I can't let you do that.") self.stdout.write("Dave, my mind is going. I can feel it. I can feel it.")
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/user_commands/management/commands/required_list_option.py
tests/user_commands/management/commands/required_list_option.py
from django.core.management.base import BaseCommand class Command(BaseCommand): def add_arguments(self, parser): parser.add_argument("--foo-list", nargs="+", type=int, required=True) def handle(self, *args, **options): for option, value in options.items(): self.stdout.write("%s=%s" % (option, value))
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/user_commands/management/commands/specific_system_checks.py
tests/user_commands/management/commands/specific_system_checks.py
from django.core.checks import Tags from django.core.management.base import BaseCommand class Command(BaseCommand): requires_system_checks = [Tags.staticfiles, Tags.models] def handle(self, *args, **options): pass
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/user_commands/management/commands/subparser_required.py
tests/user_commands/management/commands/subparser_required.py
from django.core.management.base import BaseCommand class Command(BaseCommand): def add_arguments(self, parser): subparsers_1 = parser.add_subparsers(dest="subcommand_1") parser_foo_1 = subparsers_1.add_parser("foo_1") subparsers_2 = parser_foo_1.add_subparsers(dest="subcommand_2") parser_foo_2 = subparsers_2.add_parser("foo_2") parser_foo_2.add_argument("--bar", required=True) def handle(self, *args, **options): self.stdout.write(",".join(options))
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/user_commands/management/commands/outputwrapper.py
tests/user_commands/management/commands/outputwrapper.py
from django.core.management.base import BaseCommand class Command(BaseCommand): def handle(self, **options): self.stdout.write("Working...") self.stdout.flush() self.stdout.write("OK")
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/user_commands/management/commands/no_translations.py
tests/user_commands/management/commands/no_translations.py
from django.core.management.base import BaseCommand, no_translations from django.utils import translation class Command(BaseCommand): @no_translations def handle(self, *args, **options): return translation.get_language()
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/user_commands/management/commands/reverse_url.py
tests/user_commands/management/commands/reverse_url.py
from django.core.management.base import BaseCommand from django.urls import reverse class Command(BaseCommand): """ This command returns a URL from a reverse() call. """ def handle(self, *args, **options): return reverse("some_url")
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/user_commands/management/commands/set_option.py
tests/user_commands/management/commands/set_option.py
from django.core.management.base import BaseCommand class Command(BaseCommand): def add_arguments(self, parser): parser.add_argument("--set") def handle(self, **options): self.stdout.write("Set %s" % options["set"])
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/user_commands/management/commands/required_option.py
tests/user_commands/management/commands/required_option.py
from django.core.management.base import BaseCommand class Command(BaseCommand): def add_arguments(self, parser): parser.add_argument("-n", "--need-me", required=True) parser.add_argument("-t", "--need-me-too", required=True, dest="needme2") def handle(self, *args, **options): self.stdout.write(",".join(options))
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/user_commands/management/commands/common_args.py
tests/user_commands/management/commands/common_args.py
from argparse import ArgumentError from django.core.management.base import BaseCommand, CommandError class Command(BaseCommand): def add_arguments(self, parser): try: parser.add_argument("--version", action="version", version="A.B.C") except ArgumentError: pass else: raise CommandError("--version argument does no yet exist") def handle(self, *args, **options): return "Detected that --version already exists"
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/user_commands/management/commands/no_system_checks.py
tests/user_commands/management/commands/no_system_checks.py
from django.core.management.base import BaseCommand class Command(BaseCommand): requires_system_checks = [] def handle(self, *args, **options): pass
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/user_commands/management/commands/dance.py
tests/user_commands/management/commands/dance.py
from django.core.management.base import BaseCommand, CommandError class Command(BaseCommand): help = "Dance around like a madman." args = "" requires_system_checks = "__all__" def add_arguments(self, parser): parser.add_argument("integer", nargs="?", type=int, default=0) parser.add_argument("-s", "--style", default="Rock'n'Roll") parser.add_argument("-x", "--example") parser.add_argument("--opt-3", action="store_true", dest="option3") def handle(self, *args, **options): example = options["example"] if example == "raise": raise CommandError(returncode=3) if options["verbosity"] > 0: self.stdout.write("I don't feel like dancing %s." % options["style"]) self.stdout.write(",".join(options)) if options["integer"] > 0: self.stdout.write( "You passed %d as a positional argument." % options["integer"] )
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/user_commands/management/commands/transaction.py
tests/user_commands/management/commands/transaction.py
from django.core.management.base import BaseCommand class Command(BaseCommand): help = "Say hello." args = "" output_transaction = True def handle(self, *args, **options): return "Hello!"
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/user_commands/management/commands/required_constant_option.py
tests/user_commands/management/commands/required_constant_option.py
from django.core.management.base import BaseCommand class Command(BaseCommand): def add_arguments(self, parser): parser.add_argument( "--append_const", action="append_const", const=42, required=True, ) parser.add_argument("--const", action="store_const", const=31, required=True) parser.add_argument("--count", action="count", required=True) parser.add_argument("--flag_false", action="store_false", required=True) parser.add_argument("--flag_true", action="store_true", required=True) def handle(self, *args, **options): for option, value in options.items(): if value is not None: self.stdout.write("%s=%s" % (option, value))
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/user_commands/management/commands/__init__.py
tests/user_commands/management/commands/__init__.py
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/user_commands/management/commands/mutually_exclusive_required_with_same_dest.py
tests/user_commands/management/commands/mutually_exclusive_required_with_same_dest.py
from django.core.management.base import BaseCommand class Command(BaseCommand): def add_arguments(self, parser): group = parser.add_mutually_exclusive_group(required=True) group.add_argument("--for", dest="until", action="store") group.add_argument("--until", action="store") def handle(self, *args, **options): for option, value in options.items(): if value is not None: self.stdout.write("%s=%s" % (option, value))
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/user_commands/management/commands/subparser_dest.py
tests/user_commands/management/commands/subparser_dest.py
from django.core.management.base import BaseCommand class Command(BaseCommand): def add_arguments(self, parser): subparsers = parser.add_subparsers(dest="subcommand", required=True) parser_foo = subparsers.add_parser("foo") parser_foo.add_argument("--bar") def handle(self, *args, **options): self.stdout.write(",".join(options))
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/user_commands/management/commands/subparser_vanilla.py
tests/user_commands/management/commands/subparser_vanilla.py
import argparse from django.core.management.base import BaseCommand class Command(BaseCommand): def add_arguments(self, parser): subparsers = parser.add_subparsers(parser_class=argparse.ArgumentParser) parser_foo = subparsers.add_parser("foo") parser_foo.add_argument("bar", type=int) def handle(self, *args, **options): pass
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/user_commands/management/commands/mutually_exclusive_required.py
tests/user_commands/management/commands/mutually_exclusive_required.py
from django.core.management.base import BaseCommand class Command(BaseCommand): def add_arguments(self, parser): group = parser.add_mutually_exclusive_group(required=True) group.add_argument("--foo-id", type=int, nargs="?", default=None) group.add_argument("--foo-name", type=str, nargs="?", default=None) group.add_argument("--foo-list", type=int, nargs="+") group.add_argument("--append_const", action="append_const", const=42) group.add_argument("--const", action="store_const", const=31) group.add_argument("--count", action="count") group.add_argument("--flag_false", action="store_false") group.add_argument("--flag_true", action="store_true") def handle(self, *args, **options): for option, value in options.items(): if value is not None: self.stdout.write("%s=%s" % (option, value))
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/model_enums/__init__.py
tests/model_enums/__init__.py
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/model_enums/tests.py
tests/model_enums/tests.py
import datetime import decimal import ipaddress import uuid from django.db import models from django.template import Context, Template from django.test import SimpleTestCase from django.utils.functional import Promise from django.utils.translation import gettext_lazy as _ class Suit(models.IntegerChoices): DIAMOND = 1, _("Diamond") SPADE = 2, _("Spade") HEART = 3, _("Heart") CLUB = 4, _("Club") class YearInSchool(models.TextChoices): FRESHMAN = "FR", _("Freshman") SOPHOMORE = "SO", _("Sophomore") JUNIOR = "JR", _("Junior") SENIOR = "SR", _("Senior") GRADUATE = "GR", _("Graduate") class Vehicle(models.IntegerChoices): CAR = 1, "Carriage" TRUCK = 2 JET_SKI = 3 __empty__ = _("(Unknown)") class Gender(models.TextChoices): MALE = "M" FEMALE = "F" NOT_SPECIFIED = "X" __empty__ = "(Undeclared)" class ChoicesTests(SimpleTestCase): def test_integerchoices(self): self.assertEqual( Suit.choices, [(1, "Diamond"), (2, "Spade"), (3, "Heart"), (4, "Club")] ) self.assertEqual(Suit.labels, ["Diamond", "Spade", "Heart", "Club"]) self.assertEqual(Suit.values, [1, 2, 3, 4]) self.assertEqual(Suit.names, ["DIAMOND", "SPADE", "HEART", "CLUB"]) self.assertEqual(repr(Suit.DIAMOND), "Suit.DIAMOND") self.assertEqual(Suit.DIAMOND.label, "Diamond") self.assertEqual(Suit.DIAMOND.value, 1) self.assertEqual(Suit["DIAMOND"], Suit.DIAMOND) self.assertEqual(Suit(1), Suit.DIAMOND) self.assertIsInstance(Suit, type(models.Choices)) self.assertIsInstance(Suit.DIAMOND, Suit) self.assertIsInstance(Suit.DIAMOND.label, Promise) self.assertIsInstance(Suit.DIAMOND.value, int) def test_integerchoices_auto_label(self): self.assertEqual(Vehicle.CAR.label, "Carriage") self.assertEqual(Vehicle.TRUCK.label, "Truck") self.assertEqual(Vehicle.JET_SKI.label, "Jet Ski") def test_integerchoices_empty_label(self): self.assertEqual(Vehicle.choices[0], (None, "(Unknown)")) self.assertEqual(Vehicle.labels[0], "(Unknown)") self.assertIsNone(Vehicle.values[0]) self.assertEqual(Vehicle.names[0], "__empty__") def test_integerchoices_functional_api(self): Place = models.IntegerChoices("Place", "FIRST SECOND THIRD") self.assertEqual(Place.labels, ["First", "Second", "Third"]) self.assertEqual(Place.values, [1, 2, 3]) self.assertEqual(Place.names, ["FIRST", "SECOND", "THIRD"]) def test_integerchoices_containment(self): self.assertIn(Suit.DIAMOND, Suit) self.assertIn(1, Suit) self.assertNotIn(0, Suit) def test_textchoices(self): self.assertEqual( YearInSchool.choices, [ ("FR", "Freshman"), ("SO", "Sophomore"), ("JR", "Junior"), ("SR", "Senior"), ("GR", "Graduate"), ], ) self.assertEqual( YearInSchool.labels, ["Freshman", "Sophomore", "Junior", "Senior", "Graduate"], ) self.assertEqual(YearInSchool.values, ["FR", "SO", "JR", "SR", "GR"]) self.assertEqual( YearInSchool.names, ["FRESHMAN", "SOPHOMORE", "JUNIOR", "SENIOR", "GRADUATE"], ) self.assertEqual(repr(YearInSchool.FRESHMAN), "YearInSchool.FRESHMAN") self.assertEqual(YearInSchool.FRESHMAN.label, "Freshman") self.assertEqual(YearInSchool.FRESHMAN.value, "FR") self.assertEqual(YearInSchool["FRESHMAN"], YearInSchool.FRESHMAN) self.assertEqual(YearInSchool("FR"), YearInSchool.FRESHMAN) self.assertIsInstance(YearInSchool, type(models.Choices)) self.assertIsInstance(YearInSchool.FRESHMAN, YearInSchool) self.assertIsInstance(YearInSchool.FRESHMAN.label, Promise) self.assertIsInstance(YearInSchool.FRESHMAN.value, str) def test_textchoices_auto_label(self): self.assertEqual(Gender.MALE.label, "Male") self.assertEqual(Gender.FEMALE.label, "Female") self.assertEqual(Gender.NOT_SPECIFIED.label, "Not Specified") def test_textchoices_empty_label(self): self.assertEqual(Gender.choices[0], (None, "(Undeclared)")) self.assertEqual(Gender.labels[0], "(Undeclared)") self.assertIsNone(Gender.values[0]) self.assertEqual(Gender.names[0], "__empty__") def test_textchoices_functional_api(self): Medal = models.TextChoices("Medal", "GOLD SILVER BRONZE") self.assertEqual(Medal.labels, ["Gold", "Silver", "Bronze"]) self.assertEqual(Medal.values, ["GOLD", "SILVER", "BRONZE"]) self.assertEqual(Medal.names, ["GOLD", "SILVER", "BRONZE"]) def test_textchoices_containment(self): self.assertIn(YearInSchool.FRESHMAN, YearInSchool) self.assertIn("FR", YearInSchool) self.assertNotIn("XX", YearInSchool) def test_textchoices_blank_value(self): class BlankStr(models.TextChoices): EMPTY = "", "(Empty)" ONE = "ONE", "One" self.assertEqual(BlankStr.labels, ["(Empty)", "One"]) self.assertEqual(BlankStr.values, ["", "ONE"]) self.assertEqual(BlankStr.names, ["EMPTY", "ONE"]) def test_invalid_definition(self): msg = "'str' object cannot be interpreted as an integer" with self.assertRaisesMessage(TypeError, msg): class InvalidArgumentEnum(models.IntegerChoices): # A string is not permitted as the second argument to int(). ONE = 1, "X", "Invalid" msg = "duplicate values found in <enum 'Fruit'>: PINEAPPLE -> APPLE" with self.assertRaisesMessage(ValueError, msg): class Fruit(models.IntegerChoices): APPLE = 1, "Apple" PINEAPPLE = 1, "Pineapple" def test_str(self): for test in [Gender, Suit, YearInSchool, Vehicle]: for member in test: with self.subTest(member=member): self.assertEqual(str(test[member.name]), str(member.value)) def test_templates(self): template = Template("{{ Suit.DIAMOND.label }}|{{ Suit.DIAMOND.value }}") output = template.render(Context({"Suit": Suit})) self.assertEqual(output, "Diamond|1") def test_property_names_conflict_with_member_names(self): with self.assertRaises(AttributeError): models.TextChoices("Properties", "choices labels names values") def test_label_member(self): # label can be used as a member. Stationery = models.TextChoices("Stationery", "label stamp sticker") self.assertEqual(Stationery.label.label, "Label") self.assertEqual(Stationery.label.value, "label") self.assertEqual(Stationery.label.name, "label") def test_do_not_call_in_templates_member(self): # do_not_call_in_templates is not implicitly treated as a member. Special = models.IntegerChoices("Special", "do_not_call_in_templates") self.assertIn("do_not_call_in_templates", Special.__members__) self.assertEqual( Special.do_not_call_in_templates.label, "Do Not Call In Templates", ) self.assertEqual(Special.do_not_call_in_templates.value, 1) self.assertEqual( Special.do_not_call_in_templates.name, "do_not_call_in_templates", ) def test_do_not_call_in_templates_nonmember(self): self.assertNotIn("do_not_call_in_templates", Suit.__members__) self.assertIs(Suit.do_not_call_in_templates, True) class Separator(bytes, models.Choices): FS = b"\x1c", "File Separator" GS = b"\x1d", "Group Separator" RS = b"\x1e", "Record Separator" US = b"\x1f", "Unit Separator" class Constants(float, models.Choices): PI = 3.141592653589793, "π" TAU = 6.283185307179586, "τ" class Set(frozenset, models.Choices): A = {1, 2} B = {2, 3} UNION = A | B DIFFERENCE = A - B INTERSECTION = A & B class MoonLandings(datetime.date, models.Choices): APOLLO_11 = 1969, 7, 20, "Apollo 11 (Eagle)" APOLLO_12 = 1969, 11, 19, "Apollo 12 (Intrepid)" APOLLO_14 = 1971, 2, 5, "Apollo 14 (Antares)" APOLLO_15 = 1971, 7, 30, "Apollo 15 (Falcon)" APOLLO_16 = 1972, 4, 21, "Apollo 16 (Orion)" APOLLO_17 = 1972, 12, 11, "Apollo 17 (Challenger)" class DateAndTime(datetime.datetime, models.Choices): A = 2010, 10, 10, 10, 10, 10 B = 2011, 11, 11, 11, 11, 11 C = 2012, 12, 12, 12, 12, 12 class MealTimes(datetime.time, models.Choices): BREAKFAST = 7, 0 LUNCH = 13, 0 DINNER = 18, 30 class Frequency(datetime.timedelta, models.Choices): WEEK = 0, 0, 0, 0, 0, 0, 1, "Week" DAY = 1, "Day" HOUR = 0, 0, 0, 0, 0, 1, "Hour" MINUTE = 0, 0, 0, 0, 1, "Hour" SECOND = 0, 1, "Second" class Number(decimal.Decimal, models.Choices): E = 2.718281828459045, "e" PI = "3.141592653589793", "π" TAU = decimal.Decimal("6.283185307179586"), "τ" class IPv4Address(ipaddress.IPv4Address, models.Choices): LOCALHOST = "127.0.0.1", "Localhost" GATEWAY = "192.168.0.1", "Gateway" BROADCAST = "192.168.0.255", "Broadcast" class IPv6Address(ipaddress.IPv6Address, models.Choices): LOCALHOST = "::1", "Localhost" UNSPECIFIED = "::", "Unspecified" class IPv4Network(ipaddress.IPv4Network, models.Choices): LOOPBACK = "127.0.0.0/8", "Loopback" LINK_LOCAL = "169.254.0.0/16", "Link-Local" PRIVATE_USE_A = "10.0.0.0/8", "Private-Use (Class A)" class IPv6Network(ipaddress.IPv6Network, models.Choices): LOOPBACK = "::1/128", "Loopback" UNSPECIFIED = "::/128", "Unspecified" UNIQUE_LOCAL = "fc00::/7", "Unique-Local" LINK_LOCAL_UNICAST = "fe80::/10", "Link-Local Unicast" class CustomChoicesTests(SimpleTestCase): def test_labels_valid(self): enums = ( Separator, Constants, Set, MoonLandings, DateAndTime, MealTimes, Frequency, Number, IPv4Address, IPv6Address, IPv4Network, IPv6Network, ) for choice_enum in enums: with self.subTest(choice_enum.__name__): self.assertNotIn(None, choice_enum.labels) def test_bool_unsupported(self): msg = "type 'bool' is not an acceptable base type" with self.assertRaisesMessage(TypeError, msg): class Boolean(bool, models.Choices): pass def test_uuid_unsupported(self): with self.assertRaises(TypeError): class Identifier(uuid.UUID, models.Choices): A = "972ce4eb-a95f-4a56-9339-68c208a76f18"
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/test_utils/test_simpletestcase.py
tests/test_utils/test_simpletestcase.py
import unittest from io import StringIO from unittest import mock from unittest.suite import _DebugResult from django.test import SimpleTestCase class ErrorTestCase(SimpleTestCase): def raising_test(self): self._pre_setup.assert_called_once_with() raise Exception("debug() bubbles up exceptions before cleanup.") def simple_test(self): self._pre_setup.assert_called_once_with() @unittest.skip("Skip condition.") def skipped_test(self): pass @mock.patch.object(ErrorTestCase, "_post_teardown") @mock.patch.object(ErrorTestCase, "_pre_setup") class DebugInvocationTests(SimpleTestCase): def get_runner(self): return unittest.TextTestRunner(stream=StringIO()) def isolate_debug_test(self, test_suite, result): # Suite teardown needs to be manually called to isolate failures. test_suite._tearDownPreviousClass(None, result) test_suite._handleModuleTearDown(result) def test_run_cleanup(self, _pre_setup, _post_teardown): """Simple test run: catches errors and runs cleanup.""" test_suite = unittest.TestSuite() test_suite.addTest(ErrorTestCase("raising_test")) result = self.get_runner()._makeResult() self.assertEqual(result.errors, []) test_suite.run(result) self.assertEqual(len(result.errors), 1) _, traceback = result.errors[0] self.assertIn( "Exception: debug() bubbles up exceptions before cleanup.", traceback ) _pre_setup.assert_called_once_with() _post_teardown.assert_called_once_with() def test_run_pre_setup_error(self, _pre_setup, _post_teardown): _pre_setup.side_effect = Exception("Exception in _pre_setup.") test_suite = unittest.TestSuite() test_suite.addTest(ErrorTestCase("simple_test")) result = self.get_runner()._makeResult() self.assertEqual(result.errors, []) test_suite.run(result) self.assertEqual(len(result.errors), 1) _, traceback = result.errors[0] self.assertIn("Exception: Exception in _pre_setup.", traceback) # pre-setup is called but not post-teardown. _pre_setup.assert_called_once_with() self.assertFalse(_post_teardown.called) def test_run_post_teardown_error(self, _pre_setup, _post_teardown): _post_teardown.side_effect = Exception("Exception in _post_teardown.") test_suite = unittest.TestSuite() test_suite.addTest(ErrorTestCase("simple_test")) result = self.get_runner()._makeResult() self.assertEqual(result.errors, []) test_suite.run(result) self.assertEqual(len(result.errors), 1) _, traceback = result.errors[0] self.assertIn("Exception: Exception in _post_teardown.", traceback) # pre-setup and post-teardwn are called. _pre_setup.assert_called_once_with() _post_teardown.assert_called_once_with() def test_run_skipped_test_no_cleanup(self, _pre_setup, _post_teardown): test_suite = unittest.TestSuite() test_suite.addTest(ErrorTestCase("skipped_test")) try: test_suite.run(self.get_runner()._makeResult()) except unittest.SkipTest: self.fail("SkipTest should not be raised at this stage.") self.assertFalse(_post_teardown.called) self.assertFalse(_pre_setup.called) def test_debug_cleanup(self, _pre_setup, _post_teardown): """Simple debug run without errors.""" test_suite = unittest.TestSuite() test_suite.addTest(ErrorTestCase("simple_test")) test_suite.debug() _pre_setup.assert_called_once_with() _post_teardown.assert_called_once_with() def test_debug_bubbles_error(self, _pre_setup, _post_teardown): """debug() bubbles up exceptions before cleanup.""" test_suite = unittest.TestSuite() test_suite.addTest(ErrorTestCase("raising_test")) msg = "debug() bubbles up exceptions before cleanup." with self.assertRaisesMessage(Exception, msg): # This is the same as test_suite.debug(). result = _DebugResult() test_suite.run(result, debug=True) # pre-setup is called but not post-teardown. _pre_setup.assert_called_once_with() self.assertFalse(_post_teardown.called) self.isolate_debug_test(test_suite, result) def test_debug_bubbles_pre_setup_error(self, _pre_setup, _post_teardown): """debug() bubbles up exceptions during _pre_setup.""" msg = "Exception in _pre_setup." _pre_setup.side_effect = Exception(msg) test_suite = unittest.TestSuite() test_suite.addTest(ErrorTestCase("simple_test")) with self.assertRaisesMessage(Exception, msg): # This is the same as test_suite.debug(). result = _DebugResult() test_suite.run(result, debug=True) # pre-setup is called but not post-teardown. _pre_setup.assert_called_once_with() self.assertFalse(_post_teardown.called) self.isolate_debug_test(test_suite, result) def test_debug_bubbles_post_teardown_error(self, _pre_setup, _post_teardown): """debug() bubbles up exceptions during _post_teardown.""" msg = "Exception in _post_teardown." _post_teardown.side_effect = Exception(msg) test_suite = unittest.TestSuite() test_suite.addTest(ErrorTestCase("simple_test")) with self.assertRaisesMessage(Exception, msg): # This is the same as test_suite.debug(). result = _DebugResult() test_suite.run(result, debug=True) # pre-setup and post-teardwn are called. _pre_setup.assert_called_once_with() _post_teardown.assert_called_once_with() self.isolate_debug_test(test_suite, result) def test_debug_skipped_test_no_cleanup(self, _pre_setup, _post_teardown): test_suite = unittest.TestSuite() test_suite.addTest(ErrorTestCase("skipped_test")) with self.assertRaisesMessage(unittest.SkipTest, "Skip condition."): # This is the same as test_suite.debug(). result = _DebugResult() test_suite.run(result, debug=True) self.assertFalse(_post_teardown.called) self.assertFalse(_pre_setup.called) self.isolate_debug_test(test_suite, result)
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/test_utils/test_testcase.py
tests/test_utils/test_testcase.py
import pickle from functools import wraps from django.db import IntegrityError, connections, transaction from django.test import TestCase, skipUnlessDBFeature from django.test.testcases import ( DatabaseOperationForbidden, SimpleTestCase, TestData, is_pickable, ) from .models import Car, Person, PossessedCar class UnpicklableObject: def __getstate__(self): raise pickle.PickleError("cannot be pickled for testing reasons") class TestSimpleTestCase(SimpleTestCase): def test_is_picklable_with_non_picklable_properties(self): """ParallelTestSuite requires that all TestCases are picklable.""" self.non_picklable = lambda: 0 self.assertEqual(self, pickle.loads(pickle.dumps(self))) def test_is_picklable_with_non_picklable_object(self): unpicklable_obj = UnpicklableObject() self.assertEqual(is_pickable(unpicklable_obj), False) class TestTestCase(TestCase): @skipUnlessDBFeature("can_defer_constraint_checks") @skipUnlessDBFeature("supports_foreign_keys") def test_fixture_teardown_checks_constraints(self): rollback_atomics = self._rollback_atomics self._rollback_atomics = lambda connection: None # noop try: car = PossessedCar.objects.create(car_id=1, belongs_to_id=1) with self.assertRaises(IntegrityError), transaction.atomic(): self._fixture_teardown() car.delete() finally: self._rollback_atomics = rollback_atomics def test_disallowed_database_connection(self): message = ( "Database connections to 'other' are not allowed in this test. " "Add 'other' to test_utils.test_testcase.TestTestCase.databases to " "ensure proper test isolation and silence this failure." ) with self.assertRaisesMessage(DatabaseOperationForbidden, message): connections["other"].connect() with self.assertRaisesMessage(DatabaseOperationForbidden, message): connections["other"].temporary_connection() def test_disallowed_database_queries(self): message = ( "Database queries to 'other' are not allowed in this test. " "Add 'other' to test_utils.test_testcase.TestTestCase.databases to " "ensure proper test isolation and silence this failure." ) with self.assertRaisesMessage(DatabaseOperationForbidden, message): Car.objects.using("other").get() @skipUnlessDBFeature("supports_transactions") def test_reset_sequences(self): old_reset_sequences = self.__class__.reset_sequences self.__class__.reset_sequences = True self.addCleanup(setattr, self.__class__, "reset_sequences", old_reset_sequences) msg = "reset_sequences cannot be used on TestCase instances" with self.assertRaisesMessage(TypeError, msg): self._fixture_setup() def assert_no_queries(test): @wraps(test) def inner(self): with self.assertNumQueries(0): test(self) return inner # On databases with no transaction support (for instance, MySQL with the MyISAM # engine), setUpTestData() is called before each test, so there is no need to # clone class level test data. @skipUnlessDBFeature("supports_transactions") class TestDataTests(TestCase): # setUpTestData re-assignment are also wrapped in TestData. jim_douglas = None @classmethod def setUpTestData(cls): cls.jim_douglas = Person.objects.create(name="Jim Douglas") cls.car = Car.objects.create(name="1963 Volkswagen Beetle") cls.herbie = cls.jim_douglas.possessed_cars.create( car=cls.car, belongs_to=cls.jim_douglas, ) cls.person_binary = Person.objects.create(name="Person", data=b"binary data") cls.person_binary_get = Person.objects.get(pk=cls.person_binary.pk) @assert_no_queries def test_class_attribute_equality(self): """Class level test data is equal to instance level test data.""" self.assertEqual(self.jim_douglas, self.__class__.jim_douglas) self.assertEqual(self.person_binary, self.__class__.person_binary) self.assertEqual(self.person_binary_get, self.__class__.person_binary_get) @assert_no_queries def test_class_attribute_identity(self): """ Class level test data is not identical to instance level test data. """ self.assertIsNot(self.jim_douglas, self.__class__.jim_douglas) self.assertIsNot(self.person_binary, self.__class__.person_binary) self.assertIsNot(self.person_binary_get, self.__class__.person_binary_get) @assert_no_queries def test_binaryfield_data_type(self): self.assertEqual(bytes(self.person_binary.data), b"binary data") self.assertEqual(bytes(self.person_binary_get.data), b"binary data") self.assertEqual( type(self.person_binary_get.data), type(self.__class__.person_binary_get.data), ) self.assertEqual( type(self.person_binary.data), type(self.__class__.person_binary.data), ) @assert_no_queries def test_identity_preservation(self): """Identity of test data is preserved between accesses.""" self.assertIs(self.jim_douglas, self.jim_douglas) @assert_no_queries def test_known_related_objects_identity_preservation(self): """Known related objects identity is preserved.""" self.assertIs(self.herbie.car, self.car) self.assertIs(self.herbie.belongs_to, self.jim_douglas) def test_repr(self): self.assertEqual( repr(TestData("attr", "value")), "<TestData: name='attr', data='value'>", ) class SetupTestDataIsolationTests(TestCase): """ In-memory data isolation is respected for model instances assigned to class attributes during setUpTestData. """ @classmethod def setUpTestData(cls): cls.car = Car.objects.create(name="Volkswagen Beetle") def test_book_name_deutsh(self): self.assertEqual(self.car.name, "Volkswagen Beetle") self.car.name = "VW sKäfer" self.car.save() def test_book_name_french(self): self.assertEqual(self.car.name, "Volkswagen Beetle") self.car.name = "Volkswagen Coccinelle" self.car.save()
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/test_utils/views.py
tests/test_utils/views.py
from django.http import HttpResponse from django.shortcuts import get_object_or_404 from django.template import Context, Template from .models import Person def get_person(request, pk): person = get_object_or_404(Person, pk=pk) return HttpResponse(person.name) def no_template_used(request): template = Template("This is a string-based template") return HttpResponse(template.render(Context({}))) def empty_response(request): return HttpResponse()
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/test_utils/test_serializemixin.py
tests/test_utils/test_serializemixin.py
from django.test import SimpleTestCase from django.test.testcases import SerializeMixin class TestSerializeMixin(SimpleTestCase): def test_init_without_lockfile(self): msg = ( "ExampleTests.lockfile isn't set. Set it to a unique value in the " "base class." ) with self.assertRaisesMessage(ValueError, msg): class ExampleTests(SerializeMixin, SimpleTestCase): pass class TestSerializeMixinUse(SerializeMixin, SimpleTestCase): lockfile = __file__ def test_usage(self): # Running this test ensures that the lock/unlock functions have passed. pass
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/test_utils/models.py
tests/test_utils/models.py
from django.db import models class Car(models.Model): name = models.CharField(max_length=100) class Person(models.Model): name = models.CharField(max_length=100) cars = models.ManyToManyField(Car, through="PossessedCar") data = models.BinaryField(null=True) class PossessedCar(models.Model): car = models.ForeignKey(Car, models.CASCADE) belongs_to = models.ForeignKey( Person, models.CASCADE, related_name="possessed_cars" )
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/test_utils/__init__.py
tests/test_utils/__init__.py
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/test_utils/tests.py
tests/test_utils/tests.py
import os import sys import threading import traceback import unittest import warnings from functools import partial from io import StringIO from unittest import mock from django.conf import STATICFILES_STORAGE_ALIAS, settings from django.contrib.staticfiles.finders import get_finder, get_finders from django.contrib.staticfiles.storage import staticfiles_storage from django.core.exceptions import ImproperlyConfigured from django.core.files.storage import default_storage from django.db import ( IntegrityError, connection, connections, models, router, transaction, ) from django.forms import ( CharField, EmailField, Form, IntegerField, ValidationError, formset_factory, ) from django.http import HttpResponse from django.template import Context, Template from django.template.loader import render_to_string from django.test import ( SimpleTestCase, TestCase, TransactionTestCase, skipIfDBFeature, skipUnlessDBFeature, ) from django.test.html import HTMLParseError, parse_html from django.test.testcases import DatabaseOperationForbidden from django.test.utils import ( CaptureQueriesContext, TestContextDecorator, isolate_apps, override_settings, setup_test_environment, ) from django.urls import NoReverseMatch, path, reverse, reverse_lazy from django.utils.html import VOID_ELEMENTS from .models import Car, Person, PossessedCar from .views import empty_response class SkippingTestCase(SimpleTestCase): def _assert_skipping(self, func, expected_exc, msg=None): try: if msg is not None: with self.assertRaisesMessage(expected_exc, msg): func() else: with self.assertRaises(expected_exc): func() except unittest.SkipTest: self.fail("%s should not result in a skipped test." % func.__name__) def test_skip_unless_db_feature(self): """ Testing the django.test.skipUnlessDBFeature decorator. """ # Total hack, but it works, just want an attribute that's always true. @skipUnlessDBFeature("__class__") def test_func(): raise ValueError @skipUnlessDBFeature("notprovided") def test_func2(): raise ValueError @skipUnlessDBFeature("__class__", "__class__") def test_func3(): raise ValueError @skipUnlessDBFeature("__class__", "notprovided") def test_func4(): raise ValueError self._assert_skipping(test_func, ValueError) self._assert_skipping(test_func2, AttributeError) self._assert_skipping(test_func3, ValueError) self._assert_skipping(test_func4, AttributeError) class SkipTestCase(SimpleTestCase): @skipUnlessDBFeature("missing") def test_foo(self): pass self._assert_skipping( SkipTestCase("test_foo").test_foo, ValueError, "skipUnlessDBFeature cannot be used on test_foo (test_utils.tests." "SkippingTestCase.test_skip_unless_db_feature.<locals>.SkipTestCase." "test_foo) as SkippingTestCase.test_skip_unless_db_feature.<locals>." "SkipTestCase doesn't allow queries against the 'default' database.", ) def test_skip_if_db_feature(self): """ Testing the django.test.skipIfDBFeature decorator. """ @skipIfDBFeature("__class__") def test_func(): raise ValueError @skipIfDBFeature("notprovided") def test_func2(): raise ValueError @skipIfDBFeature("__class__", "__class__") def test_func3(): raise ValueError @skipIfDBFeature("__class__", "notprovided") def test_func4(): raise ValueError @skipIfDBFeature("notprovided", "notprovided") def test_func5(): raise ValueError self._assert_skipping(test_func, unittest.SkipTest) self._assert_skipping(test_func2, AttributeError) self._assert_skipping(test_func3, unittest.SkipTest) self._assert_skipping(test_func4, unittest.SkipTest) self._assert_skipping(test_func5, AttributeError) class SkipTestCase(SimpleTestCase): @skipIfDBFeature("missing") def test_foo(self): pass self._assert_skipping( SkipTestCase("test_foo").test_foo, ValueError, "skipIfDBFeature cannot be used on test_foo (test_utils.tests." "SkippingTestCase.test_skip_if_db_feature.<locals>.SkipTestCase.test_foo) " "as SkippingTestCase.test_skip_if_db_feature.<locals>.SkipTestCase " "doesn't allow queries against the 'default' database.", ) class SkippingClassTestCase(TransactionTestCase): available_apps = [] def test_skip_class_unless_db_feature(self): @skipUnlessDBFeature("__class__") class NotSkippedTests(TestCase): def test_dummy(self): return @skipUnlessDBFeature("missing") @skipIfDBFeature("__class__") class SkippedTests(TestCase): def test_will_be_skipped(self): self.fail("We should never arrive here.") @skipIfDBFeature("__dict__") class SkippedTestsSubclass(SkippedTests): pass test_suite = unittest.TestSuite() test_suite.addTest(NotSkippedTests("test_dummy")) try: test_suite.addTest(SkippedTests("test_will_be_skipped")) test_suite.addTest(SkippedTestsSubclass("test_will_be_skipped")) except unittest.SkipTest: self.fail("SkipTest should not be raised here.") result = unittest.TextTestRunner(stream=StringIO()).run(test_suite) # PY312: Python 3.12.1 does not include skipped tests in the number of # running tests. self.assertEqual( result.testsRun, 1 if sys.version_info[:3] == (3, 12, 1) else 3 ) self.assertEqual(len(result.skipped), 2) self.assertEqual(result.skipped[0][1], "Database has feature(s) __class__") self.assertEqual(result.skipped[1][1], "Database has feature(s) __class__") def test_missing_default_databases(self): @skipIfDBFeature("missing") class MissingDatabases(SimpleTestCase): def test_assertion_error(self): pass suite = unittest.TestSuite() try: suite.addTest(MissingDatabases("test_assertion_error")) except unittest.SkipTest: self.fail("SkipTest should not be raised at this stage") runner = unittest.TextTestRunner(stream=StringIO()) msg = ( "skipIfDBFeature cannot be used on <class 'test_utils.tests." "SkippingClassTestCase.test_missing_default_databases.<locals>." "MissingDatabases'> as it doesn't allow queries against the " "'default' database." ) with self.assertRaisesMessage(ValueError, msg): runner.run(suite) @override_settings(ROOT_URLCONF="test_utils.urls") class AssertNumQueriesTests(TestCase): def test_assert_num_queries(self): def test_func(): raise ValueError with self.assertRaises(ValueError): self.assertNumQueries(2, test_func) def test_assert_num_queries_with_client(self): person = Person.objects.create(name="test") self.assertNumQueries( 1, self.client.get, "/test_utils/get_person/%s/" % person.pk ) self.assertNumQueries( 1, self.client.get, "/test_utils/get_person/%s/" % person.pk ) def test_func(): self.client.get("/test_utils/get_person/%s/" % person.pk) self.client.get("/test_utils/get_person/%s/" % person.pk) self.assertNumQueries(2, test_func) class AssertNumQueriesUponConnectionTests(TransactionTestCase): available_apps = [] def test_ignores_connection_configuration_queries(self): real_ensure_connection = connection.ensure_connection connection.close() def make_configuration_query(): is_opening_connection = connection.connection is None real_ensure_connection() if is_opening_connection: # Avoid infinite recursion. Creating a cursor calls # ensure_connection() which is currently mocked by this method. with connection.cursor() as cursor: cursor.execute("SELECT 1" + connection.features.bare_select_suffix) ensure_connection = ( "django.db.backends.base.base.BaseDatabaseWrapper.ensure_connection" ) with mock.patch(ensure_connection, side_effect=make_configuration_query): with self.assertNumQueries(1): list(Car.objects.all()) class AssertQuerySetEqualTests(TestCase): @classmethod def setUpTestData(cls): cls.p1 = Person.objects.create(name="p1") cls.p2 = Person.objects.create(name="p2") def test_empty(self): self.assertQuerySetEqual(Person.objects.filter(name="p3"), []) def test_ordered(self): self.assertQuerySetEqual( Person.objects.order_by("name"), [self.p1, self.p2], ) def test_unordered(self): self.assertQuerySetEqual( Person.objects.order_by("name"), [self.p2, self.p1], ordered=False ) def test_queryset(self): self.assertQuerySetEqual( Person.objects.order_by("name"), Person.objects.order_by("name"), ) def test_flat_values_list(self): self.assertQuerySetEqual( Person.objects.order_by("name").values_list("name", flat=True), ["p1", "p2"], ) def test_transform(self): self.assertQuerySetEqual( Person.objects.order_by("name"), [self.p1.pk, self.p2.pk], transform=lambda x: x.pk, ) def test_repr_transform(self): self.assertQuerySetEqual( Person.objects.order_by("name"), [repr(self.p1), repr(self.p2)], transform=repr, ) def test_undefined_order(self): # Using an unordered queryset with more than one ordered value # is an error. msg = ( "Trying to compare non-ordered queryset against more than one " "ordered value." ) with self.assertRaisesMessage(ValueError, msg): self.assertQuerySetEqual( Person.objects.all(), [self.p1, self.p2], ) # No error for one value. self.assertQuerySetEqual(Person.objects.filter(name="p1"), [self.p1]) def test_repeated_values(self): """ assertQuerySetEqual checks the number of appearance of each item when used with option ordered=False. """ batmobile = Car.objects.create(name="Batmobile") k2000 = Car.objects.create(name="K 2000") PossessedCar.objects.bulk_create( [ PossessedCar(car=batmobile, belongs_to=self.p1), PossessedCar(car=batmobile, belongs_to=self.p1), PossessedCar(car=k2000, belongs_to=self.p1), PossessedCar(car=k2000, belongs_to=self.p1), PossessedCar(car=k2000, belongs_to=self.p1), PossessedCar(car=k2000, belongs_to=self.p1), ] ) with self.assertRaises(AssertionError): self.assertQuerySetEqual( self.p1.cars.all(), [batmobile, k2000], ordered=False ) self.assertQuerySetEqual( self.p1.cars.all(), [batmobile] * 2 + [k2000] * 4, ordered=False ) def test_maxdiff(self): names = ["Joe Smith %s" % i for i in range(20)] Person.objects.bulk_create([Person(name=name) for name in names]) names.append("Extra Person") with self.assertRaises(AssertionError) as ctx: self.assertQuerySetEqual( Person.objects.filter(name__startswith="Joe"), names, ordered=False, transform=lambda p: p.name, ) self.assertIn("Set self.maxDiff to None to see it.", str(ctx.exception)) original = self.maxDiff self.maxDiff = None try: with self.assertRaises(AssertionError) as ctx: self.assertQuerySetEqual( Person.objects.filter(name__startswith="Joe"), names, ordered=False, transform=lambda p: p.name, ) finally: self.maxDiff = original exception_msg = str(ctx.exception) self.assertNotIn("Set self.maxDiff to None to see it.", exception_msg) for name in names: self.assertIn(name, exception_msg) @override_settings(ROOT_URLCONF="test_utils.urls") class CaptureQueriesContextManagerTests(TestCase): @classmethod def setUpTestData(cls): cls.person_pk = str(Person.objects.create(name="test").pk) cls.url = f"/test_utils/get_person/{cls.person_pk}/" def test_simple(self): with CaptureQueriesContext(connection) as captured_queries: Person.objects.get(pk=self.person_pk) self.assertEqual(len(captured_queries), 1) self.assertIn(self.person_pk, captured_queries[0]["sql"]) with CaptureQueriesContext(connection) as captured_queries: pass self.assertEqual(0, len(captured_queries)) def test_within(self): with CaptureQueriesContext(connection) as captured_queries: Person.objects.get(pk=self.person_pk) self.assertEqual(len(captured_queries), 1) self.assertIn(self.person_pk, captured_queries[0]["sql"]) def test_nested(self): with CaptureQueriesContext(connection) as captured_queries: Person.objects.count() with CaptureQueriesContext(connection) as nested_captured_queries: Person.objects.count() self.assertEqual(1, len(nested_captured_queries)) self.assertEqual(2, len(captured_queries)) def test_failure(self): with self.assertRaises(TypeError): with CaptureQueriesContext(connection): raise TypeError def test_with_client(self): with CaptureQueriesContext(connection) as captured_queries: self.client.get(self.url) self.assertEqual(len(captured_queries), 1) self.assertIn(self.person_pk, captured_queries[0]["sql"]) with CaptureQueriesContext(connection) as captured_queries: self.client.get(self.url) self.assertEqual(len(captured_queries), 1) self.assertIn(self.person_pk, captured_queries[0]["sql"]) with CaptureQueriesContext(connection) as captured_queries: self.client.get(self.url) self.client.get(self.url) self.assertEqual(len(captured_queries), 2) self.assertIn(self.person_pk, captured_queries[0]["sql"]) self.assertIn(self.person_pk, captured_queries[1]["sql"]) def test_with_client_nested(self): with CaptureQueriesContext(connection) as captured_queries: Person.objects.count() with CaptureQueriesContext(connection): pass self.client.get(self.url) self.assertEqual(2, len(captured_queries)) @override_settings(ROOT_URLCONF="test_utils.urls") class AssertNumQueriesContextManagerTests(TestCase): @classmethod def setUpTestData(cls): cls.person_pk = str(Person.objects.create(name="test").pk) cls.url = f"/test_utils/get_person/{cls.person_pk}/" def test_simple(self): with self.assertNumQueries(0): pass with self.assertNumQueries(1): Person.objects.count() with self.assertNumQueries(2): Person.objects.count() Person.objects.count() def test_failure(self): msg = "1 != 2 : 1 queries executed, 2 expected\nCaptured queries were:\n1." with self.assertRaisesMessage(AssertionError, msg): with self.assertNumQueries(2): Person.objects.count() with self.assertRaises(TypeError): with self.assertNumQueries(4000): raise TypeError def test_with_client(self): with self.assertNumQueries(1): self.client.get(self.url) with self.assertNumQueries(1): self.client.get(self.url) with self.assertNumQueries(2): self.client.get(self.url) self.client.get(self.url) def test_with_client_nested(self): with self.assertNumQueries(2): Person.objects.count() with self.assertNumQueries(0): pass self.client.get(self.url) @override_settings(ROOT_URLCONF="test_utils.urls") class AssertTemplateUsedContextManagerTests(SimpleTestCase): def test_usage(self): with self.assertTemplateUsed("template_used/base.html"): render_to_string("template_used/base.html") with self.assertTemplateUsed(template_name="template_used/base.html"): render_to_string("template_used/base.html") with self.assertTemplateUsed("template_used/base.html"): render_to_string("template_used/include.html") with self.assertTemplateUsed("template_used/base.html"): render_to_string("template_used/extends.html") with self.assertTemplateUsed("template_used/base.html"): render_to_string("template_used/base.html") render_to_string("template_used/base.html") def test_nested_usage(self): with self.assertTemplateUsed("template_used/base.html"): with self.assertTemplateUsed("template_used/include.html"): render_to_string("template_used/include.html") with self.assertTemplateUsed("template_used/extends.html"): with self.assertTemplateUsed("template_used/base.html"): render_to_string("template_used/extends.html") with self.assertTemplateUsed("template_used/base.html"): with self.assertTemplateUsed("template_used/alternative.html"): render_to_string("template_used/alternative.html") render_to_string("template_used/base.html") with self.assertTemplateUsed("template_used/base.html"): render_to_string("template_used/extends.html") with self.assertTemplateNotUsed("template_used/base.html"): render_to_string("template_used/alternative.html") render_to_string("template_used/base.html") def test_not_used(self): with self.assertTemplateNotUsed("template_used/base.html"): pass with self.assertTemplateNotUsed("template_used/alternative.html"): pass def test_error_message_no_template_used(self): msg = "No templates used to render the response" with self.assertRaisesMessage(AssertionError, msg): with self.assertTemplateUsed("template_used/base.html"): pass with self.assertRaisesMessage(AssertionError, msg): with self.assertTemplateUsed(template_name="template_used/base.html"): pass with self.assertRaisesMessage(AssertionError, msg): response = self.client.get("/test_utils/no_template_used/") self.assertTemplateUsed(response, "template_used/base.html") with self.assertRaisesMessage(AssertionError, msg): with self.assertTemplateUsed("template_used/base.html"): self.client.get("/test_utils/no_template_used/") with self.assertRaisesMessage(AssertionError, msg): with self.assertTemplateUsed("template_used/base.html"): template = Template("template_used/alternative.html", name=None) template.render(Context()) def test_error_message_unexpected_template_used(self): msg = ( "Template 'template_used/base.html' was not a template used to render " "the response. Actual template(s) used: template_used/alternative.html" ) with self.assertRaisesMessage(AssertionError, msg): with self.assertTemplateUsed("template_used/base.html"): render_to_string("template_used/alternative.html") def test_msg_prefix(self): msg_prefix = "Prefix" msg = f"{msg_prefix}: No templates used to render the response" with self.assertRaisesMessage(AssertionError, msg): with self.assertTemplateUsed( "template_used/base.html", msg_prefix=msg_prefix ): pass with self.assertRaisesMessage(AssertionError, msg): with self.assertTemplateUsed( template_name="template_used/base.html", msg_prefix=msg_prefix, ): pass msg = ( f"{msg_prefix}: Template 'template_used/base.html' was not a " f"template used to render the response. Actual template(s) used: " f"template_used/alternative.html" ) with self.assertRaisesMessage(AssertionError, msg): with self.assertTemplateUsed( "template_used/base.html", msg_prefix=msg_prefix ): render_to_string("template_used/alternative.html") def test_count(self): with self.assertTemplateUsed("template_used/base.html", count=2): render_to_string("template_used/base.html") render_to_string("template_used/base.html") msg = ( "Template 'template_used/base.html' was expected to be rendered " "3 time(s) but was actually rendered 2 time(s)." ) with self.assertRaisesMessage(AssertionError, msg): with self.assertTemplateUsed("template_used/base.html", count=3): render_to_string("template_used/base.html") render_to_string("template_used/base.html") def test_failure(self): msg = "response and/or template_name argument must be provided" with self.assertRaisesMessage(TypeError, msg): with self.assertTemplateUsed(): pass msg = "No templates used to render the response" with self.assertRaisesMessage(AssertionError, msg): with self.assertTemplateUsed(""): pass with self.assertRaisesMessage(AssertionError, msg): with self.assertTemplateUsed(""): render_to_string("template_used/base.html") with self.assertRaisesMessage(AssertionError, msg): with self.assertTemplateUsed(template_name=""): pass msg = ( "Template 'template_used/base.html' was not a template used to " "render the response. Actual template(s) used: " "template_used/alternative.html" ) with self.assertRaisesMessage(AssertionError, msg): with self.assertTemplateUsed("template_used/base.html"): render_to_string("template_used/alternative.html") def test_assert_used_on_http_response(self): response = HttpResponse() msg = "%s() is only usable on responses fetched using the Django test Client." with self.assertRaisesMessage(ValueError, msg % "assertTemplateUsed"): self.assertTemplateUsed(response, "template.html") with self.assertRaisesMessage(ValueError, msg % "assertTemplateNotUsed"): self.assertTemplateNotUsed(response, "template.html") @override_settings(ROOT_URLCONF="test_utils.urls") class AssertTemplateUsedPartialTests(SimpleTestCase): def test_template_used_pass(self): with self.assertTemplateUsed("template_used/partials.html#hello"): render_to_string("template_used/partials.html#hello") def test_template_not_used_pass(self): with self.assertTemplateNotUsed("hello"): render_to_string("template_used/partials.html#hello") def test_template_used_fail(self): msg = "Template 'hello' was not a template used to render the response." with ( self.assertRaisesMessage(AssertionError, msg), self.assertTemplateUsed("hello"), ): render_to_string("template_used/base.html") def test_template_not_used_fail(self): msg = ( "Template 'template_used/partials.html#hello' was used " "unexpectedly in rendering the response" ) with ( self.assertRaisesMessage(AssertionError, msg), self.assertTemplateNotUsed("template_used/partials.html#hello"), ): render_to_string("template_used/partials.html#hello") def test_template_not_used_pass_non_partial(self): with self.assertTemplateNotUsed( "template_used/base.html#template_used/base.html" ): render_to_string("template_used/base.html") def test_template_used_fail_non_partial(self): msg = ( "Template 'template_used/base.html#template_used/base.html' was not a " "template used to render the response." ) with ( self.assertRaisesMessage(AssertionError, msg), self.assertTemplateUsed("template_used/base.html#template_used/base.html"), ): render_to_string("template_used/base.html") class HTMLEqualTests(SimpleTestCase): def test_html_parser(self): element = parse_html("<div><p>Hello</p></div>") self.assertEqual(len(element.children), 1) self.assertEqual(element.children[0].name, "p") self.assertEqual(element.children[0].children[0], "Hello") parse_html("<p>") parse_html("<p attr>") dom = parse_html("<p>foo") self.assertEqual(len(dom.children), 1) self.assertEqual(dom.name, "p") self.assertEqual(dom[0], "foo") def test_parse_html_in_script(self): parse_html('<script>var a = "<p" + ">";</script>') parse_html( """ <script> var js_sha_link='<p>***</p>'; </script> """ ) # script content will be parsed to text dom = parse_html( """ <script><p>foo</p> '</scr'+'ipt>' <span>bar</span></script> """ ) self.assertEqual(len(dom.children), 1) self.assertEqual(dom.children[0], "<p>foo</p> '</scr'+'ipt>' <span>bar</span>") def test_void_elements(self): for tag in VOID_ELEMENTS: with self.subTest(tag): dom = parse_html("<p>Hello <%s> world</p>" % tag) self.assertEqual(len(dom.children), 3) self.assertEqual(dom[0], "Hello") self.assertEqual(dom[1].name, tag) self.assertEqual(dom[2], "world") dom = parse_html("<p>Hello <%s /> world</p>" % tag) self.assertEqual(len(dom.children), 3) self.assertEqual(dom[0], "Hello") self.assertEqual(dom[1].name, tag) self.assertEqual(dom[2], "world") def test_simple_equal_html(self): self.assertHTMLEqual("", "") self.assertHTMLEqual("<p></p>", "<p></p>") self.assertHTMLEqual("<p></p>", " <p> </p> ") self.assertHTMLEqual("<div><p>Hello</p></div>", "<div><p>Hello</p></div>") self.assertHTMLEqual("<div><p>Hello</p></div>", "<div> <p>Hello</p> </div>") self.assertHTMLEqual("<div>\n<p>Hello</p></div>", "<div><p>Hello</p></div>\n") self.assertHTMLEqual( "<div><p>Hello\nWorld !</p></div>", "<div><p>Hello World\n!</p></div>" ) self.assertHTMLEqual( "<div><p>Hello\nWorld !</p></div>", "<div><p>Hello World\n!</p></div>" ) self.assertHTMLEqual("<p>Hello World !</p>", "<p>Hello World\n\n!</p>") self.assertHTMLEqual("<p> </p>", "<p></p>") self.assertHTMLEqual("<p/>", "<p></p>") self.assertHTMLEqual("<p />", "<p></p>") self.assertHTMLEqual("<input checked>", '<input checked="checked">') self.assertHTMLEqual("<p>Hello", "<p> Hello") self.assertHTMLEqual("<p>Hello</p>World", "<p>Hello</p> World") def test_ignore_comments(self): self.assertHTMLEqual( "<div>Hello<!-- this is a comment --> World!</div>", "<div>Hello World!</div>", ) def test_unequal_html(self): self.assertHTMLNotEqual("<p>Hello</p>", "<p>Hello!</p>") self.assertHTMLNotEqual("<p>foo&#20;bar</p>", "<p>foo&nbsp;bar</p>") self.assertHTMLNotEqual("<p>foo bar</p>", "<p>foo &nbsp;bar</p>") self.assertHTMLNotEqual("<p>foo nbsp</p>", "<p>foo &nbsp;</p>") self.assertHTMLNotEqual("<p>foo #20</p>", "<p>foo &#20;</p>") self.assertHTMLNotEqual( "<p><span>Hello</span><span>World</span></p>", "<p><span>Hello</span>World</p>", ) self.assertHTMLNotEqual( "<p><span>Hello</span>World</p>", "<p><span>Hello</span><span>World</span></p>", ) def test_attributes(self): self.assertHTMLEqual( '<input type="text" id="id_name" />', '<input id="id_name" type="text" />' ) self.assertHTMLEqual( """<input type='text' id="id_name" />""", '<input id="id_name" type="text" />', ) self.assertHTMLNotEqual( '<input type="text" id="id_name" />', '<input type="password" id="id_name" />', ) def test_class_attribute(self): pairs = [ ('<p class="foo bar"></p>', '<p class="bar foo"></p>'), ('<p class=" foo bar "></p>', '<p class="bar foo"></p>'), ('<p class=" foo bar "></p>', '<p class="bar foo"></p>'), ('<p class="foo\tbar"></p>', '<p class="bar foo"></p>'), ('<p class="\tfoo\tbar\t"></p>', '<p class="bar foo"></p>'), ('<p class="\t\t\tfoo\t\t\tbar\t\t\t"></p>', '<p class="bar foo"></p>'), ('<p class="\t \nfoo \t\nbar\n\t "></p>', '<p class="bar foo"></p>'), ] for html1, html2 in pairs: with self.subTest(html1): self.assertHTMLEqual(html1, html2) def test_boolean_attribute(self): html1 = "<input checked>" html2 = '<input checked="">' html3 = '<input checked="checked">' self.assertHTMLEqual(html1, html2) self.assertHTMLEqual(html1, html3) self.assertHTMLEqual(html2, html3) self.assertHTMLNotEqual(html1, '<input checked="invalid">') self.assertEqual(str(parse_html(html1)), "<input checked>") self.assertEqual(str(parse_html(html2)), "<input checked>") self.assertEqual(str(parse_html(html3)), "<input checked>") def test_non_boolean_attibutes(self): html1 = "<input value>" html2 = '<input value="">' html3 = '<input value="value">' self.assertHTMLEqual(html1, html2) self.assertHTMLNotEqual(html1, html3) self.assertEqual(str(parse_html(html1)), '<input value="">') self.assertEqual(str(parse_html(html2)), '<input value="">') def test_normalize_refs(self): pairs = [ ("&#39;", "&#x27;"), ("&#39;", "'"), ("&#x27;", "&#39;"), ("&#x27;", "'"), ("'", "&#39;"), ("'", "&#x27;"), ("&amp;", "&#38;"), ("&amp;", "&#x26;"), ("&amp;", "&"), ("&#38;", "&amp;"), ("&#38;", "&#x26;"), ("&#38;", "&"), ("&#x26;", "&amp;"), ("&#x26;", "&#38;"), ("&#x26;", "&"), ("&", "&amp;"), ("&", "&#38;"), ("&", "&#x26;"), ] for pair in pairs: with self.subTest(repr(pair)): self.assertHTMLEqual(*pair) def test_complex_examples(self): self.assertHTMLEqual( """<tr><th><label for="id_first_name">First name:</label></th> <td><input type="text" name="first_name" value="John" id="id_first_name" /></td></tr> <tr><th><label for="id_last_name">Last name:</label></th> <td><input type="text" id="id_last_name" name="last_name" value="Lennon" /></td></tr> <tr><th><label for="id_birthday">Birthday:</label></th>
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
true
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/test_utils/test_transactiontestcase.py
tests/test_utils/test_transactiontestcase.py
from unittest import mock from django.db import connections from django.test import TestCase, TransactionTestCase, override_settings from django.test.testcases import DatabaseOperationForbidden from .models import Car, Person class TestSerializedRollbackInhibitsPostMigrate(TransactionTestCase): """ TransactionTestCase._fixture_teardown() inhibits the post_migrate signal for test classes with serialized_rollback=True. """ available_apps = ["test_utils"] serialized_rollback = True def setUp(self): # self.available_apps must be None to test the serialized_rollback # condition. self.available_apps = None def tearDown(self): self.available_apps = ["test_utils"] @mock.patch("django.test.testcases.call_command") def test(self, call_command): # with a mocked call_command(), this doesn't have any effect. self._fixture_teardown() call_command.assert_called_with( "flush", interactive=False, allow_cascade=False, reset_sequences=False, inhibit_post_migrate=True, database="default", verbosity=0, ) @override_settings(DEBUG=True) # Enable query logging for test_queries_cleared class TransactionTestCaseDatabasesTests(TestCase): available_apps = [] databases = {"default", "other"} def test_queries_cleared(self): """ TransactionTestCase._pre_setup() clears the connections' queries_log so that it's less likely to overflow. An overflow causes assertNumQueries() to fail. """ for alias in self.databases: self.assertEqual( len(connections[alias].queries_log), 0, "Failed for alias %s" % alias ) class DisallowedDatabaseQueriesTests(TransactionTestCase): available_apps = ["test_utils"] def test_disallowed_database_queries(self): message = ( "Database queries to 'other' are not allowed in this test. " "Add 'other' to test_utils.test_transactiontestcase." "DisallowedDatabaseQueriesTests.databases to ensure proper test " "isolation and silence this failure." ) with self.assertRaisesMessage(DatabaseOperationForbidden, message): Car.objects.using("other").get() class FixtureAvailableInSetUpClassTest(TransactionTestCase): available_apps = ["test_utils"] fixtures = ["person.json"] @classmethod def setUpClass(cls): super().setUpClass() cls.elvis = Person.objects.get(name="Elvis Presley") def test_fixture_loaded_during_class_setup(self): self.assertIsInstance(self.elvis, Person)
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/test_utils/urls.py
tests/test_utils/urls.py
from django.urls import path from . import views urlpatterns = [ path("test_utils/get_person/<int:pk>/", views.get_person), path( "test_utils/no_template_used/", views.no_template_used, name="no_template_used" ), ]
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/humanize_tests/__init__.py
tests/humanize_tests/__init__.py
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/humanize_tests/tests.py
tests/humanize_tests/tests.py
import datetime from decimal import Decimal from django.contrib.humanize.templatetags import humanize from django.template import Context, Template, defaultfilters from django.test import SimpleTestCase, modify_settings, override_settings from django.utils import translation from django.utils.html import escape from django.utils.timezone import get_fixed_timezone from django.utils.translation import gettext as _ # Mock out datetime in some tests so they don't fail occasionally when they # run too slow. Use a fixed datetime for datetime.now(). DST change in # America/Chicago (the default time zone) happened on March 11th in 2012. now = datetime.datetime(2012, 3, 9, 22, 30) class MockDateTime(datetime.datetime): @classmethod def now(cls, tz=None): if tz is None or tz.utcoffset(now) is None: return now else: # equals now.replace(tzinfo=utc) return now.replace(tzinfo=tz) + tz.utcoffset(now) @modify_settings(INSTALLED_APPS={"append": "django.contrib.humanize"}) class HumanizeTests(SimpleTestCase): def humanize_tester( self, test_list, result_list, method, normalize_result_func=escape ): for test_content, result in zip(test_list, result_list): with self.subTest(test_content): t = Template("{%% load humanize %%}{{ test_content|%s }}" % method) rendered = t.render(Context(locals())).strip() self.assertEqual( rendered, normalize_result_func(result), msg="%s test failed, produced '%s', should've produced '%s'" % (method, rendered, result), ) def test_ordinal(self): test_list = ( "1", "2", "3", "4", "11", "12", "13", "101", "102", "103", "111", "-0", "-1", "-105", "something else", None, ) result_list = ( "1st", "2nd", "3rd", "4th", "11th", "12th", "13th", "101st", "102nd", "103rd", "111th", "0th", "-1", "-105", "something else", None, ) with translation.override("en"): self.humanize_tester(test_list, result_list, "ordinal") def test_i18n_html_ordinal(self): """Allow html in output on i18n strings""" test_list = ( "1", "2", "3", "4", "11", "12", "13", "101", "102", "103", "111", "something else", None, ) result_list = ( "1<sup>er</sup>", "2<sup>e</sup>", "3<sup>e</sup>", "4<sup>e</sup>", "11<sup>e</sup>", "12<sup>e</sup>", "13<sup>e</sup>", "101<sup>er</sup>", "102<sup>e</sup>", "103<sup>e</sup>", "111<sup>e</sup>", "something else", "None", ) with translation.override("fr-fr"): self.humanize_tester(test_list, result_list, "ordinal", lambda x: x) def test_intcomma(self): test_list = ( 100, -100, 1000, -1000, 10123, -10123, 10311, -10311, 1000000, -1000000, 1234567.25, -1234567.25, "100", "-100", "100.1", "-100.1", "100.13", "-100.13", "1000", "-1000", "10123", "-10123", "10311", "-10311", "100000.13", "-100000.13", "1000000", "-1000000", "1234567.1234567", "-1234567.1234567", Decimal("1234567.1234567"), Decimal("-1234567.1234567"), Decimal("Infinity"), Decimal("-Infinity"), Decimal("NaN"), None, "1234567", "-1234567", "1234567.12", "-1234567.12", "the quick brown fox jumped over the lazy dog", ) result_list = ( "100", "-100", "1,000", "-1,000", "10,123", "-10,123", "10,311", "-10,311", "1,000,000", "-1,000,000", "1,234,567.25", "-1,234,567.25", "100", "-100", "100.1", "-100.1", "100.13", "-100.13", "1,000", "-1,000", "10,123", "-10,123", "10,311", "-10,311", "100,000.13", "-100,000.13", "1,000,000", "-1,000,000", "1,234,567.1234567", "-1,234,567.1234567", "1,234,567.1234567", "-1,234,567.1234567", "Infinity", "-Infinity", "NaN", None, "1,234,567", "-1,234,567", "1,234,567.12", "-1,234,567.12", "the quick brown fox jumped over the lazy dog", ) with translation.override("en"): self.humanize_tester(test_list, result_list, "intcomma") def test_l10n_intcomma(self): test_list = ( 100, -100, 1000, -1000, 10123, -10123, 10311, -10311, 1000000, -1000000, 1234567.25, -1234567.25, "100", "-100", "1000", "-1000", "10123", "-10123", "10311", "-10311", "1000000", "-1000000", "1234567.1234567", "-1234567.1234567", Decimal("1234567.1234567"), -Decimal("1234567.1234567"), None, "1234567", "-1234567", "1234567.12", "-1234567.12", "the quick brown fox jumped over the lazy dog", ) result_list_en = ( "100", "-100", "1,000", "-1,000", "10,123", "-10,123", "10,311", "-10,311", "1,000,000", "-1,000,000", "1,234,567.25", "-1,234,567.25", "100", "-100", "1,000", "-1,000", "10,123", "-10,123", "10,311", "-10,311", "1,000,000", "-1,000,000", "1,234,567.1234567", "-1,234,567.1234567", "1,234,567.1234567", "-1,234,567.1234567", None, "1,234,567", "-1,234,567", "1,234,567.12", "-1,234,567.12", "the quick brown fox jumped over the lazy dog", ) result_list_de = ( "100", "-100", "1.000", "-1.000", "10.123", "-10.123", "10.311", "-10.311", "1.000.000", "-1.000.000", "1.234.567,25", "-1.234.567,25", "100", "-100", "1.000", "-1.000", "10.123", "-10.123", "10.311", "-10.311", "1.000.000", "-1.000.000", "1.234.567,1234567", "-1.234.567,1234567", "1.234.567,1234567", "-1.234.567,1234567", None, "1.234.567", "-1.234.567", "1.234.567,12", "-1.234.567,12", "the quick brown fox jumped over the lazy dog", ) with self.settings(USE_THOUSAND_SEPARATOR=False): with translation.override("en"): self.humanize_tester(test_list, result_list_en, "intcomma") with translation.override("de"): self.humanize_tester(test_list, result_list_de, "intcomma") def test_intcomma_without_number_grouping(self): # Regression for #17414 with translation.override("ja"): self.humanize_tester([100], ["100"], "intcomma") def test_intword(self): # Positive integers. test_list_positive = ( "100", "1000000", "1200000", "1290000", "1000000000", "2000000000", "6000000000000", "1300000000000000", "3500000000000000000000", "8100000000000000000000000000000000", ("1" + "0" * 100), ("1" + "0" * 104), ) result_list_positive = ( "100", "1.0 million", "1.2 million", "1.3 million", "1.0 billion", "2.0 billion", "6.0 trillion", "1.3 quadrillion", "3.5 sextillion", "8.1 decillion", "1.0 googol", ("1" + "0" * 104), ) # Negative integers. test_list_negative = ("-" + test for test in test_list_positive) result_list_negative = ("-" + result for result in result_list_positive) with translation.override("en"): self.humanize_tester( (*test_list_positive, *test_list_negative, None), (*result_list_positive, *result_list_negative, None), "intword", ) def test_i18n_intcomma(self): test_list = ( 100, 1000, 10123, 10311, 1000000, 1234567.25, "100", "1000", "10123", "10311", "1000000", None, ) result_list = ( "100", "1.000", "10.123", "10.311", "1.000.000", "1.234.567,25", "100", "1.000", "10.123", "10.311", "1.000.000", None, ) with self.settings(USE_THOUSAND_SEPARATOR=True): with translation.override("de"): self.humanize_tester(test_list, result_list, "intcomma") def test_i18n_intword(self): # Positive integers. test_list_positive = ( "100", "1000000", "1200000", "1290000", "1000000000", "2000000000", "6000000000000", ) result_list_positive = ( "100", "1,0 Million", "1,2 Millionen", "1,3 Millionen", "1,0 Milliarde", "2,0 Milliarden", "6,0 Billionen", ) # Negative integers. test_list_negative = ("-" + test for test in test_list_positive) result_list_negative = ("-" + result for result in result_list_positive) with self.settings(USE_THOUSAND_SEPARATOR=True): with translation.override("de"): self.humanize_tester( (*test_list_positive, *test_list_negative), (*result_list_positive, *result_list_negative), "intword", ) def test_apnumber(self): test_list = [str(x) for x in range(1, 11)] test_list.append(None) result_list = ( "one", "two", "three", "four", "five", "six", "seven", "eight", "nine", "10", None, ) with translation.override("en"): self.humanize_tester(test_list, result_list, "apnumber") def test_naturalday(self): today = datetime.date.today() yesterday = today - datetime.timedelta(days=1) tomorrow = today + datetime.timedelta(days=1) someday = today - datetime.timedelta(days=10) notdate = "I'm not a date value" test_list = (today, yesterday, tomorrow, someday, notdate, None) someday_result = defaultfilters.date(someday) result_list = ( _("today"), _("yesterday"), _("tomorrow"), someday_result, "I'm not a date value", None, ) self.humanize_tester(test_list, result_list, "naturalday") def test_naturalday_tz(self): today = datetime.date.today() tz_one = get_fixed_timezone(-720) tz_two = get_fixed_timezone(720) # Can be today or yesterday date_one = datetime.datetime(today.year, today.month, today.day, tzinfo=tz_one) naturalday_one = humanize.naturalday(date_one) # Can be today or tomorrow date_two = datetime.datetime(today.year, today.month, today.day, tzinfo=tz_two) naturalday_two = humanize.naturalday(date_two) # As 24h of difference they will never be the same self.assertNotEqual(naturalday_one, naturalday_two) def test_naturalday_uses_localtime(self): # Regression for #18504 # This is 2012-03-08HT19:30:00-06:00 in America/Chicago dt = datetime.datetime(2012, 3, 9, 1, 30, tzinfo=datetime.UTC) orig_humanize_datetime, humanize.datetime = humanize.datetime, MockDateTime try: with override_settings(TIME_ZONE="America/Chicago", USE_TZ=True): with translation.override("en"): self.humanize_tester([dt], ["yesterday"], "naturalday") finally: humanize.datetime = orig_humanize_datetime def test_naturaltime(self): class naive(datetime.tzinfo): def utcoffset(self, dt): return None test_list = [ "test", now, now - datetime.timedelta(microseconds=1), now - datetime.timedelta(seconds=1), now - datetime.timedelta(seconds=30), now - datetime.timedelta(minutes=1, seconds=30), now - datetime.timedelta(minutes=2), now - datetime.timedelta(hours=1, minutes=30, seconds=30), now - datetime.timedelta(hours=23, minutes=50, seconds=50), now - datetime.timedelta(days=1), now - datetime.timedelta(days=500), now + datetime.timedelta(seconds=1), now + datetime.timedelta(seconds=30), now + datetime.timedelta(minutes=1, seconds=30), now + datetime.timedelta(minutes=2), now + datetime.timedelta(hours=1, minutes=30, seconds=30), now + datetime.timedelta(hours=23, minutes=50, seconds=50), now + datetime.timedelta(days=1), now + datetime.timedelta(days=2, hours=6), now + datetime.timedelta(days=500), now.replace(tzinfo=naive()), now.replace(tzinfo=datetime.UTC), ] result_list = [ "test", "now", "now", "a second ago", "30\xa0seconds ago", "a minute ago", "2\xa0minutes ago", "an hour ago", "23\xa0hours ago", "1\xa0day ago", "1\xa0year, 4\xa0months ago", "a second from now", "30\xa0seconds from now", "a minute from now", "2\xa0minutes from now", "an hour from now", "23\xa0hours from now", "1\xa0day from now", "2\xa0days, 6\xa0hours from now", "1\xa0year, 4\xa0months from now", "now", "now", ] # Because of the DST change, 2 days and 6 hours after the chosen # date in naive arithmetic is only 2 days and 5 hours after in # aware arithmetic. result_list_with_tz_support = result_list[:] assert result_list_with_tz_support[-4] == "2\xa0days, 6\xa0hours from now" result_list_with_tz_support[-4] == "2\xa0days, 5\xa0hours from now" orig_humanize_datetime, humanize.datetime = humanize.datetime, MockDateTime try: with translation.override("en"): self.humanize_tester(test_list, result_list, "naturaltime") with override_settings(USE_TZ=True): self.humanize_tester( test_list, result_list_with_tz_support, "naturaltime" ) finally: humanize.datetime = orig_humanize_datetime def test_naturaltime_as_documented(self): """ #23340 -- Verify the documented behavior of humanize.naturaltime. """ time_format = "%d %b %Y %H:%M:%S" documented_now = datetime.datetime.strptime("17 Feb 2007 16:30:00", time_format) test_data = ( ("17 Feb 2007 16:30:00", "now"), ("17 Feb 2007 16:29:31", "29 seconds ago"), ("17 Feb 2007 16:29:00", "a minute ago"), ("17 Feb 2007 16:25:35", "4 minutes ago"), ("17 Feb 2007 15:30:29", "59 minutes ago"), ("17 Feb 2007 15:30:01", "59 minutes ago"), ("17 Feb 2007 15:30:00", "an hour ago"), ("17 Feb 2007 13:31:29", "2 hours ago"), ("16 Feb 2007 13:31:29", "1 day, 2 hours ago"), ("16 Feb 2007 13:30:01", "1 day, 2 hours ago"), ("16 Feb 2007 13:30:00", "1 day, 3 hours ago"), ("17 Feb 2007 16:30:30", "30 seconds from now"), ("17 Feb 2007 16:30:29", "29 seconds from now"), ("17 Feb 2007 16:31:00", "a minute from now"), ("17 Feb 2007 16:34:35", "4 minutes from now"), ("17 Feb 2007 17:30:29", "an hour from now"), ("17 Feb 2007 18:31:29", "2 hours from now"), ("18 Feb 2007 16:31:29", "1 day from now"), ("26 Feb 2007 18:31:29", "1 week, 2 days from now"), ) class DocumentedMockDateTime(datetime.datetime): @classmethod def now(cls, tz=None): if tz is None or tz.utcoffset(documented_now) is None: return documented_now else: return documented_now.replace(tzinfo=tz) + tz.utcoffset(now) orig_humanize_datetime = humanize.datetime humanize.datetime = DocumentedMockDateTime try: for test_time_string, expected_natural_time in test_data: with self.subTest(test_time_string): test_time = datetime.datetime.strptime( test_time_string, time_format ) natural_time = humanize.naturaltime(test_time).replace("\xa0", " ") self.assertEqual(expected_natural_time, natural_time) finally: humanize.datetime = orig_humanize_datetime def test_inflection_for_timedelta(self): """ Translation of '%d day'/'%d month'/… may differ depending on the context of the string it is inserted in. """ test_list = [ # "%(delta)s ago" translations now - datetime.timedelta(days=1), now - datetime.timedelta(days=2), now - datetime.timedelta(days=30), now - datetime.timedelta(days=60), now - datetime.timedelta(days=500), now - datetime.timedelta(days=865), # "%(delta)s from now" translations now + datetime.timedelta(days=1), now + datetime.timedelta(days=2), now + datetime.timedelta(days=31), now + datetime.timedelta(days=61), now + datetime.timedelta(days=500), now + datetime.timedelta(days=865), ] result_list = [ "před 1\xa0dnem", "před 2\xa0dny", "před 1\xa0měsícem", "před 2\xa0měsíci", "před 1\xa0rokem, 4\xa0měsíci", "před 2\xa0lety, 4\xa0měsíci", "za 1\xa0den", "za 2\xa0dny", "za 1\xa0měsíc", "za 2\xa0měsíce", "za 1\xa0rok, 4\xa0měsíce", "za 2\xa0roky, 4\xa0měsíce", ] orig_humanize_datetime, humanize.datetime = humanize.datetime, MockDateTime try: # Choose a language with different # naturaltime-past/naturaltime-future translations. with translation.override("cs"): self.humanize_tester(test_list, result_list, "naturaltime") finally: humanize.datetime = orig_humanize_datetime
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/file_uploads/views.py
tests/file_uploads/views.py
import hashlib import os from django.core.files.uploadedfile import UploadedFile from django.core.files.uploadhandler import TemporaryFileUploadHandler from django.http import HttpResponse, HttpResponseServerError, JsonResponse from .models import FileModel from .tests import UNICODE_FILENAME, UPLOAD_FOLDER from .uploadhandler import ( ErroringUploadHandler, QuotaUploadHandler, StopUploadTemporaryFileHandler, TraversalUploadHandler, ) def file_upload_view(request): """ A file upload can be updated into the POST dictionary. """ form_data = request.POST.copy() form_data.update(request.FILES) if isinstance(form_data.get("file_field"), UploadedFile) and isinstance( form_data["name"], str ): # If a file is posted, the dummy client should only post the file name, # not the full path. if os.path.dirname(form_data["file_field"].name) != "": return HttpResponseServerError() return HttpResponse() else: return HttpResponseServerError() def file_upload_view_verify(request): """ Use the sha digest hash to verify the uploaded contents. """ form_data = request.POST.copy() form_data.update(request.FILES) for key, value in form_data.items(): if key.endswith("_hash"): continue if key + "_hash" not in form_data: continue submitted_hash = form_data[key + "_hash"] if isinstance(value, UploadedFile): new_hash = hashlib.sha1(value.read()).hexdigest() else: new_hash = hashlib.sha1(value.encode()).hexdigest() if new_hash != submitted_hash: return HttpResponseServerError() # Adding large file to the database should succeed largefile = request.FILES["file_field2"] obj = FileModel() obj.testfile.save(largefile.name, largefile) return HttpResponse() def file_upload_unicode_name(request): # Check to see if Unicode name came through properly. if not request.FILES["file_unicode"].name.endswith(UNICODE_FILENAME): return HttpResponseServerError() # Check to make sure the exotic characters are preserved even # through file save. uni_named_file = request.FILES["file_unicode"] file_model = FileModel.objects.create(testfile=uni_named_file) full_name = f"{UPLOAD_FOLDER}/{uni_named_file.name}" return ( HttpResponse() if file_model.testfile.storage.exists(full_name) else HttpResponseServerError() ) def file_upload_echo(request): """ Simple view to echo back info about uploaded files for tests. """ r = {k: f.name for k, f in request.FILES.items()} return JsonResponse(r) def file_upload_echo_content(request): """ Simple view to echo back the content of uploaded files for tests. """ def read_and_close(f): with f: return f.read().decode() r = {k: read_and_close(f) for k, f in request.FILES.items()} return JsonResponse(r) def file_upload_quota(request): """ Dynamically add in an upload handler. """ request.upload_handlers.insert(0, QuotaUploadHandler()) return file_upload_echo(request) def file_upload_quota_broken(request): """ You can't change handlers after reading FILES; this view shouldn't work. """ response = file_upload_echo(request) request.upload_handlers.insert(0, QuotaUploadHandler()) return response def file_stop_upload_temporary_file(request): request.upload_handlers.insert(0, StopUploadTemporaryFileHandler()) request.upload_handlers.pop(2) request.FILES # Trigger file parsing. return JsonResponse( {"temp_path": request.upload_handlers[0].file.temporary_file_path()}, ) def file_upload_interrupted_temporary_file(request): request.upload_handlers.insert(0, TemporaryFileUploadHandler()) request.upload_handlers.pop(2) request.FILES # Trigger file parsing. return JsonResponse( {"temp_path": request.upload_handlers[0].file.temporary_file_path()}, ) def file_upload_getlist_count(request): """ Check the .getlist() function to ensure we receive the correct number of files. """ file_counts = {} for key in request.FILES: file_counts[key] = len(request.FILES.getlist(key)) return JsonResponse(file_counts) def file_upload_errors(request): request.upload_handlers.insert(0, ErroringUploadHandler()) return file_upload_echo(request) def file_upload_filename_case_view(request): """ Check adding the file to the database will preserve the filename case. """ file = request.FILES["file_field"] obj = FileModel() obj.testfile.save(file.name, file) return HttpResponse("%d" % obj.pk) def file_upload_content_type_extra(request): """ Simple view to echo back extra content-type parameters. """ params = {} for file_name, uploadedfile in request.FILES.items(): params[file_name] = { k: v.decode() for k, v in uploadedfile.content_type_extra.items() } return JsonResponse(params) def file_upload_fd_closing(request, access): if access == "t": request.FILES # Trigger file parsing. return HttpResponse() def file_upload_traversal_view(request): request.upload_handlers.insert(0, TraversalUploadHandler()) request.FILES # Trigger file parsing. return JsonResponse( {"file_name": request.upload_handlers[0].file_name}, )
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/file_uploads/models.py
tests/file_uploads/models.py
from django.db import models class FileModel(models.Model): testfile = models.FileField(upload_to="test_upload")
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/file_uploads/__init__.py
tests/file_uploads/__init__.py
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/file_uploads/tests.py
tests/file_uploads/tests.py
import base64 import hashlib import os import shutil import sys import tempfile as sys_tempfile import unittest from io import BytesIO, StringIO from unittest import mock from urllib.parse import quote from django.conf import DEFAULT_STORAGE_ALIAS from django.core.exceptions import SuspiciousFileOperation from django.core.files import temp as tempfile from django.core.files.storage import default_storage from django.core.files.uploadedfile import SimpleUploadedFile, UploadedFile from django.http.multipartparser import ( FILE, MAX_TOTAL_HEADER_SIZE, MultiPartParser, MultiPartParserError, Parser, ) from django.test import SimpleTestCase, TestCase, client, override_settings from . import uploadhandler from .models import FileModel UNICODE_FILENAME = "test-0123456789_中文_Orléans.jpg" MEDIA_ROOT = sys_tempfile.mkdtemp() UPLOAD_FOLDER = "test_upload" UPLOAD_TO = os.path.join(MEDIA_ROOT, UPLOAD_FOLDER) CANDIDATE_TRAVERSAL_FILE_NAMES = [ "/tmp/hax0rd.txt", # Absolute path, *nix-style. "C:\\Windows\\hax0rd.txt", # Absolute path, win-style. "C:/Windows/hax0rd.txt", # Absolute path, broken-style. "\\tmp\\hax0rd.txt", # Absolute path, broken in a different way. "/tmp\\hax0rd.txt", # Absolute path, broken by mixing. "subdir/hax0rd.txt", # Descendant path, *nix-style. "subdir\\hax0rd.txt", # Descendant path, win-style. "sub/dir\\hax0rd.txt", # Descendant path, mixed. "../../hax0rd.txt", # Relative path, *nix-style. "..\\..\\hax0rd.txt", # Relative path, win-style. "../..\\hax0rd.txt", # Relative path, mixed. "..&#x2F;hax0rd.txt", # HTML entities. "..&sol;hax0rd.txt", # HTML entities. ] CANDIDATE_INVALID_FILE_NAMES = [ "/tmp/", # Directory, *nix-style. "c:\\tmp\\", # Directory, win-style. "/tmp/.", # Directory dot, *nix-style. "c:\\tmp\\.", # Directory dot, *nix-style. "/tmp/..", # Parent directory, *nix-style. "c:\\tmp\\..", # Parent directory, win-style. "", # Empty filename. ] @override_settings( MEDIA_ROOT=MEDIA_ROOT, ROOT_URLCONF="file_uploads.urls", MIDDLEWARE=[] ) class FileUploadTests(TestCase): @classmethod def setUpClass(cls): super().setUpClass() os.makedirs(MEDIA_ROOT, exist_ok=True) cls.addClassCleanup(shutil.rmtree, MEDIA_ROOT) def test_upload_name_is_validated(self): candidates = [ "/tmp/", "/tmp/..", "/tmp/.", ] if sys.platform == "win32": candidates.extend( [ "c:\\tmp\\", "c:\\tmp\\..", "c:\\tmp\\.", ] ) for file_name in candidates: with self.subTest(file_name=file_name): self.assertRaises(SuspiciousFileOperation, UploadedFile, name=file_name) def test_simple_upload(self): with open(__file__, "rb") as fp: post_data = { "name": "Ringo", "file_field": fp, } response = self.client.post("/upload/", post_data) self.assertEqual(response.status_code, 200) def test_large_upload(self): file = tempfile.NamedTemporaryFile with file(suffix=".file1") as file1, file(suffix=".file2") as file2: file1.write(b"a" * (2**21)) file1.seek(0) file2.write(b"a" * (10 * 2**20)) file2.seek(0) post_data = { "name": "Ringo", "file_field1": file1, "file_field2": file2, } for key in list(post_data): try: post_data[key + "_hash"] = hashlib.sha1( post_data[key].read() ).hexdigest() post_data[key].seek(0) except AttributeError: post_data[key + "_hash"] = hashlib.sha1( post_data[key].encode() ).hexdigest() response = self.client.post("/verify/", post_data) self.assertEqual(response.status_code, 200) def _test_base64_upload(self, content, encode=base64.b64encode): payload = client.FakePayload( "\r\n".join( [ "--" + client.BOUNDARY, 'Content-Disposition: form-data; name="file"; filename="test.txt"', "Content-Type: application/octet-stream", "Content-Transfer-Encoding: base64", "", ] ) ) payload.write(b"\r\n" + encode(content.encode()) + b"\r\n") payload.write("--" + client.BOUNDARY + "--\r\n") r = { "CONTENT_LENGTH": len(payload), "CONTENT_TYPE": client.MULTIPART_CONTENT, "PATH_INFO": "/echo_content/", "REQUEST_METHOD": "POST", "wsgi.input": payload, } response = self.client.request(**r) self.assertEqual(response.json()["file"], content) def test_base64_upload(self): self._test_base64_upload("This data will be transmitted base64-encoded.") def test_big_base64_upload(self): self._test_base64_upload("Big data" * 68000) # > 512Kb def test_big_base64_newlines_upload(self): self._test_base64_upload("Big data" * 68000, encode=base64.encodebytes) def test_base64_invalid_upload(self): payload = client.FakePayload( "\r\n".join( [ "--" + client.BOUNDARY, 'Content-Disposition: form-data; name="file"; filename="test.txt"', "Content-Type: application/octet-stream", "Content-Transfer-Encoding: base64", "", ] ) ) payload.write(b"\r\n!\r\n") payload.write("--" + client.BOUNDARY + "--\r\n") r = { "CONTENT_LENGTH": len(payload), "CONTENT_TYPE": client.MULTIPART_CONTENT, "PATH_INFO": "/echo_content/", "REQUEST_METHOD": "POST", "wsgi.input": payload, } response = self.client.request(**r) self.assertEqual(response.json()["file"], "") def test_unicode_file_name(self): with sys_tempfile.TemporaryDirectory() as temp_dir: # This file contains Chinese symbols and an accented char in the # name. with open(os.path.join(temp_dir, UNICODE_FILENAME), "w+b") as file1: file1.write(b"b" * (2**10)) file1.seek(0) response = self.client.post("/unicode_name/", {"file_unicode": file1}) self.assertEqual(response.status_code, 200) def test_unicode_file_name_rfc2231(self): """ Receiving file upload when filename is encoded with RFC 2231. """ payload = client.FakePayload() payload.write( "\r\n".join( [ "--" + client.BOUNDARY, 'Content-Disposition: form-data; name="file_unicode"; ' "filename*=UTF-8''%s" % quote(UNICODE_FILENAME), "Content-Type: application/octet-stream", "", "You got pwnd.\r\n", "\r\n--" + client.BOUNDARY + "--\r\n", ] ) ) r = { "CONTENT_LENGTH": len(payload), "CONTENT_TYPE": client.MULTIPART_CONTENT, "PATH_INFO": "/unicode_name/", "REQUEST_METHOD": "POST", "wsgi.input": payload, } response = self.client.request(**r) self.assertEqual(response.status_code, 200) def test_unicode_name_rfc2231(self): """ Receiving file upload when filename is encoded with RFC 2231. """ payload = client.FakePayload() payload.write( "\r\n".join( [ "--" + client.BOUNDARY, "Content-Disposition: form-data; name*=UTF-8''file_unicode; " "filename*=UTF-8''%s" % quote(UNICODE_FILENAME), "Content-Type: application/octet-stream", "", "You got pwnd.\r\n", "\r\n--" + client.BOUNDARY + "--\r\n", ] ) ) r = { "CONTENT_LENGTH": len(payload), "CONTENT_TYPE": client.MULTIPART_CONTENT, "PATH_INFO": "/unicode_name/", "REQUEST_METHOD": "POST", "wsgi.input": payload, } response = self.client.request(**r) self.assertEqual(response.status_code, 200) def test_unicode_file_name_rfc2231_with_double_quotes(self): payload = client.FakePayload() payload.write( "\r\n".join( [ "--" + client.BOUNDARY, 'Content-Disposition: form-data; name="file_unicode"; ' "filename*=\"UTF-8''%s\"" % quote(UNICODE_FILENAME), "Content-Type: application/octet-stream", "", "You got pwnd.\r\n", "\r\n--" + client.BOUNDARY + "--\r\n", ] ) ) r = { "CONTENT_LENGTH": len(payload), "CONTENT_TYPE": client.MULTIPART_CONTENT, "PATH_INFO": "/unicode_name/", "REQUEST_METHOD": "POST", "wsgi.input": payload, } response = self.client.request(**r) self.assertEqual(response.status_code, 200) def test_unicode_name_rfc2231_with_double_quotes(self): payload = client.FakePayload() payload.write( "\r\n".join( [ "--" + client.BOUNDARY, "Content-Disposition: form-data; name*=\"UTF-8''file_unicode\"; " "filename*=\"UTF-8''%s\"" % quote(UNICODE_FILENAME), "Content-Type: application/octet-stream", "", "You got pwnd.\r\n", "\r\n--" + client.BOUNDARY + "--\r\n", ] ) ) r = { "CONTENT_LENGTH": len(payload), "CONTENT_TYPE": client.MULTIPART_CONTENT, "PATH_INFO": "/unicode_name/", "REQUEST_METHOD": "POST", "wsgi.input": payload, } response = self.client.request(**r) self.assertEqual(response.status_code, 200) def test_blank_filenames(self): """ Receiving file upload when filename is blank (before and after sanitization) should be okay. """ filenames = [ "", # Normalized by MultiPartParser.IE_sanitize(). "C:\\Windows\\", # Normalized by os.path.basename(). "/", "ends-with-slash/", ] payload = client.FakePayload() for i, name in enumerate(filenames): payload.write( "\r\n".join( [ "--" + client.BOUNDARY, 'Content-Disposition: form-data; name="file%s"; filename="%s"' % (i, name), "Content-Type: application/octet-stream", "", "You got pwnd.\r\n", ] ) ) payload.write("\r\n--" + client.BOUNDARY + "--\r\n") r = { "CONTENT_LENGTH": len(payload), "CONTENT_TYPE": client.MULTIPART_CONTENT, "PATH_INFO": "/echo/", "REQUEST_METHOD": "POST", "wsgi.input": payload, } response = self.client.request(**r) self.assertEqual(response.status_code, 200) # Empty filenames should be ignored received = response.json() for i, name in enumerate(filenames): self.assertIsNone(received.get("file%s" % i)) def test_non_printable_chars_in_file_names(self): file_name = "non-\x00printable\x00\n_chars.txt\x00" payload = client.FakePayload() payload.write( "\r\n".join( [ "--" + client.BOUNDARY, f'Content-Disposition: form-data; name="file"; ' f'filename="{file_name}"', "Content-Type: application/octet-stream", "", "You got pwnd.\r\n", ] ) ) payload.write("\r\n--" + client.BOUNDARY + "--\r\n") r = { "CONTENT_LENGTH": len(payload), "CONTENT_TYPE": client.MULTIPART_CONTENT, "PATH_INFO": "/echo/", "REQUEST_METHOD": "POST", "wsgi.input": payload, } response = self.client.request(**r) # Non-printable chars are sanitized. received = response.json() self.assertEqual(received["file"], "non-printable_chars.txt") def test_dangerous_file_names(self): """ Uploaded file names should be sanitized before ever reaching the view. """ # This test simulates possible directory traversal attacks by a # malicious uploader We have to do some monkeybusiness here to # construct a malicious payload with an invalid file name (containing # os.sep or os.pardir). This similar to what an attacker would need to # do when trying such an attack. payload = client.FakePayload() for i, name in enumerate(CANDIDATE_TRAVERSAL_FILE_NAMES): payload.write( "\r\n".join( [ "--" + client.BOUNDARY, 'Content-Disposition: form-data; name="file%s"; filename="%s"' % (i, name), "Content-Type: application/octet-stream", "", "You got pwnd.\r\n", ] ) ) payload.write("\r\n--" + client.BOUNDARY + "--\r\n") r = { "CONTENT_LENGTH": len(payload), "CONTENT_TYPE": client.MULTIPART_CONTENT, "PATH_INFO": "/echo/", "REQUEST_METHOD": "POST", "wsgi.input": payload, } response = self.client.request(**r) # The filenames should have been sanitized by the time it got to the # view. received = response.json() for i, name in enumerate(CANDIDATE_TRAVERSAL_FILE_NAMES): got = received["file%s" % i] self.assertEqual(got, "hax0rd.txt") def test_filename_overflow(self): """ File names over 256 characters (dangerous on some platforms) get fixed up. """ long_str = "f" * 300 cases = [ # field name, filename, expected ("long_filename", "%s.txt" % long_str, "%s.txt" % long_str[:251]), ("long_extension", "foo.%s" % long_str, ".%s" % long_str[:254]), ("no_extension", long_str, long_str[:255]), ("no_filename", ".%s" % long_str, ".%s" % long_str[:254]), ("long_everything", "%s.%s" % (long_str, long_str), ".%s" % long_str[:254]), ] payload = client.FakePayload() for name, filename, _ in cases: payload.write( "\r\n".join( [ "--" + client.BOUNDARY, 'Content-Disposition: form-data; name="{}"; filename="{}"', "Content-Type: application/octet-stream", "", "Oops.", "", ] ).format(name, filename) ) payload.write("\r\n--" + client.BOUNDARY + "--\r\n") r = { "CONTENT_LENGTH": len(payload), "CONTENT_TYPE": client.MULTIPART_CONTENT, "PATH_INFO": "/echo/", "REQUEST_METHOD": "POST", "wsgi.input": payload, } response = self.client.request(**r) result = response.json() for name, _, expected in cases: got = result[name] self.assertEqual(expected, got, "Mismatch for {}".format(name)) self.assertLess( len(got), 256, "Got a long file name (%s characters)." % len(got) ) def test_file_content(self): file = tempfile.NamedTemporaryFile with ( file(suffix=".ctype_extra") as no_content_type, file(suffix=".ctype_extra") as simple_file, ): no_content_type.write(b"no content") no_content_type.seek(0) simple_file.write(b"text content") simple_file.seek(0) simple_file.content_type = "text/plain" string_io = StringIO("string content") bytes_io = BytesIO(b"binary content") response = self.client.post( "/echo_content/", { "no_content_type": no_content_type, "simple_file": simple_file, "string": string_io, "binary": bytes_io, }, ) received = response.json() self.assertEqual(received["no_content_type"], "no content") self.assertEqual(received["simple_file"], "text content") self.assertEqual(received["string"], "string content") self.assertEqual(received["binary"], "binary content") def test_content_type_extra(self): """Uploaded files may have content type parameters available.""" file = tempfile.NamedTemporaryFile with ( file(suffix=".ctype_extra") as no_content_type, file(suffix=".ctype_extra") as simple_file, ): no_content_type.write(b"something") no_content_type.seek(0) simple_file.write(b"something") simple_file.seek(0) simple_file.content_type = "text/plain; test-key=test_value" response = self.client.post( "/echo_content_type_extra/", { "no_content_type": no_content_type, "simple_file": simple_file, }, ) received = response.json() self.assertEqual(received["no_content_type"], {}) self.assertEqual(received["simple_file"], {"test-key": "test_value"}) def test_truncated_multipart_handled_gracefully(self): """ If passed an incomplete multipart message, MultiPartParser does not attempt to read beyond the end of the stream, and simply will handle the part that can be parsed gracefully. """ payload_str = "\r\n".join( [ "--" + client.BOUNDARY, 'Content-Disposition: form-data; name="file"; filename="foo.txt"', "Content-Type: application/octet-stream", "", "file contents" "--" + client.BOUNDARY + "--", "", ] ) payload = client.FakePayload(payload_str[:-10]) r = { "CONTENT_LENGTH": len(payload), "CONTENT_TYPE": client.MULTIPART_CONTENT, "PATH_INFO": "/echo/", "REQUEST_METHOD": "POST", "wsgi.input": payload, } self.assertEqual(self.client.request(**r).json(), {}) def test_empty_multipart_handled_gracefully(self): """ If passed an empty multipart message, MultiPartParser will return an empty QueryDict. """ r = { "CONTENT_LENGTH": 0, "CONTENT_TYPE": client.MULTIPART_CONTENT, "PATH_INFO": "/echo/", "REQUEST_METHOD": "POST", "wsgi.input": client.FakePayload(b""), } self.assertEqual(self.client.request(**r).json(), {}) def test_custom_upload_handler(self): file = tempfile.NamedTemporaryFile with file() as smallfile, file() as bigfile: # A small file (under the 5M quota) smallfile.write(b"a" * (2**21)) smallfile.seek(0) # A big file (over the quota) bigfile.write(b"a" * (10 * 2**20)) bigfile.seek(0) # Small file posting should work. self.assertIn("f", self.client.post("/quota/", {"f": smallfile}).json()) # Large files don't go through. self.assertNotIn("f", self.client.post("/quota/", {"f": bigfile}).json()) def test_broken_custom_upload_handler(self): with tempfile.NamedTemporaryFile() as file: file.write(b"a" * (2**21)) file.seek(0) msg = ( "You cannot alter upload handlers after the upload has been processed." ) with self.assertRaisesMessage(AttributeError, msg): self.client.post("/quota/broken/", {"f": file}) def test_stop_upload_temporary_file_handler(self): with tempfile.NamedTemporaryFile() as temp_file: temp_file.write(b"a") temp_file.seek(0) response = self.client.post("/temp_file/stop_upload/", {"file": temp_file}) temp_path = response.json()["temp_path"] self.assertIs(os.path.exists(temp_path), False) def test_upload_interrupted_temporary_file_handler(self): # Simulate an interrupted upload by omitting the closing boundary. class MockedParser(Parser): def __iter__(self): for item in super().__iter__(): item_type, meta_data, field_stream = item yield item_type, meta_data, field_stream if item_type == FILE: return with tempfile.NamedTemporaryFile() as temp_file: temp_file.write(b"a") temp_file.seek(0) with mock.patch( "django.http.multipartparser.Parser", MockedParser, ): response = self.client.post( "/temp_file/upload_interrupted/", {"file": temp_file}, ) temp_path = response.json()["temp_path"] self.assertIs(os.path.exists(temp_path), False) def test_upload_large_header_fields(self): payload = client.FakePayload( "\r\n".join( [ "--" + client.BOUNDARY, 'Content-Disposition: form-data; name="my_file"; ' 'filename="test.txt"', "Content-Type: text/plain", "X-Long-Header: %s" % ("-" * 500), "", "file contents", "--" + client.BOUNDARY + "--\r\n", ] ), ) r = { "CONTENT_LENGTH": len(payload), "CONTENT_TYPE": client.MULTIPART_CONTENT, "PATH_INFO": "/echo_content/", "REQUEST_METHOD": "POST", "wsgi.input": payload, } response = self.client.request(**r) self.assertEqual(response.status_code, 200) self.assertEqual(response.json(), {"my_file": "file contents"}) def test_upload_header_fields_too_large(self): payload = client.FakePayload( "\r\n".join( [ "--" + client.BOUNDARY, 'Content-Disposition: form-data; name="my_file"; ' 'filename="test.txt"', "Content-Type: text/plain", "X-Long-Header: %s" % ("-" * (MAX_TOTAL_HEADER_SIZE + 1)), "", "file contents", "--" + client.BOUNDARY + "--\r\n", ] ), ) r = { "CONTENT_LENGTH": len(payload), "CONTENT_TYPE": client.MULTIPART_CONTENT, "PATH_INFO": "/echo_content/", "REQUEST_METHOD": "POST", "wsgi.input": payload, } response = self.client.request(**r) self.assertEqual(response.status_code, 400) def test_fileupload_getlist(self): file = tempfile.NamedTemporaryFile with file() as file1, file() as file2, file() as file2a: file1.write(b"a" * (2**23)) file1.seek(0) file2.write(b"a" * (2 * 2**18)) file2.seek(0) file2a.write(b"a" * (5 * 2**20)) file2a.seek(0) response = self.client.post( "/getlist_count/", { "file1": file1, "field1": "test", "field2": "test3", "field3": "test5", "field4": "test6", "field5": "test7", "file2": (file2, file2a), }, ) got = response.json() self.assertEqual(got.get("file1"), 1) self.assertEqual(got.get("file2"), 2) def test_fileuploads_closed_at_request_end(self): file = tempfile.NamedTemporaryFile with file() as f1, file() as f2a, file() as f2b: response = self.client.post( "/fd_closing/t/", { "file": f1, "file2": (f2a, f2b), }, ) request = response.wsgi_request # The files were parsed. self.assertTrue(hasattr(request, "_files")) file = request._files["file"] self.assertTrue(file.closed) files = request._files.getlist("file2") self.assertTrue(files[0].closed) self.assertTrue(files[1].closed) def test_no_parsing_triggered_by_fd_closing(self): file = tempfile.NamedTemporaryFile with file() as f1, file() as f2a, file() as f2b: response = self.client.post( "/fd_closing/f/", { "file": f1, "file2": (f2a, f2b), }, ) request = response.wsgi_request # The fd closing logic doesn't trigger parsing of the stream self.assertFalse(hasattr(request, "_files")) def test_file_error_blocking(self): """ The server should not block when there are upload errors (bug #8622). This can happen if something -- i.e. an exception handler -- tries to access POST while handling an error in parsing POST. This shouldn't cause an infinite loop! """ class POSTAccessingHandler(client.ClientHandler): """A handler that'll access POST during an exception.""" def handle_uncaught_exception(self, request, resolver, exc_info): ret = super().handle_uncaught_exception(request, resolver, exc_info) request.POST # evaluate return ret # Maybe this is a little more complicated that it needs to be; but if # the django.test.client.FakePayload.read() implementation changes then # this test would fail. So we need to know exactly what kind of error # it raises when there is an attempt to read more than the available # bytes: try: client.FakePayload(b"a").read(2) except Exception as err: reference_error = err # install the custom handler that tries to access request.POST self.client.handler = POSTAccessingHandler() with open(__file__, "rb") as fp: post_data = { "name": "Ringo", "file_field": fp, } try: self.client.post("/upload_errors/", post_data) except reference_error.__class__ as err: self.assertNotEqual( str(err), str(reference_error), "Caught a repeated exception that'll cause an infinite loop in " "file uploads.", ) except Exception as err: # CustomUploadError is the error that should have been raised self.assertEqual(err.__class__, uploadhandler.CustomUploadError) def test_filename_case_preservation(self): """ The storage backend shouldn't mess with the case of the filenames uploaded. """ # Synthesize the contents of a file upload with a mixed case filename # so we don't have to carry such a file in the Django tests source code # tree. vars = {"boundary": "oUrBoUnDaRyStRiNg"} post_data = [ "--%(boundary)s", 'Content-Disposition: form-data; name="file_field"; ' 'filename="MiXeD_cAsE.txt"', "Content-Type: application/octet-stream", "", "file contents\n", "--%(boundary)s--\r\n", ] response = self.client.post( "/filename_case/", "\r\n".join(post_data) % vars, "multipart/form-data; boundary=%(boundary)s" % vars, ) self.assertEqual(response.status_code, 200) id = int(response.content) obj = FileModel.objects.get(pk=id) # The name of the file uploaded and the file stored in the server-side # shouldn't differ. self.assertEqual(os.path.basename(obj.testfile.path), "MiXeD_cAsE.txt") def test_filename_traversal_upload(self): os.makedirs(UPLOAD_TO, exist_ok=True) tests = [ "..&#x2F;test.txt", "..&sol;test.txt", ] for file_name in tests: with self.subTest(file_name=file_name): payload = client.FakePayload() payload.write( "\r\n".join( [ "--" + client.BOUNDARY, 'Content-Disposition: form-data; name="my_file"; ' 'filename="%s";' % file_name, "Content-Type: text/plain", "", "file contents.\r\n", "\r\n--" + client.BOUNDARY + "--\r\n", ] ), ) r = { "CONTENT_LENGTH": len(payload), "CONTENT_TYPE": client.MULTIPART_CONTENT, "PATH_INFO": "/upload_traversal/", "REQUEST_METHOD": "POST", "wsgi.input": payload, } response = self.client.request(**r) result = response.json() self.assertEqual(response.status_code, 200) self.assertEqual(result["file_name"], "test.txt") self.assertIs( os.path.exists(os.path.join(MEDIA_ROOT, "test.txt")), False, ) self.assertIs( os.path.exists(os.path.join(UPLOAD_TO, "test.txt")), True, ) @override_settings(MEDIA_ROOT=MEDIA_ROOT) class DirectoryCreationTests(SimpleTestCase): """ Tests for error handling during directory creation via _save_FIELD_file (ticket #6450) """ @classmethod def setUpClass(cls): super().setUpClass() os.makedirs(MEDIA_ROOT, exist_ok=True) cls.addClassCleanup(shutil.rmtree, MEDIA_ROOT) def setUp(self): self.obj = FileModel() @unittest.skipIf( sys.platform == "win32", "Python on Windows doesn't have working os.chmod()." ) @override_settings( STORAGES={ DEFAULT_STORAGE_ALIAS: { "BACKEND": "django.core.files.storage.FileSystemStorage", } } ) def test_readonly_root(self): """Permission errors are not swallowed""" os.chmod(MEDIA_ROOT, 0o500) self.addCleanup(os.chmod, MEDIA_ROOT, 0o700) with self.assertRaises(PermissionError): self.obj.testfile.save( "foo.txt", SimpleUploadedFile("foo.txt", b"x"), save=False ) def test_not_a_directory(self): default_storage.delete(UPLOAD_TO) # Create a file with the upload directory name with SimpleUploadedFile(UPLOAD_TO, b"x") as file:
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
true
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/file_uploads/uploadhandler.py
tests/file_uploads/uploadhandler.py
""" Upload handlers to test the upload API. """ import os from tempfile import NamedTemporaryFile from django.core.files.uploadhandler import ( FileUploadHandler, StopUpload, TemporaryFileUploadHandler, ) class QuotaUploadHandler(FileUploadHandler): """ This test upload handler terminates the connection if more than a quota (5MB) is uploaded. """ QUOTA = 5 * 2**20 # 5 MB def __init__(self, request=None): super().__init__(request) self.total_upload = 0 def receive_data_chunk(self, raw_data, start): self.total_upload += len(raw_data) if self.total_upload >= self.QUOTA: raise StopUpload(connection_reset=True) return raw_data def file_complete(self, file_size): return None class StopUploadTemporaryFileHandler(TemporaryFileUploadHandler): """A handler that raises a StopUpload exception.""" def receive_data_chunk(self, raw_data, start): raise StopUpload() class CustomUploadError(Exception): pass class ErroringUploadHandler(FileUploadHandler): """A handler that raises an exception.""" def receive_data_chunk(self, raw_data, start): raise CustomUploadError("Oops!") class TraversalUploadHandler(FileUploadHandler): """A handler with potential directory-traversal vulnerability.""" def __init__(self, request=None): from .tests import UPLOAD_TO super().__init__(request) self.upload_dir = UPLOAD_TO def file_complete(self, file_size): self.file.seek(0) self.file.size = file_size with open(os.path.join(self.upload_dir, self.file_name), "wb") as fp: fp.write(self.file.read()) return self.file def new_file( self, field_name, file_name, content_type, content_length, charset=None, content_type_extra=None, ): super().new_file( file_name, file_name, content_length, content_length, charset, content_type_extra, ) self.file = NamedTemporaryFile(suffix=".upload", dir=self.upload_dir) def receive_data_chunk(self, raw_data, start): self.file.write(raw_data)
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/file_uploads/urls.py
tests/file_uploads/urls.py
from django.urls import path, re_path from . import views urlpatterns = [ path("upload/", views.file_upload_view), path("upload_traversal/", views.file_upload_traversal_view), path("verify/", views.file_upload_view_verify), path("unicode_name/", views.file_upload_unicode_name), path("echo/", views.file_upload_echo), path("echo_content_type_extra/", views.file_upload_content_type_extra), path("echo_content/", views.file_upload_echo_content), path("quota/", views.file_upload_quota), path("quota/broken/", views.file_upload_quota_broken), path("getlist_count/", views.file_upload_getlist_count), path("upload_errors/", views.file_upload_errors), path("temp_file/stop_upload/", views.file_stop_upload_temporary_file), path("temp_file/upload_interrupted/", views.file_upload_interrupted_temporary_file), path("filename_case/", views.file_upload_filename_case_view), re_path(r"^fd_closing/(?P<access>t|f)/$", views.file_upload_fd_closing), ]
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/responses/test_fileresponse.py
tests/responses/test_fileresponse.py
import io import itertools import os import sys import tempfile from unittest import skipIf from django.core.files.base import ContentFile from django.http import FileResponse from django.test import SimpleTestCase class UnseekableBytesIO(io.BytesIO): def seekable(self): return False class FileResponseTests(SimpleTestCase): def test_content_length_file(self): response = FileResponse(open(__file__, "rb")) response.close() self.assertEqual( response.headers["Content-Length"], str(os.path.getsize(__file__)) ) def test_content_length_buffer(self): response = FileResponse(io.BytesIO(b"binary content")) self.assertEqual(response.headers["Content-Length"], "14") def test_content_length_nonzero_starting_position_file(self): file = open(__file__, "rb") file.seek(10) response = FileResponse(file) response.close() self.assertEqual( response.headers["Content-Length"], str(os.path.getsize(__file__) - 10) ) def test_content_length_nonzero_starting_position_buffer(self): test_tuples = ( ("BytesIO", io.BytesIO), ("UnseekableBytesIO", UnseekableBytesIO), ) for buffer_class_name, BufferClass in test_tuples: with self.subTest(buffer_class_name=buffer_class_name): buffer = BufferClass(b"binary content") buffer.seek(10) response = FileResponse(buffer) self.assertEqual(response.headers["Content-Length"], "4") def test_content_length_nonzero_starting_position_file_seekable_no_tell(self): class TestFile: def __init__(self, path, *args, **kwargs): self._file = open(path, *args, **kwargs) def read(self, n_bytes=-1): return self._file.read(n_bytes) def seek(self, offset, whence=io.SEEK_SET): return self._file.seek(offset, whence) def seekable(self): return True @property def name(self): return self._file.name def close(self): if self._file: self._file.close() self._file = None def __enter__(self): return self def __exit__(self, e_type, e_val, e_tb): self.close() file = TestFile(__file__, "rb") file.seek(10) response = FileResponse(file) response.close() self.assertEqual( response.headers["Content-Length"], str(os.path.getsize(__file__) - 10) ) def test_content_type_file(self): response = FileResponse(open(__file__, "rb")) response.close() self.assertIn(response.headers["Content-Type"], ["text/x-python", "text/plain"]) def test_content_type_buffer(self): response = FileResponse(io.BytesIO(b"binary content")) self.assertEqual(response.headers["Content-Type"], "application/octet-stream") def test_content_type_buffer_explicit(self): response = FileResponse( io.BytesIO(b"binary content"), content_type="video/webm" ) self.assertEqual(response.headers["Content-Type"], "video/webm") def test_content_type_buffer_explicit_default(self): response = FileResponse( io.BytesIO(b"binary content"), content_type="text/html; charset=utf-8" ) self.assertEqual(response.headers["Content-Type"], "text/html; charset=utf-8") def test_content_type_buffer_named(self): test_tuples = ( (__file__, ["text/x-python", "text/plain"]), (__file__ + "nosuchfile", ["application/octet-stream"]), ("test_fileresponse.py", ["text/x-python", "text/plain"]), ("test_fileresponse.pynosuchfile", ["application/octet-stream"]), ) for filename, content_types in test_tuples: with self.subTest(filename=filename): buffer = io.BytesIO(b"binary content") buffer.name = filename response = FileResponse(buffer) self.assertIn(response.headers["Content-Type"], content_types) def test_content_disposition_file(self): filenames = ( ("", "test_fileresponse.py"), ("custom_name.py", "custom_name.py"), ) dispositions = ( (False, "inline"), (True, "attachment"), ) for (filename, header_filename), ( as_attachment, header_disposition, ) in itertools.product(filenames, dispositions): with self.subTest(filename=filename, disposition=header_disposition): response = FileResponse( open(__file__, "rb"), filename=filename, as_attachment=as_attachment ) response.close() self.assertEqual( response.headers["Content-Disposition"], '%s; filename="%s"' % (header_disposition, header_filename), ) def test_content_disposition_escaping(self): # fmt: off tests = [ ( 'multi-part-one";\" dummy".txt', r"multi-part-one\";\" dummy\".txt" ), ] # fmt: on # Non-escape sequence backslashes are path segments on Windows, and are # eliminated by an os.path.basename() check in FileResponse. if sys.platform != "win32": # fmt: off tests += [ ( 'multi-part-one\\";\" dummy".txt', r"multi-part-one\\\";\" dummy\".txt" ), ( 'multi-part-one\\";\\\" dummy".txt', r"multi-part-one\\\";\\\" dummy\".txt" ) ] # fmt: on for filename, escaped in tests: with self.subTest(filename=filename, escaped=escaped): response = FileResponse( io.BytesIO(b"binary content"), filename=filename, as_attachment=True ) response.close() self.assertEqual( response.headers["Content-Disposition"], f'attachment; filename="{escaped}"', ) def test_content_disposition_buffer(self): response = FileResponse(io.BytesIO(b"binary content")) self.assertFalse(response.has_header("Content-Disposition")) def test_content_disposition_buffer_attachment(self): response = FileResponse(io.BytesIO(b"binary content"), as_attachment=True) self.assertEqual(response.headers["Content-Disposition"], "attachment") def test_content_disposition_buffer_explicit_filename(self): dispositions = ( (False, "inline"), (True, "attachment"), ) for as_attachment, header_disposition in dispositions: response = FileResponse( io.BytesIO(b"binary content"), as_attachment=as_attachment, filename="custom_name.py", ) self.assertEqual( response.headers["Content-Disposition"], '%s; filename="custom_name.py"' % header_disposition, ) def test_response_buffer(self): response = FileResponse(io.BytesIO(b"binary content")) self.assertEqual(list(response), [b"binary content"]) def test_response_nonzero_starting_position(self): test_tuples = ( ("BytesIO", io.BytesIO), ("UnseekableBytesIO", UnseekableBytesIO), ) for buffer_class_name, BufferClass in test_tuples: with self.subTest(buffer_class_name=buffer_class_name): buffer = BufferClass(b"binary content") buffer.seek(10) response = FileResponse(buffer) self.assertEqual(list(response), [b"tent"]) def test_buffer_explicit_absolute_filename(self): """ Headers are set correctly with a buffer when an absolute filename is provided. """ response = FileResponse(io.BytesIO(b"binary content"), filename=__file__) self.assertEqual(response.headers["Content-Length"], "14") self.assertEqual( response.headers["Content-Disposition"], 'inline; filename="test_fileresponse.py"', ) @skipIf(sys.platform == "win32", "Named pipes are Unix-only.") def test_file_from_named_pipe_response(self): with tempfile.TemporaryDirectory() as temp_dir: pipe_file = os.path.join(temp_dir, "named_pipe") os.mkfifo(pipe_file) pipe_for_read = os.open(pipe_file, os.O_RDONLY | os.O_NONBLOCK) with open(pipe_file, "wb") as pipe_for_write: pipe_for_write.write(b"binary content") response = FileResponse(os.fdopen(pipe_for_read, mode="rb")) response_content = list(response) response.close() self.assertEqual(response_content, [b"binary content"]) self.assertFalse(response.has_header("Content-Length")) def test_compressed_response(self): """ If compressed responses are served with the uncompressed Content-Type and a compression Content-Encoding, browsers might automatically uncompress the file, which is most probably not wanted. """ test_tuples = ( (".tar.gz", "application/gzip"), (".tar.br", "application/x-brotli"), (".tar.bz2", "application/x-bzip"), (".tar.xz", "application/x-xz"), (".tar.Z", "application/x-compress"), ) for extension, mimetype in test_tuples: with self.subTest(ext=extension): with tempfile.NamedTemporaryFile(suffix=extension) as tmp: response = FileResponse(tmp) self.assertEqual(response.headers["Content-Type"], mimetype) self.assertFalse(response.has_header("Content-Encoding")) def test_unicode_attachment(self): response = FileResponse( ContentFile(b"binary content", name="祝您平安.odt"), as_attachment=True, content_type="application/vnd.oasis.opendocument.text", ) self.assertEqual( response.headers["Content-Type"], "application/vnd.oasis.opendocument.text", ) self.assertEqual( response.headers["Content-Disposition"], "attachment; filename*=utf-8''%E7%A5%9D%E6%82%A8%E5%B9%B3%E5%AE%89.odt", ) def test_repr(self): response = FileResponse(io.BytesIO(b"binary content")) self.assertEqual( repr(response), '<FileResponse status_code=200, "application/octet-stream">', )
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/responses/test_cookie.py
tests/responses/test_cookie.py
import time from datetime import UTC, date, datetime, timedelta from email.utils import format_datetime as format_datetime_rfc5322 from http import cookies from django.http import HttpResponse from django.test import SimpleTestCase from django.test.utils import freeze_time from django.utils.http import http_date class SetCookieTests(SimpleTestCase): def test_near_expiration(self): """Cookie will expire when a near expiration time is provided.""" response = HttpResponse() # There's a timing weakness in this test; The expected result for # max-age requires that there be a very slight difference between the # evaluated expiration time and the time evaluated in set_cookie(). If # this difference doesn't exist, the cookie time will be 1 second # larger. The sleep guarantees that there will be a time difference. expires = datetime.now(tz=UTC).replace(tzinfo=None) + timedelta(seconds=10) time.sleep(0.001) response.set_cookie("datetime", expires=expires) datetime_cookie = response.cookies["datetime"] self.assertEqual(datetime_cookie["max-age"], 10) def test_aware_expiration(self): """set_cookie() accepts an aware datetime as expiration time.""" response = HttpResponse() expires = datetime.now(tz=UTC) + timedelta(seconds=10) time.sleep(0.001) response.set_cookie("datetime", expires=expires) datetime_cookie = response.cookies["datetime"] self.assertEqual(datetime_cookie["max-age"], 10) def test_create_cookie_after_deleting_cookie(self): """Setting a cookie after deletion clears the expiry date.""" response = HttpResponse() response.set_cookie("c", "old-value") self.assertEqual(response.cookies["c"]["expires"], "") response.delete_cookie("c") self.assertEqual( response.cookies["c"]["expires"], "Thu, 01 Jan 1970 00:00:00 GMT" ) response.set_cookie("c", "new-value") self.assertEqual(response.cookies["c"]["expires"], "") def test_far_expiration(self): """Cookie will expire when a distant expiration time is provided.""" response = HttpResponse() future_datetime = datetime(date.today().year + 2, 1, 1, 4, 5, 6, tzinfo=UTC) response.set_cookie("datetime", expires=future_datetime) datetime_cookie = response.cookies["datetime"] self.assertIn( datetime_cookie["expires"], # assertIn accounts for slight time dependency (#23450) ( format_datetime_rfc5322(future_datetime, usegmt=True), format_datetime_rfc5322(future_datetime.replace(second=7), usegmt=True), ), ) def test_max_age_expiration(self): """Cookie will expire if max_age is provided.""" response = HttpResponse() set_cookie_time = time.time() with freeze_time(set_cookie_time): response.set_cookie("max_age", max_age=10) max_age_cookie = response.cookies["max_age"] self.assertEqual(max_age_cookie["max-age"], 10) self.assertEqual(max_age_cookie["expires"], http_date(set_cookie_time + 10)) def test_max_age_int(self): response = HttpResponse() response.set_cookie("max_age", max_age=10.6) self.assertEqual(response.cookies["max_age"]["max-age"], 10) def test_max_age_timedelta(self): response = HttpResponse() response.set_cookie("max_age", max_age=timedelta(hours=1)) self.assertEqual(response.cookies["max_age"]["max-age"], 3600) def test_max_age_with_expires(self): response = HttpResponse() msg = "'expires' and 'max_age' can't be used together." with self.assertRaisesMessage(ValueError, msg): response.set_cookie( "max_age", expires=datetime(2000, 1, 1), max_age=timedelta(hours=1) ) def test_httponly_cookie(self): response = HttpResponse() response.set_cookie("example", httponly=True) example_cookie = response.cookies["example"] self.assertIn( "; %s" % cookies.Morsel._reserved["httponly"], str(example_cookie) ) self.assertIs(example_cookie["httponly"], True) def test_unicode_cookie(self): """HttpResponse.set_cookie() works with Unicode data.""" response = HttpResponse() cookie_value = "清風" response.set_cookie("test", cookie_value) self.assertEqual(response.cookies["test"].value, cookie_value) def test_samesite(self): response = HttpResponse() response.set_cookie("example", samesite="None") self.assertEqual(response.cookies["example"]["samesite"], "None") response.set_cookie("example", samesite="Lax") self.assertEqual(response.cookies["example"]["samesite"], "Lax") response.set_cookie("example", samesite="strict") self.assertEqual(response.cookies["example"]["samesite"], "strict") def test_invalid_samesite(self): msg = 'samesite must be "lax", "none", or "strict".' with self.assertRaisesMessage(ValueError, msg): HttpResponse().set_cookie("example", samesite="invalid") class DeleteCookieTests(SimpleTestCase): def test_default(self): response = HttpResponse() response.delete_cookie("c") cookie = response.cookies["c"] self.assertEqual(cookie["expires"], "Thu, 01 Jan 1970 00:00:00 GMT") self.assertEqual(cookie["max-age"], 0) self.assertEqual(cookie["path"], "/") self.assertEqual(cookie["secure"], "") self.assertEqual(cookie["domain"], "") self.assertEqual(cookie["samesite"], "") def test_delete_cookie_secure_prefix(self): """ delete_cookie() sets the secure flag if the cookie name starts with __Host- or __Secure- (without that, browsers ignore cookies with those prefixes). """ response = HttpResponse() for prefix in ("Secure", "Host"): with self.subTest(prefix=prefix): cookie_name = "__%s-c" % prefix response.delete_cookie(cookie_name) self.assertIs(response.cookies[cookie_name]["secure"], True) def test_delete_cookie_secure_samesite_none(self): # delete_cookie() sets the secure flag if samesite='none'. response = HttpResponse() response.delete_cookie("c", samesite="none") self.assertIs(response.cookies["c"]["secure"], True) def test_delete_cookie_samesite(self): response = HttpResponse() response.delete_cookie("c", samesite="lax") self.assertEqual(response.cookies["c"]["samesite"], "lax")
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/responses/__init__.py
tests/responses/__init__.py
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false
django/django
https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/responses/tests.py
tests/responses/tests.py
import io from django.conf import settings from django.core.cache import cache from django.http import HttpResponse from django.http.response import HttpResponseBase from django.test import SimpleTestCase UTF8 = "utf-8" ISO88591 = "iso-8859-1" class HttpResponseBaseTests(SimpleTestCase): def test_closed(self): r = HttpResponseBase() self.assertIs(r.closed, False) r.close() self.assertIs(r.closed, True) def test_write(self): r = HttpResponseBase() self.assertIs(r.writable(), False) with self.assertRaisesMessage( OSError, "This HttpResponseBase instance is not writable" ): r.write("asdf") with self.assertRaisesMessage( OSError, "This HttpResponseBase instance is not writable" ): r.writelines(["asdf\n", "qwer\n"]) def test_tell(self): r = HttpResponseBase() with self.assertRaisesMessage( OSError, "This HttpResponseBase instance cannot tell its position" ): r.tell() def test_setdefault(self): """ HttpResponseBase.setdefault() should not change an existing header and should be case insensitive. """ r = HttpResponseBase() r.headers["Header"] = "Value" r.setdefault("header", "changed") self.assertEqual(r.headers["header"], "Value") r.setdefault("x-header", "DefaultValue") self.assertEqual(r.headers["X-Header"], "DefaultValue") def test_charset_setter(self): r = HttpResponseBase() r.charset = "utf-8" self.assertEqual(r.charset, "utf-8") def test_reason_phrase_setter(self): r = HttpResponseBase() r.reason_phrase = "test" self.assertEqual(r.reason_phrase, "test") class HttpResponseTests(SimpleTestCase): def test_status_code(self): resp = HttpResponse(status=503) self.assertEqual(resp.status_code, 503) self.assertEqual(resp.reason_phrase, "Service Unavailable") def test_change_status_code(self): resp = HttpResponse() resp.status_code = 503 self.assertEqual(resp.status_code, 503) self.assertEqual(resp.reason_phrase, "Service Unavailable") def test_valid_status_code_string(self): resp = HttpResponse(status="100") self.assertEqual(resp.status_code, 100) resp = HttpResponse(status="404") self.assertEqual(resp.status_code, 404) resp = HttpResponse(status="599") self.assertEqual(resp.status_code, 599) def test_invalid_status_code(self): must_be_integer = "HTTP status code must be an integer." must_be_integer_in_range = ( "HTTP status code must be an integer from 100 to 599." ) with self.assertRaisesMessage(TypeError, must_be_integer): HttpResponse(status=object()) with self.assertRaisesMessage(TypeError, must_be_integer): HttpResponse(status="J'attendrai") with self.assertRaisesMessage(ValueError, must_be_integer_in_range): HttpResponse(status=99) with self.assertRaisesMessage(ValueError, must_be_integer_in_range): HttpResponse(status=600) def test_reason_phrase(self): reason = "I'm an anarchist coffee pot on crack." resp = HttpResponse(status=419, reason=reason) self.assertEqual(resp.status_code, 419) self.assertEqual(resp.reason_phrase, reason) def test_charset_detection(self): """HttpResponse should parse charset from content_type.""" response = HttpResponse("ok") self.assertEqual(response.charset, settings.DEFAULT_CHARSET) response = HttpResponse(charset=ISO88591) self.assertEqual(response.charset, ISO88591) self.assertEqual( response.headers["Content-Type"], "text/html; charset=%s" % ISO88591 ) response = HttpResponse( content_type="text/plain; charset=%s" % UTF8, charset=ISO88591 ) self.assertEqual(response.charset, ISO88591) response = HttpResponse(content_type="text/plain; charset=%s" % ISO88591) self.assertEqual(response.charset, ISO88591) response = HttpResponse(content_type='text/plain; charset="%s"' % ISO88591) self.assertEqual(response.charset, ISO88591) response = HttpResponse(content_type="text/plain; charset=") self.assertEqual(response.charset, settings.DEFAULT_CHARSET) response = HttpResponse(content_type="text/plain") self.assertEqual(response.charset, settings.DEFAULT_CHARSET) def test_response_content_charset(self): """HttpResponse should encode based on charset.""" content = "Café :)" utf8_content = content.encode(UTF8) iso_content = content.encode(ISO88591) response = HttpResponse(utf8_content) self.assertContains(response, utf8_content) response = HttpResponse( iso_content, content_type="text/plain; charset=%s" % ISO88591 ) self.assertContains(response, iso_content) response = HttpResponse(iso_content) self.assertContains(response, iso_content) response = HttpResponse(iso_content, content_type="text/plain") self.assertContains(response, iso_content) def test_repr(self): response = HttpResponse(content="Café :)".encode(UTF8), status=201) expected = '<HttpResponse status_code=201, "text/html; charset=utf-8">' self.assertEqual(repr(response), expected) def test_repr_no_content_type(self): response = HttpResponse(status=204) del response.headers["Content-Type"] self.assertEqual(repr(response), "<HttpResponse status_code=204>") def test_wrap_textiowrapper(self): content = "Café :)" r = HttpResponse() with io.TextIOWrapper(r, UTF8) as buf: buf.write(content) self.assertEqual(r.content, content.encode(UTF8)) def test_generator_cache(self): generator = (str(i) for i in range(10)) response = HttpResponse(content=generator) self.assertEqual(response.content, b"0123456789") with self.assertRaises(StopIteration): next(generator) cache.set("my-response-key", response) response = cache.get("my-response-key") self.assertEqual(response.content, b"0123456789")
python
BSD-3-Clause
3201a895cba335000827b28768a7b7105c81b415
2026-01-04T14:38:15.489092Z
false