repo stringlengths 7 90 | file_url stringlengths 81 315 | file_path stringlengths 4 228 | content stringlengths 0 32.8k | language stringclasses 1
value | license stringclasses 7
values | commit_sha stringlengths 40 40 | retrieved_at stringdate 2026-01-04 14:38:15 2026-01-05 02:33:18 | truncated bool 2
classes |
|---|---|---|---|---|---|---|---|---|
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/extra_regress/tests.py | tests/extra_regress/tests.py | import datetime
from django.contrib.auth.models import User
from django.test import TestCase
from .models import Order, RevisionableModel, TestObject
class ExtraRegressTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.u = User.objects.create_user(
username="fred", password="secret", email="fred@example.com"
)
def test_regression_7314_7372(self):
"""
Regression tests for #7314 and #7372
"""
rm = RevisionableModel.objects.create(
title="First Revision", when=datetime.datetime(2008, 9, 28, 10, 30, 0)
)
self.assertEqual(rm.pk, rm.base.pk)
rm2 = rm.new_revision()
rm2.title = "Second Revision"
rm.when = datetime.datetime(2008, 9, 28, 14, 25, 0)
rm2.save()
self.assertEqual(rm2.title, "Second Revision")
self.assertEqual(rm2.base.title, "First Revision")
self.assertNotEqual(rm2.pk, rm.pk)
self.assertEqual(rm2.base.pk, rm.pk)
# Queryset to match most recent revision:
qs = RevisionableModel.objects.extra(
where=[
"%(table)s.id IN "
"(SELECT MAX(rev.id) FROM %(table)s rev GROUP BY rev.base_id)"
% {
"table": RevisionableModel._meta.db_table,
}
]
)
self.assertQuerySetEqual(
qs,
[("Second Revision", "First Revision")],
transform=lambda r: (r.title, r.base.title),
)
# Queryset to search for string in title:
qs2 = RevisionableModel.objects.filter(title__contains="Revision")
self.assertQuerySetEqual(
qs2,
[
("First Revision", "First Revision"),
("Second Revision", "First Revision"),
],
transform=lambda r: (r.title, r.base.title),
ordered=False,
)
# Following queryset should return the most recent revision:
self.assertQuerySetEqual(
qs & qs2,
[("Second Revision", "First Revision")],
transform=lambda r: (r.title, r.base.title),
ordered=False,
)
def test_extra_stay_tied(self):
# Extra select parameters should stay tied to their corresponding
# select portions. Applies when portions are updated or otherwise
# moved around.
qs = User.objects.extra(
select={"alpha": "%s", "beta": "2", "gamma": "%s"}, select_params=(1, 3)
)
qs = qs.extra(select={"beta": 4})
qs = qs.extra(select={"alpha": "%s"}, select_params=[5])
self.assertEqual(
list(qs.filter(id=self.u.id).values("alpha", "beta", "gamma")),
[{"alpha": 5, "beta": 4, "gamma": 3}],
)
def test_regression_7957(self):
"""
Regression test for #7957: Combining extra() calls should leave the
corresponding parameters associated with the right extra() bit. I.e.
internal dictionary must remain sorted.
"""
self.assertEqual(
(
User.objects.extra(select={"alpha": "%s"}, select_params=(1,))
.extra(select={"beta": "%s"}, select_params=(2,))[0]
.alpha
),
1,
)
self.assertEqual(
(
User.objects.extra(select={"beta": "%s"}, select_params=(1,))
.extra(select={"alpha": "%s"}, select_params=(2,))[0]
.alpha
),
2,
)
def test_regression_7961(self):
"""
Regression test for #7961: When not using a portion of an
extra(...) in a query, remove any corresponding parameters from the
query as well.
"""
self.assertEqual(
list(
User.objects.extra(select={"alpha": "%s"}, select_params=(-6,))
.filter(id=self.u.id)
.values_list("id", flat=True)
),
[self.u.id],
)
def test_regression_8063(self):
"""
Regression test for #8063: limiting a query shouldn't discard any
extra() bits.
"""
qs = User.objects.extra(where=["id=%s"], params=[self.u.id])
self.assertSequenceEqual(qs, [self.u])
self.assertSequenceEqual(qs[:1], [self.u])
def test_regression_8039(self):
"""
Regression test for #8039: Ordering sometimes removed relevant tables
from extra(). This test is the critical case: ordering uses a table,
but then removes the reference because of an optimization. The table
should still be present because of the extra() call.
"""
self.assertQuerySetEqual(
(
Order.objects.extra(
where=["username=%s"], params=["fred"], tables=["auth_user"]
).order_by("created_by")
),
[],
)
def test_regression_8819(self):
"""
Regression test for #8819: Fields in the extra(select=...) list
should be available to extra(order_by=...).
"""
self.assertSequenceEqual(
User.objects.filter(pk=self.u.id)
.extra(select={"extra_field": 1})
.distinct(),
[self.u],
)
self.assertSequenceEqual(
User.objects.filter(pk=self.u.id).extra(
select={"extra_field": 1}, order_by=["extra_field"]
),
[self.u],
)
self.assertSequenceEqual(
User.objects.filter(pk=self.u.id)
.extra(select={"extra_field": 1}, order_by=["extra_field"])
.distinct(),
[self.u],
)
def test_dates_query(self):
"""
When calling the dates() method on a queryset with extra selection
columns, we can (and should) ignore those columns. They don't change
the result and cause incorrect SQL to be produced otherwise.
"""
RevisionableModel.objects.create(
title="First Revision", when=datetime.datetime(2008, 9, 28, 10, 30, 0)
)
self.assertSequenceEqual(
RevisionableModel.objects.extra(select={"the_answer": "id"}).datetimes(
"when", "month"
),
[datetime.datetime(2008, 9, 1, 0, 0)],
)
def test_values_with_extra(self):
"""
Regression test for #10256... If there is a values() clause, Extra
columns are only returned if they are explicitly mentioned.
"""
obj = TestObject(first="first", second="second", third="third")
obj.save()
self.assertEqual(
list(
TestObject.objects.extra(
select={"foo": "first", "bar": "second", "whiz": "third"}
).values()
),
[
{
"bar": "second",
"third": "third",
"second": "second",
"whiz": "third",
"foo": "first",
"id": obj.pk,
"first": "first",
}
],
)
# Extra clauses after an empty values clause are still included
self.assertEqual(
list(
TestObject.objects.values().extra(
select={"foo": "first", "bar": "second", "whiz": "third"}
)
),
[
{
"bar": "second",
"third": "third",
"second": "second",
"whiz": "third",
"foo": "first",
"id": obj.pk,
"first": "first",
}
],
)
# Extra columns are ignored if not mentioned in the values() clause
self.assertEqual(
list(
TestObject.objects.extra(
select={"foo": "first", "bar": "second", "whiz": "third"}
).values("first", "second")
),
[{"second": "second", "first": "first"}],
)
# Extra columns after a non-empty values() clause are ignored
self.assertEqual(
list(
TestObject.objects.values("first", "second").extra(
select={"foo": "first", "bar": "second", "whiz": "third"}
)
),
[{"second": "second", "first": "first"}],
)
# Extra columns can be partially returned
self.assertEqual(
list(
TestObject.objects.extra(
select={"foo": "first", "bar": "second", "whiz": "third"}
).values("first", "second", "foo")
),
[{"second": "second", "foo": "first", "first": "first"}],
)
# Also works if only extra columns are included
self.assertEqual(
list(
TestObject.objects.extra(
select={"foo": "first", "bar": "second", "whiz": "third"}
).values("foo", "whiz")
),
[{"foo": "first", "whiz": "third"}],
)
# Values list works the same way
# All columns are returned for an empty values_list()
self.assertEqual(
list(
TestObject.objects.extra(
select={"foo": "first", "bar": "second", "whiz": "third"}
).values_list()
),
[("first", "second", "third", obj.pk, "first", "second", "third")],
)
# Extra columns after an empty values_list() are still included
self.assertEqual(
list(
TestObject.objects.values_list().extra(
select={"foo": "first", "bar": "second", "whiz": "third"}
)
),
[("first", "second", "third", obj.pk, "first", "second", "third")],
)
# Extra columns ignored completely if not mentioned in values_list()
self.assertEqual(
list(
TestObject.objects.extra(
select={"foo": "first", "bar": "second", "whiz": "third"}
).values_list("first", "second")
),
[("first", "second")],
)
# Extra columns after a non-empty values_list() clause are ignored
# completely
self.assertEqual(
list(
TestObject.objects.values_list("first", "second").extra(
select={"foo": "first", "bar": "second", "whiz": "third"}
)
),
[("first", "second")],
)
self.assertEqual(
list(
TestObject.objects.extra(
select={"foo": "first", "bar": "second", "whiz": "third"}
).values_list("second", flat=True)
),
["second"],
)
# Only the extra columns specified in the values_list() are returned
self.assertEqual(
list(
TestObject.objects.extra(
select={"foo": "first", "bar": "second", "whiz": "third"}
).values_list("first", "second", "whiz")
),
[("first", "second", "third")],
)
# ...also works if only extra columns are included
self.assertEqual(
list(
TestObject.objects.extra(
select={"foo": "first", "bar": "second", "whiz": "third"}
).values_list("foo", "whiz")
),
[("first", "third")],
)
self.assertEqual(
list(
TestObject.objects.extra(
select={"foo": "first", "bar": "second", "whiz": "third"}
).values_list("whiz", flat=True)
),
["third"],
)
# ... and values are returned in the order they are specified
self.assertEqual(
list(
TestObject.objects.extra(
select={"foo": "first", "bar": "second", "whiz": "third"}
).values_list("whiz", "foo")
),
[("third", "first")],
)
self.assertEqual(
list(
TestObject.objects.extra(
select={"foo": "first", "bar": "second", "whiz": "third"}
).values_list("first", "id")
),
[("first", obj.pk)],
)
self.assertEqual(
list(
TestObject.objects.extra(
select={"foo": "first", "bar": "second", "whiz": "third"}
).values_list("whiz", "first", "bar", "id")
),
[("third", "first", "second", obj.pk)],
)
def test_regression_10847(self):
"""
Regression for #10847: the list of extra columns can always be
accurately evaluated. Using an inner query ensures that as_sql() is
producing correct output without requiring full evaluation and
execution of the inner query.
"""
obj = TestObject(first="first", second="second", third="third")
obj.save()
self.assertEqual(
list(TestObject.objects.extra(select={"extra": 1}).values("pk")),
[{"pk": obj.pk}],
)
self.assertSequenceEqual(
TestObject.objects.filter(
pk__in=TestObject.objects.extra(select={"extra": 1}).values("pk")
),
[obj],
)
self.assertEqual(
list(TestObject.objects.values("pk").extra(select={"extra": 1})),
[{"pk": obj.pk}],
)
self.assertSequenceEqual(
TestObject.objects.filter(
pk__in=TestObject.objects.values("pk").extra(select={"extra": 1})
),
[obj],
)
self.assertSequenceEqual(
TestObject.objects.filter(pk=obj.pk)
| TestObject.objects.extra(where=["id > %s"], params=[obj.pk]),
[obj],
)
def test_regression_17877(self):
"""
Extra WHERE clauses get correctly ANDed, even when they
contain OR operations.
"""
# Test Case 1: should appear in queryset.
t1 = TestObject.objects.create(first="a", second="a", third="a")
# Test Case 2: should appear in queryset.
t2 = TestObject.objects.create(first="b", second="a", third="a")
# Test Case 3: should not appear in queryset, bug case.
t = TestObject(first="a", second="a", third="b")
t.save()
# Test Case 4: should not appear in queryset.
t = TestObject(first="b", second="a", third="b")
t.save()
# Test Case 5: should not appear in queryset.
t = TestObject(first="b", second="b", third="a")
t.save()
# Test Case 6: should not appear in queryset, bug case.
t = TestObject(first="a", second="b", third="b")
t.save()
self.assertCountEqual(
TestObject.objects.extra(
where=["first = 'a' OR second = 'a'", "third = 'a'"],
),
[t1, t2],
)
def test_extra_values_distinct_ordering(self):
t1 = TestObject.objects.create(first="a", second="a", third="a")
t2 = TestObject.objects.create(first="a", second="b", third="b")
qs = (
TestObject.objects.extra(select={"second_extra": "second"})
.values_list("id", flat=True)
.distinct()
)
self.assertSequenceEqual(qs.order_by("second_extra"), [t1.pk, t2.pk])
self.assertSequenceEqual(qs.order_by("-second_extra"), [t2.pk, t1.pk])
# Note: the extra ordering must appear in select clause, so we get two
# non-distinct results here (this is on purpose, see #7070).
# Extra select doesn't appear in result values.
self.assertSequenceEqual(
qs.order_by("-second_extra").values_list("first"), [("a",), ("a",)]
)
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/no_models/__init__.py | tests/no_models/__init__.py | python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false | |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/no_models/tests.py | tests/no_models/tests.py | from django.apps import apps
from django.test import SimpleTestCase
class NoModelTests(SimpleTestCase):
def test_no_models(self):
"""It's possible to load an app with no models.py file."""
app_config = apps.get_app_config("no_models")
self.assertIsNone(app_config.models_module)
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/reserved_names/models.py | tests/reserved_names/models.py | """
Using SQL reserved names
Need to use a reserved SQL name as a column name or table name? Need to include
a hyphen in a column or table name? No problem. Django quotes names
appropriately behind the scenes, so your database won't complain about
reserved-name usage.
"""
from django.db import models
class Thing(models.Model):
when = models.CharField(max_length=1, primary_key=True)
join = models.CharField(max_length=1)
like = models.CharField(max_length=1)
drop = models.CharField(max_length=1)
alter = models.CharField(max_length=1)
having = models.CharField(max_length=1)
where = models.DateField(max_length=1)
has_hyphen = models.CharField(max_length=1, db_column="has-hyphen")
class Meta:
db_table = "select"
def __str__(self):
return self.when
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/reserved_names/__init__.py | tests/reserved_names/__init__.py | python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false | |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/reserved_names/tests.py | tests/reserved_names/tests.py | import datetime
from django.test import TestCase
from .models import Thing
class ReservedNameTests(TestCase):
def generate(self):
day1 = datetime.date(2005, 1, 1)
Thing.objects.create(
when="a",
join="b",
like="c",
drop="d",
alter="e",
having="f",
where=day1,
has_hyphen="h",
)
day2 = datetime.date(2006, 2, 2)
Thing.objects.create(
when="h",
join="i",
like="j",
drop="k",
alter="l",
having="m",
where=day2,
)
def test_simple(self):
day1 = datetime.date(2005, 1, 1)
t = Thing.objects.create(
when="a",
join="b",
like="c",
drop="d",
alter="e",
having="f",
where=day1,
has_hyphen="h",
)
self.assertEqual(t.when, "a")
day2 = datetime.date(2006, 2, 2)
u = Thing.objects.create(
when="h",
join="i",
like="j",
drop="k",
alter="l",
having="m",
where=day2,
)
self.assertEqual(u.when, "h")
def test_order_by(self):
self.generate()
things = [t.when for t in Thing.objects.order_by("when")]
self.assertEqual(things, ["a", "h"])
def test_fields(self):
self.generate()
v = Thing.objects.get(pk="a")
self.assertEqual(v.join, "b")
self.assertEqual(v.where, datetime.date(year=2005, month=1, day=1))
def test_dates(self):
self.generate()
resp = Thing.objects.dates("where", "year")
self.assertEqual(
list(resp),
[
datetime.date(2005, 1, 1),
datetime.date(2006, 1, 1),
],
)
def test_month_filter(self):
self.generate()
self.assertEqual(Thing.objects.filter(where__month=1)[0].when, "a")
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/sitemaps_tests/test_https.py | tests/sitemaps_tests/test_https.py | from datetime import date
from django.test import override_settings
from .base import SitemapTestsBase
@override_settings(ROOT_URLCONF="sitemaps_tests.urls.https")
class HTTPSSitemapTests(SitemapTestsBase):
protocol = "https"
def test_secure_sitemap_index(self):
"A secure sitemap index can be rendered"
response = self.client.get("/secure/index.xml")
expected_content = """<?xml version="1.0" encoding="UTF-8"?>
<sitemapindex xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
<sitemap><loc>%s/secure/sitemap-simple.xml</loc><lastmod>%s</lastmod></sitemap>
</sitemapindex>
""" % (
self.base_url,
date.today(),
)
self.assertXMLEqual(response.text, expected_content)
def test_secure_sitemap_section(self):
"A secure sitemap section can be rendered"
response = self.client.get("/secure/sitemap-simple.xml")
expected_content = (
'<?xml version="1.0" encoding="UTF-8"?>\n'
'<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9" '
'xmlns:xhtml="http://www.w3.org/1999/xhtml">\n'
"<url><loc>%s/location/</loc><lastmod>%s</lastmod>"
"<changefreq>never</changefreq><priority>0.5</priority></url>\n"
"</urlset>"
) % (
self.base_url,
date.today(),
)
self.assertXMLEqual(response.text, expected_content)
@override_settings(SECURE_PROXY_SSL_HEADER=False)
class HTTPSDetectionSitemapTests(SitemapTestsBase):
extra = {"wsgi.url_scheme": "https"}
def test_sitemap_index_with_https_request(self):
"A sitemap index requested in HTTPS is rendered with HTTPS links"
response = self.client.get("/simple/index.xml", **self.extra)
expected_content = """<?xml version="1.0" encoding="UTF-8"?>
<sitemapindex xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
<sitemap><loc>%s/simple/sitemap-simple.xml</loc><lastmod>%s</lastmod></sitemap>
</sitemapindex>
""" % (
self.base_url.replace("http://", "https://"),
date.today(),
)
self.assertXMLEqual(response.text, expected_content)
def test_sitemap_section_with_https_request(self):
"A sitemap section requested in HTTPS is rendered with HTTPS links"
response = self.client.get("/simple/sitemap-simple.xml", **self.extra)
expected_content = (
'<?xml version="1.0" encoding="UTF-8"?>\n'
'<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9" '
'xmlns:xhtml="http://www.w3.org/1999/xhtml">\n'
"<url><loc>%s/location/</loc><lastmod>%s</lastmod>"
"<changefreq>never</changefreq><priority>0.5</priority></url>\n"
"</urlset>"
) % (
self.base_url.replace("http://", "https://"),
date.today(),
)
self.assertXMLEqual(response.text, expected_content)
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/sitemaps_tests/test_http.py | tests/sitemaps_tests/test_http.py | import os
from datetime import date
from django.contrib.sitemaps import Sitemap
from django.contrib.sites.models import Site
from django.core.exceptions import ImproperlyConfigured
from django.test import modify_settings, override_settings
from django.utils import translation
from django.utils.formats import localize
from .base import SitemapTestsBase
from .models import I18nTestModel, TestModel
class HTTPSitemapTests(SitemapTestsBase):
use_sitemap_err_msg = (
"To use sitemaps, either enable the sites framework or pass a "
"Site/RequestSite object in your view."
)
def test_simple_sitemap_index(self):
"A simple sitemap index can be rendered"
response = self.client.get("/simple/index.xml")
expected_content = """<?xml version="1.0" encoding="UTF-8"?>
<sitemapindex xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
<sitemap><loc>%s/simple/sitemap-simple.xml</loc><lastmod>%s</lastmod></sitemap>
</sitemapindex>
""" % (
self.base_url,
date.today(),
)
self.assertXMLEqual(response.text, expected_content)
def test_sitemap_not_callable(self):
"""A sitemap may not be callable."""
response = self.client.get("/simple-not-callable/index.xml")
expected_content = """<?xml version="1.0" encoding="UTF-8"?>
<sitemapindex xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
<sitemap><loc>%s/simple/sitemap-simple.xml</loc><lastmod>%s</lastmod></sitemap>
</sitemapindex>
""" % (
self.base_url,
date.today(),
)
self.assertXMLEqual(response.text, expected_content)
def test_paged_sitemap(self):
"""A sitemap may have multiple pages."""
response = self.client.get("/simple-paged/index.xml")
expected_content = """<?xml version="1.0" encoding="UTF-8"?>
<sitemapindex xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
<sitemap><loc>{0}/simple/sitemap-simple.xml</loc><lastmod>{1}</lastmod></sitemap><sitemap><loc>{0}/simple/sitemap-simple.xml?p=2</loc><lastmod>{1}</lastmod></sitemap>
</sitemapindex>
""".format(
self.base_url, date.today()
)
self.assertXMLEqual(response.text, expected_content)
@override_settings(
TEMPLATES=[
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": [os.path.join(os.path.dirname(__file__), "templates")],
}
]
)
def test_simple_sitemap_custom_lastmod_index(self):
"A simple sitemap index can be rendered with a custom template"
response = self.client.get("/simple/custom-lastmod-index.xml")
expected_content = """<?xml version="1.0" encoding="UTF-8"?>
<!-- This is a customized template -->
<sitemapindex xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
<sitemap><loc>%s/simple/sitemap-simple.xml</loc><lastmod>%s</lastmod></sitemap>
</sitemapindex>
""" % (
self.base_url,
date.today(),
)
self.assertXMLEqual(response.text, expected_content)
def test_simple_sitemap_section(self):
"A simple sitemap section can be rendered"
response = self.client.get("/simple/sitemap-simple.xml")
expected_content = (
'<?xml version="1.0" encoding="UTF-8"?>\n'
'<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9" '
'xmlns:xhtml="http://www.w3.org/1999/xhtml">\n'
"<url><loc>%s/location/</loc><lastmod>%s</lastmod>"
"<changefreq>never</changefreq><priority>0.5</priority></url>\n"
"</urlset>"
) % (
self.base_url,
date.today(),
)
self.assertXMLEqual(response.text, expected_content)
def test_no_section(self):
response = self.client.get("/simple/sitemap-simple2.xml")
self.assertEqual(
str(response.context["exception"]),
"No sitemap available for section: 'simple2'",
)
self.assertEqual(response.status_code, 404)
def test_empty_page(self):
response = self.client.get("/simple/sitemap-simple.xml?p=0")
self.assertEqual(str(response.context["exception"]), "Page 0 empty")
self.assertEqual(response.status_code, 404)
def test_page_not_int(self):
response = self.client.get("/simple/sitemap-simple.xml?p=test")
self.assertEqual(str(response.context["exception"]), "No page 'test'")
self.assertEqual(response.status_code, 404)
def test_simple_sitemap(self):
"A simple sitemap can be rendered"
response = self.client.get("/simple/sitemap.xml")
expected_content = (
'<?xml version="1.0" encoding="UTF-8"?>\n'
'<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9" '
'xmlns:xhtml="http://www.w3.org/1999/xhtml">\n'
"<url><loc>%s/location/</loc><lastmod>%s</lastmod>"
"<changefreq>never</changefreq><priority>0.5</priority></url>\n"
"</urlset>"
) % (
self.base_url,
date.today(),
)
self.assertXMLEqual(response.text, expected_content)
@override_settings(
TEMPLATES=[
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": [os.path.join(os.path.dirname(__file__), "templates")],
}
]
)
def test_simple_custom_sitemap(self):
"A simple sitemap can be rendered with a custom template"
response = self.client.get("/simple/custom-sitemap.xml")
expected_content = """<?xml version="1.0" encoding="UTF-8"?>
<!-- This is a customized template -->
<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
<url><loc>%s/location/</loc><lastmod>%s</lastmod><changefreq>never</changefreq><priority>0.5</priority></url>
</urlset>
""" % (
self.base_url,
date.today(),
)
self.assertXMLEqual(response.text, expected_content)
def test_sitemap_last_modified(self):
"Last-Modified header is set correctly"
response = self.client.get("/lastmod/sitemap.xml")
self.assertEqual(
response.headers["Last-Modified"], "Wed, 13 Mar 2013 10:00:00 GMT"
)
def test_sitemap_last_modified_date(self):
"""
The Last-Modified header should be support dates (without time).
"""
response = self.client.get("/lastmod/date-sitemap.xml")
self.assertEqual(
response.headers["Last-Modified"], "Wed, 13 Mar 2013 00:00:00 GMT"
)
def test_sitemap_last_modified_tz(self):
"""
The Last-Modified header should be converted from timezone aware dates
to GMT.
"""
response = self.client.get("/lastmod/tz-sitemap.xml")
self.assertEqual(
response.headers["Last-Modified"], "Wed, 13 Mar 2013 15:00:00 GMT"
)
def test_sitemap_last_modified_missing(self):
"Last-Modified header is missing when sitemap has no lastmod"
response = self.client.get("/generic/sitemap.xml")
self.assertFalse(response.has_header("Last-Modified"))
def test_sitemap_last_modified_mixed(self):
"Last-Modified header is omitted when lastmod not on all items"
response = self.client.get("/lastmod-mixed/sitemap.xml")
self.assertFalse(response.has_header("Last-Modified"))
def test_sitemaps_lastmod_mixed_ascending_last_modified_missing(self):
"""
The Last-Modified header is omitted when lastmod isn't found in all
sitemaps. Test sitemaps are sorted by lastmod in ascending order.
"""
response = self.client.get("/lastmod-sitemaps/mixed-ascending.xml")
self.assertFalse(response.has_header("Last-Modified"))
def test_sitemaps_lastmod_mixed_descending_last_modified_missing(self):
"""
The Last-Modified header is omitted when lastmod isn't found in all
sitemaps. Test sitemaps are sorted by lastmod in descending order.
"""
response = self.client.get("/lastmod-sitemaps/mixed-descending.xml")
self.assertFalse(response.has_header("Last-Modified"))
def test_sitemaps_lastmod_ascending(self):
"""
The Last-Modified header is set to the most recent sitemap lastmod.
Test sitemaps are sorted by lastmod in ascending order.
"""
response = self.client.get("/lastmod-sitemaps/ascending.xml")
self.assertEqual(
response.headers["Last-Modified"], "Sat, 20 Apr 2013 05:00:00 GMT"
)
def test_sitemaps_lastmod_descending(self):
"""
The Last-Modified header is set to the most recent sitemap lastmod.
Test sitemaps are sorted by lastmod in descending order.
"""
response = self.client.get("/lastmod-sitemaps/descending.xml")
self.assertEqual(
response.headers["Last-Modified"], "Sat, 20 Apr 2013 05:00:00 GMT"
)
def test_sitemap_get_latest_lastmod_none(self):
"""
sitemapindex.lastmod is omitted when Sitemap.lastmod is
callable and Sitemap.get_latest_lastmod is not implemented
"""
response = self.client.get("/lastmod/get-latest-lastmod-none-sitemap.xml")
self.assertNotContains(response, "<lastmod>")
def test_sitemap_get_latest_lastmod(self):
"""
sitemapindex.lastmod is included when Sitemap.lastmod is
attribute and Sitemap.get_latest_lastmod is implemented
"""
response = self.client.get("/lastmod/get-latest-lastmod-sitemap.xml")
self.assertContains(response, "<lastmod>2013-03-13T10:00:00</lastmod>")
def test_sitemap_latest_lastmod_timezone(self):
"""
lastmod datestamp shows timezones if Sitemap.get_latest_lastmod
returns an aware datetime.
"""
response = self.client.get("/lastmod/latest-lastmod-timezone-sitemap.xml")
self.assertContains(response, "<lastmod>2013-03-13T10:00:00-05:00</lastmod>")
def test_localized_priority(self):
"""The priority value should not be localized."""
with translation.override("fr"):
self.assertEqual("0,3", localize(0.3))
# Priorities aren't rendered in localized format.
response = self.client.get("/simple/sitemap.xml")
self.assertContains(response, "<priority>0.5</priority>")
self.assertContains(response, "<lastmod>%s</lastmod>" % date.today())
@modify_settings(INSTALLED_APPS={"remove": "django.contrib.sites"})
def test_requestsite_sitemap(self):
# Hitting the flatpages sitemap without the sites framework installed
# doesn't raise an exception.
response = self.client.get("/simple/sitemap.xml")
expected_content = (
'<?xml version="1.0" encoding="UTF-8"?>\n'
'<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9" '
'xmlns:xhtml="http://www.w3.org/1999/xhtml">\n'
"<url><loc>http://testserver/location/</loc><lastmod>%s</lastmod>"
"<changefreq>never</changefreq><priority>0.5</priority></url>\n"
"</urlset>"
) % date.today()
self.assertXMLEqual(response.text, expected_content)
def test_sitemap_get_urls_no_site_1(self):
"""
Check we get ImproperlyConfigured if we don't pass a site object to
Sitemap.get_urls and no Site objects exist
"""
Site.objects.all().delete()
with self.assertRaisesMessage(ImproperlyConfigured, self.use_sitemap_err_msg):
Sitemap().get_urls()
@modify_settings(INSTALLED_APPS={"remove": "django.contrib.sites"})
def test_sitemap_get_urls_no_site_2(self):
"""
Check we get ImproperlyConfigured when we don't pass a site object to
Sitemap.get_urls if Site objects exists, but the sites framework is not
actually installed.
"""
with self.assertRaisesMessage(ImproperlyConfigured, self.use_sitemap_err_msg):
Sitemap().get_urls()
def test_sitemap_item(self):
"""
Check to make sure that the raw item is included with each
Sitemap.get_url() url result.
"""
test_sitemap = Sitemap()
test_sitemap.items = TestModel.objects.order_by("pk").all
def is_testmodel(url):
return isinstance(url["item"], TestModel)
item_in_url_info = all(map(is_testmodel, test_sitemap.get_urls()))
self.assertTrue(item_in_url_info)
def test_cached_sitemap_index(self):
"""
A cached sitemap index can be rendered (#2713).
"""
response = self.client.get("/cached/index.xml")
expected_content = """<?xml version="1.0" encoding="UTF-8"?>
<sitemapindex xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
<sitemap><loc>%s/cached/sitemap-simple.xml</loc><lastmod>%s</lastmod></sitemap>
</sitemapindex>
""" % (
self.base_url,
date.today(),
)
self.assertXMLEqual(response.text, expected_content)
def test_x_robots_sitemap(self):
response = self.client.get("/simple/index.xml")
self.assertEqual(response.headers["X-Robots-Tag"], "noindex, noodp, noarchive")
response = self.client.get("/simple/sitemap.xml")
self.assertEqual(response.headers["X-Robots-Tag"], "noindex, noodp, noarchive")
def test_empty_sitemap(self):
response = self.client.get("/empty/sitemap.xml")
self.assertEqual(response.status_code, 200)
@override_settings(LANGUAGES=(("en", "English"), ("pt", "Portuguese")))
def test_simple_i18n_sitemap_index(self):
"""
A simple i18n sitemap index can be rendered, without logging variable
lookup errors.
"""
with self.assertNoLogs("django.template", "DEBUG"):
response = self.client.get("/simple/i18n.xml")
expected_content = (
'<?xml version="1.0" encoding="UTF-8"?>\n'
'<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9" '
'xmlns:xhtml="http://www.w3.org/1999/xhtml">\n'
"<url><loc>{0}/en/i18n/testmodel/{1}/</loc><changefreq>never</changefreq>"
"<priority>0.5</priority></url><url><loc>{0}/pt/i18n/testmodel/{1}/</loc>"
"<changefreq>never</changefreq><priority>0.5</priority></url>\n"
"</urlset>"
).format(self.base_url, self.i18n_model.pk)
self.assertXMLEqual(response.text, expected_content)
@override_settings(LANGUAGES=(("en", "English"), ("pt", "Portuguese")))
def test_alternate_i18n_sitemap_index(self):
"""
A i18n sitemap with alternate/hreflang links can be rendered.
"""
response = self.client.get("/alternates/i18n.xml")
url, pk = self.base_url, self.i18n_model.pk
expected_urls = f"""
<url><loc>{url}/en/i18n/testmodel/{pk}/</loc><changefreq>never</changefreq><priority>0.5</priority>
<xhtml:link rel="alternate" hreflang="en" href="{url}/en/i18n/testmodel/{pk}/"/>
<xhtml:link rel="alternate" hreflang="pt" href="{url}/pt/i18n/testmodel/{pk}/"/>
</url>
<url><loc>{url}/pt/i18n/testmodel/{pk}/</loc><changefreq>never</changefreq><priority>0.5</priority>
<xhtml:link rel="alternate" hreflang="en" href="{url}/en/i18n/testmodel/{pk}/"/>
<xhtml:link rel="alternate" hreflang="pt" href="{url}/pt/i18n/testmodel/{pk}/"/>
</url>
""".replace(
"\n", ""
)
expected_content = (
f'<?xml version="1.0" encoding="UTF-8"?>\n'
f'<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9" '
f'xmlns:xhtml="http://www.w3.org/1999/xhtml">\n'
f"{expected_urls}\n"
f"</urlset>"
)
self.assertXMLEqual(response.text, expected_content)
@override_settings(
LANGUAGES=(("en", "English"), ("pt", "Portuguese"), ("es", "Spanish"))
)
def test_alternate_i18n_sitemap_limited(self):
"""
A i18n sitemap index with limited languages can be rendered.
"""
response = self.client.get("/limited/i18n.xml")
url, pk = self.base_url, self.i18n_model.pk
expected_urls = f"""
<url><loc>{url}/en/i18n/testmodel/{pk}/</loc><changefreq>never</changefreq><priority>0.5</priority>
<xhtml:link rel="alternate" hreflang="en" href="{url}/en/i18n/testmodel/{pk}/"/>
<xhtml:link rel="alternate" hreflang="es" href="{url}/es/i18n/testmodel/{pk}/"/>
</url>
<url><loc>{url}/es/i18n/testmodel/{pk}/</loc><changefreq>never</changefreq><priority>0.5</priority>
<xhtml:link rel="alternate" hreflang="en" href="{url}/en/i18n/testmodel/{pk}/"/>
<xhtml:link rel="alternate" hreflang="es" href="{url}/es/i18n/testmodel/{pk}/"/>
</url>
""".replace(
"\n", ""
)
expected_content = (
f'<?xml version="1.0" encoding="UTF-8"?>\n'
f'<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9" '
f'xmlns:xhtml="http://www.w3.org/1999/xhtml">\n'
f"{expected_urls}\n"
f"</urlset>"
)
self.assertXMLEqual(response.text, expected_content)
@override_settings(LANGUAGES=(("en", "English"), ("pt", "Portuguese")))
def test_alternate_i18n_sitemap_xdefault(self):
"""
A i18n sitemap index with x-default can be rendered.
"""
response = self.client.get("/x-default/i18n.xml")
url, pk = self.base_url, self.i18n_model.pk
expected_urls = f"""
<url><loc>{url}/en/i18n/testmodel/{pk}/</loc><changefreq>never</changefreq><priority>0.5</priority>
<xhtml:link rel="alternate" hreflang="en" href="{url}/en/i18n/testmodel/{pk}/"/>
<xhtml:link rel="alternate" hreflang="pt" href="{url}/pt/i18n/testmodel/{pk}/"/>
<xhtml:link rel="alternate" hreflang="x-default" href="{url}/i18n/testmodel/{pk}/"/>
</url>
<url><loc>{url}/pt/i18n/testmodel/{pk}/</loc><changefreq>never</changefreq><priority>0.5</priority>
<xhtml:link rel="alternate" hreflang="en" href="{url}/en/i18n/testmodel/{pk}/"/>
<xhtml:link rel="alternate" hreflang="pt" href="{url}/pt/i18n/testmodel/{pk}/"/>
<xhtml:link rel="alternate" hreflang="x-default" href="{url}/i18n/testmodel/{pk}/"/>
</url>
""".replace(
"\n", ""
)
expected_content = (
f'<?xml version="1.0" encoding="UTF-8"?>\n'
f'<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9" '
f'xmlns:xhtml="http://www.w3.org/1999/xhtml">\n'
f"{expected_urls}\n"
f"</urlset>"
)
self.assertXMLEqual(response.text, expected_content)
@override_settings(LANGUAGES=(("en", "English"), ("pt", "Portuguese")))
def test_language_for_item_i18n_sitemap(self):
"""
A i18n sitemap index in which item can be chosen to be displayed for a
lang or not.
"""
only_pt = I18nTestModel.objects.create(name="Only for PT")
response = self.client.get("/item-by-lang/i18n.xml")
url, pk, only_pt_pk = self.base_url, self.i18n_model.pk, only_pt.pk
expected_urls = (
f"<url><loc>{url}/en/i18n/testmodel/{pk}/</loc>"
f"<changefreq>never</changefreq><priority>0.5</priority></url>"
f"<url><loc>{url}/pt/i18n/testmodel/{pk}/</loc>"
f"<changefreq>never</changefreq><priority>0.5</priority></url>"
f"<url><loc>{url}/pt/i18n/testmodel/{only_pt_pk}/</loc>"
f"<changefreq>never</changefreq><priority>0.5</priority></url>"
)
expected_content = (
f'<?xml version="1.0" encoding="UTF-8"?>\n'
f'<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9" '
f'xmlns:xhtml="http://www.w3.org/1999/xhtml">\n'
f"{expected_urls}\n"
f"</urlset>"
)
self.assertXMLEqual(response.text, expected_content)
@override_settings(LANGUAGES=(("en", "English"), ("pt", "Portuguese")))
def test_alternate_language_for_item_i18n_sitemap(self):
"""
A i18n sitemap index in which item can be chosen to be displayed for a
lang or not.
"""
only_pt = I18nTestModel.objects.create(name="Only for PT")
response = self.client.get("/item-by-lang-alternates/i18n.xml")
url, pk, only_pt_pk = self.base_url, self.i18n_model.pk, only_pt.pk
expected_urls = (
f"<url><loc>{url}/en/i18n/testmodel/{pk}/</loc>"
f"<changefreq>never</changefreq><priority>0.5</priority>"
f'<xhtml:link rel="alternate" '
f'hreflang="en" href="{url}/en/i18n/testmodel/{pk}/"/>'
f'<xhtml:link rel="alternate" '
f'hreflang="pt" href="{url}/pt/i18n/testmodel/{pk}/"/>'
f'<xhtml:link rel="alternate" '
f'hreflang="x-default" href="{url}/i18n/testmodel/{pk}/"/></url>'
f"<url><loc>{url}/pt/i18n/testmodel/{pk}/</loc>"
f"<changefreq>never</changefreq><priority>0.5</priority>"
f'<xhtml:link rel="alternate" '
f'hreflang="en" href="{url}/en/i18n/testmodel/{pk}/"/>'
f'<xhtml:link rel="alternate" '
f'hreflang="pt" href="{url}/pt/i18n/testmodel/{pk}/"/>'
f'<xhtml:link rel="alternate" '
f'hreflang="x-default" href="{url}/i18n/testmodel/{pk}/"/></url>'
f"<url><loc>{url}/pt/i18n/testmodel/{only_pt_pk}/</loc>"
f"<changefreq>never</changefreq><priority>0.5</priority>"
f'<xhtml:link rel="alternate" '
f'hreflang="pt" href="{url}/pt/i18n/testmodel/{only_pt_pk}/"/></url>'
)
expected_content = (
f'<?xml version="1.0" encoding="UTF-8"?>\n'
f'<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9" '
f'xmlns:xhtml="http://www.w3.org/1999/xhtml">\n'
f"{expected_urls}\n"
f"</urlset>"
)
self.assertXMLEqual(response.text, expected_content)
def test_sitemap_without_entries(self):
response = self.client.get("/sitemap-without-entries/sitemap.xml")
expected_content = (
'<?xml version="1.0" encoding="UTF-8"?>\n'
'<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9" '
'xmlns:xhtml="http://www.w3.org/1999/xhtml">\n\n'
"</urlset>"
)
self.assertXMLEqual(response.text, expected_content)
def test_callable_sitemod_partial(self):
"""
Not all items have `lastmod`. Therefore the `Last-Modified` header
is not set by the detail or index sitemap view.
"""
index_response = self.client.get("/callable-lastmod-partial/index.xml")
sitemap_response = self.client.get("/callable-lastmod-partial/sitemap.xml")
self.assertNotIn("Last-Modified", index_response)
self.assertNotIn("Last-Modified", sitemap_response)
expected_content_index = """<?xml version="1.0" encoding="UTF-8"?>
<sitemapindex xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
<sitemap><loc>http://example.com/simple/sitemap-callable-lastmod.xml</loc></sitemap>
</sitemapindex>
"""
expected_content_sitemap = (
'<?xml version="1.0" encoding="UTF-8"?>\n'
'<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9" '
'xmlns:xhtml="http://www.w3.org/1999/xhtml">\n'
"<url><loc>http://example.com/location/</loc>"
"<lastmod>2013-03-13</lastmod></url><url>"
"<loc>http://example.com/location/</loc></url>\n"
"</urlset>"
)
self.assertXMLEqual(index_response.text, expected_content_index)
self.assertXMLEqual(sitemap_response.text, expected_content_sitemap)
def test_callable_sitemod_full(self):
"""
All items in the sitemap have `lastmod`. The `Last-Modified` header
is set for the detail and index sitemap view.
"""
index_response = self.client.get("/callable-lastmod-full/index.xml")
sitemap_response = self.client.get("/callable-lastmod-full/sitemap.xml")
self.assertEqual(
index_response.headers["Last-Modified"], "Thu, 13 Mar 2014 10:00:00 GMT"
)
self.assertEqual(
sitemap_response.headers["Last-Modified"], "Thu, 13 Mar 2014 10:00:00 GMT"
)
expected_content_index = """<?xml version="1.0" encoding="UTF-8"?>
<sitemapindex xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
<sitemap><loc>http://example.com/simple/sitemap-callable-lastmod.xml</loc><lastmod>2014-03-13T10:00:00</lastmod></sitemap>
</sitemapindex>
"""
expected_content_sitemap = (
'<?xml version="1.0" encoding="UTF-8"?>\n'
'<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9" '
'xmlns:xhtml="http://www.w3.org/1999/xhtml">\n'
"<url><loc>http://example.com/location/</loc>"
"<lastmod>2013-03-13</lastmod></url>"
"<url><loc>http://example.com/location/</loc>"
"<lastmod>2014-03-13</lastmod></url>\n"
"</urlset>"
)
self.assertXMLEqual(index_response.text, expected_content_index)
self.assertXMLEqual(sitemap_response.text, expected_content_sitemap)
def test_callable_sitemod_no_items(self):
index_response = self.client.get("/callable-lastmod-no-items/index.xml")
self.assertNotIn("Last-Modified", index_response)
expected_content_index = """<?xml version="1.0" encoding="UTF-8"?>
<sitemapindex xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
<sitemap><loc>http://example.com/simple/sitemap-callable-lastmod.xml</loc></sitemap>
</sitemapindex>
"""
self.assertXMLEqual(index_response.text, expected_content_index)
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/sitemaps_tests/models.py | tests/sitemaps_tests/models.py | from django.db import models
from django.urls import reverse
class TestModel(models.Model):
name = models.CharField(max_length=100)
lastmod = models.DateTimeField(null=True)
def get_absolute_url(self):
return "/testmodel/%s/" % self.id
class I18nTestModel(models.Model):
name = models.CharField(max_length=100)
def get_absolute_url(self):
return reverse("i18n_testmodel", args=[self.id])
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/sitemaps_tests/__init__.py | tests/sitemaps_tests/__init__.py | python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false | |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/sitemaps_tests/base.py | tests/sitemaps_tests/base.py | from django.apps import apps
from django.contrib.sites.models import Site
from django.core.cache import cache
from django.test import TestCase, modify_settings, override_settings
from .models import I18nTestModel, TestModel
@modify_settings(INSTALLED_APPS={"append": "django.contrib.sitemaps"})
@override_settings(ROOT_URLCONF="sitemaps_tests.urls.http")
class SitemapTestsBase(TestCase):
protocol = "http"
sites_installed = apps.is_installed("django.contrib.sites")
domain = "example.com" if sites_installed else "testserver"
@classmethod
def setUpTestData(cls):
# Create an object for sitemap content.
TestModel.objects.create(name="Test Object")
cls.i18n_model = I18nTestModel.objects.create(name="Test Object")
def setUp(self):
self.base_url = "%s://%s" % (self.protocol, self.domain)
cache.clear()
@classmethod
def setUpClass(cls):
super().setUpClass()
# This cleanup is necessary because contrib.sites cache
# makes tests interfere with each other, see #11505
Site.objects.clear_cache()
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/sitemaps_tests/test_generic.py | tests/sitemaps_tests/test_generic.py | from datetime import datetime
from django.contrib.sitemaps import GenericSitemap
from django.test import override_settings
from .base import SitemapTestsBase
from .models import TestModel
@override_settings(ABSOLUTE_URL_OVERRIDES={})
class GenericViewsSitemapTests(SitemapTestsBase):
def test_generic_sitemap_attributes(self):
datetime_value = datetime.now()
queryset = TestModel.objects.all()
generic_sitemap = GenericSitemap(
info_dict={
"queryset": queryset,
"date_field": datetime_value,
},
priority=0.6,
changefreq="monthly",
protocol="https",
)
attr_values = (
("date_field", datetime_value),
("priority", 0.6),
("changefreq", "monthly"),
("protocol", "https"),
)
for attr_name, expected_value in attr_values:
with self.subTest(attr_name=attr_name):
self.assertEqual(getattr(generic_sitemap, attr_name), expected_value)
self.assertCountEqual(generic_sitemap.queryset, queryset)
def test_generic_sitemap(self):
"A minimal generic sitemap can be rendered"
response = self.client.get("/generic/sitemap.xml")
expected = ""
for pk in TestModel.objects.values_list("id", flat=True):
expected += "<url><loc>%s/testmodel/%s/</loc></url>" % (self.base_url, pk)
expected_content = (
'<?xml version="1.0" encoding="UTF-8"?>\n'
'<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9" '
'xmlns:xhtml="http://www.w3.org/1999/xhtml">\n'
"%s\n"
"</urlset>"
) % expected
self.assertXMLEqual(response.text, expected_content)
def test_generic_sitemap_lastmod(self):
test_model = TestModel.objects.first()
TestModel.objects.update(lastmod=datetime(2013, 3, 13, 10, 0, 0))
response = self.client.get("/generic-lastmod/sitemap.xml")
expected_content = (
'<?xml version="1.0" encoding="UTF-8"?>\n'
'<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9" '
'xmlns:xhtml="http://www.w3.org/1999/xhtml">\n'
"<url><loc>%s/testmodel/%s/</loc><lastmod>2013-03-13</lastmod></url>\n"
"</urlset>"
) % (
self.base_url,
test_model.pk,
)
self.assertXMLEqual(response.text, expected_content)
self.assertEqual(
response.headers["Last-Modified"], "Wed, 13 Mar 2013 10:00:00 GMT"
)
def test_get_protocol_defined_in_constructor(self):
for protocol in ["http", "https"]:
with self.subTest(protocol=protocol):
sitemap = GenericSitemap({"queryset": None}, protocol=protocol)
self.assertEqual(sitemap.get_protocol(), protocol)
def test_get_protocol_passed_as_argument(self):
sitemap = GenericSitemap({"queryset": None})
for protocol in ["http", "https"]:
with self.subTest(protocol=protocol):
self.assertEqual(sitemap.get_protocol(protocol), protocol)
def test_get_protocol_default(self):
sitemap = GenericSitemap({"queryset": None})
self.assertEqual(sitemap.get_protocol(), "https")
def test_generic_sitemap_index(self):
TestModel.objects.update(lastmod=datetime(2013, 3, 13, 10, 0, 0))
response = self.client.get("/generic-lastmod/index.xml")
expected_content = """<?xml version="1.0" encoding="UTF-8"?>
<sitemapindex xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
<sitemap><loc>http://example.com/simple/sitemap-generic.xml</loc><lastmod>2013-03-13T10:00:00</lastmod></sitemap>
</sitemapindex>"""
self.assertXMLEqual(response.text, expected_content)
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/sitemaps_tests/urls/http.py | tests/sitemaps_tests/urls/http.py | from datetime import date, datetime
from django.conf.urls.i18n import i18n_patterns
from django.contrib.sitemaps import GenericSitemap, Sitemap, views
from django.http import HttpResponse
from django.urls import path
from django.utils import timezone
from django.views.decorators.cache import cache_page
from ..models import I18nTestModel, TestModel
class SimpleSitemap(Sitemap):
changefreq = "never"
priority = 0.5
location = "/location/"
lastmod = date.today()
def items(self):
return [object()]
class SimplePagedSitemap(Sitemap):
lastmod = date.today()
def items(self):
return [object() for x in range(Sitemap.limit + 1)]
class SimpleI18nSitemap(Sitemap):
changefreq = "never"
priority = 0.5
i18n = True
def items(self):
return I18nTestModel.objects.order_by("pk").all()
class AlternatesI18nSitemap(SimpleI18nSitemap):
alternates = True
class LimitedI18nSitemap(AlternatesI18nSitemap):
languages = ["en", "es"]
class XDefaultI18nSitemap(AlternatesI18nSitemap):
x_default = True
class ItemByLangSitemap(SimpleI18nSitemap):
def get_languages_for_item(self, item):
if item.name == "Only for PT":
return ["pt"]
return super().get_languages_for_item(item)
class ItemByLangAlternatesSitemap(AlternatesI18nSitemap):
x_default = True
def get_languages_for_item(self, item):
if item.name == "Only for PT":
return ["pt"]
return super().get_languages_for_item(item)
class EmptySitemap(Sitemap):
changefreq = "never"
priority = 0.5
location = "/location/"
class FixedLastmodSitemap(SimpleSitemap):
lastmod = datetime(2013, 3, 13, 10, 0, 0)
class FixedLastmodMixedSitemap(Sitemap):
changefreq = "never"
priority = 0.5
location = "/location/"
loop = 0
def items(self):
o1 = TestModel()
o1.lastmod = datetime(2013, 3, 13, 10, 0, 0)
o2 = TestModel()
return [o1, o2]
class FixedNewerLastmodSitemap(SimpleSitemap):
lastmod = datetime(2013, 4, 20, 5, 0, 0)
class DateSiteMap(SimpleSitemap):
lastmod = date(2013, 3, 13)
class TimezoneSiteMap(SimpleSitemap):
lastmod = datetime(2013, 3, 13, 10, 0, 0, tzinfo=timezone.get_fixed_timezone(-300))
class CallableLastmodPartialSitemap(Sitemap):
"""Not all items have `lastmod`."""
location = "/location/"
def items(self):
o1 = TestModel()
o1.lastmod = datetime(2013, 3, 13, 10, 0, 0)
o2 = TestModel()
return [o1, o2]
def lastmod(self, obj):
return obj.lastmod
class CallableLastmodFullSitemap(Sitemap):
"""All items have `lastmod`."""
location = "/location/"
def items(self):
o1 = TestModel()
o1.lastmod = datetime(2013, 3, 13, 10, 0, 0)
o2 = TestModel()
o2.lastmod = datetime(2014, 3, 13, 10, 0, 0)
return [o1, o2]
def lastmod(self, obj):
return obj.lastmod
class CallableLastmodNoItemsSitemap(Sitemap):
location = "/location/"
def items(self):
return []
def lastmod(self, obj):
return obj.lastmod
class GetLatestLastmodNoneSiteMap(Sitemap):
changefreq = "never"
priority = 0.5
location = "/location/"
def items(self):
return [object()]
def lastmod(self, obj):
return datetime(2013, 3, 13, 10, 0, 0)
def get_latest_lastmod(self):
return None
class GetLatestLastmodSiteMap(SimpleSitemap):
def get_latest_lastmod(self):
return datetime(2013, 3, 13, 10, 0, 0)
def testmodelview(request, id):
return HttpResponse()
simple_sitemaps = {
"simple": SimpleSitemap,
}
simple_i18n_sitemaps = {
"i18n": SimpleI18nSitemap,
}
alternates_i18n_sitemaps = {
"i18n-alternates": AlternatesI18nSitemap,
}
limited_i18n_sitemaps = {
"i18n-limited": LimitedI18nSitemap,
}
xdefault_i18n_sitemaps = {
"i18n-xdefault": XDefaultI18nSitemap,
}
item_by_lang_i18n_sitemaps = {
"i18n-item-by-lang": ItemByLangSitemap,
}
item_by_lang_alternates_i18n_sitemaps = {
"i18n-item-by-lang-alternates": ItemByLangAlternatesSitemap,
}
simple_sitemaps_not_callable = {
"simple": SimpleSitemap(),
}
simple_sitemaps_paged = {
"simple": SimplePagedSitemap,
}
empty_sitemaps = {
"empty": EmptySitemap,
}
fixed_lastmod_sitemaps = {
"fixed-lastmod": FixedLastmodSitemap,
}
fixed_lastmod_mixed_sitemaps = {
"fixed-lastmod-mixed": FixedLastmodMixedSitemap,
}
sitemaps_lastmod_mixed_ascending = {
"no-lastmod": EmptySitemap,
"lastmod": FixedLastmodSitemap,
}
sitemaps_lastmod_mixed_descending = {
"lastmod": FixedLastmodSitemap,
"no-lastmod": EmptySitemap,
}
sitemaps_lastmod_ascending = {
"date": DateSiteMap,
"datetime": FixedLastmodSitemap,
"datetime-newer": FixedNewerLastmodSitemap,
}
sitemaps_lastmod_descending = {
"datetime-newer": FixedNewerLastmodSitemap,
"datetime": FixedLastmodSitemap,
"date": DateSiteMap,
}
generic_sitemaps = {
"generic": GenericSitemap({"queryset": TestModel.objects.order_by("pk").all()}),
}
get_latest_lastmod_none_sitemaps = {
"get-latest-lastmod-none": GetLatestLastmodNoneSiteMap,
}
get_latest_lastmod_sitemaps = {
"get-latest-lastmod": GetLatestLastmodSiteMap,
}
latest_lastmod_timezone_sitemaps = {
"latest-lastmod-timezone": TimezoneSiteMap,
}
generic_sitemaps_lastmod = {
"generic": GenericSitemap(
{
"queryset": TestModel.objects.order_by("pk").all(),
"date_field": "lastmod",
}
),
}
callable_lastmod_partial_sitemap = {
"callable-lastmod": CallableLastmodPartialSitemap,
}
callable_lastmod_full_sitemap = {
"callable-lastmod": CallableLastmodFullSitemap,
}
callable_lastmod_no_items_sitemap = {
"callable-lastmod": CallableLastmodNoItemsSitemap,
}
urlpatterns = [
path("simple/index.xml", views.index, {"sitemaps": simple_sitemaps}),
path("simple-paged/index.xml", views.index, {"sitemaps": simple_sitemaps_paged}),
path(
"simple-not-callable/index.xml",
views.index,
{"sitemaps": simple_sitemaps_not_callable},
),
path(
"simple/custom-lastmod-index.xml",
views.index,
{
"sitemaps": simple_sitemaps,
"template_name": "custom_sitemap_lastmod_index.xml",
},
),
path(
"simple/sitemap-<section>.xml",
views.sitemap,
{"sitemaps": simple_sitemaps},
name="django.contrib.sitemaps.views.sitemap",
),
path(
"simple/sitemap.xml",
views.sitemap,
{"sitemaps": simple_sitemaps},
name="django.contrib.sitemaps.views.sitemap",
),
path(
"simple/i18n.xml",
views.sitemap,
{"sitemaps": simple_i18n_sitemaps},
name="django.contrib.sitemaps.views.sitemap",
),
path(
"alternates/i18n.xml",
views.sitemap,
{"sitemaps": alternates_i18n_sitemaps},
name="django.contrib.sitemaps.views.sitemap",
),
path(
"limited/i18n.xml",
views.sitemap,
{"sitemaps": limited_i18n_sitemaps},
name="django.contrib.sitemaps.views.sitemap",
),
path(
"x-default/i18n.xml",
views.sitemap,
{"sitemaps": xdefault_i18n_sitemaps},
name="django.contrib.sitemaps.views.sitemap",
),
path(
"simple/custom-sitemap.xml",
views.sitemap,
{"sitemaps": simple_sitemaps, "template_name": "custom_sitemap.xml"},
name="django.contrib.sitemaps.views.sitemap",
),
path(
"empty/sitemap.xml",
views.sitemap,
{"sitemaps": empty_sitemaps},
name="django.contrib.sitemaps.views.sitemap",
),
path(
"lastmod/sitemap.xml",
views.sitemap,
{"sitemaps": fixed_lastmod_sitemaps},
name="django.contrib.sitemaps.views.sitemap",
),
path(
"lastmod-mixed/sitemap.xml",
views.sitemap,
{"sitemaps": fixed_lastmod_mixed_sitemaps},
name="django.contrib.sitemaps.views.sitemap",
),
path(
"lastmod/date-sitemap.xml",
views.sitemap,
{"sitemaps": {"date-sitemap": DateSiteMap}},
name="django.contrib.sitemaps.views.sitemap",
),
path(
"lastmod/tz-sitemap.xml",
views.sitemap,
{"sitemaps": {"tz-sitemap": TimezoneSiteMap}},
name="django.contrib.sitemaps.views.sitemap",
),
path(
"lastmod-sitemaps/mixed-ascending.xml",
views.sitemap,
{"sitemaps": sitemaps_lastmod_mixed_ascending},
name="django.contrib.sitemaps.views.sitemap",
),
path(
"lastmod-sitemaps/mixed-descending.xml",
views.sitemap,
{"sitemaps": sitemaps_lastmod_mixed_descending},
name="django.contrib.sitemaps.views.sitemap",
),
path(
"lastmod-sitemaps/ascending.xml",
views.sitemap,
{"sitemaps": sitemaps_lastmod_ascending},
name="django.contrib.sitemaps.views.sitemap",
),
path(
"item-by-lang/i18n.xml",
views.sitemap,
{"sitemaps": item_by_lang_i18n_sitemaps},
name="django.contrib.sitemaps.views.sitemap",
),
path(
"item-by-lang-alternates/i18n.xml",
views.sitemap,
{"sitemaps": item_by_lang_alternates_i18n_sitemaps},
name="django.contrib.sitemaps.views.sitemap",
),
path(
"lastmod-sitemaps/descending.xml",
views.sitemap,
{"sitemaps": sitemaps_lastmod_descending},
name="django.contrib.sitemaps.views.sitemap",
),
path(
"lastmod/get-latest-lastmod-none-sitemap.xml",
views.index,
{"sitemaps": get_latest_lastmod_none_sitemaps},
name="django.contrib.sitemaps.views.index",
),
path(
"lastmod/get-latest-lastmod-sitemap.xml",
views.index,
{"sitemaps": get_latest_lastmod_sitemaps},
name="django.contrib.sitemaps.views.index",
),
path(
"lastmod/latest-lastmod-timezone-sitemap.xml",
views.index,
{"sitemaps": latest_lastmod_timezone_sitemaps},
name="django.contrib.sitemaps.views.index",
),
path(
"generic/sitemap.xml",
views.sitemap,
{"sitemaps": generic_sitemaps},
name="django.contrib.sitemaps.views.sitemap",
),
path(
"generic-lastmod/sitemap.xml",
views.sitemap,
{"sitemaps": generic_sitemaps_lastmod},
name="django.contrib.sitemaps.views.sitemap",
),
path(
"cached/index.xml",
cache_page(1)(views.index),
{"sitemaps": simple_sitemaps, "sitemap_url_name": "cached_sitemap"},
),
path(
"cached/sitemap-<section>.xml",
cache_page(1)(views.sitemap),
{"sitemaps": simple_sitemaps},
name="cached_sitemap",
),
path(
"sitemap-without-entries/sitemap.xml",
views.sitemap,
{"sitemaps": {}},
name="django.contrib.sitemaps.views.sitemap",
),
path(
"callable-lastmod-partial/index.xml",
views.index,
{"sitemaps": callable_lastmod_partial_sitemap},
),
path(
"callable-lastmod-partial/sitemap.xml",
views.sitemap,
{"sitemaps": callable_lastmod_partial_sitemap},
),
path(
"callable-lastmod-full/index.xml",
views.index,
{"sitemaps": callable_lastmod_full_sitemap},
),
path(
"callable-lastmod-full/sitemap.xml",
views.sitemap,
{"sitemaps": callable_lastmod_full_sitemap},
),
path(
"callable-lastmod-no-items/index.xml",
views.index,
{"sitemaps": callable_lastmod_no_items_sitemap},
),
path(
"generic-lastmod/index.xml",
views.index,
{"sitemaps": generic_sitemaps_lastmod},
name="django.contrib.sitemaps.views.index",
),
]
urlpatterns += i18n_patterns(
path("i18n/testmodel/<int:id>/", testmodelview, name="i18n_testmodel"),
)
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/sitemaps_tests/urls/https.py | tests/sitemaps_tests/urls/https.py | from django.contrib.sitemaps import views
from django.urls import path
from .http import SimpleSitemap
class HTTPSSitemap(SimpleSitemap):
protocol = "https"
secure_sitemaps = {
"simple": HTTPSSitemap,
}
urlpatterns = [
path("secure/index.xml", views.index, {"sitemaps": secure_sitemaps}),
path(
"secure/sitemap-<section>.xml",
views.sitemap,
{"sitemaps": secure_sitemaps},
name="django.contrib.sitemaps.views.sitemap",
),
]
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/sitemaps_tests/urls/__init__.py | tests/sitemaps_tests/urls/__init__.py | python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false | |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/generic_inline_admin/admin.py | tests/generic_inline_admin/admin.py | from django.contrib import admin
from django.contrib.contenttypes.admin import GenericTabularInline
from .models import Category, Contact, Episode, EpisodePermanent, Media, PhoneNumber
site = admin.AdminSite(name="admin")
class MediaInline(GenericTabularInline):
model = Media
class EpisodeAdmin(admin.ModelAdmin):
inlines = [
MediaInline,
]
class PhoneNumberInline(GenericTabularInline):
model = PhoneNumber
class MediaPermanentInline(GenericTabularInline):
model = Media
can_delete = False
site.register(Episode, EpisodeAdmin)
site.register(Contact, inlines=[PhoneNumberInline])
site.register(Category)
site.register(EpisodePermanent, inlines=[MediaPermanentInline])
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/generic_inline_admin/models.py | tests/generic_inline_admin/models.py | from django.contrib.contenttypes.fields import GenericForeignKey, GenericRelation
from django.contrib.contenttypes.models import ContentType
from django.db import models
class Episode(models.Model):
name = models.CharField(max_length=100)
length = models.CharField(max_length=100, blank=True)
author = models.CharField(max_length=100, blank=True)
class Media(models.Model):
"""
Media that can associated to any object.
"""
content_type = models.ForeignKey(ContentType, models.CASCADE)
object_id = models.PositiveIntegerField()
content_object = GenericForeignKey()
url = models.URLField()
description = models.CharField(max_length=100, blank=True)
keywords = models.CharField(max_length=100, blank=True)
def __str__(self):
return self.url
#
# Generic inline with unique_together
#
class Category(models.Model):
name = models.CharField(max_length=50)
class PhoneNumber(models.Model):
content_type = models.ForeignKey(ContentType, models.CASCADE)
object_id = models.PositiveIntegerField()
content_object = GenericForeignKey("content_type", "object_id")
phone_number = models.CharField(max_length=30)
category = models.ForeignKey(Category, models.SET_NULL, null=True, blank=True)
class Meta:
unique_together = (
(
"content_type",
"object_id",
"phone_number",
),
)
class Contact(models.Model):
name = models.CharField(max_length=50)
phone_numbers = GenericRelation(PhoneNumber, related_query_name="phone_numbers")
#
# Generic inline with can_delete=False
#
class EpisodePermanent(Episode):
pass
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/generic_inline_admin/__init__.py | tests/generic_inline_admin/__init__.py | python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false | |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/generic_inline_admin/tests.py | tests/generic_inline_admin/tests.py | from django.contrib import admin
from django.contrib.admin.sites import AdminSite
from django.contrib.auth.models import User
from django.contrib.contenttypes.admin import GenericTabularInline
from django.contrib.contenttypes.models import ContentType
from django.forms.formsets import DEFAULT_MAX_NUM
from django.forms.models import ModelForm
from django.test import RequestFactory, SimpleTestCase, TestCase, override_settings
from django.urls import reverse
from .admin import MediaInline, MediaPermanentInline
from .admin import site as admin_site
from .models import Category, Episode, EpisodePermanent, Media, PhoneNumber
class TestDataMixin:
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="super@example.com"
)
@override_settings(ROOT_URLCONF="generic_inline_admin.urls")
class GenericAdminViewTest(TestDataMixin, TestCase):
def setUp(self):
self.client.force_login(self.superuser)
e = Episode.objects.create(name="This Week in Django")
self.episode_pk = e.pk
m = Media(content_object=e, url="http://example.com/podcast.mp3")
m.save()
self.mp3_media_pk = m.pk
m = Media(content_object=e, url="http://example.com/logo.png")
m.save()
self.png_media_pk = m.pk
def test_basic_add_GET(self):
"""
A smoke test to ensure GET on the add_view works.
"""
response = self.client.get(reverse("admin:generic_inline_admin_episode_add"))
self.assertEqual(response.status_code, 200)
def test_basic_edit_GET(self):
"""
A smoke test to ensure GET on the change_view works.
"""
response = self.client.get(
reverse(
"admin:generic_inline_admin_episode_change", args=(self.episode_pk,)
)
)
self.assertEqual(response.status_code, 200)
def test_basic_add_POST(self):
"""
A smoke test to ensure POST on add_view works.
"""
post_data = {
"name": "This Week in Django",
# inline data
"generic_inline_admin-media-content_type-object_id-TOTAL_FORMS": "1",
"generic_inline_admin-media-content_type-object_id-INITIAL_FORMS": "0",
"generic_inline_admin-media-content_type-object_id-MAX_NUM_FORMS": "0",
}
response = self.client.post(
reverse("admin:generic_inline_admin_episode_add"), post_data
)
self.assertEqual(response.status_code, 302) # redirect somewhere
def test_basic_edit_POST(self):
"""
A smoke test to ensure POST on edit_view works.
"""
prefix = "generic_inline_admin-media-content_type-object_id"
post_data = {
"name": "This Week in Django",
# inline data
f"{prefix}-TOTAL_FORMS": "3",
f"{prefix}-INITIAL_FORMS": "2",
f"{prefix}-MAX_NUM_FORMS": "0",
f"{prefix}-0-id": str(self.mp3_media_pk),
f"{prefix}-0-url": "http://example.com/podcast.mp3",
f"{prefix}-1-id": str(self.png_media_pk),
f"{prefix}-1-url": "http://example.com/logo.png",
f"{prefix}-2-id": "",
f"{prefix}-2-url": "",
}
url = reverse(
"admin:generic_inline_admin_episode_change", args=(self.episode_pk,)
)
response = self.client.post(url, post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
@override_settings(ROOT_URLCONF="generic_inline_admin.urls")
class GenericInlineAdminParametersTest(TestDataMixin, TestCase):
factory = RequestFactory()
def setUp(self):
self.client.force_login(self.superuser)
def _create_object(self, model):
"""
Create a model with an attached Media object via GFK. We can't
load content via a fixture (since the GenericForeignKey relies on
content type IDs, which will vary depending on what other tests
have been run), thus we do it here.
"""
e = model.objects.create(name="This Week in Django")
Media.objects.create(content_object=e, url="http://example.com/podcast.mp3")
return e
def test_no_param(self):
"""
With one initial form, extra (default) at 3, there should be 4 forms.
"""
e = self._create_object(Episode)
response = self.client.get(
reverse("admin:generic_inline_admin_episode_change", args=(e.pk,))
)
formset = response.context["inline_admin_formsets"][0].formset
self.assertEqual(formset.total_form_count(), 4)
self.assertEqual(formset.initial_form_count(), 1)
def test_extra_param(self):
"""
With extra=0, there should be one form.
"""
class ExtraInline(GenericTabularInline):
model = Media
extra = 0
modeladmin = admin.ModelAdmin(Episode, admin_site)
modeladmin.inlines = [ExtraInline]
e = self._create_object(Episode)
request = self.factory.get(
reverse("admin:generic_inline_admin_episode_change", args=(e.pk,))
)
request.user = User(username="super", is_superuser=True)
response = modeladmin.changeform_view(request, object_id=str(e.pk))
formset = response.context_data["inline_admin_formsets"][0].formset
self.assertEqual(formset.total_form_count(), 1)
self.assertEqual(formset.initial_form_count(), 1)
def test_max_num_param(self):
"""
With extra=5 and max_num=2, there should be only 2 forms.
"""
class MaxNumInline(GenericTabularInline):
model = Media
extra = 5
max_num = 2
modeladmin = admin.ModelAdmin(Episode, admin_site)
modeladmin.inlines = [MaxNumInline]
e = self._create_object(Episode)
request = self.factory.get(
reverse("admin:generic_inline_admin_episode_change", args=(e.pk,))
)
request.user = User(username="super", is_superuser=True)
response = modeladmin.changeform_view(request, object_id=str(e.pk))
formset = response.context_data["inline_admin_formsets"][0].formset
self.assertEqual(formset.total_form_count(), 2)
self.assertEqual(formset.initial_form_count(), 1)
def test_min_num_param(self):
"""
With extra=3 and min_num=2, there should be five forms.
"""
class MinNumInline(GenericTabularInline):
model = Media
extra = 3
min_num = 2
modeladmin = admin.ModelAdmin(Episode, admin_site)
modeladmin.inlines = [MinNumInline]
e = self._create_object(Episode)
request = self.factory.get(
reverse("admin:generic_inline_admin_episode_change", args=(e.pk,))
)
request.user = User(username="super", is_superuser=True)
response = modeladmin.changeform_view(request, object_id=str(e.pk))
formset = response.context_data["inline_admin_formsets"][0].formset
self.assertEqual(formset.total_form_count(), 5)
self.assertEqual(formset.initial_form_count(), 1)
def test_get_extra(self):
class GetExtraInline(GenericTabularInline):
model = Media
extra = 4
def get_extra(self, request, obj):
return 2
modeladmin = admin.ModelAdmin(Episode, admin_site)
modeladmin.inlines = [GetExtraInline]
e = self._create_object(Episode)
request = self.factory.get(
reverse("admin:generic_inline_admin_episode_change", args=(e.pk,))
)
request.user = User(username="super", is_superuser=True)
response = modeladmin.changeform_view(request, object_id=str(e.pk))
formset = response.context_data["inline_admin_formsets"][0].formset
self.assertEqual(formset.extra, 2)
def test_get_min_num(self):
class GetMinNumInline(GenericTabularInline):
model = Media
min_num = 5
def get_min_num(self, request, obj):
return 2
modeladmin = admin.ModelAdmin(Episode, admin_site)
modeladmin.inlines = [GetMinNumInline]
e = self._create_object(Episode)
request = self.factory.get(
reverse("admin:generic_inline_admin_episode_change", args=(e.pk,))
)
request.user = User(username="super", is_superuser=True)
response = modeladmin.changeform_view(request, object_id=str(e.pk))
formset = response.context_data["inline_admin_formsets"][0].formset
self.assertEqual(formset.min_num, 2)
def test_get_max_num(self):
class GetMaxNumInline(GenericTabularInline):
model = Media
extra = 5
def get_max_num(self, request, obj):
return 2
modeladmin = admin.ModelAdmin(Episode, admin_site)
modeladmin.inlines = [GetMaxNumInline]
e = self._create_object(Episode)
request = self.factory.get(
reverse("admin:generic_inline_admin_episode_change", args=(e.pk,))
)
request.user = User(username="super", is_superuser=True)
response = modeladmin.changeform_view(request, object_id=str(e.pk))
formset = response.context_data["inline_admin_formsets"][0].formset
self.assertEqual(formset.max_num, 2)
@override_settings(ROOT_URLCONF="generic_inline_admin.urls")
class GenericInlineAdminWithUniqueTogetherTest(TestDataMixin, TestCase):
def setUp(self):
self.client.force_login(self.superuser)
def test_add(self):
category_id = Category.objects.create(name="male").pk
prefix = "generic_inline_admin-phonenumber-content_type-object_id"
post_data = {
"name": "John Doe",
# inline data
f"{prefix}-TOTAL_FORMS": "1",
f"{prefix}-INITIAL_FORMS": "0",
f"{prefix}-MAX_NUM_FORMS": "0",
f"{prefix}-0-id": "",
f"{prefix}-0-phone_number": "555-555-5555",
f"{prefix}-0-category": str(category_id),
}
response = self.client.get(reverse("admin:generic_inline_admin_contact_add"))
self.assertEqual(response.status_code, 200)
response = self.client.post(
reverse("admin:generic_inline_admin_contact_add"), post_data
)
self.assertEqual(response.status_code, 302) # redirect somewhere
def test_delete(self):
from .models import Contact
c = Contact.objects.create(name="foo")
PhoneNumber.objects.create(
object_id=c.id,
content_type=ContentType.objects.get_for_model(Contact),
phone_number="555-555-5555",
)
response = self.client.post(
reverse("admin:generic_inline_admin_contact_delete", args=[c.pk])
)
self.assertContains(response, "Are you sure you want to delete")
@override_settings(ROOT_URLCONF="generic_inline_admin.urls")
class NoInlineDeletionTest(SimpleTestCase):
def test_no_deletion(self):
inline = MediaPermanentInline(EpisodePermanent, admin_site)
fake_request = object()
formset = inline.get_formset(fake_request)
self.assertFalse(formset.can_delete)
class MockRequest:
pass
class MockSuperUser:
def has_perm(self, perm, obj=None):
return True
request = MockRequest()
request.user = MockSuperUser()
@override_settings(ROOT_URLCONF="generic_inline_admin.urls")
class GenericInlineModelAdminTest(SimpleTestCase):
def setUp(self):
self.site = AdminSite()
def test_get_formset_kwargs(self):
media_inline = MediaInline(Media, AdminSite())
# Create a formset with default arguments
formset = media_inline.get_formset(request)
self.assertEqual(formset.max_num, DEFAULT_MAX_NUM)
self.assertIs(formset.can_order, False)
# Create a formset with custom keyword arguments
formset = media_inline.get_formset(request, max_num=100, can_order=True)
self.assertEqual(formset.max_num, 100)
self.assertIs(formset.can_order, True)
def test_custom_form_meta_exclude_with_readonly(self):
"""
The custom ModelForm's `Meta.exclude` is respected when
used in conjunction with `GenericInlineModelAdmin.readonly_fields`
and when no `ModelAdmin.exclude` is defined.
"""
class MediaForm(ModelForm):
class Meta:
model = Media
exclude = ["url"]
class MediaInline(GenericTabularInline):
readonly_fields = ["description"]
form = MediaForm
model = Media
class EpisodeAdmin(admin.ModelAdmin):
inlines = [MediaInline]
ma = EpisodeAdmin(Episode, self.site)
self.assertEqual(
list(list(ma.get_formsets_with_inlines(request))[0][0]().forms[0].fields),
["keywords", "id", "DELETE"],
)
def test_custom_form_meta_exclude(self):
"""
The custom ModelForm's `Meta.exclude` is respected by
`GenericInlineModelAdmin.get_formset`, and overridden if
`ModelAdmin.exclude` or `GenericInlineModelAdmin.exclude` are defined.
Refs #15907.
"""
# First with `GenericInlineModelAdmin` -----------------
class MediaForm(ModelForm):
class Meta:
model = Media
exclude = ["url"]
class MediaInline(GenericTabularInline):
exclude = ["description"]
form = MediaForm
model = Media
class EpisodeAdmin(admin.ModelAdmin):
inlines = [MediaInline]
ma = EpisodeAdmin(Episode, self.site)
self.assertEqual(
list(list(ma.get_formsets_with_inlines(request))[0][0]().forms[0].fields),
["url", "keywords", "id", "DELETE"],
)
# Then, only with `ModelForm` -----------------
class MediaInline(GenericTabularInline):
form = MediaForm
model = Media
class EpisodeAdmin(admin.ModelAdmin):
inlines = [MediaInline]
ma = EpisodeAdmin(Episode, self.site)
self.assertEqual(
list(list(ma.get_formsets_with_inlines(request))[0][0]().forms[0].fields),
["description", "keywords", "id", "DELETE"],
)
def test_get_fieldsets(self):
# get_fieldsets is called when figuring out form fields.
# Refs #18681.
class MediaForm(ModelForm):
class Meta:
model = Media
fields = "__all__"
class MediaInline(GenericTabularInline):
form = MediaForm
model = Media
can_delete = False
def get_fieldsets(self, request, obj=None):
return [(None, {"fields": ["url", "description"]})]
ma = MediaInline(Media, self.site)
form = ma.get_formset(None).form
self.assertEqual(form._meta.fields, ["url", "description"])
def test_get_formsets_with_inlines_returns_tuples(self):
"""
get_formsets_with_inlines() returns the correct tuples.
"""
class MediaForm(ModelForm):
class Meta:
model = Media
exclude = ["url"]
class MediaInline(GenericTabularInline):
form = MediaForm
model = Media
class AlternateInline(GenericTabularInline):
form = MediaForm
model = Media
class EpisodeAdmin(admin.ModelAdmin):
inlines = [AlternateInline, MediaInline]
ma = EpisodeAdmin(Episode, self.site)
inlines = ma.get_inline_instances(request)
for (formset, inline), other_inline in zip(
ma.get_formsets_with_inlines(request), inlines
):
self.assertIsInstance(formset, other_inline.get_formset(request).__class__)
def test_get_inline_instances_override_get_inlines(self):
class MediaInline(GenericTabularInline):
model = Media
class AlternateInline(GenericTabularInline):
model = Media
class EpisodeAdmin(admin.ModelAdmin):
inlines = (AlternateInline, MediaInline)
def get_inlines(self, request, obj):
if hasattr(request, "name"):
if request.name == "alternate":
return self.inlines[:1]
elif request.name == "media":
return self.inlines[1:2]
return []
ma = EpisodeAdmin(Episode, self.site)
self.assertEqual(ma.get_inlines(request, None), [])
self.assertEqual(ma.get_inline_instances(request), [])
for name, inline_class in (
("alternate", AlternateInline),
("media", MediaInline),
):
request.name = name
self.assertEqual(ma.get_inlines(request, None), (inline_class,))
self.assertEqual(type(ma.get_inline_instances(request)[0]), inline_class)
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/generic_inline_admin/urls.py | tests/generic_inline_admin/urls.py | from django.urls import path
from . import admin
urlpatterns = [
path("generic_inline_admin/admin/", admin.site.urls),
]
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/serializers/test_data.py | tests/serializers/test_data.py | """
A test spanning all the capabilities of all the serializers.
This class defines sample data and a dynamically generated
test case that is capable of testing the capabilities of
the serializers. This includes all valid data values, plus
forward, backwards and self references.
"""
import datetime
import decimal
import uuid
from collections import namedtuple
from django.core import serializers
from django.db import connection, models
from django.test import TestCase, skipIfDBFeature, skipUnlessDBFeature
from .models import (
Anchor,
AutoNowDateTimeData,
BigIntegerData,
BinaryData,
BooleanData,
BooleanPKData,
CharData,
CharPKData,
DateData,
DatePKData,
DateTimeData,
DateTimePKData,
DecimalData,
DecimalPKData,
EmailData,
EmailPKData,
ExplicitInheritBaseModel,
FileData,
FilePathData,
FilePathPKData,
FKData,
FKDataToField,
FKDataToO2O,
FKSelfData,
FKToUUID,
FloatData,
FloatPKData,
GenericData,
GenericIPAddressData,
GenericIPAddressPKData,
ImageData,
InheritAbstractModel,
InheritBaseModel,
IntegerData,
IntegerPKData,
Intermediate,
LengthModel,
M2MData,
M2MIntermediateData,
M2MSelfData,
ModifyingSaveData,
O2OData,
PositiveBigIntegerData,
PositiveIntegerData,
PositiveIntegerPKData,
PositiveSmallIntegerData,
PositiveSmallIntegerPKData,
SlugData,
SlugPKData,
SmallData,
SmallPKData,
Tag,
TextData,
TextPKData,
TimeData,
TimePKData,
UniqueAnchor,
UUIDData,
UUIDDefaultData,
)
from .tests import register_tests
# A set of functions that can be used to recreate
# test data objects of various kinds.
# The save method is a raw base model save, to make
# sure that the data in the database matches the
# exact test case.
def data_create(pk, klass, data):
instance = klass(id=pk)
instance.data = data
models.Model.save_base(instance, raw=True)
return [instance]
def generic_create(pk, klass, data):
instance = klass(id=pk)
instance.data = data[0]
models.Model.save_base(instance, raw=True)
for tag in data[1:]:
instance.tags.create(data=tag)
return [instance]
def fk_create(pk, klass, data):
instance = klass(id=pk)
setattr(instance, "data_id", data)
models.Model.save_base(instance, raw=True)
return [instance]
def m2m_create(pk, klass, data):
instance = klass(id=pk)
models.Model.save_base(instance, raw=True)
instance.data.set_base(data, raw=True)
return [instance]
def im2m_create(pk, klass, data):
instance = klass(id=pk)
models.Model.save_base(instance, raw=True)
return [instance]
def im_create(pk, klass, data):
instance = klass(id=pk)
instance.right_id = data["right"]
instance.left_id = data["left"]
if "extra" in data:
instance.extra = data["extra"]
models.Model.save_base(instance, raw=True)
return [instance]
def o2o_create(pk, klass, data):
instance = klass()
instance.data_id = data
models.Model.save_base(instance, raw=True)
return [instance]
def pk_create(pk, klass, data):
instance = klass()
instance.data = data
models.Model.save_base(instance, raw=True)
return [instance]
def inherited_create(pk, klass, data):
instance = klass(id=pk, **data)
# This isn't a raw save because:
# 1) we're testing inheritance, not field behavior, so none
# of the field values need to be protected.
# 2) saving the child class and having the parent created
# automatically is easier than manually creating both.
models.Model.save(instance)
created = [instance]
for klass in instance._meta.parents:
created.append(klass.objects.get(id=pk))
return created
# A set of functions that can be used to compare
# test data objects of various kinds
def data_compare(testcase, pk, klass, data):
instance = klass.objects.get(id=pk)
if klass == BinaryData and data is not None:
testcase.assertEqual(
bytes(data),
bytes(instance.data),
"Objects with PK=%d not equal; expected '%s' (%s), got '%s' (%s)"
% (
pk,
repr(bytes(data)),
type(data),
repr(bytes(instance.data)),
type(instance.data),
),
)
else:
testcase.assertEqual(
data,
instance.data,
"Objects with PK=%d not equal; expected '%s' (%s), got '%s' (%s)"
% (
pk,
data,
type(data),
instance,
type(instance.data),
),
)
def generic_compare(testcase, pk, klass, data):
instance = klass.objects.get(id=pk)
testcase.assertEqual(data[0], instance.data)
testcase.assertEqual(data[1:], [t.data for t in instance.tags.order_by("id")])
def fk_compare(testcase, pk, klass, data):
instance = klass.objects.get(id=pk)
testcase.assertEqual(data, instance.data_id)
def m2m_compare(testcase, pk, klass, data):
instance = klass.objects.get(id=pk)
testcase.assertEqual(data, [obj.id for obj in instance.data.order_by("id")])
def im2m_compare(testcase, pk, klass, data):
klass.objects.get(id=pk)
# actually nothing else to check, the instance just should exist
def im_compare(testcase, pk, klass, data):
instance = klass.objects.get(id=pk)
testcase.assertEqual(data["left"], instance.left_id)
testcase.assertEqual(data["right"], instance.right_id)
if "extra" in data:
testcase.assertEqual(data["extra"], instance.extra)
else:
testcase.assertEqual("doesn't matter", instance.extra)
def o2o_compare(testcase, pk, klass, data):
instance = klass.objects.get(data=data)
testcase.assertEqual(data, instance.data_id)
def pk_compare(testcase, pk, klass, data):
instance = klass.objects.get(data=data)
testcase.assertEqual(data, instance.data)
def inherited_compare(testcase, pk, klass, data):
instance = klass.objects.get(id=pk)
for key, value in data.items():
testcase.assertEqual(value, getattr(instance, key))
# Define some test helpers. Each has a pair of functions: one to create objects
# and one to make assertions against objects of a particular type.
TestHelper = namedtuple("TestHelper", ["create_object", "compare_object"])
data_obj = TestHelper(data_create, data_compare)
generic_obj = TestHelper(generic_create, generic_compare)
fk_obj = TestHelper(fk_create, fk_compare)
m2m_obj = TestHelper(m2m_create, m2m_compare)
im2m_obj = TestHelper(im2m_create, im2m_compare)
im_obj = TestHelper(im_create, im_compare)
o2o_obj = TestHelper(o2o_create, o2o_compare)
pk_obj = TestHelper(pk_create, pk_compare)
inherited_obj = TestHelper(inherited_create, inherited_compare)
uuid_obj = uuid.uuid4()
test_data = [
# Format: (test helper, PK value, Model Class, data)
(data_obj, 1, BinaryData, memoryview(b"\x05\xfd\x00")),
(data_obj, 5, BooleanData, True),
(data_obj, 6, BooleanData, False),
(data_obj, 7, BooleanData, None),
(data_obj, 10, CharData, "Test Char Data"),
(data_obj, 11, CharData, ""),
(data_obj, 12, CharData, "None"),
(data_obj, 13, CharData, "null"),
(data_obj, 14, CharData, "NULL"),
# (We use something that will fit into a latin1 database encoding here,
# because that is still the default used on many system setups.)
(data_obj, 16, CharData, "\xa5"),
(data_obj, 20, DateData, datetime.date(2006, 6, 16)),
(data_obj, 21, DateData, None),
(data_obj, 30, DateTimeData, datetime.datetime(2006, 6, 16, 10, 42, 37)),
(data_obj, 31, DateTimeData, None),
(data_obj, 40, EmailData, "hovercraft@example.com"),
(data_obj, 42, EmailData, ""),
(data_obj, 50, FileData, "file:///foo/bar/whiz.txt"),
# (data_obj, 51, FileData, None),
(data_obj, 52, FileData, ""),
(data_obj, 60, FilePathData, "/foo/bar/whiz.txt"),
(data_obj, 62, FilePathData, ""),
(data_obj, 70, DecimalData, decimal.Decimal("12.345")),
(data_obj, 71, DecimalData, decimal.Decimal("-12.345")),
(data_obj, 72, DecimalData, decimal.Decimal("0.0")),
(data_obj, 73, DecimalData, None),
(data_obj, 74, FloatData, 12.345),
(data_obj, 75, FloatData, -12.345),
(data_obj, 76, FloatData, 0.0),
(data_obj, 77, FloatData, None),
(data_obj, 80, IntegerData, 123456789),
(data_obj, 81, IntegerData, -123456789),
(data_obj, 82, IntegerData, 0),
(data_obj, 83, IntegerData, None),
(data_obj, 95, GenericIPAddressData, "fe80:1424:2223:6cff:fe8a:2e8a:2151:abcd"),
(data_obj, 96, GenericIPAddressData, None),
(data_obj, 110, PositiveBigIntegerData, 9223372036854775807),
(data_obj, 111, PositiveBigIntegerData, None),
(data_obj, 120, PositiveIntegerData, 123456789),
(data_obj, 121, PositiveIntegerData, None),
(data_obj, 130, PositiveSmallIntegerData, 12),
(data_obj, 131, PositiveSmallIntegerData, None),
(data_obj, 140, SlugData, "this-is-a-slug"),
(data_obj, 142, SlugData, ""),
(data_obj, 150, SmallData, 12),
(data_obj, 151, SmallData, -12),
(data_obj, 152, SmallData, 0),
(data_obj, 153, SmallData, None),
(
data_obj,
160,
TextData,
"""This is a long piece of text.
It contains line breaks.
Several of them.
The end.""",
),
(data_obj, 161, TextData, ""),
(data_obj, 170, TimeData, datetime.time(10, 42, 37)),
(data_obj, 171, TimeData, None),
(generic_obj, 200, GenericData, ["Generic Object 1", "tag1", "tag2"]),
(generic_obj, 201, GenericData, ["Generic Object 2", "tag2", "tag3"]),
(data_obj, 300, Anchor, "Anchor 1"),
(data_obj, 301, Anchor, "Anchor 2"),
(data_obj, 302, UniqueAnchor, "UAnchor 1"),
(fk_obj, 400, FKData, 300), # Post reference
(fk_obj, 401, FKData, 500), # Pre reference
(fk_obj, 402, FKData, None), # Empty reference
(m2m_obj, 410, M2MData, []), # Empty set
(m2m_obj, 411, M2MData, [300, 301]), # Post reference
(m2m_obj, 412, M2MData, [500, 501]), # Pre reference
(m2m_obj, 413, M2MData, [300, 301, 500, 501]), # Pre and Post reference
(o2o_obj, None, O2OData, 300), # Post reference
(o2o_obj, None, O2OData, 500), # Pre reference
(fk_obj, 430, FKSelfData, 431), # Pre reference
(fk_obj, 431, FKSelfData, 430), # Post reference
(fk_obj, 432, FKSelfData, None), # Empty reference
(m2m_obj, 440, M2MSelfData, []),
(m2m_obj, 441, M2MSelfData, []),
(m2m_obj, 442, M2MSelfData, [440, 441]),
(m2m_obj, 443, M2MSelfData, [445, 446]),
(m2m_obj, 444, M2MSelfData, [440, 441, 445, 446]),
(m2m_obj, 445, M2MSelfData, []),
(m2m_obj, 446, M2MSelfData, []),
(fk_obj, 450, FKDataToField, "UAnchor 1"),
(fk_obj, 451, FKDataToField, "UAnchor 2"),
(fk_obj, 452, FKDataToField, None),
(fk_obj, 460, FKDataToO2O, 300),
(im2m_obj, 470, M2MIntermediateData, None),
# testing post- and pre-references and extra fields
(im_obj, 480, Intermediate, {"right": 300, "left": 470}),
(im_obj, 481, Intermediate, {"right": 300, "left": 490}),
(im_obj, 482, Intermediate, {"right": 500, "left": 470}),
(im_obj, 483, Intermediate, {"right": 500, "left": 490}),
(im_obj, 484, Intermediate, {"right": 300, "left": 470, "extra": "extra"}),
(im_obj, 485, Intermediate, {"right": 300, "left": 490, "extra": "extra"}),
(im_obj, 486, Intermediate, {"right": 500, "left": 470, "extra": "extra"}),
(im_obj, 487, Intermediate, {"right": 500, "left": 490, "extra": "extra"}),
(im2m_obj, 490, M2MIntermediateData, []),
(data_obj, 500, Anchor, "Anchor 3"),
(data_obj, 501, Anchor, "Anchor 4"),
(data_obj, 502, UniqueAnchor, "UAnchor 2"),
(pk_obj, 601, BooleanPKData, True),
(pk_obj, 602, BooleanPKData, False),
(pk_obj, 610, CharPKData, "Test Char PKData"),
(pk_obj, 620, DatePKData, datetime.date(2006, 6, 16)),
(pk_obj, 630, DateTimePKData, datetime.datetime(2006, 6, 16, 10, 42, 37)),
(pk_obj, 640, EmailPKData, "hovercraft@example.com"),
(pk_obj, 660, FilePathPKData, "/foo/bar/whiz.txt"),
(pk_obj, 670, DecimalPKData, decimal.Decimal("12.345")),
(pk_obj, 671, DecimalPKData, decimal.Decimal("-12.345")),
(pk_obj, 672, DecimalPKData, decimal.Decimal("0.0")),
(pk_obj, 673, FloatPKData, 12.345),
(pk_obj, 674, FloatPKData, -12.345),
(pk_obj, 675, FloatPKData, 0.0),
(pk_obj, 680, IntegerPKData, 123456789),
(pk_obj, 681, IntegerPKData, -123456789),
(pk_obj, 682, IntegerPKData, 0),
(pk_obj, 695, GenericIPAddressPKData, "fe80:1424:2223:6cff:fe8a:2e8a:2151:abcd"),
(pk_obj, 720, PositiveIntegerPKData, 123456789),
(pk_obj, 730, PositiveSmallIntegerPKData, 12),
(pk_obj, 740, SlugPKData, "this-is-a-slug"),
(pk_obj, 750, SmallPKData, 12),
(pk_obj, 751, SmallPKData, -12),
(pk_obj, 752, SmallPKData, 0),
(pk_obj, 770, TimePKData, datetime.time(10, 42, 37)),
(pk_obj, 791, UUIDData, uuid_obj),
(fk_obj, 792, FKToUUID, uuid_obj),
(pk_obj, 793, UUIDDefaultData, uuid_obj),
(data_obj, 800, AutoNowDateTimeData, datetime.datetime(2006, 6, 16, 10, 42, 37)),
(data_obj, 810, ModifyingSaveData, 42),
(inherited_obj, 900, InheritAbstractModel, {"child_data": 37, "parent_data": 42}),
(
inherited_obj,
910,
ExplicitInheritBaseModel,
{"child_data": 37, "parent_data": 42},
),
(inherited_obj, 920, InheritBaseModel, {"child_data": 37, "parent_data": 42}),
(data_obj, 1000, BigIntegerData, 9223372036854775807),
(data_obj, 1001, BigIntegerData, -9223372036854775808),
(data_obj, 1002, BigIntegerData, 0),
(data_obj, 1003, BigIntegerData, None),
(data_obj, 1004, LengthModel, 0),
(data_obj, 1005, LengthModel, 1),
]
if ImageData is not None:
test_data.extend(
[
(data_obj, 86, ImageData, "file:///foo/bar/whiz.png"),
# (data_obj, 87, ImageData, None),
(data_obj, 88, ImageData, ""),
]
)
class SerializerDataTests(TestCase):
pass
def assert_serializer(self, format, data):
# Create all the objects defined in the test data.
objects = []
for test_helper, pk, model, data_value in data:
with connection.constraint_checks_disabled():
objects.extend(test_helper.create_object(pk, model, data_value))
# Get a count of the number of objects created for each model class.
instance_counts = {}
for _, _, model, _ in data:
if model not in instance_counts:
instance_counts[model] = model.objects.count()
# Add the generic tagged objects to the object list.
objects.extend(Tag.objects.all())
# Serialize the test database.
serialized_data = serializers.serialize(format, objects, indent=2)
for obj in serializers.deserialize(format, serialized_data):
obj.save()
# Assert that the deserialized data is the same as the original source.
for test_helper, pk, model, data_value in data:
with self.subTest(model=model, data_value=data_value):
test_helper.compare_object(self, pk, model, data_value)
# Assert no new objects were created.
for model, count in instance_counts.items():
with self.subTest(model=model, count=count):
self.assertEqual(count, model.objects.count())
def serializerTest(self, format):
assert_serializer(self, format, test_data)
@skipUnlessDBFeature("allows_auto_pk_0")
def serializerTestPK0(self, format):
# FK to an object with PK of 0. This won't work on MySQL without the
# NO_AUTO_VALUE_ON_ZERO SQL mode since it won't let you create an object
# with an autoincrement primary key of 0.
data = [
(data_obj, 0, Anchor, "Anchor 0"),
(fk_obj, 1, FKData, 0),
]
assert_serializer(self, format, data)
@skipIfDBFeature("interprets_empty_strings_as_nulls")
def serializerTestNullValueStingField(self, format):
data = [
(data_obj, 1, BinaryData, None),
(data_obj, 2, CharData, None),
(data_obj, 3, EmailData, None),
(data_obj, 4, FilePathData, None),
(data_obj, 5, SlugData, None),
(data_obj, 6, TextData, None),
]
assert_serializer(self, format, data)
@skipUnlessDBFeature("supports_index_on_text_field")
def serializerTestTextFieldPK(self, format):
data = [
(
pk_obj,
1,
TextPKData,
"""This is a long piece of text.
It contains line breaks.
Several of them.
The end.""",
),
]
assert_serializer(self, format, data)
register_tests(SerializerDataTests, "test_%s_serializer", serializerTest)
register_tests(SerializerDataTests, "test_%s_serializer_pk_0", serializerTestPK0)
register_tests(
SerializerDataTests,
"test_%s_serializer_null_value_string_field",
serializerTestNullValueStingField,
)
register_tests(
SerializerDataTests,
"test_%s_serializer_text_field_pk",
serializerTestTextFieldPK,
)
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/serializers/test_natural.py | tests/serializers/test_natural.py | from django.core import serializers
from django.db import connection
from django.test import TestCase
from .models import (
Child,
FKAsPKNoNaturalKey,
FKDataNaturalKey,
FKToNaturalKeyWithNullable,
NaturalKeyAnchor,
NaturalKeyThing,
NaturalKeyWithNullableField,
NaturalPKWithDefault,
PostToOptOutSubclassUser,
SubclassNaturalKeyOptOutUser,
)
from .tests import register_tests
class NaturalKeySerializerTests(TestCase):
pass
def natural_key_serializer_test(self, format):
# Create all the objects defined in the test data
with connection.constraint_checks_disabled():
objects = [
NaturalKeyAnchor.objects.create(id=1100, data="Natural Key Anghor"),
FKDataNaturalKey.objects.create(id=1101, data_id=1100),
FKDataNaturalKey.objects.create(id=1102, data_id=None),
]
# Serialize the test database
serialized_data = serializers.serialize(
format, objects, indent=2, use_natural_foreign_keys=True
)
for obj in serializers.deserialize(format, serialized_data):
obj.save()
# Assert that the deserialized data is the same
# as the original source
for obj in objects:
instance = obj.__class__.objects.get(id=obj.pk)
self.assertEqual(
obj.data,
instance.data,
"Objects with PK=%d not equal; expected '%s' (%s), got '%s' (%s)"
% (
obj.pk,
obj.data,
type(obj.data),
instance,
type(instance.data),
),
)
def natural_key_test(self, format):
book1 = {
"data": "978-1590597255",
"title": "The Definitive Guide to Django: Web Development Done Right",
}
book2 = {"data": "978-1590599969", "title": "Practical Django Projects"}
# Create the books.
adrian = NaturalKeyAnchor.objects.create(**book1)
james = NaturalKeyAnchor.objects.create(**book2)
# Serialize the books.
string_data = serializers.serialize(
format,
NaturalKeyAnchor.objects.all(),
indent=2,
use_natural_foreign_keys=True,
use_natural_primary_keys=True,
)
# Delete one book (to prove that the natural key generation will only
# restore the primary keys of books found in the database via the
# get_natural_key manager method).
james.delete()
# Deserialize and test.
books = list(serializers.deserialize(format, string_data))
self.assertCountEqual(
[(book.object.title, book.object.pk) for book in books],
[
(book1["title"], adrian.pk),
(book2["title"], None),
],
)
def natural_pk_mti_test(self, format):
"""
If serializing objects in a multi-table inheritance relationship using
natural primary keys, the natural foreign key for the parent is output in
the fields of the child so it's possible to relate the child to the parent
when deserializing.
"""
child_1 = Child.objects.create(parent_data="1", child_data="1")
child_2 = Child.objects.create(parent_data="2", child_data="2")
string_data = serializers.serialize(
format,
[child_1.parent_ptr, child_2.parent_ptr, child_2, child_1],
use_natural_foreign_keys=True,
use_natural_primary_keys=True,
)
child_1.delete()
child_2.delete()
for obj in serializers.deserialize(format, string_data):
obj.save()
children = Child.objects.all()
self.assertEqual(len(children), 2)
for child in children:
# If it's possible to find the superclass from the subclass and it's
# the correct superclass, it's working.
self.assertEqual(child.child_data, child.parent_data)
def forward_ref_fk_test(self, format):
t1 = NaturalKeyThing.objects.create(key="t1")
t2 = NaturalKeyThing.objects.create(key="t2", other_thing=t1)
t1.other_thing = t2
t1.save()
string_data = serializers.serialize(
format,
[t1, t2],
use_natural_primary_keys=True,
use_natural_foreign_keys=True,
)
NaturalKeyThing.objects.all().delete()
objs_with_deferred_fields = []
for obj in serializers.deserialize(
format, string_data, handle_forward_references=True
):
obj.save()
if obj.deferred_fields:
objs_with_deferred_fields.append(obj)
for obj in objs_with_deferred_fields:
obj.save_deferred_fields()
t1 = NaturalKeyThing.objects.get(key="t1")
t2 = NaturalKeyThing.objects.get(key="t2")
self.assertEqual(t1.other_thing, t2)
self.assertEqual(t2.other_thing, t1)
def forward_ref_fk_with_error_test(self, format):
t1 = NaturalKeyThing.objects.create(key="t1")
t2 = NaturalKeyThing.objects.create(key="t2", other_thing=t1)
t1.other_thing = t2
t1.save()
string_data = serializers.serialize(
format,
[t1],
use_natural_primary_keys=True,
use_natural_foreign_keys=True,
)
NaturalKeyThing.objects.all().delete()
objs_with_deferred_fields = []
for obj in serializers.deserialize(
format, string_data, handle_forward_references=True
):
obj.save()
if obj.deferred_fields:
objs_with_deferred_fields.append(obj)
obj = objs_with_deferred_fields[0]
msg = "NaturalKeyThing matching query does not exist"
with self.assertRaisesMessage(serializers.base.DeserializationError, msg):
obj.save_deferred_fields()
def forward_ref_m2m_test(self, format):
t1 = NaturalKeyThing.objects.create(key="t1")
t2 = NaturalKeyThing.objects.create(key="t2")
t3 = NaturalKeyThing.objects.create(key="t3")
t1.other_things.set([t2, t3])
string_data = serializers.serialize(
format,
[t1, t2, t3],
use_natural_primary_keys=True,
use_natural_foreign_keys=True,
)
NaturalKeyThing.objects.all().delete()
objs_with_deferred_fields = []
for obj in serializers.deserialize(
format, string_data, handle_forward_references=True
):
obj.save()
if obj.deferred_fields:
objs_with_deferred_fields.append(obj)
for obj in objs_with_deferred_fields:
obj.save_deferred_fields()
t1 = NaturalKeyThing.objects.get(key="t1")
t2 = NaturalKeyThing.objects.get(key="t2")
t3 = NaturalKeyThing.objects.get(key="t3")
self.assertCountEqual(t1.other_things.all(), [t2, t3])
def forward_ref_m2m_with_error_test(self, format):
t1 = NaturalKeyThing.objects.create(key="t1")
t2 = NaturalKeyThing.objects.create(key="t2")
t3 = NaturalKeyThing.objects.create(key="t3")
t1.other_things.set([t2, t3])
t1.save()
string_data = serializers.serialize(
format,
[t1, t2],
use_natural_primary_keys=True,
use_natural_foreign_keys=True,
)
NaturalKeyThing.objects.all().delete()
objs_with_deferred_fields = []
for obj in serializers.deserialize(
format, string_data, handle_forward_references=True
):
obj.save()
if obj.deferred_fields:
objs_with_deferred_fields.append(obj)
obj = objs_with_deferred_fields[0]
msg = "NaturalKeyThing matching query does not exist"
with self.assertRaisesMessage(serializers.base.DeserializationError, msg):
obj.save_deferred_fields()
def pk_with_default(self, format):
"""
The deserializer works with natural keys when the primary key has a default
value.
"""
obj = NaturalPKWithDefault.objects.create(name="name")
string_data = serializers.serialize(
format,
NaturalPKWithDefault.objects.all(),
use_natural_foreign_keys=True,
use_natural_primary_keys=True,
)
objs = list(serializers.deserialize(format, string_data))
self.assertEqual(len(objs), 1)
self.assertEqual(objs[0].object.pk, obj.pk)
def fk_as_pk_natural_key_not_called(self, format):
"""
The deserializer doesn't rely on natural keys when a model has a custom
primary key that is a ForeignKey.
"""
o1 = NaturalKeyAnchor.objects.create(data="978-1590599969")
o2 = FKAsPKNoNaturalKey.objects.create(pk_fk=o1)
serialized_data = serializers.serialize(format, [o1, o2])
deserialized_objects = list(serializers.deserialize(format, serialized_data))
self.assertEqual(len(deserialized_objects), 2)
for obj in deserialized_objects:
self.assertEqual(obj.object.pk, o1.pk)
def natural_key_opt_out_test(self, format):
"""
When a subclass of AbstractBaseUser opts out of natural key serialization
by returning an empty tuple, both FK and M2M relations serialize as
integer PKs and can be deserialized without error.
"""
user1 = SubclassNaturalKeyOptOutUser.objects.create(email="user1@example.com")
user2 = SubclassNaturalKeyOptOutUser.objects.create(email="user2@example.com")
post = PostToOptOutSubclassUser.objects.create(
author=user1, title="Post 2 (Subclass Opt-out)"
)
post.subscribers.add(user1, user2)
user_data = serializers.serialize(format, [user1], use_natural_primary_keys=True)
post_data = serializers.serialize(format, [post], use_natural_foreign_keys=True)
list(serializers.deserialize(format, user_data))
deserialized_posts = list(serializers.deserialize(format, post_data))
post_obj = deserialized_posts[0].object
self.assertEqual(user1.email, post_obj.author.email)
self.assertEqual(
sorted([user1.email, user2.email]),
sorted(post_obj.subscribers.values_list("email", flat=True)),
)
def nullable_natural_key_fk_test(self, format):
target_with_none = NaturalKeyWithNullableField.objects.create(
name="test_none",
optional_id=None,
)
target_with_value = NaturalKeyWithNullableField.objects.create(
name="test_value",
optional_id="some_id",
)
fk_to_none = FKToNaturalKeyWithNullable.objects.create(
ref=target_with_none,
data="points_to_none",
)
fk_to_value = FKToNaturalKeyWithNullable.objects.create(
ref=target_with_value,
data="points_to_value",
)
objects = [target_with_none, target_with_value, fk_to_none, fk_to_value]
serialized = serializers.serialize(
format,
objects,
use_natural_foreign_keys=True,
use_natural_primary_keys=True,
)
objs = list(serializers.deserialize(format, serialized))
self.assertEqual(objs[2].object.ref_id, target_with_none.pk)
self.assertEqual(objs[3].object.ref_id, target_with_value.pk)
def nullable_natural_key_m2m_test(self, format):
target_with_none = NaturalKeyWithNullableField.objects.create(
name="test_none",
optional_id=None,
)
target_with_value = NaturalKeyWithNullableField.objects.create(
name="test_value",
optional_id="some_id",
)
m2m_obj = FKToNaturalKeyWithNullable.objects.create(data="m2m_test")
m2m_obj.refs.set([target_with_none, target_with_value])
objects = [target_with_none, target_with_value, m2m_obj]
serialized = serializers.serialize(
format,
objects,
use_natural_foreign_keys=True,
use_natural_primary_keys=True,
)
objs = list(serializers.deserialize(format, serialized))
self.assertCountEqual(
objs[2].m2m_data["refs"],
[target_with_none.pk, target_with_value.pk],
)
# Dynamically register tests for each serializer
register_tests(
NaturalKeySerializerTests,
"test_%s_natural_key_serializer",
natural_key_serializer_test,
)
register_tests(
NaturalKeySerializerTests, "test_%s_serializer_natural_keys", natural_key_test
)
register_tests(
NaturalKeySerializerTests, "test_%s_serializer_natural_pks_mti", natural_pk_mti_test
)
register_tests(
NaturalKeySerializerTests, "test_%s_forward_references_fks", forward_ref_fk_test
)
register_tests(
NaturalKeySerializerTests,
"test_%s_forward_references_fk_errors",
forward_ref_fk_with_error_test,
)
register_tests(
NaturalKeySerializerTests, "test_%s_forward_references_m2ms", forward_ref_m2m_test
)
register_tests(
NaturalKeySerializerTests,
"test_%s_forward_references_m2m_errors",
forward_ref_m2m_with_error_test,
)
register_tests(NaturalKeySerializerTests, "test_%s_pk_with_default", pk_with_default)
register_tests(
NaturalKeySerializerTests,
"test_%s_fk_as_pk_natural_key_not_called",
fk_as_pk_natural_key_not_called,
)
register_tests(
NaturalKeySerializerTests,
"test_%s_natural_key_opt_out",
natural_key_opt_out_test,
)
register_tests(
NaturalKeySerializerTests,
"test_%s_nullable_natural_key_fk",
nullable_natural_key_fk_test,
)
register_tests(
NaturalKeySerializerTests,
"test_%s_nullable_natural_key_m2m",
nullable_natural_key_m2m_test,
)
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/serializers/test_jsonl.py | tests/serializers/test_jsonl.py | import decimal
import json
import re
from django.core import serializers
from django.core.serializers.base import DeserializationError
from django.db import models
from django.test import TestCase, TransactionTestCase
from django.test.utils import isolate_apps
from .models import Score
from .tests import SerializersTestBase, SerializersTransactionTestBase
class JsonlSerializerTestCase(SerializersTestBase, TestCase):
serializer_name = "jsonl"
pkless_str = [
'{"pk": null,"model": "serializers.category","fields": {"name": "Reference"}}',
'{"model": "serializers.category","fields": {"name": "Non-fiction"}}',
]
pkless_str = "\n".join([s.replace("\n", "") for s in pkless_str])
mapping_ordering_str = (
'{"model": "serializers.article","pk": %(article_pk)s,'
'"fields": {'
'"author": %(author_pk)s,'
'"headline": "Poker has no place on ESPN",'
'"pub_date": "2006-06-16T11:00:00",'
'"categories": [%(first_category_pk)s,%(second_category_pk)s],'
'"meta_data": [],'
'"topics": []}}\n'
)
@staticmethod
def _validate_output(serial_str):
try:
for line in serial_str.split("\n"):
if line:
json.loads(line)
except Exception:
return False
else:
return True
@staticmethod
def _get_pk_values(serial_str):
serial_list = [json.loads(line) for line in serial_str.split("\n") if line]
return [obj_dict["pk"] for obj_dict in serial_list]
@staticmethod
def _get_field_values(serial_str, field_name):
serial_list = [json.loads(line) for line in serial_str.split("\n") if line]
return [
obj_dict["fields"][field_name]
for obj_dict in serial_list
if field_name in obj_dict["fields"]
]
def test_no_indentation(self):
s = serializers.jsonl.Serializer()
json_data = s.serialize([Score(score=5.0), Score(score=6.0)], indent=2)
for line in json_data.splitlines():
self.assertIsNone(re.search(r".+,\s*$", line))
@isolate_apps("serializers")
def test_custom_encoder(self):
class ScoreDecimal(models.Model):
score = models.DecimalField()
class CustomJSONEncoder(json.JSONEncoder):
def default(self, o):
if isinstance(o, decimal.Decimal):
return str(o)
return super().default(o)
s = serializers.jsonl.Serializer()
json_data = s.serialize(
[ScoreDecimal(score=decimal.Decimal(1.0))],
cls=CustomJSONEncoder,
)
self.assertIn('"fields": {"score": "1"}', json_data)
def test_json_deserializer_exception(self):
with self.assertRaises(DeserializationError):
for obj in serializers.deserialize("jsonl", """[{"pk":1}"""):
pass
def test_helpful_error_message_invalid_pk(self):
"""
If there is an invalid primary key, the error message contains the
model associated with it.
"""
test_string = (
'{"pk": "badpk","model": "serializers.player",'
'"fields": {"name": "Bob","rank": 1,"team": "Team"}}'
)
with self.assertRaisesMessage(
DeserializationError, "(serializers.player:pk=badpk)"
):
list(serializers.deserialize("jsonl", test_string))
def test_helpful_error_message_invalid_field(self):
"""
If there is an invalid field value, the error message contains the
model associated with it.
"""
test_string = (
'{"pk": "1","model": "serializers.player",'
'"fields": {"name": "Bob","rank": "invalidint","team": "Team"}}'
)
expected = "(serializers.player:pk=1) field_value was 'invalidint'"
with self.assertRaisesMessage(DeserializationError, expected):
list(serializers.deserialize("jsonl", test_string))
def test_helpful_error_message_for_foreign_keys(self):
"""
Invalid foreign keys with a natural key throws a helpful error message,
such as what the failing key is.
"""
test_string = (
'{"pk": 1, "model": "serializers.category",'
'"fields": {'
'"name": "Unknown foreign key",'
'"meta_data": ["doesnotexist","metadata"]}}'
)
key = ["doesnotexist", "metadata"]
expected = "(serializers.category:pk=1) field_value was '%r'" % key
with self.assertRaisesMessage(DeserializationError, expected):
list(serializers.deserialize("jsonl", test_string))
def test_helpful_error_message_for_many2many_non_natural(self):
"""
Invalid many-to-many keys throws a helpful error message.
"""
test_strings = [
"""{
"pk": 1,
"model": "serializers.article",
"fields": {
"author": 1,
"headline": "Unknown many to many",
"pub_date": "2014-09-15T10:35:00",
"categories": [1, "doesnotexist"]
}
}""",
"""{
"pk": 1,
"model": "serializers.author",
"fields": {"name": "Agnes"}
}""",
"""{
"pk": 1,
"model": "serializers.category",
"fields": {"name": "Reference"}
}""",
]
test_string = "\n".join([s.replace("\n", "") for s in test_strings])
expected = "(serializers.article:pk=1) field_value was 'doesnotexist'"
with self.assertRaisesMessage(DeserializationError, expected):
list(serializers.deserialize("jsonl", test_string))
def test_helpful_error_message_for_many2many_natural1(self):
"""
Invalid many-to-many keys throws a helpful error message where one of a
list of natural keys is invalid.
"""
test_strings = [
"""{
"pk": 1,
"model": "serializers.categorymetadata",
"fields": {"kind": "author","name": "meta1","value": "Agnes"}
}""",
"""{
"pk": 1,
"model": "serializers.article",
"fields": {
"author": 1,
"headline": "Unknown many to many",
"pub_date": "2014-09-15T10:35:00",
"meta_data": [
["author", "meta1"],
["doesnotexist", "meta1"],
["author", "meta1"]
]
}
}""",
"""{
"pk": 1,
"model": "serializers.author",
"fields": {"name": "Agnes"}
}""",
]
test_string = "\n".join([s.replace("\n", "") for s in test_strings])
key = ["doesnotexist", "meta1"]
expected = "(serializers.article:pk=1) field_value was '%r'" % key
with self.assertRaisesMessage(DeserializationError, expected):
for obj in serializers.deserialize("jsonl", test_string):
obj.save()
def test_helpful_error_message_for_many2many_natural2(self):
"""
Invalid many-to-many keys throws a helpful error message where a
natural many-to-many key has only a single value.
"""
test_strings = [
"""{
"pk": 1,
"model": "serializers.article",
"fields": {
"author": 1,
"headline": "Unknown many to many",
"pub_date": "2014-09-15T10:35:00",
"meta_data": [1, "doesnotexist"]
}
}""",
"""{
"pk": 1,
"model": "serializers.categorymetadata",
"fields": {"kind": "author","name": "meta1","value": "Agnes"}
}""",
"""{
"pk": 1,
"model": "serializers.author",
"fields": {"name": "Agnes"}
}""",
]
test_string = "\n".join([s.replace("\n", "") for s in test_strings])
expected = "(serializers.article:pk=1) field_value was 'doesnotexist'"
with self.assertRaisesMessage(DeserializationError, expected):
for obj in serializers.deserialize("jsonl", test_string, ignore=False):
obj.save()
def test_helpful_error_message_for_many2many_not_iterable(self):
"""
Not iterable many-to-many field value throws a helpful error message.
"""
test_string = (
'{"pk": 1,"model": "serializers.m2mdata","fields": {"data": null}}'
)
expected = "(serializers.m2mdata:pk=1) field_value was 'None'"
with self.assertRaisesMessage(DeserializationError, expected):
next(serializers.deserialize("jsonl", test_string, ignore=False))
class JsonSerializerTransactionTestCase(
SerializersTransactionTestBase, TransactionTestCase
):
serializer_name = "jsonl"
fwd_ref_str = [
"""{
"pk": 1,
"model": "serializers.article",
"fields": {
"headline": "Forward references pose no problem",
"pub_date": "2006-06-16T15:00:00",
"categories": [1],
"author": 1
}
}""",
"""{
"pk": 1,
"model": "serializers.category",
"fields": {"name": "Reference"}
}""",
"""{
"pk": 1,
"model": "serializers.author",
"fields": {"name": "Agnes"}
}""",
]
fwd_ref_str = "\n".join([s.replace("\n", "") for s in fwd_ref_str])
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/serializers/test_json.py | tests/serializers/test_json.py | import datetime
import decimal
import json
import re
from django.core import serializers
from django.core.serializers.base import DeserializationError
from django.core.serializers.json import DjangoJSONEncoder
from django.db import models
from django.test import SimpleTestCase, TestCase, TransactionTestCase
from django.test.utils import isolate_apps
from django.utils.translation import gettext_lazy, override
from .models import Score
from .tests import SerializersTestBase, SerializersTransactionTestBase
class JsonSerializerTestCase(SerializersTestBase, TestCase):
serializer_name = "json"
pkless_str = """[
{
"pk": null,
"model": "serializers.category",
"fields": {"name": "Reference"}
}, {
"model": "serializers.category",
"fields": {"name": "Non-fiction"}
}]"""
mapping_ordering_str = """[
{
"model": "serializers.article",
"pk": %(article_pk)s,
"fields": {
"author": %(author_pk)s,
"headline": "Poker has no place on ESPN",
"pub_date": "2006-06-16T11:00:00",
"categories": [
%(first_category_pk)s,
%(second_category_pk)s
],
"meta_data": [],
"topics": []
}
}
]
"""
@staticmethod
def _validate_output(serial_str):
try:
json.loads(serial_str)
except Exception:
return False
else:
return True
@staticmethod
def _get_pk_values(serial_str):
serial_list = json.loads(serial_str)
return [obj_dict["pk"] for obj_dict in serial_list]
@staticmethod
def _get_field_values(serial_str, field_name):
serial_list = json.loads(serial_str)
return [
obj_dict["fields"][field_name]
for obj_dict in serial_list
if field_name in obj_dict["fields"]
]
def test_indentation_whitespace(self):
s = serializers.json.Serializer()
json_data = s.serialize([Score(score=5.0), Score(score=6.0)], indent=2)
for line in json_data.splitlines():
if re.search(r".+,\s*$", line):
self.assertEqual(line, line.rstrip())
@isolate_apps("serializers")
def test_custom_encoder(self):
class ScoreDecimal(models.Model):
score = models.DecimalField()
class CustomJSONEncoder(json.JSONEncoder):
def default(self, o):
if isinstance(o, decimal.Decimal):
return str(o)
return super().default(o)
s = serializers.json.Serializer()
json_data = s.serialize(
[ScoreDecimal(score=decimal.Decimal(1.0))], cls=CustomJSONEncoder
)
self.assertIn('"fields": {"score": "1"}', json_data)
def test_json_deserializer_exception(self):
with self.assertRaises(DeserializationError):
for obj in serializers.deserialize("json", """[{"pk":1}"""):
pass
def test_helpful_error_message_invalid_pk(self):
"""
If there is an invalid primary key, the error message should contain
the model associated with it.
"""
test_string = """[{
"pk": "badpk",
"model": "serializers.player",
"fields": {
"name": "Bob",
"rank": 1,
"team": "Team"
}
}]"""
with self.assertRaisesMessage(
DeserializationError, "(serializers.player:pk=badpk)"
):
list(serializers.deserialize("json", test_string))
def test_helpful_error_message_invalid_field(self):
"""
If there is an invalid field value, the error message should contain
the model associated with it.
"""
test_string = """[{
"pk": "1",
"model": "serializers.player",
"fields": {
"name": "Bob",
"rank": "invalidint",
"team": "Team"
}
}]"""
expected = "(serializers.player:pk=1) field_value was 'invalidint'"
with self.assertRaisesMessage(DeserializationError, expected):
list(serializers.deserialize("json", test_string))
def test_helpful_error_message_for_foreign_keys(self):
"""
Invalid foreign keys with a natural key should throw a helpful error
message, such as what the failing key is.
"""
test_string = """[{
"pk": 1,
"model": "serializers.category",
"fields": {
"name": "Unknown foreign key",
"meta_data": [
"doesnotexist",
"metadata"
]
}
}]"""
key = ["doesnotexist", "metadata"]
expected = "(serializers.category:pk=1) field_value was '%r'" % key
with self.assertRaisesMessage(DeserializationError, expected):
list(serializers.deserialize("json", test_string))
def test_helpful_error_message_for_many2many_non_natural(self):
"""
Invalid many-to-many keys should throw a helpful error message.
"""
test_string = """[{
"pk": 1,
"model": "serializers.article",
"fields": {
"author": 1,
"headline": "Unknown many to many",
"pub_date": "2014-09-15T10:35:00",
"categories": [1, "doesnotexist"]
}
}, {
"pk": 1,
"model": "serializers.author",
"fields": {
"name": "Agnes"
}
}, {
"pk": 1,
"model": "serializers.category",
"fields": {
"name": "Reference"
}
}]"""
expected = "(serializers.article:pk=1) field_value was 'doesnotexist'"
with self.assertRaisesMessage(DeserializationError, expected):
list(serializers.deserialize("json", test_string))
def test_helpful_error_message_for_many2many_natural1(self):
"""
Invalid many-to-many keys should throw a helpful error message.
This tests the code path where one of a list of natural keys is
invalid.
"""
test_string = """[{
"pk": 1,
"model": "serializers.categorymetadata",
"fields": {
"kind": "author",
"name": "meta1",
"value": "Agnes"
}
}, {
"pk": 1,
"model": "serializers.article",
"fields": {
"author": 1,
"headline": "Unknown many to many",
"pub_date": "2014-09-15T10:35:00",
"meta_data": [
["author", "meta1"],
["doesnotexist", "meta1"],
["author", "meta1"]
]
}
}, {
"pk": 1,
"model": "serializers.author",
"fields": {
"name": "Agnes"
}
}]"""
key = ["doesnotexist", "meta1"]
expected = "(serializers.article:pk=1) field_value was '%r'" % key
with self.assertRaisesMessage(DeserializationError, expected):
for obj in serializers.deserialize("json", test_string):
obj.save()
def test_helpful_error_message_for_many2many_natural2(self):
"""
Invalid many-to-many keys should throw a helpful error message. This
tests the code path where a natural many-to-many key has only a single
value.
"""
test_string = """[{
"pk": 1,
"model": "serializers.article",
"fields": {
"author": 1,
"headline": "Unknown many to many",
"pub_date": "2014-09-15T10:35:00",
"meta_data": [1, "doesnotexist"]
}
}, {
"pk": 1,
"model": "serializers.categorymetadata",
"fields": {
"kind": "author",
"name": "meta1",
"value": "Agnes"
}
}, {
"pk": 1,
"model": "serializers.author",
"fields": {
"name": "Agnes"
}
}]"""
expected = "(serializers.article:pk=1) field_value was 'doesnotexist'"
with self.assertRaisesMessage(DeserializationError, expected):
for obj in serializers.deserialize("json", test_string, ignore=False):
obj.save()
def test_helpful_error_message_for_many2many_not_iterable(self):
"""
Not iterable many-to-many field value throws a helpful error message.
"""
test_string = """[{
"pk": 1,
"model": "serializers.m2mdata",
"fields": {"data": null}
}]"""
expected = "(serializers.m2mdata:pk=1) field_value was 'None'"
with self.assertRaisesMessage(DeserializationError, expected):
next(serializers.deserialize("json", test_string, ignore=False))
class JsonSerializerTransactionTestCase(
SerializersTransactionTestBase, TransactionTestCase
):
serializer_name = "json"
fwd_ref_str = """[
{
"pk": 1,
"model": "serializers.article",
"fields": {
"headline": "Forward references pose no problem",
"pub_date": "2006-06-16T15:00:00",
"categories": [1],
"author": 1
}
},
{
"pk": 1,
"model": "serializers.category",
"fields": {
"name": "Reference"
}
},
{
"pk": 1,
"model": "serializers.author",
"fields": {
"name": "Agnes"
}
}]"""
class DjangoJSONEncoderTests(SimpleTestCase):
def test_lazy_string_encoding(self):
self.assertEqual(
json.dumps({"lang": gettext_lazy("French")}, cls=DjangoJSONEncoder),
'{"lang": "French"}',
)
with override("fr"):
self.assertEqual(
json.dumps({"lang": gettext_lazy("French")}, cls=DjangoJSONEncoder),
'{"lang": "Fran\\u00e7ais"}',
)
def test_timedelta(self):
duration = datetime.timedelta(days=1, hours=2, seconds=3)
self.assertEqual(
json.dumps({"duration": duration}, cls=DjangoJSONEncoder),
'{"duration": "P1DT02H00M03S"}',
)
duration = datetime.timedelta(0)
self.assertEqual(
json.dumps({"duration": duration}, cls=DjangoJSONEncoder),
'{"duration": "P0DT00H00M00S"}',
)
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/serializers/test_yaml.py | tests/serializers/test_yaml.py | import importlib
import unittest
from io import StringIO
from django.core import management, serializers
from django.core.serializers.base import DeserializationError
from django.test import SimpleTestCase, TestCase, TransactionTestCase
from .models import Author
from .tests import SerializersTestBase, SerializersTransactionTestBase
try:
import yaml
HAS_YAML = True
except ImportError:
HAS_YAML = False
YAML_IMPORT_ERROR_MESSAGE = r"No module named yaml"
class YamlImportModuleMock:
"""Provides a wrapped import_module function to simulate yaml ImportError
In order to run tests that verify the behavior of the YAML serializer
when run on a system that has yaml installed (like the django CI server),
mock import_module, so that it raises an ImportError when the yaml
serializer is being imported. The importlib.import_module() call is
being made in the serializers.register_serializer().
Refs: #12756
"""
def __init__(self):
self._import_module = importlib.import_module
def import_module(self, module_path):
if module_path == serializers.BUILTIN_SERIALIZERS["yaml"]:
raise ImportError(YAML_IMPORT_ERROR_MESSAGE)
return self._import_module(module_path)
class NoYamlSerializerTestCase(SimpleTestCase):
"""Not having pyyaml installed provides a misleading error
Refs: #12756
"""
@classmethod
def setUpClass(cls):
"""Removes imported yaml and stubs importlib.import_module"""
super().setUpClass()
cls._import_module_mock = YamlImportModuleMock()
importlib.import_module = cls._import_module_mock.import_module
# clear out cached serializers to emulate yaml missing
serializers._serializers = {}
@classmethod
def tearDownClass(cls):
"""Puts yaml back if necessary"""
super().tearDownClass()
importlib.import_module = cls._import_module_mock._import_module
# clear out cached serializers to clean out BadSerializer instances
serializers._serializers = {}
def test_serializer_pyyaml_error_message(self):
"""Using yaml serializer without pyyaml raises ImportError"""
jane = Author(name="Jane")
with self.assertRaises(ImportError):
serializers.serialize("yaml", [jane])
def test_deserializer_pyyaml_error_message(self):
"""Using yaml deserializer without pyyaml raises ImportError"""
with self.assertRaises(ImportError):
serializers.deserialize("yaml", "")
def test_dumpdata_pyyaml_error_message(self):
"""Calling dumpdata produces an error when yaml package missing"""
with self.assertRaisesMessage(
management.CommandError, YAML_IMPORT_ERROR_MESSAGE
):
management.call_command("dumpdata", format="yaml")
@unittest.skipUnless(HAS_YAML, "No yaml library detected")
class YamlSerializerTestCase(SerializersTestBase, TestCase):
serializer_name = "yaml"
pkless_str = """- model: serializers.category
pk: null
fields:
name: Reference
- model: serializers.category
fields:
name: Non-fiction"""
mapping_ordering_str = (
"""- model: serializers.article
pk: %(article_pk)s
fields:
author: %(author_pk)s
headline: Poker has no place on ESPN
pub_date: 2006-06-16 11:00:00
categories:"""
+ (
" [%(first_category_pk)s, %(second_category_pk)s]"
if HAS_YAML and yaml.__version__ < "5.1"
else "\n - %(first_category_pk)s\n - %(second_category_pk)s"
)
+ """
meta_data: []
topics: []
"""
)
@staticmethod
def _validate_output(serial_str):
try:
yaml.safe_load(StringIO(serial_str))
except Exception:
return False
else:
return True
@staticmethod
def _get_pk_values(serial_str):
ret_list = []
stream = StringIO(serial_str)
for obj_dict in yaml.safe_load(stream):
ret_list.append(obj_dict["pk"])
return ret_list
@staticmethod
def _get_field_values(serial_str, field_name):
ret_list = []
stream = StringIO(serial_str)
for obj_dict in yaml.safe_load(stream):
if "fields" in obj_dict and field_name in obj_dict["fields"]:
field_value = obj_dict["fields"][field_name]
# yaml.safe_load will return non-string objects for some
# of the fields we are interested in, this ensures that
# everything comes back as a string
if isinstance(field_value, str):
ret_list.append(field_value)
else:
ret_list.append(str(field_value))
return ret_list
def test_yaml_deserializer_exception(self):
with self.assertRaises(DeserializationError):
for obj in serializers.deserialize("yaml", "{"):
pass
@unittest.skipUnless(HAS_YAML, "No yaml library detected")
class YamlSerializerTransactionTestCase(
SerializersTransactionTestBase, TransactionTestCase
):
serializer_name = "yaml"
fwd_ref_str = """- model: serializers.article
pk: 1
fields:
headline: Forward references pose no problem
pub_date: 2006-06-16 15:00:00
categories: [1]
author: 1
- model: serializers.category
pk: 1
fields:
name: Reference
- model: serializers.author
pk: 1
fields:
name: Agnes"""
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/serializers/test_deserialization.py | tests/serializers/test_deserialization.py | import json
import time
import unittest
from django.core.serializers.base import DeserializationError, DeserializedObject
from django.core.serializers.json import Deserializer as JsonDeserializer
from django.core.serializers.jsonl import Deserializer as JsonlDeserializer
from django.core.serializers.python import Deserializer
from django.core.serializers.xml_serializer import Deserializer as XMLDeserializer
from django.db import models
from django.test import SimpleTestCase
from django.test.utils import garbage_collect
from .models import Author
try:
import yaml # NOQA
HAS_YAML = True
except ImportError:
HAS_YAML = False
class TestDeserializer(SimpleTestCase):
def setUp(self):
self.object_list = [
{"pk": 1, "model": "serializers.author", "fields": {"name": "Jane"}},
{"pk": 2, "model": "serializers.author", "fields": {"name": "Joe"}},
]
self.deserializer = Deserializer(self.object_list)
self.jane = Author(name="Jane", pk=1)
self.joe = Author(name="Joe", pk=2)
def test_deserialized_object_repr(self):
deserial_obj = DeserializedObject(obj=self.jane)
self.assertEqual(
repr(deserial_obj), "<DeserializedObject: serializers.Author(pk=1)>"
)
def test_next_functionality(self):
first_item = next(self.deserializer)
self.assertEqual(first_item.object, self.jane)
second_item = next(self.deserializer)
self.assertEqual(second_item.object, self.joe)
with self.assertRaises(StopIteration):
next(self.deserializer)
def test_invalid_model_identifier(self):
invalid_object_list = [
{"pk": 1, "model": "serializers.author2", "fields": {"name": "Jane"}}
]
self.deserializer = Deserializer(invalid_object_list)
with self.assertRaises(DeserializationError):
next(self.deserializer)
deserializer = Deserializer(object_list=[])
with self.assertRaises(StopIteration):
next(deserializer)
def test_custom_deserializer(self):
class CustomDeserializer(Deserializer):
@staticmethod
def _get_model_from_node(model_identifier):
return Author
deserializer = CustomDeserializer(self.object_list)
result = next(iter(deserializer))
deserialized_object = result.object
self.assertEqual(
self.jane,
deserialized_object,
)
def test_empty_object_list(self):
deserializer = Deserializer(object_list=[])
with self.assertRaises(StopIteration):
next(deserializer)
def test_json_bytes_input(self):
test_string = json.dumps(self.object_list)
stream = test_string.encode("utf-8")
deserializer = JsonDeserializer(stream_or_string=stream)
first_item = next(deserializer)
second_item = next(deserializer)
self.assertEqual(first_item.object, self.jane)
self.assertEqual(second_item.object, self.joe)
def test_jsonl_bytes_input(self):
test_string = """
{"pk": 1, "model": "serializers.author", "fields": {"name": "Jane"}}
{"pk": 2, "model": "serializers.author", "fields": {"name": "Joe"}}
{"pk": 3, "model": "serializers.author", "fields": {"name": "John"}}
{"pk": 4, "model": "serializers.author", "fields": {"name": "Smith"}}"""
stream = test_string.encode("utf-8")
deserializer = JsonlDeserializer(stream_or_string=stream)
first_item = next(deserializer)
second_item = next(deserializer)
self.assertEqual(first_item.object, self.jane)
self.assertEqual(second_item.object, self.joe)
@unittest.skipUnless(HAS_YAML, "No yaml library detected")
def test_yaml_bytes_input(self):
from django.core.serializers.pyyaml import Deserializer as YamlDeserializer
test_string = """- pk: 1
model: serializers.author
fields:
name: Jane
- pk: 2
model: serializers.author
fields:
name: Joe
- pk: 3
model: serializers.author
fields:
name: John
- pk: 4
model: serializers.author
fields:
name: Smith
"""
stream = test_string.encode("utf-8")
deserializer = YamlDeserializer(stream_or_string=stream)
first_item = next(deserializer)
second_item = next(deserializer)
self.assertEqual(first_item.object, self.jane)
self.assertEqual(second_item.object, self.joe)
def test_crafted_xml_performance(self):
"""The time to process invalid inputs is not quadratic."""
def build_crafted_xml(depth, leaf_text_len):
nested_open = "<nested>" * depth
nested_close = "</nested>" * depth
leaf = "x" * leaf_text_len
field_content = f"{nested_open}{leaf}{nested_close}"
return f"""
<django-objects version="1.0">
<object model="contenttypes.contenttype" pk="1">
<field name="app_label">{field_content}</field>
<field name="model">m</field>
</object>
</django-objects>
"""
def deserialize(crafted_xml):
iterator = XMLDeserializer(crafted_xml)
garbage_collect()
start_time = time.perf_counter()
result = list(iterator)
end_time = time.perf_counter()
self.assertEqual(len(result), 1)
self.assertIsInstance(result[0].object, models.Model)
return end_time - start_time
def assertFactor(label, params, factor=2):
factors = []
prev_time = None
for depth, length in params:
crafted_xml = build_crafted_xml(depth, length)
elapsed = deserialize(crafted_xml)
if prev_time is not None:
factors.append(elapsed / prev_time)
prev_time = elapsed
with self.subTest(label):
# Assert based on the average factor to reduce test flakiness.
self.assertLessEqual(sum(factors) / len(factors), factor)
assertFactor(
"varying depth, varying length",
[(50, 2000), (100, 4000), (200, 8000), (400, 16000), (800, 32000)],
2,
)
assertFactor("constant depth, varying length", [(100, 1), (100, 1000)], 2)
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/serializers/test_xml.py | tests/serializers/test_xml.py | from xml.dom import minidom
from django.core import serializers
from django.core.serializers.xml_serializer import DTDForbidden
from django.test import TestCase, TransactionTestCase
from .tests import SerializersTestBase, SerializersTransactionTestBase
class XmlSerializerTestCase(SerializersTestBase, TestCase):
serializer_name = "xml"
pkless_str = """<?xml version="1.0" encoding="utf-8"?>
<django-objects version="1.0">
<object model="serializers.category">
<field type="CharField" name="name">Reference</field>
</object>
<object model="serializers.category">
<field type="CharField" name="name">Non-fiction</field>
</object>
</django-objects>"""
mapping_ordering_str = """<?xml version="1.0" encoding="utf-8"?>
<django-objects version="1.0">
<object model="serializers.article" pk="%(article_pk)s">
<field name="author" rel="ManyToOneRel" to="serializers.author">%(author_pk)s</field>
<field name="headline" type="CharField">Poker has no place on ESPN</field>
<field name="pub_date" type="DateTimeField">2006-06-16T11:00:00</field>
<field name="categories" rel="ManyToManyRel" to="serializers.category"><object pk="%(first_category_pk)s"></object><object pk="%(second_category_pk)s"></object></field>
<field name="meta_data" rel="ManyToManyRel" to="serializers.categorymetadata"></field>
<field name="topics" rel="ManyToManyRel" to="serializers.topic"></field>
</object>
</django-objects>""" # NOQA
@staticmethod
def _validate_output(serial_str):
try:
minidom.parseString(serial_str)
except Exception:
return False
else:
return True
@staticmethod
def _get_pk_values(serial_str):
ret_list = []
dom = minidom.parseString(serial_str)
fields = dom.getElementsByTagName("object")
for field in fields:
ret_list.append(field.getAttribute("pk"))
return ret_list
@staticmethod
def _get_field_values(serial_str, field_name):
ret_list = []
dom = minidom.parseString(serial_str)
fields = dom.getElementsByTagName("field")
for field in fields:
if field.getAttribute("name") == field_name:
temp = []
for child in field.childNodes:
temp.append(child.nodeValue)
ret_list.append("".join(temp))
return ret_list
def test_control_char_failure(self):
"""
Serializing control characters with XML should fail as those characters
are not supported in the XML 1.0 standard (except HT, LF, CR).
"""
self.a1.headline = "This contains \u0001 control \u0011 chars"
msg = "Article.headline (pk:%s) contains unserializable characters" % self.a1.pk
with self.assertRaisesMessage(ValueError, msg):
serializers.serialize(self.serializer_name, [self.a1])
self.a1.headline = "HT \u0009, LF \u000a, and CR \u000d are allowed"
self.assertIn(
"HT \t, LF \n, and CR \r are allowed",
serializers.serialize(self.serializer_name, [self.a1]),
)
def test_no_dtd(self):
"""
The XML deserializer shouldn't allow a DTD.
This is the most straightforward way to prevent all entity definitions
and avoid both external entities and entity-expansion attacks.
"""
xml = (
'<?xml version="1.0" standalone="no"?>'
'<!DOCTYPE example SYSTEM "http://example.com/example.dtd">'
)
with self.assertRaises(DTDForbidden):
next(serializers.deserialize("xml", xml))
class XmlSerializerTransactionTestCase(
SerializersTransactionTestBase, TransactionTestCase
):
serializer_name = "xml"
fwd_ref_str = """<?xml version="1.0" encoding="utf-8"?>
<django-objects version="1.0">
<object pk="1" model="serializers.article">
<field to="serializers.author" name="author" rel="ManyToOneRel">1</field>
<field type="CharField" name="headline">Forward references pose no problem</field>
<field type="DateTimeField" name="pub_date">2006-06-16T15:00:00</field>
<field to="serializers.category" name="categories" rel="ManyToManyRel">
<object pk="1"></object>
</field>
<field to="serializers.categorymetadata" name="meta_data" rel="ManyToManyRel"></field>
</object>
<object pk="1" model="serializers.author">
<field type="CharField" name="name">Agnes</field>
</object>
<object pk="1" model="serializers.category">
<field type="CharField" name="name">Reference</field></object>
</django-objects>""" # NOQA
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/serializers/__init__.py | tests/serializers/__init__.py | python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false | |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/serializers/tests.py | tests/serializers/tests.py | from datetime import datetime
from functools import partialmethod
from io import StringIO
from unittest import mock, skipIf
from django.core import serializers
from django.core.serializers import SerializerDoesNotExist
from django.core.serializers.base import ProgressBar
from django.db import connection, transaction
from django.db.models import Prefetch
from django.http import HttpResponse
from django.test import SimpleTestCase, override_settings, skipUnlessDBFeature
from django.test.utils import Approximate
from .models import (
Actor,
Article,
Author,
AuthorProfile,
BaseModel,
Category,
CategoryMetaData,
Child,
ComplexModel,
Movie,
Player,
ProxyBaseModel,
ProxyProxyBaseModel,
Score,
Team,
)
@override_settings(
SERIALIZATION_MODULES={
"json2": "django.core.serializers.json",
}
)
class SerializerRegistrationTests(SimpleTestCase):
def setUp(self):
self.old_serializers = serializers._serializers
serializers._serializers = {}
def tearDown(self):
serializers._serializers = self.old_serializers
def test_register(self):
"Registering a new serializer populates the full registry. Refs #14823"
serializers.register_serializer("json3", "django.core.serializers.json")
public_formats = serializers.get_public_serializer_formats()
self.assertIn("json3", public_formats)
self.assertIn("json2", public_formats)
self.assertIn("xml", public_formats)
def test_unregister(self):
"""
Unregistering a serializer doesn't cause the registry to be
repopulated.
"""
serializers.unregister_serializer("xml")
serializers.register_serializer("json3", "django.core.serializers.json")
public_formats = serializers.get_public_serializer_formats()
self.assertNotIn("xml", public_formats)
self.assertIn("json3", public_formats)
def test_unregister_unknown_serializer(self):
with self.assertRaises(SerializerDoesNotExist):
serializers.unregister_serializer("nonsense")
def test_builtin_serializers(self):
"Requesting a list of serializer formats populates the registry"
all_formats = set(serializers.get_serializer_formats())
public_formats = set(serializers.get_public_serializer_formats())
self.assertIn("xml", all_formats)
self.assertIn("xml", public_formats)
self.assertIn("json2", all_formats)
self.assertIn("json2", public_formats)
self.assertIn("python", all_formats)
self.assertNotIn("python", public_formats)
def test_get_unknown_serializer(self):
"""
#15889: get_serializer('nonsense') raises a SerializerDoesNotExist
"""
with self.assertRaises(SerializerDoesNotExist):
serializers.get_serializer("nonsense")
with self.assertRaises(KeyError):
serializers.get_serializer("nonsense")
# SerializerDoesNotExist is instantiated with the nonexistent format
with self.assertRaisesMessage(SerializerDoesNotExist, "nonsense"):
serializers.get_serializer("nonsense")
def test_get_unknown_deserializer(self):
with self.assertRaises(SerializerDoesNotExist):
serializers.get_deserializer("nonsense")
class SerializersTestBase:
serializer_name = None # Set by subclasses to the serialization format name
@classmethod
def setUpTestData(cls):
sports = Category.objects.create(name="Sports")
music = Category.objects.create(name="Music")
op_ed = Category.objects.create(name="Op-Ed")
cls.joe = Author.objects.create(name="Joe")
cls.jane = Author.objects.create(name="Jane")
cls.a1 = Article(
author=cls.jane,
headline="Poker has no place on ESPN",
pub_date=datetime(2006, 6, 16, 11, 00),
)
cls.a1.save()
cls.a1.categories.set([sports, op_ed])
cls.a2 = Article(
author=cls.joe,
headline="Time to reform copyright",
pub_date=datetime(2006, 6, 16, 13, 00, 11, 345),
)
cls.a2.save()
cls.a2.categories.set([music, op_ed])
def test_serialize(self):
"""Basic serialization works."""
serial_str = serializers.serialize(self.serializer_name, Article.objects.all())
self.assertTrue(self._validate_output(serial_str))
def test_serializer_roundtrip(self):
"""Serialized content can be deserialized."""
serial_str = serializers.serialize(self.serializer_name, Article.objects.all())
models = list(serializers.deserialize(self.serializer_name, serial_str))
self.assertEqual(len(models), 2)
def test_serialize_to_stream(self):
obj = ComplexModel(field1="first", field2="second", field3="third")
obj.save_base(raw=True)
# Serialize the test database to a stream
for stream in (StringIO(), HttpResponse()):
serializers.serialize(self.serializer_name, [obj], indent=2, stream=stream)
# Serialize normally for a comparison
string_data = serializers.serialize(self.serializer_name, [obj], indent=2)
# The two are the same
if isinstance(stream, StringIO):
self.assertEqual(string_data, stream.getvalue())
else:
self.assertEqual(string_data, stream.text)
def test_serialize_specific_fields(self):
obj = ComplexModel(field1="first", field2="second", field3="third")
obj.save_base(raw=True)
# Serialize then deserialize the test database
serialized_data = serializers.serialize(
self.serializer_name, [obj], indent=2, fields=("field1", "field3")
)
result = next(serializers.deserialize(self.serializer_name, serialized_data))
# The deserialized object contains data in only the serialized fields.
self.assertEqual(result.object.field1, "first")
self.assertEqual(result.object.field2, "")
self.assertEqual(result.object.field3, "third")
def test_altering_serialized_output(self):
"""
The ability to create new objects by modifying serialized content.
"""
old_headline = "Poker has no place on ESPN"
new_headline = "Poker has no place on television"
serial_str = serializers.serialize(self.serializer_name, Article.objects.all())
serial_str = serial_str.replace(old_headline, new_headline)
models = list(serializers.deserialize(self.serializer_name, serial_str))
# Prior to saving, old headline is in place
self.assertTrue(Article.objects.filter(headline=old_headline))
self.assertFalse(Article.objects.filter(headline=new_headline))
for model in models:
model.save()
# After saving, new headline is in place
self.assertTrue(Article.objects.filter(headline=new_headline))
self.assertFalse(Article.objects.filter(headline=old_headline))
def test_one_to_one_as_pk(self):
"""
If you use your own primary key field (such as a OneToOneField), it
doesn't appear in the serialized field list - it replaces the pk
identifier.
"""
AuthorProfile.objects.create(
author=self.joe, date_of_birth=datetime(1970, 1, 1)
)
serial_str = serializers.serialize(
self.serializer_name, AuthorProfile.objects.all()
)
self.assertFalse(self._get_field_values(serial_str, "author"))
for obj in serializers.deserialize(self.serializer_name, serial_str):
self.assertEqual(obj.object.pk, self.joe.pk)
def test_serialize_field_subset(self):
"""Output can be restricted to a subset of fields"""
valid_fields = ("headline", "pub_date")
invalid_fields = ("author", "categories")
serial_str = serializers.serialize(
self.serializer_name, Article.objects.all(), fields=valid_fields
)
for field_name in invalid_fields:
self.assertFalse(self._get_field_values(serial_str, field_name))
for field_name in valid_fields:
self.assertTrue(self._get_field_values(serial_str, field_name))
def test_serialize_unicode_roundtrip(self):
"""Unicode makes the roundtrip intact"""
actor_name = "Za\u017c\u00f3\u0142\u0107"
movie_title = "G\u0119\u015bl\u0105 ja\u017a\u0144"
ac = Actor(name=actor_name)
mv = Movie(title=movie_title, actor=ac)
ac.save()
mv.save()
serial_str = serializers.serialize(self.serializer_name, [mv])
self.assertEqual(self._get_field_values(serial_str, "title")[0], movie_title)
self.assertEqual(self._get_field_values(serial_str, "actor")[0], actor_name)
obj_list = list(serializers.deserialize(self.serializer_name, serial_str))
mv_obj = obj_list[0].object
self.assertEqual(mv_obj.title, movie_title)
def test_unicode_serialization(self):
unicode_name = "יוניקוד"
data = serializers.serialize(self.serializer_name, [Author(name=unicode_name)])
self.assertIn(unicode_name, data)
objs = list(serializers.deserialize(self.serializer_name, data))
self.assertEqual(objs[0].object.name, unicode_name)
def test_serialize_progressbar(self):
fake_stdout = StringIO()
serializers.serialize(
self.serializer_name,
Article.objects.all(),
progress_output=fake_stdout,
object_count=Article.objects.count(),
)
self.assertTrue(
fake_stdout.getvalue().endswith(
"[" + "." * ProgressBar.progress_width + "]\n"
)
)
def test_serialize_superfluous_queries(self):
"""Ensure no superfluous queries are made when serializing ForeignKeys
#17602
"""
ac = Actor(name="Actor name")
ac.save()
mv = Movie(title="Movie title", actor_id=ac.pk)
mv.save()
with self.assertNumQueries(0):
serializers.serialize(self.serializer_name, [mv])
def test_serialize_prefetch_related_m2m(self):
# One query for the Article table, one for each prefetched m2m
# field, and one extra one for the nested prefetch for the Topics
# that have a relationship to the Category.
with self.assertNumQueries(5):
serializers.serialize(
self.serializer_name,
Article.objects.prefetch_related(
"meta_data",
"topics",
Prefetch(
"categories",
queryset=Category.objects.prefetch_related("topic_set"),
),
),
)
# One query for the Article table, and three m2m queries for each
# article.
with self.assertNumQueries(7):
serializers.serialize(self.serializer_name, Article.objects.all())
def test_serialize_prefetch_related_m2m_with_natural_keys(self):
# One query for the Article table, one for each prefetched m2m
# field, and a query to get the categories for each Article (two in
# total).
with self.assertNumQueries(5):
serializers.serialize(
self.serializer_name,
Article.objects.prefetch_related(
Prefetch(
"meta_data",
queryset=CategoryMetaData.objects.prefetch_related(
"category_set"
),
),
"topics",
),
use_natural_foreign_keys=True,
)
def test_serialize_with_null_pk(self):
"""
Serialized data with no primary key results
in a model instance with no id
"""
category = Category(name="Reference")
serial_str = serializers.serialize(self.serializer_name, [category])
pk_value = self._get_pk_values(serial_str)[0]
self.assertFalse(pk_value)
cat_obj = list(serializers.deserialize(self.serializer_name, serial_str))[
0
].object
self.assertIsNone(cat_obj.id)
def test_float_serialization(self):
"""Float values serialize and deserialize intact"""
sc = Score(score=3.4)
sc.save()
serial_str = serializers.serialize(self.serializer_name, [sc])
deserial_objs = list(serializers.deserialize(self.serializer_name, serial_str))
self.assertEqual(deserial_objs[0].object.score, Approximate(3.4, places=1))
def test_deferred_field_serialization(self):
author = Author.objects.create(name="Victor Hugo")
author = Author.objects.defer("name").get(pk=author.pk)
serial_str = serializers.serialize(self.serializer_name, [author])
deserial_objs = list(serializers.deserialize(self.serializer_name, serial_str))
self.assertIsInstance(deserial_objs[0].object, Author)
def test_custom_field_serialization(self):
"""Custom fields serialize and deserialize intact"""
team_str = "Spartak Moskva"
player = Player()
player.name = "Soslan Djanaev"
player.rank = 1
player.team = Team(team_str)
player.save()
serial_str = serializers.serialize(self.serializer_name, Player.objects.all())
team = self._get_field_values(serial_str, "team")
self.assertTrue(team)
self.assertEqual(team[0], team_str)
deserial_objs = list(serializers.deserialize(self.serializer_name, serial_str))
self.assertEqual(
deserial_objs[0].object.team.to_string(), player.team.to_string()
)
def test_pre_1000ad_date(self):
"""Year values before 1000AD are properly formatted"""
# Regression for #12524 -- dates before 1000AD get prefixed
# 0's on the year
a = Article.objects.create(
author=self.jane,
headline="Nobody remembers the early years",
pub_date=datetime(1, 2, 3, 4, 5, 6),
)
serial_str = serializers.serialize(self.serializer_name, [a])
date_values = self._get_field_values(serial_str, "pub_date")
self.assertEqual(date_values[0].replace("T", " "), "0001-02-03 04:05:06")
def test_pkless_serialized_strings(self):
"""
Serialized strings without PKs can be turned into models
"""
deserial_objs = list(
serializers.deserialize(self.serializer_name, self.pkless_str)
)
for obj in deserial_objs:
self.assertFalse(obj.object.id)
obj.save()
self.assertEqual(Category.objects.count(), 5)
def test_deterministic_mapping_ordering(self):
"""
Mapping such as fields should be deterministically ordered. (#24558)
"""
output = serializers.serialize(self.serializer_name, [self.a1], indent=2)
categories = self.a1.categories.values_list("pk", flat=True)
self.assertEqual(
output,
self.mapping_ordering_str
% {
"article_pk": self.a1.pk,
"author_pk": self.a1.author_id,
"first_category_pk": categories[0],
"second_category_pk": categories[1],
},
)
def test_deserialize_force_insert(self):
"""
Deserialized content can be saved with force_insert as a parameter.
"""
serial_str = serializers.serialize(self.serializer_name, [self.a1])
deserial_obj = list(serializers.deserialize(self.serializer_name, serial_str))[
0
]
with mock.patch("django.db.models.Model") as mock_model:
deserial_obj.save(force_insert=False)
mock_model.save_base.assert_called_with(
deserial_obj.object, raw=True, using=None, force_insert=False
)
@skipUnlessDBFeature("can_defer_constraint_checks")
def test_serialize_proxy_model(self):
BaseModel.objects.create(parent_data=1)
base_objects = BaseModel.objects.all()
proxy_objects = ProxyBaseModel.objects.all()
proxy_proxy_objects = ProxyProxyBaseModel.objects.all()
base_data = serializers.serialize("json", base_objects)
proxy_data = serializers.serialize("json", proxy_objects)
proxy_proxy_data = serializers.serialize("json", proxy_proxy_objects)
self.assertEqual(base_data, proxy_data.replace("proxy", ""))
self.assertEqual(base_data, proxy_proxy_data.replace("proxy", ""))
def test_serialize_inherited_fields(self):
child_1 = Child.objects.create(parent_data="a", child_data="b")
child_2 = Child.objects.create(parent_data="c", child_data="d")
child_1.parent_m2m.add(child_2)
child_data = serializers.serialize(self.serializer_name, [child_1, child_2])
self.assertEqual(self._get_field_values(child_data, "parent_m2m"), [])
self.assertEqual(self._get_field_values(child_data, "parent_data"), [])
def test_serialize_only_pk(self):
with self.assertNumQueries(7) as ctx:
serializers.serialize(
self.serializer_name,
Article.objects.all(),
use_natural_foreign_keys=False,
)
categories_sql = ctx[1]["sql"]
self.assertNotIn(connection.ops.quote_name("meta_data_id"), categories_sql)
meta_data_sql = ctx[2]["sql"]
self.assertNotIn(connection.ops.quote_name("kind"), meta_data_sql)
topics_data_sql = ctx[3]["sql"]
self.assertNotIn(connection.ops.quote_name("category_id"), topics_data_sql)
def test_serialize_no_only_pk_with_natural_keys(self):
with self.assertNumQueries(7) as ctx:
serializers.serialize(
self.serializer_name,
Article.objects.all(),
use_natural_foreign_keys=True,
)
categories_sql = ctx[1]["sql"]
self.assertNotIn(connection.ops.quote_name("meta_data_id"), categories_sql)
# CategoryMetaData has natural_key().
meta_data_sql = ctx[2]["sql"]
self.assertIn(connection.ops.quote_name("kind"), meta_data_sql)
topics_data_sql = ctx[3]["sql"]
self.assertNotIn(connection.ops.quote_name("category_id"), topics_data_sql)
class SerializerAPITests(SimpleTestCase):
def test_stream_class(self):
class File:
def __init__(self):
self.lines = []
def write(self, line):
self.lines.append(line)
def getvalue(self):
return "".join(self.lines)
class Serializer(serializers.json.Serializer):
stream_class = File
serializer = Serializer()
data = serializer.serialize([Score(id=1, score=3.4)])
self.assertIs(serializer.stream_class, File)
self.assertIsInstance(serializer.stream, File)
self.assertEqual(
data,
'[{"model": "serializers.score", "pk": 1, "fields": {"score": 3.4}}]\n',
)
class SerializersTransactionTestBase:
available_apps = ["serializers"]
@skipUnlessDBFeature("supports_forward_references")
def test_forward_refs(self):
"""
Objects ids can be referenced before they are
defined in the serialization data.
"""
# The deserialization process needs to run in a transaction in order
# to test forward reference handling.
with transaction.atomic():
objs = serializers.deserialize(self.serializer_name, self.fwd_ref_str)
with connection.constraint_checks_disabled():
for obj in objs:
obj.save()
for model_cls in (Category, Author, Article):
self.assertEqual(model_cls.objects.count(), 1)
art_obj = Article.objects.all()[0]
self.assertEqual(art_obj.categories.count(), 1)
self.assertEqual(art_obj.author.name, "Agnes")
def register_tests(test_class, method_name, test_func, exclude=()):
"""
Dynamically create serializer tests to ensure that all registered
serializers are automatically tested.
"""
for format_ in serializers.get_serializer_formats():
if format_ == "geojson" or format_ in exclude:
continue
decorated_func = skipIf(
isinstance(serializers.get_serializer(format_), serializers.BadSerializer),
"The Python library for the %s serializer is not installed." % format_,
)(test_func)
setattr(
test_class, method_name % format_, partialmethod(decorated_func, format_)
)
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/serializers/models/natural.py | tests/serializers/models/natural.py | """Models for test_natural.py"""
import uuid
from django.contrib.auth.base_user import AbstractBaseUser
from django.db import models
class NaturalKeyAnchorManager(models.Manager):
def get_by_natural_key(self, data):
return self.get(data=data)
class NaturalKeyAnchor(models.Model):
data = models.CharField(max_length=100, unique=True)
title = models.CharField(max_length=100, null=True)
objects = NaturalKeyAnchorManager()
def natural_key(self):
return (self.data,)
class FKDataNaturalKey(models.Model):
data = models.ForeignKey(NaturalKeyAnchor, models.SET_NULL, null=True)
class NaturalKeyThing(models.Model):
key = models.CharField(max_length=100, unique=True)
other_thing = models.ForeignKey(
"NaturalKeyThing", on_delete=models.CASCADE, null=True
)
other_things = models.ManyToManyField(
"NaturalKeyThing", related_name="thing_m2m_set"
)
class Manager(models.Manager):
def get_by_natural_key(self, key):
return self.get(key=key)
objects = Manager()
def natural_key(self):
return (self.key,)
def __str__(self):
return self.key
class NaturalPKWithDefault(models.Model):
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
name = models.CharField(max_length=100, unique=True)
class Manager(models.Manager):
def get_by_natural_key(self, name):
return self.get(name=name)
objects = Manager()
def natural_key(self):
return (self.name,)
class FKAsPKNoNaturalKeyManager(models.Manager):
def get_by_natural_key(self, *args, **kwargs):
return super().get_by_natural_key(*args, **kwargs)
class FKAsPKNoNaturalKey(models.Model):
pk_fk = models.ForeignKey(
NaturalKeyAnchor, on_delete=models.CASCADE, primary_key=True
)
objects = FKAsPKNoNaturalKeyManager()
def natural_key(self):
raise NotImplementedError("This method was not expected to be called.")
class SubclassNaturalKeyOptOutUser(AbstractBaseUser):
email = models.EmailField(unique=False, null=True, blank=True)
USERNAME_FIELD = "email"
def natural_key(self):
return ()
class PostToOptOutSubclassUser(models.Model):
author = models.ForeignKey(SubclassNaturalKeyOptOutUser, on_delete=models.CASCADE)
title = models.CharField(max_length=100)
subscribers = models.ManyToManyField(
SubclassNaturalKeyOptOutUser, related_name="subscribed_posts", blank=True
)
class NaturalKeyWithNullableFieldManager(models.Manager):
def get_by_natural_key(self, name, optional_id):
return self.get(name=name, optional_id=optional_id)
class NaturalKeyWithNullableField(models.Model):
name = models.CharField(max_length=100)
optional_id = models.CharField(max_length=100, null=True, blank=True)
objects = NaturalKeyWithNullableFieldManager()
class Meta:
unique_together = [["name", "optional_id"]]
def natural_key(self):
return (self.name, self.optional_id)
class FKToNaturalKeyWithNullable(models.Model):
ref = models.ForeignKey(
NaturalKeyWithNullableField, on_delete=models.CASCADE, null=True
)
refs = models.ManyToManyField(
NaturalKeyWithNullableField, related_name="m2m_referrers"
)
data = models.CharField(max_length=100, default="")
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/serializers/models/multi_table.py | tests/serializers/models/multi_table.py | from django.db import models
class ParentManager(models.Manager):
def get_by_natural_key(self, parent_data):
return self.get(parent_data=parent_data)
class Parent(models.Model):
parent_data = models.CharField(max_length=30, unique=True)
parent_m2m = models.ManyToManyField("self")
objects = ParentManager()
def natural_key(self):
return (self.parent_data,)
class Child(Parent):
child_data = models.CharField(max_length=30, unique=True)
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/serializers/models/__init__.py | tests/serializers/models/__init__.py | from .base import * # NOQA
from .data import * # NOQA
from .multi_table import * # NOQA
from .natural import * # NOQA
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/serializers/models/base.py | tests/serializers/models/base.py | """
Serialization
``django.core.serializers`` provides interfaces to converting Django
``QuerySet`` objects to and from "flat" data (i.e. strings).
"""
from decimal import Decimal
from django.db import models
class CategoryMetaDataManager(models.Manager):
def get_by_natural_key(self, kind, name):
return self.get(kind=kind, name=name)
class CategoryMetaData(models.Model):
kind = models.CharField(max_length=10)
name = models.CharField(max_length=10)
value = models.CharField(max_length=10)
objects = CategoryMetaDataManager()
class Meta:
unique_together = (("kind", "name"),)
def __str__(self):
return "[%s:%s]=%s" % (self.kind, self.name, self.value)
def natural_key(self):
return (self.kind, self.name)
class Category(models.Model):
name = models.CharField(max_length=20)
meta_data = models.ForeignKey(
CategoryMetaData, models.SET_NULL, null=True, default=None
)
class Meta:
ordering = ("name",)
def __str__(self):
return self.name
class Author(models.Model):
name = models.CharField(max_length=20)
class Meta:
ordering = ("name",)
def __str__(self):
return self.name
class TopicManager(models.Manager):
def get_queryset(self):
return super().get_queryset().select_related("category")
class Topic(models.Model):
name = models.CharField(max_length=255)
category = models.ForeignKey(Category, models.CASCADE)
objects = TopicManager()
class Article(models.Model):
author = models.ForeignKey(Author, models.CASCADE)
headline = models.CharField(max_length=50)
pub_date = models.DateTimeField()
categories = models.ManyToManyField(Category)
meta_data = models.ManyToManyField(CategoryMetaData)
topics = models.ManyToManyField(Topic)
class Meta:
ordering = ("pub_date",)
def __str__(self):
return self.headline
class AuthorProfile(models.Model):
author = models.OneToOneField(Author, models.CASCADE, primary_key=True)
date_of_birth = models.DateField()
def __str__(self):
return "Profile of %s" % self.author
class Actor(models.Model):
name = models.CharField(max_length=20, primary_key=True)
class Meta:
ordering = ("name",)
def __str__(self):
return self.name
class Movie(models.Model):
actor = models.ForeignKey(Actor, models.CASCADE)
title = models.CharField(max_length=50)
price = models.DecimalField(max_digits=6, decimal_places=2, default=Decimal("0.00"))
class Meta:
ordering = ("title",)
def __str__(self):
return self.title
class Score(models.Model):
score = models.FloatField()
class Team:
def __init__(self, title):
self.title = title
def __str__(self):
raise NotImplementedError("Not so simple")
def to_string(self):
return str(self.title)
class TeamField(models.CharField):
def __init__(self):
super().__init__(max_length=100)
def get_db_prep_save(self, value, connection):
return str(value.title)
def to_python(self, value):
if isinstance(value, Team):
return value
return Team(value)
def from_db_value(self, value, expression, connection):
return Team(value)
def value_to_string(self, obj):
return self.value_from_object(obj).to_string()
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
del kwargs["max_length"]
return name, path, args, kwargs
class Player(models.Model):
name = models.CharField(max_length=50)
rank = models.IntegerField()
team = TeamField()
def __str__(self):
return "%s (%d) playing for %s" % (self.name, self.rank, self.team.to_string())
class BaseModel(models.Model):
parent_data = models.IntegerField()
class ProxyBaseModel(BaseModel):
class Meta:
proxy = True
class ProxyProxyBaseModel(ProxyBaseModel):
class Meta:
proxy = True
class ComplexModel(models.Model):
field1 = models.CharField(max_length=10)
field2 = models.CharField(max_length=10)
field3 = models.CharField(max_length=10)
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/serializers/models/data.py | tests/serializers/models/data.py | """
******** Models for test_data.py ***********
The following classes are for testing basic data marshalling, including
NULL values, where allowed.
The basic idea is to have a model for each Django data type.
"""
import uuid
from django.contrib.contenttypes.fields import GenericForeignKey, GenericRelation
from django.contrib.contenttypes.models import ContentType
from django.db import models
from .base import BaseModel
try:
from PIL import Image # NOQA
except ImportError:
ImageData = None
else:
class ImageData(models.Model):
data = models.ImageField(null=True)
class BinaryData(models.Model):
data = models.BinaryField(null=True)
class BooleanData(models.Model):
data = models.BooleanField(default=False, null=True)
class CharData(models.Model):
data = models.CharField(max_length=30, null=True)
class DateData(models.Model):
data = models.DateField(null=True)
class DateTimeData(models.Model):
data = models.DateTimeField(null=True)
class DecimalData(models.Model):
data = models.DecimalField(null=True, decimal_places=3, max_digits=5)
class EmailData(models.Model):
data = models.EmailField(null=True)
class FileData(models.Model):
data = models.FileField(null=True)
class FilePathData(models.Model):
data = models.FilePathField(null=True)
class FloatData(models.Model):
data = models.FloatField(null=True)
class IntegerData(models.Model):
data = models.IntegerField(null=True)
class BigIntegerData(models.Model):
data = models.BigIntegerField(null=True)
class GenericIPAddressData(models.Model):
data = models.GenericIPAddressField(null=True)
class PositiveBigIntegerData(models.Model):
data = models.PositiveBigIntegerField(null=True)
class PositiveIntegerData(models.Model):
data = models.PositiveIntegerField(null=True)
class PositiveSmallIntegerData(models.Model):
data = models.PositiveSmallIntegerField(null=True)
class SlugData(models.Model):
data = models.SlugField(null=True)
class SmallData(models.Model):
data = models.SmallIntegerField(null=True)
class TextData(models.Model):
data = models.TextField(null=True)
class TimeData(models.Model):
data = models.TimeField(null=True)
class Tag(models.Model):
"""A tag on an item."""
data = models.SlugField()
content_type = models.ForeignKey(ContentType, models.CASCADE)
object_id = models.PositiveIntegerField()
content_object = GenericForeignKey()
class Meta:
ordering = ["data"]
class GenericData(models.Model):
data = models.CharField(max_length=30)
tags = GenericRelation(Tag)
# The following test classes are all for validation
# of related objects; in particular, forward, backward,
# and self references.
class Anchor(models.Model):
"""This is a model that can be used as
something for other models to point at"""
data = models.CharField(max_length=30)
class Meta:
ordering = ("id",)
class UniqueAnchor(models.Model):
"""This is a model that can be used as
something for other models to point at"""
data = models.CharField(unique=True, max_length=30)
class FKData(models.Model):
data = models.ForeignKey(Anchor, models.SET_NULL, null=True)
class M2MData(models.Model):
data = models.ManyToManyField(Anchor)
class O2OData(models.Model):
# One to one field can't be null here, since it is a PK.
data = models.OneToOneField(Anchor, models.CASCADE, primary_key=True)
class FKSelfData(models.Model):
data = models.ForeignKey("self", models.CASCADE, null=True)
class M2MSelfData(models.Model):
data = models.ManyToManyField("self", symmetrical=False)
class FKDataToField(models.Model):
data = models.ForeignKey(UniqueAnchor, models.SET_NULL, null=True, to_field="data")
class FKDataToO2O(models.Model):
data = models.ForeignKey(O2OData, models.SET_NULL, null=True)
class M2MIntermediateData(models.Model):
data = models.ManyToManyField(Anchor, through="Intermediate")
class Intermediate(models.Model):
left = models.ForeignKey(M2MIntermediateData, models.CASCADE)
right = models.ForeignKey(Anchor, models.CASCADE)
extra = models.CharField(max_length=30, blank=True, default="doesn't matter")
# The following test classes are for validating the
# deserialization of objects that use a user-defined
# field as the primary key.
# Some of these data types have been commented out
# because they can't be used as a primary key on one
# or all database backends.
class BooleanPKData(models.Model):
data = models.BooleanField(primary_key=True, default=False)
class CharPKData(models.Model):
data = models.CharField(max_length=30, primary_key=True)
class DatePKData(models.Model):
data = models.DateField(primary_key=True)
class DateTimePKData(models.Model):
data = models.DateTimeField(primary_key=True)
class DecimalPKData(models.Model):
data = models.DecimalField(primary_key=True, decimal_places=3, max_digits=5)
class EmailPKData(models.Model):
data = models.EmailField(primary_key=True)
class FilePathPKData(models.Model):
data = models.FilePathField(primary_key=True)
class FloatPKData(models.Model):
data = models.FloatField(primary_key=True)
class IntegerPKData(models.Model):
data = models.IntegerField(primary_key=True)
class GenericIPAddressPKData(models.Model):
data = models.GenericIPAddressField(primary_key=True)
class PositiveIntegerPKData(models.Model):
data = models.PositiveIntegerField(primary_key=True)
class PositiveSmallIntegerPKData(models.Model):
data = models.PositiveSmallIntegerField(primary_key=True)
class SlugPKData(models.Model):
data = models.SlugField(primary_key=True)
class SmallPKData(models.Model):
data = models.SmallIntegerField(primary_key=True)
class TextPKData(models.Model):
data = models.TextField(primary_key=True)
class Meta:
required_db_features = ["supports_index_on_text_field"]
class TimePKData(models.Model):
data = models.TimeField(primary_key=True)
class UUIDData(models.Model):
data = models.UUIDField(primary_key=True)
class UUIDDefaultData(models.Model):
data = models.UUIDField(primary_key=True, default=uuid.uuid4)
class FKToUUID(models.Model):
data = models.ForeignKey(UUIDData, models.CASCADE)
# Tests for handling fields with pre_save functions, or
# models with save functions that modify data
class AutoNowDateTimeData(models.Model):
data = models.DateTimeField(null=True, auto_now=True)
class ModifyingSaveData(models.Model):
data = models.IntegerField(null=True)
def save(self, *args, **kwargs):
"""
A save method that modifies the data in the object.
A user-defined save() method isn't called when objects are deserialized
(#4459).
"""
self.data = 666
super().save(*args, **kwargs)
# Tests for serialization of models using inheritance.
# Regression for #7202, #7350
class AbstractBaseModel(models.Model):
parent_data = models.IntegerField()
class Meta:
abstract = True
class InheritAbstractModel(AbstractBaseModel):
child_data = models.IntegerField()
class InheritBaseModel(BaseModel):
child_data = models.IntegerField()
class ExplicitInheritBaseModel(BaseModel):
parent = models.OneToOneField(BaseModel, models.CASCADE, parent_link=True)
child_data = models.IntegerField()
class LengthModel(models.Model):
data = models.IntegerField()
def __len__(self):
return self.data
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/expressions_window/models.py | tests/expressions_window/models.py | from django.db import models
class Classification(models.Model):
code = models.CharField(max_length=10)
class Employee(models.Model):
name = models.CharField(max_length=40, blank=False, null=False)
salary = models.PositiveIntegerField()
department = models.CharField(max_length=40, blank=False, null=False)
hire_date = models.DateField(blank=False, null=False)
age = models.IntegerField(blank=False, null=False)
classification = models.ForeignKey(
"Classification", on_delete=models.CASCADE, null=True
)
bonus = models.DecimalField(decimal_places=2, max_digits=15, null=True)
class PastEmployeeDepartment(models.Model):
employee = models.ForeignKey(
Employee, related_name="past_departments", on_delete=models.CASCADE
)
department = models.CharField(max_length=40, blank=False, null=False)
class Detail(models.Model):
value = models.JSONField()
class Meta:
required_db_features = {"supports_json_field"}
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/expressions_window/__init__.py | tests/expressions_window/__init__.py | python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false | |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/expressions_window/tests.py | tests/expressions_window/tests.py | import datetime
from decimal import Decimal
from unittest import mock
from django.core.exceptions import FieldError
from django.db import NotSupportedError, connection
from django.db.models import (
Avg,
Case,
Count,
F,
IntegerField,
Max,
Min,
OuterRef,
Q,
RowRange,
Subquery,
Sum,
Value,
ValueRange,
When,
Window,
WindowFrame,
WindowFrameExclusion,
)
from django.db.models.fields.json import KeyTextTransform, KeyTransform
from django.db.models.functions import (
Cast,
CumeDist,
DenseRank,
ExtractYear,
FirstValue,
Lag,
LastValue,
Lead,
NthValue,
Ntile,
PercentRank,
Rank,
RowNumber,
Upper,
)
from django.db.models.lookups import Exact
from django.test import SimpleTestCase, TestCase, skipIfDBFeature, skipUnlessDBFeature
from django.test.utils import CaptureQueriesContext
from .models import Classification, Detail, Employee, PastEmployeeDepartment
@skipUnlessDBFeature("supports_over_clause")
class WindowFunctionTests(TestCase):
@classmethod
def setUpTestData(cls):
classification = Classification.objects.create()
Employee.objects.bulk_create(
[
Employee(
name=e[0],
salary=e[1],
department=e[2],
hire_date=e[3],
age=e[4],
bonus=Decimal(e[1]) / 400,
classification=classification,
)
for e in [
("Jones", 45000, "Accounting", datetime.datetime(2005, 11, 1), 20),
(
"Williams",
37000,
"Accounting",
datetime.datetime(2009, 6, 1),
20,
),
("Jenson", 45000, "Accounting", datetime.datetime(2008, 4, 1), 20),
("Adams", 50000, "Accounting", datetime.datetime(2013, 7, 1), 50),
("Smith", 55000, "Sales", datetime.datetime(2007, 6, 1), 30),
("Brown", 53000, "Sales", datetime.datetime(2009, 9, 1), 30),
("Johnson", 40000, "Marketing", datetime.datetime(2012, 3, 1), 30),
("Smith", 38000, "Marketing", datetime.datetime(2009, 10, 1), 20),
("Wilkinson", 60000, "IT", datetime.datetime(2011, 3, 1), 40),
("Moore", 34000, "IT", datetime.datetime(2013, 8, 1), 40),
("Miller", 100000, "Management", datetime.datetime(2005, 6, 1), 40),
("Johnson", 80000, "Management", datetime.datetime(2005, 7, 1), 50),
]
]
)
employees = list(Employee.objects.order_by("pk"))
PastEmployeeDepartment.objects.bulk_create(
[
PastEmployeeDepartment(employee=employees[6], department="Sales"),
PastEmployeeDepartment(employee=employees[10], department="IT"),
]
)
def test_dense_rank(self):
tests = [
ExtractYear(F("hire_date")).asc(),
F("hire_date__year").asc(),
"hire_date__year",
]
for order_by in tests:
with self.subTest(order_by=order_by):
qs = Employee.objects.annotate(
rank=Window(expression=DenseRank(), order_by=order_by),
)
self.assertQuerySetEqual(
qs,
[
("Jones", 45000, "Accounting", datetime.date(2005, 11, 1), 1),
("Miller", 100000, "Management", datetime.date(2005, 6, 1), 1),
("Johnson", 80000, "Management", datetime.date(2005, 7, 1), 1),
("Smith", 55000, "Sales", datetime.date(2007, 6, 1), 2),
("Jenson", 45000, "Accounting", datetime.date(2008, 4, 1), 3),
("Smith", 38000, "Marketing", datetime.date(2009, 10, 1), 4),
("Brown", 53000, "Sales", datetime.date(2009, 9, 1), 4),
("Williams", 37000, "Accounting", datetime.date(2009, 6, 1), 4),
("Wilkinson", 60000, "IT", datetime.date(2011, 3, 1), 5),
("Johnson", 40000, "Marketing", datetime.date(2012, 3, 1), 6),
("Moore", 34000, "IT", datetime.date(2013, 8, 1), 7),
("Adams", 50000, "Accounting", datetime.date(2013, 7, 1), 7),
],
lambda entry: (
entry.name,
entry.salary,
entry.department,
entry.hire_date,
entry.rank,
),
ordered=False,
)
def test_department_salary(self):
qs = Employee.objects.annotate(
department_sum=Window(
expression=Sum("salary"),
partition_by=F("department"),
order_by=[F("hire_date").asc()],
)
).order_by("department", "department_sum")
self.assertQuerySetEqual(
qs,
[
("Jones", "Accounting", 45000, 45000),
("Jenson", "Accounting", 45000, 90000),
("Williams", "Accounting", 37000, 127000),
("Adams", "Accounting", 50000, 177000),
("Wilkinson", "IT", 60000, 60000),
("Moore", "IT", 34000, 94000),
("Miller", "Management", 100000, 100000),
("Johnson", "Management", 80000, 180000),
("Smith", "Marketing", 38000, 38000),
("Johnson", "Marketing", 40000, 78000),
("Smith", "Sales", 55000, 55000),
("Brown", "Sales", 53000, 108000),
],
lambda entry: (
entry.name,
entry.department,
entry.salary,
entry.department_sum,
),
)
def test_rank(self):
"""
Rank the employees based on the year they're were hired. Since there
are multiple employees hired in different years, this will contain
gaps.
"""
qs = Employee.objects.annotate(
rank=Window(
expression=Rank(),
order_by=F("hire_date__year").asc(),
)
)
self.assertQuerySetEqual(
qs,
[
("Jones", 45000, "Accounting", datetime.date(2005, 11, 1), 1),
("Miller", 100000, "Management", datetime.date(2005, 6, 1), 1),
("Johnson", 80000, "Management", datetime.date(2005, 7, 1), 1),
("Smith", 55000, "Sales", datetime.date(2007, 6, 1), 4),
("Jenson", 45000, "Accounting", datetime.date(2008, 4, 1), 5),
("Smith", 38000, "Marketing", datetime.date(2009, 10, 1), 6),
("Brown", 53000, "Sales", datetime.date(2009, 9, 1), 6),
("Williams", 37000, "Accounting", datetime.date(2009, 6, 1), 6),
("Wilkinson", 60000, "IT", datetime.date(2011, 3, 1), 9),
("Johnson", 40000, "Marketing", datetime.date(2012, 3, 1), 10),
("Moore", 34000, "IT", datetime.date(2013, 8, 1), 11),
("Adams", 50000, "Accounting", datetime.date(2013, 7, 1), 11),
],
lambda entry: (
entry.name,
entry.salary,
entry.department,
entry.hire_date,
entry.rank,
),
ordered=False,
)
def test_row_number(self):
"""
The row number window function computes the number based on the order
in which the tuples were inserted. Depending on the backend,
Oracle requires an ordering-clause in the Window expression.
"""
qs = Employee.objects.annotate(
row_number=Window(
expression=RowNumber(),
order_by=F("pk").asc(),
)
).order_by("pk")
self.assertQuerySetEqual(
qs,
[
("Jones", "Accounting", 1),
("Williams", "Accounting", 2),
("Jenson", "Accounting", 3),
("Adams", "Accounting", 4),
("Smith", "Sales", 5),
("Brown", "Sales", 6),
("Johnson", "Marketing", 7),
("Smith", "Marketing", 8),
("Wilkinson", "IT", 9),
("Moore", "IT", 10),
("Miller", "Management", 11),
("Johnson", "Management", 12),
],
lambda entry: (entry.name, entry.department, entry.row_number),
)
def test_row_number_no_ordering(self):
"""
The row number window function computes the number based on the order
in which the tuples were inserted.
"""
# Add a default ordering for consistent results across databases.
qs = Employee.objects.annotate(
row_number=Window(
expression=RowNumber(),
)
).order_by("pk")
self.assertQuerySetEqual(
qs,
[
("Jones", "Accounting", 1),
("Williams", "Accounting", 2),
("Jenson", "Accounting", 3),
("Adams", "Accounting", 4),
("Smith", "Sales", 5),
("Brown", "Sales", 6),
("Johnson", "Marketing", 7),
("Smith", "Marketing", 8),
("Wilkinson", "IT", 9),
("Moore", "IT", 10),
("Miller", "Management", 11),
("Johnson", "Management", 12),
],
lambda entry: (entry.name, entry.department, entry.row_number),
)
def test_avg_salary_department(self):
qs = Employee.objects.annotate(
avg_salary=Window(
expression=Avg("salary"),
order_by=F("department").asc(),
partition_by="department",
)
).order_by("department", "-salary", "name")
self.assertQuerySetEqual(
qs,
[
("Adams", 50000, "Accounting", 44250.00),
("Jenson", 45000, "Accounting", 44250.00),
("Jones", 45000, "Accounting", 44250.00),
("Williams", 37000, "Accounting", 44250.00),
("Wilkinson", 60000, "IT", 47000.00),
("Moore", 34000, "IT", 47000.00),
("Miller", 100000, "Management", 90000.00),
("Johnson", 80000, "Management", 90000.00),
("Johnson", 40000, "Marketing", 39000.00),
("Smith", 38000, "Marketing", 39000.00),
("Smith", 55000, "Sales", 54000.00),
("Brown", 53000, "Sales", 54000.00),
],
transform=lambda row: (
row.name,
row.salary,
row.department,
row.avg_salary,
),
)
def test_lag(self):
"""
Compute the difference between an employee's salary and the next
highest salary in the employee's department. Return None if the
employee has the lowest salary.
"""
qs = Employee.objects.annotate(
lag=Window(
expression=Lag(expression="salary", offset=1),
partition_by=F("department"),
order_by=[F("salary").asc(), F("name").asc()],
)
).order_by("department", F("salary").asc(), F("name").asc())
self.assertQuerySetEqual(
qs,
[
("Williams", 37000, "Accounting", None),
("Jenson", 45000, "Accounting", 37000),
("Jones", 45000, "Accounting", 45000),
("Adams", 50000, "Accounting", 45000),
("Moore", 34000, "IT", None),
("Wilkinson", 60000, "IT", 34000),
("Johnson", 80000, "Management", None),
("Miller", 100000, "Management", 80000),
("Smith", 38000, "Marketing", None),
("Johnson", 40000, "Marketing", 38000),
("Brown", 53000, "Sales", None),
("Smith", 55000, "Sales", 53000),
],
transform=lambda row: (row.name, row.salary, row.department, row.lag),
)
def test_lag_decimalfield(self):
qs = Employee.objects.annotate(
lag=Window(
expression=Lag(expression="bonus", offset=1),
partition_by=F("department"),
order_by=[F("bonus").asc(), F("name").asc()],
)
).order_by("department", F("bonus").asc(), F("name").asc())
self.assertQuerySetEqual(
qs,
[
("Williams", 92.5, "Accounting", None),
("Jenson", 112.5, "Accounting", 92.5),
("Jones", 112.5, "Accounting", 112.5),
("Adams", 125, "Accounting", 112.5),
("Moore", 85, "IT", None),
("Wilkinson", 150, "IT", 85),
("Johnson", 200, "Management", None),
("Miller", 250, "Management", 200),
("Smith", 95, "Marketing", None),
("Johnson", 100, "Marketing", 95),
("Brown", 132.5, "Sales", None),
("Smith", 137.5, "Sales", 132.5),
],
transform=lambda row: (row.name, row.bonus, row.department, row.lag),
)
def test_order_by_decimalfield(self):
qs = Employee.objects.annotate(
rank=Window(expression=Rank(), order_by="bonus")
).order_by("-bonus", "id")
self.assertQuerySetEqual(
qs,
[
("Miller", 250.0, 12),
("Johnson", 200.0, 11),
("Wilkinson", 150.0, 10),
("Smith", 137.5, 9),
("Brown", 132.5, 8),
("Adams", 125.0, 7),
("Jones", 112.5, 5),
("Jenson", 112.5, 5),
("Johnson", 100.0, 4),
("Smith", 95.0, 3),
("Williams", 92.5, 2),
("Moore", 85.0, 1),
],
transform=lambda row: (row.name, float(row.bonus), row.rank),
)
def test_first_value(self):
qs = Employee.objects.annotate(
first_value=Window(
expression=FirstValue("salary"),
partition_by=F("department"),
order_by=F("hire_date").asc(),
)
).order_by("department", "hire_date")
self.assertQuerySetEqual(
qs,
[
("Jones", 45000, "Accounting", datetime.date(2005, 11, 1), 45000),
("Jenson", 45000, "Accounting", datetime.date(2008, 4, 1), 45000),
("Williams", 37000, "Accounting", datetime.date(2009, 6, 1), 45000),
("Adams", 50000, "Accounting", datetime.date(2013, 7, 1), 45000),
("Wilkinson", 60000, "IT", datetime.date(2011, 3, 1), 60000),
("Moore", 34000, "IT", datetime.date(2013, 8, 1), 60000),
("Miller", 100000, "Management", datetime.date(2005, 6, 1), 100000),
("Johnson", 80000, "Management", datetime.date(2005, 7, 1), 100000),
("Smith", 38000, "Marketing", datetime.date(2009, 10, 1), 38000),
("Johnson", 40000, "Marketing", datetime.date(2012, 3, 1), 38000),
("Smith", 55000, "Sales", datetime.date(2007, 6, 1), 55000),
("Brown", 53000, "Sales", datetime.date(2009, 9, 1), 55000),
],
lambda row: (
row.name,
row.salary,
row.department,
row.hire_date,
row.first_value,
),
)
def test_last_value(self):
qs = Employee.objects.annotate(
last_value=Window(
expression=LastValue("hire_date"),
partition_by=F("department"),
order_by=F("hire_date").asc(),
)
)
self.assertQuerySetEqual(
qs,
[
(
"Adams",
"Accounting",
datetime.date(2013, 7, 1),
50000,
datetime.date(2013, 7, 1),
),
(
"Jenson",
"Accounting",
datetime.date(2008, 4, 1),
45000,
datetime.date(2008, 4, 1),
),
(
"Jones",
"Accounting",
datetime.date(2005, 11, 1),
45000,
datetime.date(2005, 11, 1),
),
(
"Williams",
"Accounting",
datetime.date(2009, 6, 1),
37000,
datetime.date(2009, 6, 1),
),
(
"Moore",
"IT",
datetime.date(2013, 8, 1),
34000,
datetime.date(2013, 8, 1),
),
(
"Wilkinson",
"IT",
datetime.date(2011, 3, 1),
60000,
datetime.date(2011, 3, 1),
),
(
"Miller",
"Management",
datetime.date(2005, 6, 1),
100000,
datetime.date(2005, 6, 1),
),
(
"Johnson",
"Management",
datetime.date(2005, 7, 1),
80000,
datetime.date(2005, 7, 1),
),
(
"Johnson",
"Marketing",
datetime.date(2012, 3, 1),
40000,
datetime.date(2012, 3, 1),
),
(
"Smith",
"Marketing",
datetime.date(2009, 10, 1),
38000,
datetime.date(2009, 10, 1),
),
(
"Brown",
"Sales",
datetime.date(2009, 9, 1),
53000,
datetime.date(2009, 9, 1),
),
(
"Smith",
"Sales",
datetime.date(2007, 6, 1),
55000,
datetime.date(2007, 6, 1),
),
],
transform=lambda row: (
row.name,
row.department,
row.hire_date,
row.salary,
row.last_value,
),
ordered=False,
)
def test_function_list_of_values(self):
qs = (
Employee.objects.annotate(
lead=Window(
expression=Lead(expression="salary"),
order_by=[F("hire_date").asc(), F("name").desc()],
partition_by="department",
)
)
.values_list("name", "salary", "department", "hire_date", "lead")
.order_by("department", F("hire_date").asc(), F("name").desc())
)
self.assertNotIn("GROUP BY", str(qs.query))
self.assertSequenceEqual(
qs,
[
("Jones", 45000, "Accounting", datetime.date(2005, 11, 1), 45000),
("Jenson", 45000, "Accounting", datetime.date(2008, 4, 1), 37000),
("Williams", 37000, "Accounting", datetime.date(2009, 6, 1), 50000),
("Adams", 50000, "Accounting", datetime.date(2013, 7, 1), None),
("Wilkinson", 60000, "IT", datetime.date(2011, 3, 1), 34000),
("Moore", 34000, "IT", datetime.date(2013, 8, 1), None),
("Miller", 100000, "Management", datetime.date(2005, 6, 1), 80000),
("Johnson", 80000, "Management", datetime.date(2005, 7, 1), None),
("Smith", 38000, "Marketing", datetime.date(2009, 10, 1), 40000),
("Johnson", 40000, "Marketing", datetime.date(2012, 3, 1), None),
("Smith", 55000, "Sales", datetime.date(2007, 6, 1), 53000),
("Brown", 53000, "Sales", datetime.date(2009, 9, 1), None),
],
)
def test_min_department(self):
"""An alternative way to specify a query for FirstValue."""
qs = Employee.objects.annotate(
min_salary=Window(
expression=Min("salary"),
partition_by=F("department"),
order_by=[F("salary").asc(), F("name").asc()],
)
).order_by("department", "salary", "name")
self.assertQuerySetEqual(
qs,
[
("Williams", "Accounting", 37000, 37000),
("Jenson", "Accounting", 45000, 37000),
("Jones", "Accounting", 45000, 37000),
("Adams", "Accounting", 50000, 37000),
("Moore", "IT", 34000, 34000),
("Wilkinson", "IT", 60000, 34000),
("Johnson", "Management", 80000, 80000),
("Miller", "Management", 100000, 80000),
("Smith", "Marketing", 38000, 38000),
("Johnson", "Marketing", 40000, 38000),
("Brown", "Sales", 53000, 53000),
("Smith", "Sales", 55000, 53000),
],
lambda row: (row.name, row.department, row.salary, row.min_salary),
)
def test_max_per_year(self):
"""
Find the maximum salary awarded in the same year as the
employee was hired, regardless of the department.
"""
qs = Employee.objects.annotate(
max_salary_year=Window(
expression=Max("salary"),
order_by=ExtractYear("hire_date").asc(),
partition_by=ExtractYear("hire_date"),
)
).order_by(ExtractYear("hire_date"), "salary")
self.assertQuerySetEqual(
qs,
[
("Jones", "Accounting", 45000, 2005, 100000),
("Johnson", "Management", 80000, 2005, 100000),
("Miller", "Management", 100000, 2005, 100000),
("Smith", "Sales", 55000, 2007, 55000),
("Jenson", "Accounting", 45000, 2008, 45000),
("Williams", "Accounting", 37000, 2009, 53000),
("Smith", "Marketing", 38000, 2009, 53000),
("Brown", "Sales", 53000, 2009, 53000),
("Wilkinson", "IT", 60000, 2011, 60000),
("Johnson", "Marketing", 40000, 2012, 40000),
("Moore", "IT", 34000, 2013, 50000),
("Adams", "Accounting", 50000, 2013, 50000),
],
lambda row: (
row.name,
row.department,
row.salary,
row.hire_date.year,
row.max_salary_year,
),
)
def test_cume_dist(self):
"""
Compute the cumulative distribution for the employees based on the
salary in increasing order. Equal to rank/total number of rows (12).
"""
qs = Employee.objects.annotate(
cume_dist=Window(
expression=CumeDist(),
order_by=F("salary").asc(),
)
).order_by("salary", "name")
# Round result of cume_dist because Oracle uses greater precision.
self.assertQuerySetEqual(
qs,
[
("Moore", "IT", 34000, 0.0833333333),
("Williams", "Accounting", 37000, 0.1666666667),
("Smith", "Marketing", 38000, 0.25),
("Johnson", "Marketing", 40000, 0.3333333333),
("Jenson", "Accounting", 45000, 0.5),
("Jones", "Accounting", 45000, 0.5),
("Adams", "Accounting", 50000, 0.5833333333),
("Brown", "Sales", 53000, 0.6666666667),
("Smith", "Sales", 55000, 0.75),
("Wilkinson", "IT", 60000, 0.8333333333),
("Johnson", "Management", 80000, 0.9166666667),
("Miller", "Management", 100000, 1),
],
lambda row: (
row.name,
row.department,
row.salary,
round(row.cume_dist, 10),
),
)
def test_nthvalue(self):
qs = Employee.objects.annotate(
nth_value=Window(
expression=NthValue(expression="salary", nth=2),
order_by=[F("hire_date").asc(), F("name").desc()],
partition_by=F("department"),
)
).order_by("department", "hire_date", "name")
self.assertQuerySetEqual(
qs,
[
("Jones", "Accounting", datetime.date(2005, 11, 1), 45000, None),
("Jenson", "Accounting", datetime.date(2008, 4, 1), 45000, 45000),
("Williams", "Accounting", datetime.date(2009, 6, 1), 37000, 45000),
("Adams", "Accounting", datetime.date(2013, 7, 1), 50000, 45000),
("Wilkinson", "IT", datetime.date(2011, 3, 1), 60000, None),
("Moore", "IT", datetime.date(2013, 8, 1), 34000, 34000),
("Miller", "Management", datetime.date(2005, 6, 1), 100000, None),
("Johnson", "Management", datetime.date(2005, 7, 1), 80000, 80000),
("Smith", "Marketing", datetime.date(2009, 10, 1), 38000, None),
("Johnson", "Marketing", datetime.date(2012, 3, 1), 40000, 40000),
("Smith", "Sales", datetime.date(2007, 6, 1), 55000, None),
("Brown", "Sales", datetime.date(2009, 9, 1), 53000, 53000),
],
lambda row: (
row.name,
row.department,
row.hire_date,
row.salary,
row.nth_value,
),
)
def test_lead(self):
"""
Determine what the next person hired in the same department makes.
Because the dataset is ambiguous, the name is also part of the
ordering clause. No default is provided, so None/NULL should be
returned.
"""
qs = Employee.objects.annotate(
lead=Window(
expression=Lead(expression="salary"),
order_by=[F("hire_date").asc(), F("name").desc()],
partition_by="department",
)
).order_by("department", F("hire_date").asc(), F("name").desc())
self.assertQuerySetEqual(
qs,
[
("Jones", 45000, "Accounting", datetime.date(2005, 11, 1), 45000),
("Jenson", 45000, "Accounting", datetime.date(2008, 4, 1), 37000),
("Williams", 37000, "Accounting", datetime.date(2009, 6, 1), 50000),
("Adams", 50000, "Accounting", datetime.date(2013, 7, 1), None),
("Wilkinson", 60000, "IT", datetime.date(2011, 3, 1), 34000),
("Moore", 34000, "IT", datetime.date(2013, 8, 1), None),
("Miller", 100000, "Management", datetime.date(2005, 6, 1), 80000),
("Johnson", 80000, "Management", datetime.date(2005, 7, 1), None),
("Smith", 38000, "Marketing", datetime.date(2009, 10, 1), 40000),
("Johnson", 40000, "Marketing", datetime.date(2012, 3, 1), None),
("Smith", 55000, "Sales", datetime.date(2007, 6, 1), 53000),
("Brown", 53000, "Sales", datetime.date(2009, 9, 1), None),
],
transform=lambda row: (
row.name,
row.salary,
row.department,
row.hire_date,
row.lead,
),
)
def test_lead_offset(self):
"""
Determine what the person hired after someone makes. Due to
ambiguity, the name is also included in the ordering.
"""
qs = Employee.objects.annotate(
lead=Window(
expression=Lead("salary", offset=2),
partition_by="department",
order_by=F("hire_date").asc(),
)
)
self.assertQuerySetEqual(
qs,
[
("Jones", 45000, "Accounting", datetime.date(2005, 11, 1), 37000),
("Jenson", 45000, "Accounting", datetime.date(2008, 4, 1), 50000),
("Williams", 37000, "Accounting", datetime.date(2009, 6, 1), None),
("Adams", 50000, "Accounting", datetime.date(2013, 7, 1), None),
("Wilkinson", 60000, "IT", datetime.date(2011, 3, 1), None),
("Moore", 34000, "IT", datetime.date(2013, 8, 1), None),
("Johnson", 80000, "Management", datetime.date(2005, 7, 1), None),
("Miller", 100000, "Management", datetime.date(2005, 6, 1), None),
("Smith", 38000, "Marketing", datetime.date(2009, 10, 1), None),
("Johnson", 40000, "Marketing", datetime.date(2012, 3, 1), None),
("Smith", 55000, "Sales", datetime.date(2007, 6, 1), None),
("Brown", 53000, "Sales", datetime.date(2009, 9, 1), None),
],
transform=lambda row: (
row.name,
row.salary,
row.department,
row.hire_date,
row.lead,
),
ordered=False,
)
@skipUnlessDBFeature("supports_default_in_lead_lag")
def test_lead_default(self):
qs = Employee.objects.annotate(
lead_default=Window(
expression=Lead(expression="salary", offset=5, default=60000),
partition_by=F("department"),
order_by=F("department").asc(),
)
)
self.assertEqual(
list(qs.values_list("lead_default", flat=True).distinct()), [60000]
)
def test_ntile(self):
"""
Compute the group for each of the employees across the entire company,
based on how high the salary is for them. There are twelve employees
so it divides evenly into four groups.
"""
qs = Employee.objects.annotate(
ntile=Window(
expression=Ntile(num_buckets=4),
order_by="-salary",
)
).order_by("ntile", "-salary", "name")
self.assertQuerySetEqual(
qs,
[
("Miller", "Management", 100000, 1),
("Johnson", "Management", 80000, 1),
("Wilkinson", "IT", 60000, 1),
("Smith", "Sales", 55000, 2),
("Brown", "Sales", 53000, 2),
("Adams", "Accounting", 50000, 2),
("Jenson", "Accounting", 45000, 3),
("Jones", "Accounting", 45000, 3),
("Johnson", "Marketing", 40000, 3),
("Smith", "Marketing", 38000, 4),
("Williams", "Accounting", 37000, 4),
("Moore", "IT", 34000, 4),
],
lambda x: (x.name, x.department, x.salary, x.ntile),
)
def test_percent_rank(self):
"""
Calculate the percentage rank of the employees across the entire
company based on salary and name (in case of ambiguity).
"""
qs = Employee.objects.annotate(
percent_rank=Window(
expression=PercentRank(),
order_by=[F("salary").asc(), F("name").asc()],
)
).order_by("percent_rank")
# Round to account for precision differences among databases.
self.assertQuerySetEqual(
qs,
[
("Moore", "IT", 34000, 0.0),
("Williams", "Accounting", 37000, 0.0909090909),
("Smith", "Marketing", 38000, 0.1818181818),
("Johnson", "Marketing", 40000, 0.2727272727),
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | true |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/urlpatterns/path_urls.py | tests/urlpatterns/path_urls.py | from django.urls import include, path, re_path
from . import views
urlpatterns = [
path("articles/2003/", views.empty_view, name="articles-2003"),
path("articles/<int:year>/", views.empty_view, name="articles-year"),
path(
"articles/<int:year>/<int:month>/", views.empty_view, name="articles-year-month"
),
path(
"articles/<int:year>/<int:month>/<int:day>/",
views.empty_view,
name="articles-year-month-day",
),
path("books/2007/", views.empty_view, {"extra": True}, name="books-2007"),
path(
"books/<int:year>/<int:month>/<int:day>/",
views.empty_view,
{"extra": True},
name="books-year-month-day",
),
path("users/", views.empty_view, name="users"),
path("users/<id>/", views.empty_view, name="user-with-id"),
path("included_urls/", include("urlpatterns.included_urls")),
re_path(r"^regex/(?P<pk>[0-9]+)/$", views.empty_view, name="regex"),
re_path(
r"^regex_optional/(?P<arg1>\d+)/(?:(?P<arg2>\d+)/)?",
views.empty_view,
name="regex_optional",
),
re_path(
r"^regex_only_optional/(?:(?P<arg1>\d+)/)?",
views.empty_view,
name="regex_only_optional",
),
path("", include("urlpatterns.more_urls"), {"sub-extra": False}),
path("<lang>/<path:url>/", views.empty_view, name="lang-and-path"),
]
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/urlpatterns/converters.py | tests/urlpatterns/converters.py | import base64
class Base64Converter:
regex = r"[a-zA-Z0-9+/]*={0,2}"
def to_python(self, value):
return base64.b64decode(value)
def to_url(self, value):
return base64.b64encode(value).decode("ascii")
class DynamicConverter:
_dynamic_to_python = None
_dynamic_to_url = None
@property
def regex(self):
return r"[0-9a-zA-Z]+"
@regex.setter
def regex(self):
raise Exception("You can't modify the regular expression.")
def to_python(self, value):
return type(self)._dynamic_to_python(value)
def to_url(self, value):
return type(self)._dynamic_to_url(value)
@classmethod
def register_to_python(cls, value):
cls._dynamic_to_python = value
@classmethod
def register_to_url(cls, value):
cls._dynamic_to_url = value
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/urlpatterns/views.py | tests/urlpatterns/views.py | from django.http import HttpResponse
def empty_view(request, *args, **kwargs):
return HttpResponse()
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/urlpatterns/path_base64_urls.py | tests/urlpatterns/path_base64_urls.py | from django.urls import include, path, register_converter
from . import converters, views
register_converter(converters.Base64Converter, "base64")
subsubpatterns = [
path("<base64:last_value>/", views.empty_view, name="subsubpattern-base64"),
]
subpatterns = [
path("<base64:value>/", views.empty_view, name="subpattern-base64"),
path(
"<base64:value>/",
include(
(subsubpatterns, "second-layer-namespaced-base64"), "instance-ns-base64"
),
),
]
urlpatterns = [
path("base64/<base64:value>/", views.empty_view, name="base64"),
path("base64/<base64:base>/subpatterns/", include(subpatterns)),
path(
"base64/<base64:base>/namespaced/", include((subpatterns, "namespaced-base64"))
),
]
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/urlpatterns/test_resolvers.py | tests/urlpatterns/test_resolvers.py | from django.test import SimpleTestCase
from django.test.utils import override_settings
from django.urls.resolvers import RegexPattern, RoutePattern, get_resolver
from django.utils.translation import gettext_lazy as _
from . import views
class RegexPatternTests(SimpleTestCase):
def test_str(self):
self.assertEqual(str(RegexPattern(_("^translated/$"))), "^translated/$")
class RoutePatternTests(SimpleTestCase):
def test_str(self):
self.assertEqual(str(RoutePattern(_("translated/"))), "translated/")
def test_has_converters(self):
self.assertEqual(len(RoutePattern("translated/").converters), 0)
self.assertEqual(len(RoutePattern(_("translated/")).converters), 0)
self.assertEqual(len(RoutePattern("translated/<int:foo>").converters), 1)
self.assertEqual(len(RoutePattern(_("translated/<int:foo>")).converters), 1)
def test_match_lazy_route_without_converters(self):
pattern = RoutePattern(_("test/"))
result = pattern.match("test/child/")
self.assertEqual(result, ("child/", (), {}))
def test_match_lazy_route_endpoint(self):
pattern = RoutePattern(_("test/"), is_endpoint=True)
result = pattern.match("test/")
self.assertEqual(result, ("", (), {}))
def test_match_lazy_route_with_converters(self):
pattern = RoutePattern(_("test/<int:pk>/"))
result = pattern.match("test/123/child/")
self.assertEqual(result, ("child/", (), {"pk": 123}))
class ResolverCacheTests(SimpleTestCase):
@override_settings(ROOT_URLCONF="urlpatterns.path_urls")
def test_resolver_cache_default__root_urlconf(self):
# resolver for a default URLconf (passing no argument) and for the
# settings.ROOT_URLCONF is the same cached object.
self.assertIs(get_resolver(), get_resolver("urlpatterns.path_urls"))
self.assertIsNot(get_resolver(), get_resolver("urlpatterns.path_dynamic_urls"))
class ResolverLazyIncludeTests(SimpleTestCase):
def test_lazy_route_resolves(self):
resolver = get_resolver("urlpatterns.lazy_path_urls")
for url_path, name in [
("/lazy/test-me/", "lazy"),
("/included_urls/extra/test/", "inner-extra"),
]:
with self.subTest(name=name):
match = resolver.resolve(url_path)
self.assertEqual(match.func, views.empty_view)
self.assertEqual(match.url_name, name)
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/urlpatterns/converter_urls.py | tests/urlpatterns/converter_urls.py | from django.urls import path
from . import views
urlpatterns = [
path("{x}/<{x}:{x}>/".format(x=name), views.empty_view, name=name)
for name in ("int", "path", "slug", "str", "uuid")
]
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/urlpatterns/lazy_path_urls.py | tests/urlpatterns/lazy_path_urls.py | from django.urls import include, path
from django.utils.translation import gettext_lazy as _
from . import views
urlpatterns = [
path(_("included_urls/"), include("urlpatterns.included_urls")),
path(_("lazy/<slug:slug>/"), views.empty_view, name="lazy"),
]
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/urlpatterns/included_urls.py | tests/urlpatterns/included_urls.py | from django.urls import include, path
from . import views
urlpatterns = [
path("extra/<extra>/", views.empty_view, name="inner-extra"),
path("", include("urlpatterns.more_urls")),
]
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/urlpatterns/__init__.py | tests/urlpatterns/__init__.py | python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false | |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/urlpatterns/path_same_name_urls.py | tests/urlpatterns/path_same_name_urls.py | from django.urls import path, re_path, register_converter
from . import converters, views
register_converter(converters.DynamicConverter, "to_url_value_error")
urlpatterns = [
# Different number of arguments.
path("number_of_args/0/", views.empty_view, name="number_of_args"),
path("number_of_args/1/<value>/", views.empty_view, name="number_of_args"),
# Different names of the keyword arguments.
path("kwargs_names/a/<a>/", views.empty_view, name="kwargs_names"),
path("kwargs_names/b/<b>/", views.empty_view, name="kwargs_names"),
# Different path converters.
path("converter/path/<path:value>/", views.empty_view, name="converter"),
path("converter/str/<str:value>/", views.empty_view, name="converter"),
path("converter/slug/<slug:value>/", views.empty_view, name="converter"),
path("converter/int/<int:value>/", views.empty_view, name="converter"),
path("converter/uuid/<uuid:value>/", views.empty_view, name="converter"),
# Different regular expressions.
re_path(r"^regex/uppercase/([A-Z]+)/", views.empty_view, name="regex"),
re_path(r"^regex/lowercase/([a-z]+)/", views.empty_view, name="regex"),
# converter.to_url() raises ValueError (no match).
path(
"converter_to_url/int/<value>/",
views.empty_view,
name="converter_to_url",
),
path(
"converter_to_url/tiny_int/<to_url_value_error:value>/",
views.empty_view,
name="converter_to_url",
),
]
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/urlpatterns/tests.py | tests/urlpatterns/tests.py | import string
import uuid
from django.core.exceptions import ImproperlyConfigured
from django.test import SimpleTestCase
from django.test.utils import override_settings
from django.urls import (
NoReverseMatch,
Resolver404,
path,
re_path,
register_converter,
resolve,
reverse,
)
from django.urls.converters import REGISTERED_CONVERTERS, IntConverter
from django.views import View
from .converters import Base64Converter, DynamicConverter
from .views import empty_view
included_kwargs = {"base": b"hello", "value": b"world"}
converter_test_data = (
# ('url', ('url_name', 'app_name', {kwargs})),
# aGVsbG8= is 'hello' encoded in base64.
("/base64/aGVsbG8=/", ("base64", "", {"value": b"hello"})),
(
"/base64/aGVsbG8=/subpatterns/d29ybGQ=/",
("subpattern-base64", "", included_kwargs),
),
(
"/base64/aGVsbG8=/namespaced/d29ybGQ=/",
("subpattern-base64", "namespaced-base64", included_kwargs),
),
)
@override_settings(ROOT_URLCONF="urlpatterns.path_urls")
class SimplifiedURLTests(SimpleTestCase):
def test_path_lookup_without_parameters(self):
match = resolve("/articles/2003/")
self.assertEqual(match.url_name, "articles-2003")
self.assertEqual(match.args, ())
self.assertEqual(match.kwargs, {})
self.assertEqual(match.route, "articles/2003/")
self.assertEqual(match.captured_kwargs, {})
self.assertEqual(match.extra_kwargs, {})
def test_path_lookup_with_typed_parameters(self):
match = resolve("/articles/2015/")
self.assertEqual(match.url_name, "articles-year")
self.assertEqual(match.args, ())
self.assertEqual(match.kwargs, {"year": 2015})
self.assertEqual(match.route, "articles/<int:year>/")
self.assertEqual(match.captured_kwargs, {"year": 2015})
self.assertEqual(match.extra_kwargs, {})
def test_path_lookup_with_multiple_parameters(self):
match = resolve("/articles/2015/04/12/")
self.assertEqual(match.url_name, "articles-year-month-day")
self.assertEqual(match.args, ())
self.assertEqual(match.kwargs, {"year": 2015, "month": 4, "day": 12})
self.assertEqual(match.route, "articles/<int:year>/<int:month>/<int:day>/")
self.assertEqual(match.captured_kwargs, {"year": 2015, "month": 4, "day": 12})
self.assertEqual(match.extra_kwargs, {})
def test_path_lookup_with_multiple_parameters_and_extra_kwarg(self):
match = resolve("/books/2015/04/12/")
self.assertEqual(match.url_name, "books-year-month-day")
self.assertEqual(match.args, ())
self.assertEqual(
match.kwargs, {"year": 2015, "month": 4, "day": 12, "extra": True}
)
self.assertEqual(match.route, "books/<int:year>/<int:month>/<int:day>/")
self.assertEqual(match.captured_kwargs, {"year": 2015, "month": 4, "day": 12})
self.assertEqual(match.extra_kwargs, {"extra": True})
def test_path_lookup_with_extra_kwarg(self):
match = resolve("/books/2007/")
self.assertEqual(match.url_name, "books-2007")
self.assertEqual(match.args, ())
self.assertEqual(match.kwargs, {"extra": True})
self.assertEqual(match.route, "books/2007/")
self.assertEqual(match.captured_kwargs, {})
self.assertEqual(match.extra_kwargs, {"extra": True})
def test_two_variable_at_start_of_path_pattern(self):
match = resolve("/en/foo/")
self.assertEqual(match.url_name, "lang-and-path")
self.assertEqual(match.kwargs, {"lang": "en", "url": "foo"})
self.assertEqual(match.route, "<lang>/<path:url>/")
self.assertEqual(match.captured_kwargs, {"lang": "en", "url": "foo"})
self.assertEqual(match.extra_kwargs, {})
def test_re_path(self):
match = resolve("/regex/1/")
self.assertEqual(match.url_name, "regex")
self.assertEqual(match.kwargs, {"pk": "1"})
self.assertEqual(match.route, "^regex/(?P<pk>[0-9]+)/$")
self.assertEqual(match.captured_kwargs, {"pk": "1"})
self.assertEqual(match.extra_kwargs, {})
def test_re_path_with_optional_parameter(self):
for url, kwargs in (
("/regex_optional/1/2/", {"arg1": "1", "arg2": "2"}),
("/regex_optional/1/", {"arg1": "1"}),
):
with self.subTest(url=url):
match = resolve(url)
self.assertEqual(match.url_name, "regex_optional")
self.assertEqual(match.kwargs, kwargs)
self.assertEqual(
match.route,
r"^regex_optional/(?P<arg1>\d+)/(?:(?P<arg2>\d+)/)?",
)
self.assertEqual(match.captured_kwargs, kwargs)
self.assertEqual(match.extra_kwargs, {})
def test_re_path_with_missing_optional_parameter(self):
match = resolve("/regex_only_optional/")
self.assertEqual(match.url_name, "regex_only_optional")
self.assertEqual(match.kwargs, {})
self.assertEqual(match.args, ())
self.assertEqual(
match.route,
r"^regex_only_optional/(?:(?P<arg1>\d+)/)?",
)
self.assertEqual(match.captured_kwargs, {})
self.assertEqual(match.extra_kwargs, {})
def test_path_lookup_with_inclusion(self):
match = resolve("/included_urls/extra/something/")
self.assertEqual(match.url_name, "inner-extra")
self.assertEqual(match.route, "included_urls/extra/<extra>/")
def test_path_lookup_with_empty_string_inclusion(self):
match = resolve("/more/99/")
self.assertEqual(match.url_name, "inner-more")
self.assertEqual(match.route, r"^more/(?P<extra>\w+)/$")
self.assertEqual(match.kwargs, {"extra": "99", "sub-extra": True})
self.assertEqual(match.captured_kwargs, {"extra": "99"})
self.assertEqual(match.extra_kwargs, {"sub-extra": True})
def test_path_lookup_with_double_inclusion(self):
match = resolve("/included_urls/more/some_value/")
self.assertEqual(match.url_name, "inner-more")
self.assertEqual(match.route, r"included_urls/more/(?P<extra>\w+)/$")
def test_path_reverse_without_parameter(self):
url = reverse("articles-2003")
self.assertEqual(url, "/articles/2003/")
def test_path_reverse_with_parameter(self):
url = reverse(
"articles-year-month-day", kwargs={"year": 2015, "month": 4, "day": 12}
)
self.assertEqual(url, "/articles/2015/4/12/")
@override_settings(ROOT_URLCONF="urlpatterns.path_base64_urls")
def test_converter_resolve(self):
for url, (url_name, app_name, kwargs) in converter_test_data:
with self.subTest(url=url):
match = resolve(url)
self.assertEqual(match.url_name, url_name)
self.assertEqual(match.app_name, app_name)
self.assertEqual(match.kwargs, kwargs)
@override_settings(ROOT_URLCONF="urlpatterns.path_base64_urls")
def test_converter_reverse(self):
for expected, (url_name, app_name, kwargs) in converter_test_data:
if app_name:
url_name = "%s:%s" % (app_name, url_name)
with self.subTest(url=url_name):
url = reverse(url_name, kwargs=kwargs)
self.assertEqual(url, expected)
@override_settings(ROOT_URLCONF="urlpatterns.path_base64_urls")
def test_converter_reverse_with_second_layer_instance_namespace(self):
kwargs = included_kwargs.copy()
kwargs["last_value"] = b"world"
url = reverse("instance-ns-base64:subsubpattern-base64", kwargs=kwargs)
self.assertEqual(url, "/base64/aGVsbG8=/subpatterns/d29ybGQ=/d29ybGQ=/")
def test_path_inclusion_is_matchable(self):
match = resolve("/included_urls/extra/something/")
self.assertEqual(match.url_name, "inner-extra")
self.assertEqual(match.kwargs, {"extra": "something"})
def test_path_inclusion_is_reversible(self):
url = reverse("inner-extra", kwargs={"extra": "something"})
self.assertEqual(url, "/included_urls/extra/something/")
def test_invalid_kwargs(self):
msg = "kwargs argument must be a dict, but got str."
with self.assertRaisesMessage(TypeError, msg):
path("hello/", empty_view, "name")
with self.assertRaisesMessage(TypeError, msg):
re_path("^hello/$", empty_view, "name")
def test_invalid_converter(self):
msg = "URL route 'foo/<nonexistent:var>/' uses invalid converter 'nonexistent'."
with self.assertRaisesMessage(ImproperlyConfigured, msg):
path("foo/<nonexistent:var>/", empty_view)
def test_warning_override_default_converter(self):
msg = "Converter 'int' is already registered."
with self.assertRaisesMessage(ValueError, msg):
register_converter(IntConverter, "int")
def test_warning_override_converter(self):
msg = "Converter 'base64' is already registered."
try:
with self.assertRaisesMessage(ValueError, msg):
register_converter(Base64Converter, "base64")
register_converter(Base64Converter, "base64")
finally:
REGISTERED_CONVERTERS.pop("base64", None)
def test_invalid_view(self):
msg = "view must be a callable or a list/tuple in the case of include()."
with self.assertRaisesMessage(TypeError, msg):
path("articles/", "invalid_view")
def test_invalid_view_instance(self):
class EmptyCBV(View):
pass
msg = "view must be a callable, pass EmptyCBV.as_view(), not EmptyCBV()."
with self.assertRaisesMessage(TypeError, msg):
path("foo", EmptyCBV())
def test_whitespace_in_route(self):
msg = "URL route %r cannot contain whitespace in angle brackets <…>"
for whitespace in string.whitespace:
with self.subTest(repr(whitespace)):
route = "space/<int:num>/extra/<str:%stest>" % whitespace
with self.assertRaisesMessage(ImproperlyConfigured, msg % route):
path(route, empty_view)
# Whitespaces are valid in paths.
p = path("space%s/<int:num>/" % string.whitespace, empty_view)
match = p.resolve("space%s/1/" % string.whitespace)
self.assertEqual(match.kwargs, {"num": 1})
def test_path_trailing_newlines(self):
tests = [
"/articles/2003/\n",
"/articles/2010/\n",
"/en/foo/\n",
"/included_urls/extra/\n",
"/regex/1/\n",
"/users/1/\n",
]
for url in tests:
with self.subTest(url=url), self.assertRaises(Resolver404):
resolve(url)
@override_settings(ROOT_URLCONF="urlpatterns.converter_urls")
class ConverterTests(SimpleTestCase):
def test_matching_urls(self):
def no_converter(x):
return x
test_data = (
("int", {"0", "1", "01", 1234567890}, int),
("str", {"abcxyz"}, no_converter),
("path", {"allows.ANY*characters"}, no_converter),
("slug", {"abcxyz-ABCXYZ_01234567890"}, no_converter),
("uuid", {"39da9369-838e-4750-91a5-f7805cd82839"}, uuid.UUID),
)
for url_name, url_suffixes, converter in test_data:
for url_suffix in url_suffixes:
url = "/%s/%s/" % (url_name, url_suffix)
with self.subTest(url=url):
match = resolve(url)
self.assertEqual(match.url_name, url_name)
self.assertEqual(match.kwargs, {url_name: converter(url_suffix)})
# reverse() works with string parameters.
string_kwargs = {url_name: url_suffix}
self.assertEqual(reverse(url_name, kwargs=string_kwargs), url)
# reverse() also works with native types (int, UUID, etc.).
if converter is not no_converter:
# The converted value might be different for int (a
# leading zero is lost in the conversion).
converted_value = match.kwargs[url_name]
converted_url = "/%s/%s/" % (url_name, converted_value)
self.assertEqual(
reverse(url_name, kwargs={url_name: converted_value}),
converted_url,
)
def test_nonmatching_urls(self):
test_data = (
("int", {"-1", "letters"}),
("str", {"", "/"}),
("path", {""}),
("slug", {"", "stars*notallowed"}),
(
"uuid",
{
"",
"9da9369-838e-4750-91a5-f7805cd82839",
"39da9369-838-4750-91a5-f7805cd82839",
"39da9369-838e-475-91a5-f7805cd82839",
"39da9369-838e-4750-91a-f7805cd82839",
"39da9369-838e-4750-91a5-f7805cd8283",
},
),
)
for url_name, url_suffixes in test_data:
for url_suffix in url_suffixes:
url = "/%s/%s/" % (url_name, url_suffix)
with self.subTest(url=url), self.assertRaises(Resolver404):
resolve(url)
@override_settings(ROOT_URLCONF="urlpatterns.path_same_name_urls")
class SameNameTests(SimpleTestCase):
def test_matching_urls_same_name(self):
@DynamicConverter.register_to_url
def requires_tiny_int(value):
if value > 5:
raise ValueError
return value
tests = [
(
"number_of_args",
[
([], {}, "0/"),
([1], {}, "1/1/"),
],
),
(
"kwargs_names",
[
([], {"a": 1}, "a/1/"),
([], {"b": 1}, "b/1/"),
],
),
(
"converter",
[
(["a/b"], {}, "path/a/b/"),
(["a b"], {}, "str/a%20b/"),
(["a-b"], {}, "slug/a-b/"),
(["2"], {}, "int/2/"),
(
["39da9369-838e-4750-91a5-f7805cd82839"],
{},
"uuid/39da9369-838e-4750-91a5-f7805cd82839/",
),
],
),
(
"regex",
[
(["ABC"], {}, "uppercase/ABC/"),
(["abc"], {}, "lowercase/abc/"),
],
),
(
"converter_to_url",
[
([6], {}, "int/6/"),
([1], {}, "tiny_int/1/"),
],
),
]
for url_name, cases in tests:
for args, kwargs, url_suffix in cases:
expected_url = "/%s/%s" % (url_name, url_suffix)
with self.subTest(url=expected_url):
self.assertEqual(
reverse(url_name, args=args, kwargs=kwargs),
expected_url,
)
class ParameterRestrictionTests(SimpleTestCase):
def test_integer_parameter_name_causes_exception(self):
msg = (
"URL route 'hello/<int:1>/' uses parameter name '1' which isn't "
"a valid Python identifier."
)
with self.assertRaisesMessage(ImproperlyConfigured, msg):
path(r"hello/<int:1>/", lambda r: None)
def test_non_identifier_parameter_name_causes_exception(self):
msg = (
"URL route 'b/<int:book.id>/' uses parameter name 'book.id' which "
"isn't a valid Python identifier."
)
with self.assertRaisesMessage(ImproperlyConfigured, msg):
path(r"b/<int:book.id>/", lambda r: None)
def test_allows_non_ascii_but_valid_identifiers(self):
# \u0394 is "GREEK CAPITAL LETTER DELTA", a valid identifier.
p = path("hello/<str:\u0394>/", lambda r: None)
match = p.resolve("hello/1/")
self.assertEqual(match.kwargs, {"\u0394": "1"})
@override_settings(ROOT_URLCONF="urlpatterns.path_dynamic_urls")
class ConversionExceptionTests(SimpleTestCase):
"""How are errors in Converter.to_python() and to_url() handled?"""
def test_resolve_value_error_means_no_match(self):
@DynamicConverter.register_to_python
def raises_value_error(value):
raise ValueError()
with self.assertRaises(Resolver404):
resolve("/dynamic/abc/")
def test_resolve_type_error_propagates(self):
@DynamicConverter.register_to_python
def raises_type_error(value):
raise TypeError("This type error propagates.")
with self.assertRaisesMessage(TypeError, "This type error propagates."):
resolve("/dynamic/abc/")
def test_reverse_value_error_means_no_match(self):
@DynamicConverter.register_to_url
def raises_value_error(value):
raise ValueError
with self.assertRaises(NoReverseMatch):
reverse("dynamic", kwargs={"value": object()})
def test_reverse_type_error_propagates(self):
@DynamicConverter.register_to_url
def raises_type_error(value):
raise TypeError("This type error propagates.")
with self.assertRaisesMessage(TypeError, "This type error propagates."):
reverse("dynamic", kwargs={"value": object()})
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/urlpatterns/more_urls.py | tests/urlpatterns/more_urls.py | from django.urls import re_path
from . import views
urlpatterns = [
re_path(
r"^more/(?P<extra>\w+)/$",
views.empty_view,
{"sub-extra": True},
name="inner-more",
),
]
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/urlpatterns/path_dynamic_urls.py | tests/urlpatterns/path_dynamic_urls.py | from django.urls import path, register_converter
from . import converters, views
register_converter(converters.DynamicConverter, "dynamic")
urlpatterns = [
path("dynamic/<dynamic:value>/", views.empty_view, name="dynamic"),
]
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/model_inheritance_regress/models.py | tests/model_inheritance_regress/models.py | import datetime
from django.db import models
class Place(models.Model):
name = models.CharField(max_length=50)
address = models.CharField(max_length=80)
class Meta:
ordering = ("name",)
class Restaurant(Place):
serves_hot_dogs = models.BooleanField(default=False)
serves_pizza = models.BooleanField(default=False)
class ItalianRestaurant(Restaurant):
serves_gnocchi = models.BooleanField(default=False)
class ParkingLot(Place):
# An explicit link to the parent (we can control the attribute name).
parent = models.OneToOneField(
Place, models.CASCADE, primary_key=True, parent_link=True
)
capacity = models.IntegerField()
class ParkingLot3(Place):
# The parent_link connector need not be the pk on the model.
primary_key = models.AutoField(primary_key=True)
parent = models.OneToOneField(Place, models.CASCADE, parent_link=True)
class ParkingLot4(models.Model):
# Test parent_link connector can be discovered in abstract classes.
parent = models.OneToOneField(Place, models.CASCADE, parent_link=True)
class Meta:
abstract = True
class ParkingLot4A(ParkingLot4, Place):
pass
class ParkingLot4B(Place, ParkingLot4):
pass
class Supplier(models.Model):
name = models.CharField(max_length=50)
restaurant = models.ForeignKey(Restaurant, models.CASCADE)
class Wholesaler(Supplier):
retailer = models.ForeignKey(
Supplier, models.CASCADE, related_name="wholesale_supplier"
)
class Parent(models.Model):
created = models.DateTimeField(default=datetime.datetime.now)
class Child(Parent):
name = models.CharField(max_length=10)
class SelfRefParent(models.Model):
parent_data = models.IntegerField()
self_data = models.ForeignKey("self", models.SET_NULL, null=True)
class SelfRefChild(SelfRefParent):
child_data = models.IntegerField()
class Article(models.Model):
headline = models.CharField(max_length=100)
pub_date = models.DateTimeField()
class Meta:
ordering = ("-pub_date", "headline")
class ArticleWithAuthor(Article):
author = models.CharField(max_length=100)
class M2MBase(models.Model):
articles = models.ManyToManyField(Article)
class M2MChild(M2MBase):
name = models.CharField(max_length=50)
class Evaluation(Article):
quality = models.IntegerField()
class Meta:
abstract = True
class QualityControl(Evaluation):
assignee = models.CharField(max_length=50)
class BaseM(models.Model):
base_name = models.CharField(max_length=100)
class DerivedM(BaseM):
customPK = models.IntegerField(primary_key=True)
derived_name = models.CharField(max_length=100)
class AuditBase(models.Model):
planned_date = models.DateField()
class Meta:
abstract = True
verbose_name_plural = "Audits"
class CertificationAudit(AuditBase):
class Meta(AuditBase.Meta):
abstract = True
class InternalCertificationAudit(CertificationAudit):
auditing_dept = models.CharField(max_length=20)
# Abstract classes don't get m2m tables autocreated.
class Person(models.Model):
name = models.CharField(max_length=100)
class Meta:
ordering = ("name",)
class AbstractEvent(models.Model):
name = models.CharField(max_length=100)
attendees = models.ManyToManyField(Person, related_name="%(class)s_set")
class Meta:
abstract = True
ordering = ("name",)
class BirthdayParty(AbstractEvent):
pass
class BachelorParty(AbstractEvent):
pass
class MessyBachelorParty(BachelorParty):
pass
# Check concrete -> abstract -> concrete inheritance
class SearchableLocation(models.Model):
keywords = models.CharField(max_length=255)
class Station(SearchableLocation):
name = models.CharField(max_length=128)
class Meta:
abstract = True
class BusStation(Station):
inbound = models.BooleanField(default=False)
class TrainStation(Station):
zone = models.IntegerField()
class User(models.Model):
username = models.CharField(max_length=30, unique=True)
class Profile(User):
profile_id = models.AutoField(primary_key=True)
extra = models.CharField(max_length=30, blank=True)
# Check concrete + concrete -> concrete -> concrete
class Politician(models.Model):
politician_id = models.AutoField(primary_key=True)
title = models.CharField(max_length=50)
class Congressman(Person, Politician):
state = models.CharField(max_length=2)
class Senator(Congressman):
pass
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/model_inheritance_regress/__init__.py | tests/model_inheritance_regress/__init__.py | python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false | |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/model_inheritance_regress/tests.py | tests/model_inheritance_regress/tests.py | """
Regression tests for Model inheritance behavior.
"""
import datetime
from operator import attrgetter
from unittest import expectedFailure
from django import forms
from django.db.models import FETCH_PEERS
from django.test import TestCase
from .models import (
ArticleWithAuthor,
BachelorParty,
BirthdayParty,
BusStation,
Child,
Congressman,
DerivedM,
InternalCertificationAudit,
ItalianRestaurant,
M2MChild,
MessyBachelorParty,
ParkingLot,
ParkingLot3,
ParkingLot4A,
ParkingLot4B,
Person,
Place,
Politician,
Profile,
QualityControl,
Restaurant,
SelfRefChild,
SelfRefParent,
Senator,
Supplier,
TrainStation,
User,
Wholesaler,
)
class ModelInheritanceTest(TestCase):
def test_model_inheritance(self):
# Regression for #7350, #7202
# When you create a Parent object with a specific reference to an
# existent child instance, saving the Parent doesn't duplicate the
# child. This behavior is only activated during a raw save - it is
# mostly relevant to deserialization, but any sort of CORBA style
# 'narrow()' API would require a similar approach.
# Create a child-parent-grandparent chain
place1 = Place(name="Guido's House of Pasta", address="944 W. Fullerton")
place1.save_base(raw=True)
restaurant = Restaurant(
place_ptr=place1,
serves_hot_dogs=True,
serves_pizza=False,
)
restaurant.save_base(raw=True)
italian_restaurant = ItalianRestaurant(
restaurant_ptr=restaurant, serves_gnocchi=True
)
italian_restaurant.save_base(raw=True)
# Create a child-parent chain with an explicit parent link
place2 = Place(name="Main St", address="111 Main St")
place2.save_base(raw=True)
park = ParkingLot(parent=place2, capacity=100)
park.save_base(raw=True)
# No extra parent objects have been created.
places = list(Place.objects.all())
self.assertEqual(places, [place1, place2])
dicts = list(Restaurant.objects.values("name", "serves_hot_dogs"))
self.assertEqual(
dicts, [{"name": "Guido's House of Pasta", "serves_hot_dogs": True}]
)
dicts = list(
ItalianRestaurant.objects.values(
"name", "serves_hot_dogs", "serves_gnocchi"
)
)
self.assertEqual(
dicts,
[
{
"name": "Guido's House of Pasta",
"serves_gnocchi": True,
"serves_hot_dogs": True,
}
],
)
dicts = list(ParkingLot.objects.values("name", "capacity"))
self.assertEqual(
dicts,
[
{
"capacity": 100,
"name": "Main St",
}
],
)
# You can also update objects when using a raw save.
place1.name = "Guido's All New House of Pasta"
place1.save_base(raw=True)
restaurant.serves_hot_dogs = False
restaurant.save_base(raw=True)
italian_restaurant.serves_gnocchi = False
italian_restaurant.save_base(raw=True)
place2.name = "Derelict lot"
place2.save_base(raw=True)
park.capacity = 50
park.save_base(raw=True)
# No extra parent objects after an update, either.
places = list(Place.objects.all())
self.assertEqual(places, [place2, place1])
self.assertEqual(places[0].name, "Derelict lot")
self.assertEqual(places[1].name, "Guido's All New House of Pasta")
dicts = list(Restaurant.objects.values("name", "serves_hot_dogs"))
self.assertEqual(
dicts,
[
{
"name": "Guido's All New House of Pasta",
"serves_hot_dogs": False,
}
],
)
dicts = list(
ItalianRestaurant.objects.values(
"name", "serves_hot_dogs", "serves_gnocchi"
)
)
self.assertEqual(
dicts,
[
{
"name": "Guido's All New House of Pasta",
"serves_gnocchi": False,
"serves_hot_dogs": False,
}
],
)
dicts = list(ParkingLot.objects.values("name", "capacity"))
self.assertEqual(
dicts,
[
{
"capacity": 50,
"name": "Derelict lot",
}
],
)
# If you try to raw_save a parent attribute onto a child object,
# the attribute will be ignored.
italian_restaurant.name = "Lorenzo's Pasta Hut"
italian_restaurant.save_base(raw=True)
# Note that the name has not changed
# - name is an attribute of Place, not ItalianRestaurant
dicts = list(
ItalianRestaurant.objects.values(
"name", "serves_hot_dogs", "serves_gnocchi"
)
)
self.assertEqual(
dicts,
[
{
"name": "Guido's All New House of Pasta",
"serves_gnocchi": False,
"serves_hot_dogs": False,
}
],
)
def test_issue_7105(self):
# Regressions tests for #7105: dates() queries should be able to use
# fields from the parent model as easily as the child.
Child.objects.create(
name="child", created=datetime.datetime(2008, 6, 26, 17, 0, 0)
)
datetimes = list(Child.objects.datetimes("created", "month"))
self.assertEqual(datetimes, [datetime.datetime(2008, 6, 1, 0, 0)])
def test_issue_7276(self):
# Regression test for #7276: calling delete() on a model with
# multi-table inheritance should delete the associated rows from any
# ancestor tables, as well as any descendent objects.
place1 = Place(name="Guido's House of Pasta", address="944 W. Fullerton")
place1.save_base(raw=True)
restaurant = Restaurant(
place_ptr=place1,
serves_hot_dogs=True,
serves_pizza=False,
)
restaurant.save_base(raw=True)
italian_restaurant = ItalianRestaurant(
restaurant_ptr=restaurant, serves_gnocchi=True
)
italian_restaurant.save_base(raw=True)
ident = ItalianRestaurant.objects.all()[0].id
self.assertEqual(Place.objects.get(pk=ident), place1)
Restaurant.objects.create(
name="a",
address="xx",
serves_hot_dogs=True,
serves_pizza=False,
)
# This should delete both Restaurants, plus the related places, plus
# the ItalianRestaurant.
Restaurant.objects.all().delete()
with self.assertRaises(Place.DoesNotExist):
Place.objects.get(pk=ident)
with self.assertRaises(ItalianRestaurant.DoesNotExist):
ItalianRestaurant.objects.get(pk=ident)
def test_issue_6755(self):
"""
Regression test for #6755
"""
r = Restaurant(serves_pizza=False, serves_hot_dogs=False)
r.save()
self.assertEqual(r.id, r.place_ptr_id)
orig_id = r.id
r = Restaurant(place_ptr_id=orig_id, serves_pizza=True, serves_hot_dogs=False)
r.save()
self.assertEqual(r.id, orig_id)
self.assertEqual(r.id, r.place_ptr_id)
def test_issue_11764(self):
"""
Regression test for #11764
"""
wholesalers = list(Wholesaler.objects.select_related())
self.assertEqual(wholesalers, [])
def test_issue_7853(self):
"""
Regression test for #7853
If the parent class has a self-referential link, make sure that any
updates to that link via the child update the right table.
"""
obj = SelfRefChild.objects.create(child_data=37, parent_data=42)
obj.delete()
def test_get_next_previous_by_date(self):
"""
Regression tests for #8076
get_(next/previous)_by_date should work
"""
c1 = ArticleWithAuthor(
headline="ArticleWithAuthor 1",
author="Person 1",
pub_date=datetime.datetime(2005, 8, 1, 3, 0),
)
c1.save()
c2 = ArticleWithAuthor(
headline="ArticleWithAuthor 2",
author="Person 2",
pub_date=datetime.datetime(2005, 8, 1, 10, 0),
)
c2.save()
c3 = ArticleWithAuthor(
headline="ArticleWithAuthor 3",
author="Person 3",
pub_date=datetime.datetime(2005, 8, 2),
)
c3.save()
self.assertEqual(c1.get_next_by_pub_date(), c2)
self.assertEqual(c2.get_next_by_pub_date(), c3)
with self.assertRaises(ArticleWithAuthor.DoesNotExist):
c3.get_next_by_pub_date()
self.assertEqual(c3.get_previous_by_pub_date(), c2)
self.assertEqual(c2.get_previous_by_pub_date(), c1)
with self.assertRaises(ArticleWithAuthor.DoesNotExist):
c1.get_previous_by_pub_date()
def test_inherited_fields(self):
"""
Regression test for #8825 and #9390
Make sure all inherited fields (esp. m2m fields, in this case) appear
on the child class.
"""
m2mchildren = list(M2MChild.objects.filter(articles__isnull=False))
self.assertEqual(m2mchildren, [])
# Ordering should not include any database column more than once (this
# is most likely to occur naturally with model inheritance, so we
# check it here). Regression test for #9390. This necessarily pokes at
# the SQL string for the query, since the duplicate problems are only
# apparent at that late stage.
qs = ArticleWithAuthor.objects.order_by("pub_date", "pk")
sql = qs.query.get_compiler(qs.db).as_sql()[0]
fragment = sql[sql.find("ORDER BY") :]
pos = fragment.find("pub_date")
self.assertEqual(fragment.find("pub_date", pos + 1), -1)
def test_queryset_update_on_parent_model(self):
"""
Regression test for #10362
It is possible to call update() and only change a field in
an ancestor model.
"""
article = ArticleWithAuthor.objects.create(
author="fred",
headline="Hey there!",
pub_date=datetime.datetime(2009, 3, 1, 8, 0, 0),
)
update = ArticleWithAuthor.objects.filter(author="fred").update(
headline="Oh, no!"
)
self.assertEqual(update, 1)
update = ArticleWithAuthor.objects.filter(pk=article.pk).update(
headline="Oh, no!"
)
self.assertEqual(update, 1)
derivedm1 = DerivedM.objects.create(
customPK=44,
base_name="b1",
derived_name="d1",
)
self.assertEqual(derivedm1.customPK, 44)
self.assertEqual(derivedm1.base_name, "b1")
self.assertEqual(derivedm1.derived_name, "d1")
derivedms = list(DerivedM.objects.all())
self.assertEqual(derivedms, [derivedm1])
def test_use_explicit_o2o_to_parent_as_pk(self):
"""
The connector from child to parent need not be the pk on the child.
"""
self.assertEqual(ParkingLot3._meta.pk.name, "primary_key")
# the child->parent link
self.assertEqual(ParkingLot3._meta.get_ancestor_link(Place).name, "parent")
def test_use_explicit_o2o_to_parent_from_abstract_model(self):
self.assertEqual(ParkingLot4A._meta.pk.name, "parent")
ParkingLot4A.objects.create(
name="Parking4A",
address="21 Jump Street",
)
self.assertEqual(ParkingLot4B._meta.pk.name, "parent")
ParkingLot4A.objects.create(
name="Parking4B",
address="21 Jump Street",
)
def test_all_fields_from_abstract_base_class(self):
"""
Regression tests for #7588
"""
# All fields from an ABC, including those inherited non-abstractly
# should be available on child classes (#7588). Creating this instance
# should work without error.
QualityControl.objects.create(
headline="Problems in Django",
pub_date=datetime.datetime.now(),
quality=10,
assignee="adrian",
)
def test_abstract_base_class_m2m_relation_inheritance(self):
# many-to-many relations defined on an abstract base class are
# correctly inherited (and created) on the child class.
p1 = Person.objects.create(name="Alice")
p2 = Person.objects.create(name="Bob")
p3 = Person.objects.create(name="Carol")
p4 = Person.objects.create(name="Dave")
birthday = BirthdayParty.objects.create(name="Birthday party for Alice")
birthday.attendees.set([p1, p3])
bachelor = BachelorParty.objects.create(name="Bachelor party for Bob")
bachelor.attendees.set([p2, p4])
parties = list(p1.birthdayparty_set.all())
self.assertEqual(parties, [birthday])
parties = list(p1.bachelorparty_set.all())
self.assertEqual(parties, [])
parties = list(p2.bachelorparty_set.all())
self.assertEqual(parties, [bachelor])
# A subclass of a subclass of an abstract model doesn't get its own
# accessor.
self.assertFalse(hasattr(p2, "messybachelorparty_set"))
# ... but it does inherit the m2m from its parent
messy = MessyBachelorParty.objects.create(name="Bachelor party for Dave")
messy.attendees.set([p4])
messy_parent = messy.bachelorparty_ptr
parties = list(p4.bachelorparty_set.all())
self.assertEqual(parties, [bachelor, messy_parent])
def test_abstract_verbose_name_plural_inheritance(self):
"""
verbose_name_plural correctly inherited from ABC if inheritance chain
includes an abstract model.
"""
# Regression test for #11369: verbose_name_plural should be inherited
# from an ABC even when there are one or more intermediate
# abstract models in the inheritance chain, for consistency with
# verbose_name.
self.assertEqual(InternalCertificationAudit._meta.verbose_name_plural, "Audits")
def test_inherited_nullable_exclude(self):
obj = SelfRefChild.objects.create(child_data=37, parent_data=42)
self.assertQuerySetEqual(
SelfRefParent.objects.exclude(self_data=72), [obj.pk], attrgetter("pk")
)
self.assertQuerySetEqual(
SelfRefChild.objects.exclude(self_data=72), [obj.pk], attrgetter("pk")
)
def test_concrete_abstract_concrete_pk(self):
"""
Primary key set correctly with concrete->abstract->concrete
inheritance.
"""
# Regression test for #13987: Primary key is incorrectly determined
# when more than one model has a concrete->abstract->concrete
# inheritance hierarchy.
self.assertEqual(
len(
[field for field in BusStation._meta.local_fields if field.primary_key]
),
1,
)
self.assertEqual(
len(
[
field
for field in TrainStation._meta.local_fields
if field.primary_key
]
),
1,
)
self.assertIs(BusStation._meta.pk.model, BusStation)
self.assertIs(TrainStation._meta.pk.model, TrainStation)
def test_inherited_unique_field_with_form(self):
"""
A model which has different primary key for the parent model passes
unique field checking correctly (#17615).
"""
class ProfileForm(forms.ModelForm):
class Meta:
model = Profile
fields = "__all__"
User.objects.create(username="user_only")
p = Profile.objects.create(username="user_with_profile")
form = ProfileForm(
{"username": "user_with_profile", "extra": "hello"}, instance=p
)
self.assertTrue(form.is_valid())
def test_inheritance_joins(self):
# Test for #17502 - check that filtering through two levels of
# inheritance chain doesn't generate extra joins.
qs = ItalianRestaurant.objects.all()
self.assertEqual(str(qs.query).count("JOIN"), 2)
qs = ItalianRestaurant.objects.filter(name="foo")
self.assertEqual(str(qs.query).count("JOIN"), 2)
@expectedFailure
def test_inheritance_values_joins(self):
# It would be nice (but not too important) to skip the middle join in
# this case. Skipping is possible as nothing from the middle model is
# used in the qs and top contains direct pointer to the bottom model.
qs = ItalianRestaurant.objects.values_list("serves_gnocchi").filter(name="foo")
self.assertEqual(str(qs.query).count("JOIN"), 1)
def test_issue_21554(self):
senator = Senator.objects.create(name="John Doe", title="X", state="Y")
senator = Senator.objects.get(pk=senator.pk)
self.assertEqual(senator.name, "John Doe")
self.assertEqual(senator.title, "X")
self.assertEqual(senator.state, "Y")
def test_inheritance_resolve_columns(self):
Restaurant.objects.create(
name="Bobs Cafe",
address="Somewhere",
serves_pizza=True,
serves_hot_dogs=True,
)
p = Place.objects.select_related("restaurant")[0]
self.assertIsInstance(p.restaurant.serves_pizza, bool)
def test_inheritance_select_related(self):
# Regression test for #7246
r1 = Restaurant.objects.create(
name="Nobu", serves_hot_dogs=True, serves_pizza=False
)
r2 = Restaurant.objects.create(
name="Craft", serves_hot_dogs=False, serves_pizza=True
)
Supplier.objects.create(name="John", restaurant=r1)
Supplier.objects.create(name="Jane", restaurant=r2)
self.assertQuerySetEqual(
Supplier.objects.order_by("name").select_related(),
[
"Jane",
"John",
],
attrgetter("name"),
)
jane = Supplier.objects.order_by("name").select_related("restaurant")[0]
self.assertEqual(jane.restaurant.name, "Craft")
def test_filter_with_parent_fk(self):
r = Restaurant.objects.create()
s = Supplier.objects.create(restaurant=r)
# The mismatch between Restaurant and Place is intentional (#28175).
self.assertSequenceEqual(
Supplier.objects.filter(restaurant__in=Place.objects.all()), [s]
)
def test_ptr_accessor_assigns_state(self):
r = Restaurant.objects.create()
self.assertIs(r.place_ptr._state.adding, False)
self.assertEqual(r.place_ptr._state.db, "default")
def test_related_filtering_query_efficiency_ticket_15844(self):
r = Restaurant.objects.create(
name="Guido's House of Pasta",
address="944 W. Fullerton",
serves_hot_dogs=True,
serves_pizza=False,
)
s = Supplier.objects.create(restaurant=r)
with self.assertNumQueries(1):
self.assertSequenceEqual(Supplier.objects.filter(restaurant=r), [s])
with self.assertNumQueries(1):
self.assertSequenceEqual(r.supplier_set.all(), [s])
def test_queries_on_parent_access(self):
italian_restaurant = ItalianRestaurant.objects.create(
name="Guido's House of Pasta",
address="944 W. Fullerton",
serves_hot_dogs=True,
serves_pizza=False,
serves_gnocchi=True,
)
# No queries are made when accessing the parent objects.
italian_restaurant = ItalianRestaurant.objects.get(pk=italian_restaurant.pk)
with self.assertNumQueries(0):
restaurant = italian_restaurant.restaurant_ptr
self.assertEqual(restaurant.place_ptr.restaurant, restaurant)
self.assertEqual(restaurant.italianrestaurant, italian_restaurant)
# One query is made when accessing the parent objects when the instance
# is deferred.
italian_restaurant = ItalianRestaurant.objects.only("serves_gnocchi").get(
pk=italian_restaurant.pk
)
with self.assertNumQueries(1):
restaurant = italian_restaurant.restaurant_ptr
self.assertEqual(restaurant.place_ptr.restaurant, restaurant)
self.assertEqual(restaurant.italianrestaurant, italian_restaurant)
# No queries are made when accessing the parent objects when the
# instance has deferred a field not present in the parent table.
italian_restaurant = ItalianRestaurant.objects.defer("serves_gnocchi").get(
pk=italian_restaurant.pk
)
with self.assertNumQueries(0):
restaurant = italian_restaurant.restaurant_ptr
self.assertEqual(restaurant.place_ptr.restaurant, restaurant)
self.assertEqual(restaurant.italianrestaurant, italian_restaurant)
def test_parent_access_copies_fetch_mode(self):
italian_restaurant = ItalianRestaurant.objects.create(
name="Mom's Spaghetti",
address="2131 Woodward Ave",
serves_hot_dogs=False,
serves_pizza=False,
serves_gnocchi=True,
)
# No queries are made when accessing the parent objects.
italian_restaurant = ItalianRestaurant.objects.fetch_mode(FETCH_PEERS).get(
pk=italian_restaurant.pk
)
restaurant = italian_restaurant.restaurant_ptr
self.assertEqual(restaurant._state.fetch_mode, FETCH_PEERS)
def test_id_field_update_on_ancestor_change(self):
place1 = Place.objects.create(name="House of Pasta", address="944 Fullerton")
place2 = Place.objects.create(name="House of Pizza", address="954 Fullerton")
place3 = Place.objects.create(name="Burger house", address="964 Fullerton")
restaurant1 = Restaurant.objects.create(
place_ptr=place1,
serves_hot_dogs=True,
serves_pizza=False,
)
restaurant2 = Restaurant.objects.create(
place_ptr=place2,
serves_hot_dogs=True,
serves_pizza=False,
)
italian_restaurant = ItalianRestaurant.objects.create(
restaurant_ptr=restaurant1,
serves_gnocchi=True,
)
# Changing the parent of a restaurant changes the restaurant's ID & PK.
restaurant1.place_ptr = place3
self.assertEqual(restaurant1.pk, place3.pk)
self.assertEqual(restaurant1.id, place3.id)
self.assertEqual(restaurant1.pk, restaurant1.id)
restaurant1.place_ptr = None
self.assertIsNone(restaurant1.pk)
self.assertIsNone(restaurant1.id)
# Changing the parent of an italian restaurant changes the restaurant's
# ID & PK.
italian_restaurant.restaurant_ptr = restaurant2
self.assertEqual(italian_restaurant.pk, restaurant2.pk)
self.assertEqual(italian_restaurant.id, restaurant2.id)
self.assertEqual(italian_restaurant.pk, italian_restaurant.id)
italian_restaurant.restaurant_ptr = None
self.assertIsNone(italian_restaurant.pk)
self.assertIsNone(italian_restaurant.id)
def test_create_new_instance_with_pk_equals_none(self):
p1 = Profile.objects.create(username="john")
p2 = User.objects.get(pk=p1.user_ptr_id).profile
# Create a new profile by setting pk = None.
p2.pk = None
p2.user_ptr_id = None
p2.username = "bill"
p2.save()
self.assertEqual(Profile.objects.count(), 2)
self.assertEqual(User.objects.get(pk=p1.user_ptr_id).username, "john")
def test_create_new_instance_with_pk_equals_none_multi_inheritance(self):
c1 = Congressman.objects.create(state="PA", name="John", title="senator 1")
c2 = Person.objects.get(pk=c1.pk).congressman
# Create a new congressman by setting pk = None.
c2.pk = None
c2.id = None
c2.politician_ptr_id = None
c2.name = "Bill"
c2.title = "senator 2"
c2.save()
self.assertEqual(Congressman.objects.count(), 2)
self.assertEqual(Person.objects.get(pk=c1.pk).name, "John")
self.assertEqual(
Politician.objects.get(pk=c1.politician_ptr_id).title,
"senator 1",
)
def test_mti_update_parent_through_child(self):
Politician.objects.create()
Congressman.objects.create()
Congressman.objects.update(title="senator 1")
self.assertEqual(Congressman.objects.get().title, "senator 1")
def test_mti_update_grand_parent_through_child(self):
Politician.objects.create()
Senator.objects.create()
Senator.objects.update(title="senator 1")
self.assertEqual(Senator.objects.get().title, "senator 1")
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/asgi/__init__.py | tests/asgi/__init__.py | python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false | |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/asgi/tests.py | tests/asgi/tests.py | import asyncio
import sys
import tempfile
import threading
import time
from pathlib import Path
from unittest.mock import patch
from asgiref.sync import sync_to_async
from asgiref.testing import ApplicationCommunicator
from django.contrib.staticfiles.handlers import ASGIStaticFilesHandler
from django.core.asgi import get_asgi_application
from django.core.exceptions import RequestDataTooBig
from django.core.handlers.asgi import ASGIHandler, ASGIRequest
from django.core.signals import request_finished, request_started
from django.db import close_old_connections
from django.http import HttpResponse, StreamingHttpResponse
from django.test import (
AsyncRequestFactory,
SimpleTestCase,
ignore_warnings,
modify_settings,
override_settings,
)
from django.test.utils import captured_stderr
from django.urls import path
from django.utils.http import http_date
from django.views.decorators.csrf import csrf_exempt
from .urls import sync_waiter, test_filename
TEST_STATIC_ROOT = Path(__file__).parent / "project" / "static"
class SignalHandler:
"""Helper class to track threads and kwargs when signals are dispatched."""
def __init__(self):
super().__init__()
self.calls = []
def __call__(self, signal, **kwargs):
self.calls.append({"thread": threading.current_thread(), "kwargs": kwargs})
@override_settings(ROOT_URLCONF="asgi.urls")
class ASGITest(SimpleTestCase):
async_request_factory = AsyncRequestFactory()
def setUp(self):
request_started.disconnect(close_old_connections)
self.addCleanup(request_started.connect, close_old_connections)
async def test_get_asgi_application(self):
"""
get_asgi_application() returns a functioning ASGI callable.
"""
application = get_asgi_application()
# Construct HTTP request.
scope = self.async_request_factory._base_scope(path="/")
communicator = ApplicationCommunicator(application, scope)
await communicator.send_input({"type": "http.request"})
# Read the response.
response_start = await communicator.receive_output()
self.assertEqual(response_start["type"], "http.response.start")
self.assertEqual(response_start["status"], 200)
self.assertEqual(
set(response_start["headers"]),
{
(b"Content-Length", b"12"),
(b"Content-Type", b"text/html; charset=utf-8"),
},
)
response_body = await communicator.receive_output()
self.assertEqual(response_body["type"], "http.response.body")
self.assertEqual(response_body["body"], b"Hello World!")
# Allow response.close() to finish.
await communicator.wait()
async def test_asgi_cookies(self):
application = get_asgi_application()
scope = self.async_request_factory._base_scope(path="/cookie/")
communicator = ApplicationCommunicator(application, scope)
await communicator.send_input({"type": "http.request"})
response_start = await communicator.receive_output()
self.assertIn((b"Set-Cookie", b"key=value; Path=/"), response_start["headers"])
# Allow response.close() to finish.
await communicator.wait()
# Python's file API is not async compatible. A third-party library such
# as https://github.com/Tinche/aiofiles allows passing the file to
# FileResponse as an async iterator. With a sync iterator
# StreamingHTTPResponse triggers a warning when iterating the file.
# assertWarnsMessage is not async compatible, so ignore_warnings for the
# test.
@ignore_warnings(module="django.http.response")
async def test_file_response(self):
"""
Makes sure that FileResponse works over ASGI.
"""
application = get_asgi_application()
# Construct HTTP request.
scope = self.async_request_factory._base_scope(path="/file/")
communicator = ApplicationCommunicator(application, scope)
await communicator.send_input({"type": "http.request"})
# Get the file content.
with open(test_filename, "rb") as test_file:
test_file_contents = test_file.read()
# Read the response.
with captured_stderr():
response_start = await communicator.receive_output()
self.assertEqual(response_start["type"], "http.response.start")
self.assertEqual(response_start["status"], 200)
headers = response_start["headers"]
self.assertEqual(len(headers), 3)
expected_headers = {
b"Content-Length": str(len(test_file_contents)).encode("ascii"),
b"Content-Type": b"text/x-python",
b"Content-Disposition": b'inline; filename="urls.py"',
}
for key, value in headers:
try:
self.assertEqual(value, expected_headers[key])
except AssertionError:
# Windows registry may not be configured with correct
# mimetypes.
if sys.platform == "win32" and key == b"Content-Type":
self.assertEqual(value, b"text/plain")
else:
raise
# Warning ignored here.
response_body = await communicator.receive_output()
self.assertEqual(response_body["type"], "http.response.body")
self.assertEqual(response_body["body"], test_file_contents)
# Allow response.close() to finish.
await communicator.wait()
@modify_settings(INSTALLED_APPS={"append": "django.contrib.staticfiles"})
@override_settings(
STATIC_URL="static/",
STATIC_ROOT=TEST_STATIC_ROOT,
STATICFILES_DIRS=[TEST_STATIC_ROOT],
STATICFILES_FINDERS=[
"django.contrib.staticfiles.finders.FileSystemFinder",
],
)
async def test_static_file_response(self):
application = ASGIStaticFilesHandler(get_asgi_application())
# Construct HTTP request.
scope = self.async_request_factory._base_scope(path="/static/file.txt")
communicator = ApplicationCommunicator(application, scope)
await communicator.send_input({"type": "http.request"})
# Get the file content.
file_path = TEST_STATIC_ROOT / "file.txt"
with open(file_path, "rb") as test_file:
test_file_contents = test_file.read()
# Read the response.
stat = file_path.stat()
response_start = await communicator.receive_output()
self.assertEqual(response_start["type"], "http.response.start")
self.assertEqual(response_start["status"], 200)
self.assertEqual(
set(response_start["headers"]),
{
(b"Content-Length", str(len(test_file_contents)).encode("ascii")),
(b"Content-Type", b"text/plain"),
(b"Content-Disposition", b'inline; filename="file.txt"'),
(b"Last-Modified", http_date(stat.st_mtime).encode("ascii")),
},
)
response_body = await communicator.receive_output()
self.assertEqual(response_body["type"], "http.response.body")
self.assertEqual(response_body["body"], test_file_contents)
# Allow response.close() to finish.
await communicator.wait()
async def test_headers(self):
application = get_asgi_application()
communicator = ApplicationCommunicator(
application,
self.async_request_factory._base_scope(
path="/meta/",
headers=[
[b"content-type", b"text/plain; charset=utf-8"],
[b"content-length", b"77"],
[b"referer", b"Scotland"],
[b"referer", b"Wales"],
],
),
)
await communicator.send_input({"type": "http.request"})
response_start = await communicator.receive_output()
self.assertEqual(response_start["type"], "http.response.start")
self.assertEqual(response_start["status"], 200)
self.assertEqual(
set(response_start["headers"]),
{
(b"Content-Length", b"19"),
(b"Content-Type", b"text/plain; charset=utf-8"),
},
)
response_body = await communicator.receive_output()
self.assertEqual(response_body["type"], "http.response.body")
self.assertEqual(response_body["body"], b"From Scotland,Wales")
# Allow response.close() to finish
await communicator.wait()
async def test_post_body(self):
application = get_asgi_application()
scope = self.async_request_factory._base_scope(
method="POST",
path="/post/",
query_string="echo=1",
)
communicator = ApplicationCommunicator(application, scope)
await communicator.send_input({"type": "http.request", "body": b"Echo!"})
response_start = await communicator.receive_output()
self.assertEqual(response_start["type"], "http.response.start")
self.assertEqual(response_start["status"], 200)
response_body = await communicator.receive_output()
self.assertEqual(response_body["type"], "http.response.body")
self.assertEqual(response_body["body"], b"Echo!")
async def test_create_request_error(self):
# Track request_finished signal.
signal_handler = SignalHandler()
request_finished.connect(signal_handler)
self.addCleanup(request_finished.disconnect, signal_handler)
# Request class that always fails creation with RequestDataTooBig.
class TestASGIRequest(ASGIRequest):
def __init__(self, scope, body_file):
super().__init__(scope, body_file)
raise RequestDataTooBig()
# Handler to use the custom request class.
class TestASGIHandler(ASGIHandler):
request_class = TestASGIRequest
application = TestASGIHandler()
scope = self.async_request_factory._base_scope(path="/not-important/")
communicator = ApplicationCommunicator(application, scope)
# Initiate request.
await communicator.send_input({"type": "http.request"})
# Give response.close() time to finish.
await communicator.wait()
self.assertEqual(len(signal_handler.calls), 1)
self.assertNotEqual(
signal_handler.calls[0]["thread"], threading.current_thread()
)
async def test_cancel_post_request_with_sync_processing(self):
"""
The request.body object should be available and readable in view
code, even if the ASGIHandler cancels processing part way through.
"""
loop = asyncio.get_event_loop()
# Events to monitor the view processing from the parent test code.
view_started_event = asyncio.Event()
view_finished_event = asyncio.Event()
# Record received request body or exceptions raised in the test view
outcome = []
# This view will run in a new thread because it is wrapped in
# sync_to_async. The view consumes the POST body data after a short
# delay. The test will cancel the request using http.disconnect during
# the delay, but because this is a sync view the code runs to
# completion. There should be no exceptions raised inside the view
# code.
@csrf_exempt
@sync_to_async
def post_view(request):
try:
loop.call_soon_threadsafe(view_started_event.set)
time.sleep(0.1)
# Do something to read request.body after pause
outcome.append({"request_body": request.body})
return HttpResponse("ok")
except Exception as e:
outcome.append({"exception": e})
finally:
loop.call_soon_threadsafe(view_finished_event.set)
# Request class to use the view.
class TestASGIRequest(ASGIRequest):
urlconf = (path("post/", post_view),)
# Handler to use request class.
class TestASGIHandler(ASGIHandler):
request_class = TestASGIRequest
application = TestASGIHandler()
scope = self.async_request_factory._base_scope(
method="POST",
path="/post/",
)
communicator = ApplicationCommunicator(application, scope)
await communicator.send_input({"type": "http.request", "body": b"Body data!"})
# Wait until the view code has started, then send http.disconnect.
await view_started_event.wait()
await communicator.send_input({"type": "http.disconnect"})
# Wait until view code has finished.
await view_finished_event.wait()
with self.assertRaises(asyncio.TimeoutError):
await communicator.receive_output()
self.assertEqual(outcome, [{"request_body": b"Body data!"}])
async def test_untouched_request_body_gets_closed(self):
application = get_asgi_application()
scope = self.async_request_factory._base_scope(method="POST", path="/post/")
communicator = ApplicationCommunicator(application, scope)
await communicator.send_input({"type": "http.request"})
response_start = await communicator.receive_output()
self.assertEqual(response_start["type"], "http.response.start")
self.assertEqual(response_start["status"], 204)
response_body = await communicator.receive_output()
self.assertEqual(response_body["type"], "http.response.body")
self.assertEqual(response_body["body"], b"")
# Allow response.close() to finish
await communicator.wait()
async def test_get_query_string(self):
application = get_asgi_application()
for query_string in (b"name=Andrew", "name=Andrew"):
with self.subTest(query_string=query_string):
scope = self.async_request_factory._base_scope(
path="/",
query_string=query_string,
)
communicator = ApplicationCommunicator(application, scope)
await communicator.send_input({"type": "http.request"})
response_start = await communicator.receive_output()
self.assertEqual(response_start["type"], "http.response.start")
self.assertEqual(response_start["status"], 200)
response_body = await communicator.receive_output()
self.assertEqual(response_body["type"], "http.response.body")
self.assertEqual(response_body["body"], b"Hello Andrew!")
# Allow response.close() to finish
await communicator.wait()
async def test_disconnect(self):
application = get_asgi_application()
scope = self.async_request_factory._base_scope(path="/")
communicator = ApplicationCommunicator(application, scope)
await communicator.send_input({"type": "http.disconnect"})
with self.assertRaises(asyncio.TimeoutError):
await communicator.receive_output()
async def test_disconnect_both_return(self):
# Force both the disconnect listener and the task that sends the
# response to finish at the same time.
application = get_asgi_application()
scope = self.async_request_factory._base_scope(path="/")
communicator = ApplicationCommunicator(application, scope)
await communicator.send_input({"type": "http.request", "body": b"some body"})
# Fetch response headers (this yields to asyncio and causes
# ASGHandler.send_response() to dump the body of the response in the
# queue).
await communicator.receive_output()
# Fetch response body (there's already some data queued up, so this
# doesn't actually yield to the event loop, it just succeeds
# instantly).
await communicator.receive_output()
# Send disconnect at the same time that response finishes (this just
# puts some info in a queue, it doesn't have to yield to the event
# loop).
await communicator.send_input({"type": "http.disconnect"})
# Waiting for the communicator _does_ yield to the event loop, since
# ASGIHandler.send_response() is still waiting to do response.close().
# It so happens that there are enough remaining yield points in both
# tasks that they both finish while the loop is running.
await communicator.wait()
async def test_disconnect_with_body(self):
application = get_asgi_application()
scope = self.async_request_factory._base_scope(path="/")
communicator = ApplicationCommunicator(application, scope)
await communicator.send_input({"type": "http.request", "body": b"some body"})
await communicator.send_input({"type": "http.disconnect"})
with self.assertRaises(asyncio.TimeoutError):
await communicator.receive_output()
async def test_assert_in_listen_for_disconnect(self):
application = get_asgi_application()
scope = self.async_request_factory._base_scope(path="/")
communicator = ApplicationCommunicator(application, scope)
await communicator.send_input({"type": "http.request"})
await communicator.send_input({"type": "http.not_a_real_message"})
msg = "Invalid ASGI message after request body: http.not_a_real_message"
with self.assertRaisesMessage(AssertionError, msg):
await communicator.wait()
async def test_delayed_disconnect_with_body(self):
application = get_asgi_application()
scope = self.async_request_factory._base_scope(path="/delayed_hello/")
communicator = ApplicationCommunicator(application, scope)
await communicator.send_input({"type": "http.request", "body": b"some body"})
await communicator.send_input({"type": "http.disconnect"})
with self.assertRaises(asyncio.TimeoutError):
await communicator.receive_output()
async def test_wrong_connection_type(self):
application = get_asgi_application()
scope = self.async_request_factory._base_scope(path="/", type="other")
communicator = ApplicationCommunicator(application, scope)
await communicator.send_input({"type": "http.request"})
msg = "Django can only handle ASGI/HTTP connections, not other."
with self.assertRaisesMessage(ValueError, msg):
await communicator.receive_output()
async def test_non_unicode_query_string(self):
application = get_asgi_application()
scope = self.async_request_factory._base_scope(path="/", query_string=b"\xff")
communicator = ApplicationCommunicator(application, scope)
await communicator.send_input({"type": "http.request"})
response_start = await communicator.receive_output()
self.assertEqual(response_start["type"], "http.response.start")
self.assertEqual(response_start["status"], 400)
response_body = await communicator.receive_output()
self.assertEqual(response_body["type"], "http.response.body")
self.assertEqual(response_body["body"], b"")
async def test_request_lifecycle_signals_dispatched_with_thread_sensitive(self):
# Track request_started and request_finished signals.
signal_handler = SignalHandler()
request_started.connect(signal_handler)
self.addCleanup(request_started.disconnect, signal_handler)
request_finished.connect(signal_handler)
self.addCleanup(request_finished.disconnect, signal_handler)
# Perform a basic request.
application = get_asgi_application()
scope = self.async_request_factory._base_scope(path="/")
communicator = ApplicationCommunicator(application, scope)
await communicator.send_input({"type": "http.request"})
response_start = await communicator.receive_output()
self.assertEqual(response_start["type"], "http.response.start")
self.assertEqual(response_start["status"], 200)
response_body = await communicator.receive_output()
self.assertEqual(response_body["type"], "http.response.body")
self.assertEqual(response_body["body"], b"Hello World!")
# Give response.close() time to finish.
await communicator.wait()
# AsyncToSync should have executed the signals in the same thread.
self.assertEqual(len(signal_handler.calls), 2)
request_started_call, request_finished_call = signal_handler.calls
self.assertEqual(
request_started_call["thread"], request_finished_call["thread"]
)
async def test_concurrent_async_uses_multiple_thread_pools(self):
sync_waiter.active_threads.clear()
# Send 2 requests concurrently
application = get_asgi_application()
scope = self.async_request_factory._base_scope(path="/wait/")
communicators = []
for _ in range(2):
communicators.append(ApplicationCommunicator(application, scope))
await communicators[-1].send_input({"type": "http.request"})
# Each request must complete with a status code of 200
# If requests aren't scheduled concurrently, the barrier in the
# sync_wait view will time out, resulting in a 500 status code.
for communicator in communicators:
response_start = await communicator.receive_output()
self.assertEqual(response_start["type"], "http.response.start")
self.assertEqual(response_start["status"], 200)
response_body = await communicator.receive_output()
self.assertEqual(response_body["type"], "http.response.body")
self.assertEqual(response_body["body"], b"Hello World!")
# Give response.close() time to finish.
await communicator.wait()
# The requests should have scheduled on different threads. Note
# active_threads is a set (a thread can only appear once), therefore
# length is a sufficient check.
self.assertEqual(len(sync_waiter.active_threads), 2)
sync_waiter.active_threads.clear()
async def test_asyncio_cancel_error(self):
view_started = asyncio.Event()
# Flag to check if the view was cancelled.
view_did_cancel = False
# Track request_finished signal.
signal_handler = SignalHandler()
request_finished.connect(signal_handler)
self.addCleanup(request_finished.disconnect, signal_handler)
# A view that will listen for the cancelled error.
async def view(request):
nonlocal view_did_cancel
view_started.set()
try:
await asyncio.sleep(0.1)
return HttpResponse("Hello World!")
except asyncio.CancelledError:
# Set the flag.
view_did_cancel = True
raise
# Request class to use the view.
class TestASGIRequest(ASGIRequest):
urlconf = (path("cancel/", view),)
# Handler to use request class.
class TestASGIHandler(ASGIHandler):
request_class = TestASGIRequest
# Request cycle should complete since no disconnect was sent.
application = TestASGIHandler()
scope = self.async_request_factory._base_scope(path="/cancel/")
communicator = ApplicationCommunicator(application, scope)
await communicator.send_input({"type": "http.request"})
response_start = await communicator.receive_output()
self.assertEqual(response_start["type"], "http.response.start")
self.assertEqual(response_start["status"], 200)
response_body = await communicator.receive_output()
self.assertEqual(response_body["type"], "http.response.body")
self.assertEqual(response_body["body"], b"Hello World!")
# Give response.close() time to finish.
await communicator.wait()
self.assertIs(view_did_cancel, False)
# Exactly one call to request_finished handler.
self.assertEqual(len(signal_handler.calls), 1)
handler_call = signal_handler.calls.pop()
# It was NOT on the async thread.
self.assertNotEqual(handler_call["thread"], threading.current_thread())
# The signal sender is the handler class.
self.assertEqual(handler_call["kwargs"], {"sender": TestASGIHandler})
view_started.clear()
# Request cycle with a disconnect before the view can respond.
application = TestASGIHandler()
scope = self.async_request_factory._base_scope(path="/cancel/")
communicator = ApplicationCommunicator(application, scope)
await communicator.send_input({"type": "http.request"})
# Let the view actually start.
await view_started.wait()
# Disconnect the client.
await communicator.send_input({"type": "http.disconnect"})
# The handler should not send a response.
with self.assertRaises(asyncio.TimeoutError):
await communicator.receive_output()
await communicator.wait()
self.assertIs(view_did_cancel, True)
# Exactly one call to request_finished handler.
self.assertEqual(len(signal_handler.calls), 1)
handler_call = signal_handler.calls.pop()
# It was NOT on the async thread.
self.assertNotEqual(handler_call["thread"], threading.current_thread())
# The signal sender is the handler class.
self.assertEqual(handler_call["kwargs"], {"sender": TestASGIHandler})
async def test_asyncio_streaming_cancel_error(self):
# Similar to test_asyncio_cancel_error(), but during a streaming
# response.
view_did_cancel = False
# Track request_finished signals.
signal_handler = SignalHandler()
request_finished.connect(signal_handler)
self.addCleanup(request_finished.disconnect, signal_handler)
async def streaming_response():
nonlocal view_did_cancel
try:
await asyncio.sleep(0.2)
yield b"Hello World!"
except asyncio.CancelledError:
# Set the flag.
view_did_cancel = True
raise
async def view(request):
return StreamingHttpResponse(streaming_response())
class TestASGIRequest(ASGIRequest):
urlconf = (path("cancel/", view),)
class TestASGIHandler(ASGIHandler):
request_class = TestASGIRequest
# With no disconnect, the request cycle should complete in the same
# manner as the non-streaming response.
application = TestASGIHandler()
scope = self.async_request_factory._base_scope(path="/cancel/")
communicator = ApplicationCommunicator(application, scope)
await communicator.send_input({"type": "http.request"})
response_start = await communicator.receive_output()
self.assertEqual(response_start["type"], "http.response.start")
self.assertEqual(response_start["status"], 200)
response_body = await communicator.receive_output()
self.assertEqual(response_body["type"], "http.response.body")
self.assertEqual(response_body["body"], b"Hello World!")
await communicator.wait()
self.assertIs(view_did_cancel, False)
# Exactly one call to request_finished handler.
self.assertEqual(len(signal_handler.calls), 1)
handler_call = signal_handler.calls.pop()
# It was NOT on the async thread.
self.assertNotEqual(handler_call["thread"], threading.current_thread())
# The signal sender is the handler class.
self.assertEqual(handler_call["kwargs"], {"sender": TestASGIHandler})
# Request cycle with a disconnect.
application = TestASGIHandler()
scope = self.async_request_factory._base_scope(path="/cancel/")
communicator = ApplicationCommunicator(application, scope)
await communicator.send_input({"type": "http.request"})
response_start = await communicator.receive_output()
# Fetch the start of response so streaming can begin
self.assertEqual(response_start["type"], "http.response.start")
self.assertEqual(response_start["status"], 200)
await asyncio.sleep(0.1)
# Now disconnect the client.
await communicator.send_input({"type": "http.disconnect"})
# This time the handler should not send a response.
with self.assertRaises(asyncio.TimeoutError):
await communicator.receive_output()
await communicator.wait()
self.assertIs(view_did_cancel, True)
# Exactly one call to request_finished handler.
self.assertEqual(len(signal_handler.calls), 1)
handler_call = signal_handler.calls.pop()
# It was NOT on the async thread.
self.assertNotEqual(handler_call["thread"], threading.current_thread())
# The signal sender is the handler class.
self.assertEqual(handler_call["kwargs"], {"sender": TestASGIHandler})
async def test_streaming(self):
scope = self.async_request_factory._base_scope(
path="/streaming/", query_string=b"sleep=0.001"
)
application = get_asgi_application()
communicator = ApplicationCommunicator(application, scope)
await communicator.send_input({"type": "http.request"})
# Fetch http.response.start.
await communicator.receive_output(timeout=1)
# Fetch the 'first' and 'last'.
first_response = await communicator.receive_output(timeout=1)
self.assertEqual(first_response["body"], b"first\n")
second_response = await communicator.receive_output(timeout=1)
self.assertEqual(second_response["body"], b"last\n")
# Fetch the rest of the response so that coroutines are cleaned up.
await communicator.receive_output(timeout=1)
with self.assertRaises(asyncio.TimeoutError):
await communicator.receive_output(timeout=1)
async def test_streaming_disconnect(self):
scope = self.async_request_factory._base_scope(
path="/streaming/", query_string=b"sleep=0.1"
)
application = get_asgi_application()
communicator = ApplicationCommunicator(application, scope)
await communicator.send_input({"type": "http.request"})
await communicator.receive_output(timeout=1)
first_response = await communicator.receive_output(timeout=1)
self.assertEqual(first_response["body"], b"first\n")
# Disconnect the client.
await communicator.send_input({"type": "http.disconnect"})
# 'last\n' isn't sent.
with self.assertRaises(asyncio.TimeoutError):
await communicator.receive_output(timeout=0.2)
async def test_read_body_thread(self):
"""Write runs on correct thread depending on rollover."""
handler = ASGIHandler()
loop_thread = threading.current_thread()
called_threads = []
def write_wrapper(data):
called_threads.append(threading.current_thread())
return original_write(data)
# In-memory write (no rollover expected).
in_memory_chunks = [
{"type": "http.request", "body": b"small", "more_body": False}
]
async def receive():
return in_memory_chunks.pop(0)
with tempfile.SpooledTemporaryFile(max_size=1024, mode="w+b") as temp_file:
original_write = temp_file.write
with (
patch(
"django.core.handlers.asgi.tempfile.SpooledTemporaryFile",
return_value=temp_file,
),
patch.object(temp_file, "write", side_effect=write_wrapper),
):
await handler.read_body(receive)
# Write was called in the event loop thread.
self.assertIn(loop_thread, called_threads)
# Clear thread log before next test.
called_threads.clear()
# Rollover to disk (write should occur in a threadpool thread).
rolled_chunks = [
{"type": "http.request", "body": b"A" * 16, "more_body": True},
{"type": "http.request", "body": b"B" * 16, "more_body": False},
]
async def receive_rolled():
return rolled_chunks.pop(0)
with (
override_settings(FILE_UPLOAD_MAX_MEMORY_SIZE=10),
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | true |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/asgi/urls.py | tests/asgi/urls.py | import asyncio
import threading
import time
from django.http import FileResponse, HttpResponse, StreamingHttpResponse
from django.urls import path
from django.views.decorators.csrf import csrf_exempt
def hello(request):
name = request.GET.get("name") or "World"
return HttpResponse("Hello %s!" % name)
def hello_with_delay(request):
name = request.GET.get("name") or "World"
time.sleep(1)
return HttpResponse(f"Hello {name}!")
def hello_meta(request):
return HttpResponse(
"From %s" % request.META.get("HTTP_REFERER") or "",
content_type=request.META.get("CONTENT_TYPE"),
)
def hello_cookie(request):
response = HttpResponse("Hello World!")
response.set_cookie("key", "value")
return response
def sync_waiter(request):
with sync_waiter.lock:
sync_waiter.active_threads.add(threading.current_thread())
sync_waiter.barrier.wait(timeout=0.5)
return hello(request)
@csrf_exempt
def post_echo(request):
if request.GET.get("echo"):
return HttpResponse(request.body)
else:
return HttpResponse(status=204)
sync_waiter.active_threads = set()
sync_waiter.lock = threading.Lock()
sync_waiter.barrier = threading.Barrier(2)
async def streaming_inner(sleep_time):
yield b"first\n"
await asyncio.sleep(sleep_time)
yield b"last\n"
async def streaming_view(request):
sleep_time = float(request.GET["sleep"])
return StreamingHttpResponse(streaming_inner(sleep_time))
test_filename = __file__
urlpatterns = [
path("", hello),
path("cookie/", hello_cookie),
path("file/", lambda x: FileResponse(open(test_filename, "rb"))),
path("meta/", hello_meta),
path("post/", post_echo),
path("wait/", sync_waiter),
path("delayed_hello/", hello_with_delay),
path("streaming/", streaming_view),
]
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/apps/models.py | tests/apps/models.py | from django.apps.registry import Apps
from django.db import models
# We're testing app registry presence on load, so this is handy.
new_apps = Apps(["apps"])
class TotallyNormal(models.Model):
name = models.CharField(max_length=255)
class SoAlternative(models.Model):
name = models.CharField(max_length=255)
class Meta:
apps = new_apps
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/apps/__init__.py | tests/apps/__init__.py | python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false | |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/apps/tests.py | tests/apps/tests.py | import os
from unittest.mock import patch
import django
from django.apps import AppConfig, apps
from django.apps.registry import Apps
from django.contrib.admin.models import LogEntry
from django.core.exceptions import AppRegistryNotReady, ImproperlyConfigured
from django.db import connections, models
from django.test import (
SimpleTestCase,
TransactionTestCase,
override_settings,
skipUnlessDBFeature,
)
from django.test.utils import extend_sys_path, isolate_apps
from django.utils.functional import cached_property
from .models import SoAlternative, TotallyNormal, new_apps
from .one_config_app.apps import OneConfig
from .two_configs_one_default_app.apps import TwoConfig
# Small list with a variety of cases for tests that iterate on installed apps.
# Intentionally not in alphabetical order to check if the order is preserved.
SOME_INSTALLED_APPS = [
"apps.apps.MyAdmin",
"apps.apps.MyAuth",
"django.contrib.contenttypes",
"django.contrib.sessions",
"django.contrib.messages",
"django.contrib.staticfiles",
]
SOME_INSTALLED_APPS_NAMES = [
"django.contrib.admin",
"django.contrib.auth",
] + SOME_INSTALLED_APPS[2:]
HERE = os.path.dirname(__file__)
class AppsTests(SimpleTestCase):
def test_singleton_main(self):
"""
Only one main registry can exist.
"""
with self.assertRaises(RuntimeError):
Apps(installed_apps=None)
def test_ready(self):
"""
Tests the ready property of the main registry.
"""
# The main app registry is always ready when the tests run.
self.assertIs(apps.ready, True)
# Non-main app registries are populated in __init__.
self.assertIs(Apps().ready, True)
# The condition is set when apps are ready
self.assertIs(apps.ready_event.is_set(), True)
self.assertIs(Apps().ready_event.is_set(), True)
def test_bad_app_config(self):
"""
Tests when INSTALLED_APPS contains an incorrect app config.
"""
msg = "'apps.apps.BadConfig' must supply a name attribute."
with self.assertRaisesMessage(ImproperlyConfigured, msg):
with self.settings(INSTALLED_APPS=["apps.apps.BadConfig"]):
pass
def test_not_an_app_config(self):
"""
Tests when INSTALLED_APPS contains a class that isn't an app config.
"""
msg = "'apps.apps.NotAConfig' isn't a subclass of AppConfig."
with self.assertRaisesMessage(ImproperlyConfigured, msg):
with self.settings(INSTALLED_APPS=["apps.apps.NotAConfig"]):
pass
def test_no_such_app(self):
"""
Tests when INSTALLED_APPS contains an app that doesn't exist, either
directly or via an app config.
"""
with self.assertRaises(ImportError):
with self.settings(INSTALLED_APPS=["there is no such app"]):
pass
msg = (
"Cannot import 'there is no such app'. Check that "
"'apps.apps.NoSuchApp.name' is correct."
)
with self.assertRaisesMessage(ImproperlyConfigured, msg):
with self.settings(INSTALLED_APPS=["apps.apps.NoSuchApp"]):
pass
def test_no_such_app_config(self):
msg = "Module 'apps' does not contain a 'NoSuchConfig' class."
with self.assertRaisesMessage(ImportError, msg):
with self.settings(INSTALLED_APPS=["apps.NoSuchConfig"]):
pass
def test_no_such_app_config_with_choices(self):
msg = (
"Module 'apps.apps' does not contain a 'NoSuchConfig' class. "
"Choices are: 'BadConfig', 'ModelPKAppsConfig', 'MyAdmin', "
"'MyAuth', 'NoSuchApp', 'PlainAppsConfig', 'RelabeledAppsConfig'."
)
with self.assertRaisesMessage(ImportError, msg):
with self.settings(INSTALLED_APPS=["apps.apps.NoSuchConfig"]):
pass
def test_no_config_app(self):
"""Load an app that doesn't provide an AppConfig class."""
with self.settings(INSTALLED_APPS=["apps.no_config_app"]):
config = apps.get_app_config("no_config_app")
self.assertIsInstance(config, AppConfig)
def test_one_config_app(self):
"""Load an app that provides an AppConfig class."""
with self.settings(INSTALLED_APPS=["apps.one_config_app"]):
config = apps.get_app_config("one_config_app")
self.assertIsInstance(config, OneConfig)
def test_two_configs_app(self):
"""Load an app that provides two AppConfig classes."""
with self.settings(INSTALLED_APPS=["apps.two_configs_app"]):
config = apps.get_app_config("two_configs_app")
self.assertIsInstance(config, AppConfig)
def test_two_default_configs_app(self):
"""Load an app that provides two default AppConfig classes."""
msg = (
"'apps.two_default_configs_app.apps' declares more than one "
"default AppConfig: 'TwoConfig', 'TwoConfigBis'."
)
with self.assertRaisesMessage(RuntimeError, msg):
with self.settings(INSTALLED_APPS=["apps.two_default_configs_app"]):
pass
def test_two_configs_one_default_app(self):
"""
Load an app that provides two AppConfig classes, one being the default.
"""
with self.settings(INSTALLED_APPS=["apps.two_configs_one_default_app"]):
config = apps.get_app_config("two_configs_one_default_app")
self.assertIsInstance(config, TwoConfig)
@override_settings(INSTALLED_APPS=SOME_INSTALLED_APPS)
def test_get_app_configs(self):
"""
Tests apps.get_app_configs().
"""
app_configs = apps.get_app_configs()
self.assertEqual(
[app_config.name for app_config in app_configs], SOME_INSTALLED_APPS_NAMES
)
@override_settings(INSTALLED_APPS=SOME_INSTALLED_APPS)
def test_get_app_config(self):
"""
Tests apps.get_app_config().
"""
app_config = apps.get_app_config("admin")
self.assertEqual(app_config.name, "django.contrib.admin")
app_config = apps.get_app_config("staticfiles")
self.assertEqual(app_config.name, "django.contrib.staticfiles")
with self.assertRaises(LookupError):
apps.get_app_config("admindocs")
msg = "No installed app with label 'django.contrib.auth'. Did you mean 'myauth'"
with self.assertRaisesMessage(LookupError, msg):
apps.get_app_config("django.contrib.auth")
@override_settings(INSTALLED_APPS=SOME_INSTALLED_APPS)
def test_is_installed(self):
"""
Tests apps.is_installed().
"""
self.assertIs(apps.is_installed("django.contrib.admin"), True)
self.assertIs(apps.is_installed("django.contrib.auth"), True)
self.assertIs(apps.is_installed("django.contrib.staticfiles"), True)
self.assertIs(apps.is_installed("django.contrib.admindocs"), False)
@override_settings(INSTALLED_APPS=SOME_INSTALLED_APPS)
def test_get_model(self):
"""
Tests apps.get_model().
"""
self.assertEqual(apps.get_model("admin", "LogEntry"), LogEntry)
with self.assertRaises(LookupError):
apps.get_model("admin", "LogExit")
# App label is case-sensitive, Model name is case-insensitive.
self.assertEqual(apps.get_model("admin", "loGentrY"), LogEntry)
with self.assertRaises(LookupError):
apps.get_model("Admin", "LogEntry")
# A single argument is accepted.
self.assertEqual(apps.get_model("admin.LogEntry"), LogEntry)
with self.assertRaises(LookupError):
apps.get_model("admin.LogExit")
with self.assertRaises(ValueError):
apps.get_model("admin_LogEntry")
@override_settings(INSTALLED_APPS=SOME_INSTALLED_APPS)
def test_clear_cache(self):
# Set cache.
self.assertIsNone(apps.get_swappable_settings_name("admin.LogEntry"))
apps.get_models()
apps.clear_cache()
self.assertEqual(apps.get_swappable_settings_name.cache_info().currsize, 0)
self.assertEqual(apps.get_models.cache_info().currsize, 0)
@override_settings(INSTALLED_APPS=SOME_INSTALLED_APPS)
def test_cached_properties_cleared_after_cache_clear(self):
opts = apps.get_model("admin", "LogEntry")._meta
cached_properties = [
name
for name, attr in models.options.Options.__dict__.items()
if isinstance(attr, cached_property)
]
# Access each cached property to populate the cache.
for attr_name in cached_properties:
getattr(opts, attr_name)
self.assertIn(attr_name, opts.__dict__)
apps.clear_cache()
for attr_name in cached_properties:
with self.subTest(property=attr_name):
self.assertNotIn(attr_name, opts.__dict__)
@override_settings(INSTALLED_APPS=["apps.apps.RelabeledAppsConfig"])
def test_relabeling(self):
self.assertEqual(apps.get_app_config("relabeled").name, "apps")
def test_duplicate_labels(self):
with self.assertRaisesMessage(
ImproperlyConfigured, "Application labels aren't unique"
):
with self.settings(INSTALLED_APPS=["apps.apps.PlainAppsConfig", "apps"]):
pass
def test_duplicate_names(self):
with self.assertRaisesMessage(
ImproperlyConfigured, "Application names aren't unique"
):
with self.settings(
INSTALLED_APPS=["apps.apps.RelabeledAppsConfig", "apps"]
):
pass
def test_import_exception_is_not_masked(self):
"""
App discovery should preserve stack traces. Regression test for #22920.
"""
with self.assertRaisesMessage(ImportError, "Oops"):
with self.settings(INSTALLED_APPS=["import_error_package"]):
pass
def test_models_py(self):
"""
The models in the models.py file were loaded correctly.
"""
self.assertEqual(apps.get_model("apps", "TotallyNormal"), TotallyNormal)
with self.assertRaises(LookupError):
apps.get_model("apps", "SoAlternative")
with self.assertRaises(LookupError):
new_apps.get_model("apps", "TotallyNormal")
self.assertEqual(new_apps.get_model("apps", "SoAlternative"), SoAlternative)
def test_models_not_loaded(self):
"""
apps.get_models() raises an exception if apps.models_ready isn't True.
"""
apps.models_ready = False
try:
# The cache must be cleared to trigger the exception.
apps.get_models.cache_clear()
with self.assertRaisesMessage(
AppRegistryNotReady, "Models aren't loaded yet."
):
apps.get_models()
finally:
apps.models_ready = True
def test_dynamic_load(self):
"""
Makes a new model at runtime and ensures it goes into the right place.
"""
old_models = list(apps.get_app_config("apps").get_models())
# Construct a new model in a new app registry
body = {}
new_apps = Apps(["apps"])
meta_contents = {
"app_label": "apps",
"apps": new_apps,
}
meta = type("Meta", (), meta_contents)
body["Meta"] = meta
body["__module__"] = TotallyNormal.__module__
temp_model = type("SouthPonies", (models.Model,), body)
# Make sure it appeared in the right place!
self.assertEqual(list(apps.get_app_config("apps").get_models()), old_models)
with self.assertRaises(LookupError):
apps.get_model("apps", "SouthPonies")
self.assertEqual(new_apps.get_model("apps", "SouthPonies"), temp_model)
def test_model_clash(self):
"""
Test for behavior when two models clash in the app registry.
"""
new_apps = Apps(["apps"])
meta_contents = {
"app_label": "apps",
"apps": new_apps,
}
body = {}
body["Meta"] = type("Meta", (), meta_contents)
body["__module__"] = TotallyNormal.__module__
type("SouthPonies", (models.Model,), body)
# When __name__ and __module__ match we assume the module
# was reloaded and issue a warning. This use-case is
# useful for REPL. Refs #23621.
body = {}
body["Meta"] = type("Meta", (), meta_contents)
body["__module__"] = TotallyNormal.__module__
msg = (
"Model 'apps.southponies' was already registered. "
"Reloading models is not advised as it can lead to inconsistencies, "
"most notably with related models."
)
with self.assertRaisesMessage(RuntimeWarning, msg):
type("SouthPonies", (models.Model,), body)
# If it doesn't appear to be a reloaded module then we expect
# a RuntimeError.
body = {}
body["Meta"] = type("Meta", (), meta_contents)
body["__module__"] = TotallyNormal.__module__ + ".whatever"
with self.assertRaisesMessage(
RuntimeError, "Conflicting 'southponies' models in application 'apps':"
):
type("SouthPonies", (models.Model,), body)
def test_get_containing_app_config_apps_not_ready(self):
"""
apps.get_containing_app_config() should raise an exception if
apps.apps_ready isn't True.
"""
apps.apps_ready = False
try:
with self.assertRaisesMessage(
AppRegistryNotReady, "Apps aren't loaded yet"
):
apps.get_containing_app_config("foo")
finally:
apps.apps_ready = True
@isolate_apps("apps", kwarg_name="apps")
def test_lazy_model_operation(self, apps):
"""
Tests apps.lazy_model_operation().
"""
model_classes = []
initial_pending = set(apps._pending_operations)
def test_func(*models):
model_classes[:] = models
class LazyA(models.Model):
pass
# Test models appearing twice, and models appearing consecutively
model_keys = [
("apps", model_name)
for model_name in ["lazya", "lazyb", "lazyb", "lazyc", "lazya"]
]
apps.lazy_model_operation(test_func, *model_keys)
# LazyModelA shouldn't be waited on since it's already registered,
# and LazyModelC shouldn't be waited on until LazyModelB exists.
self.assertEqual(
set(apps._pending_operations) - initial_pending, {("apps", "lazyb")}
)
# Multiple operations can wait on the same model
apps.lazy_model_operation(test_func, ("apps", "lazyb"))
class LazyB(models.Model):
pass
self.assertEqual(model_classes, [LazyB])
# Now we are just waiting on LazyModelC.
self.assertEqual(
set(apps._pending_operations) - initial_pending, {("apps", "lazyc")}
)
class LazyC(models.Model):
pass
# Everything should be loaded - make sure the callback was executed
# properly.
self.assertEqual(model_classes, [LazyA, LazyB, LazyB, LazyC, LazyA])
class Stub:
def __init__(self, **kwargs):
self.__dict__.update(kwargs)
class AppConfigTests(SimpleTestCase):
"""Unit tests for AppConfig class."""
def test_path_set_explicitly(self):
"""If subclass sets path as class attr, no module attributes needed."""
class MyAppConfig(AppConfig):
path = "foo"
ac = MyAppConfig("label", Stub())
self.assertEqual(ac.path, "foo")
def test_explicit_path_overrides(self):
"""If path set as class attr, overrides __path__ and __file__."""
class MyAppConfig(AppConfig):
path = "foo"
ac = MyAppConfig("label", Stub(__path__=["a"], __file__="b/__init__.py"))
self.assertEqual(ac.path, "foo")
def test_dunder_path(self):
"""
If single element in __path__, use it (in preference to __file__).
"""
ac = AppConfig("label", Stub(__path__=["a"], __file__="b/__init__.py"))
self.assertEqual(ac.path, "a")
def test_no_dunder_path_fallback_to_dunder_file(self):
"""If there is no __path__ attr, use __file__."""
ac = AppConfig("label", Stub(__file__="b/__init__.py"))
self.assertEqual(ac.path, "b")
def test_empty_dunder_path_fallback_to_dunder_file(self):
"""If the __path__ attr is empty, use __file__ if set."""
ac = AppConfig("label", Stub(__path__=[], __file__="b/__init__.py"))
self.assertEqual(ac.path, "b")
def test_multiple_dunder_path_fallback_to_dunder_file(self):
"""If the __path__ attr is length>1, use __file__ if set."""
ac = AppConfig("label", Stub(__path__=["a", "b"], __file__="c/__init__.py"))
self.assertEqual(ac.path, "c")
def test_no_dunder_path_or_dunder_file(self):
"""If there is no __path__ or __file__, raise ImproperlyConfigured."""
with self.assertRaises(ImproperlyConfigured):
AppConfig("label", Stub())
def test_empty_dunder_path_no_dunder_file(self):
"""If the __path__ attr is empty and there is no __file__, raise."""
with self.assertRaises(ImproperlyConfigured):
AppConfig("label", Stub(__path__=[]))
def test_multiple_dunder_path_no_dunder_file(self):
"""If the __path__ attr is length>1 and there is no __file__, raise."""
with self.assertRaises(ImproperlyConfigured):
AppConfig("label", Stub(__path__=["a", "b"]))
def test_duplicate_dunder_path_no_dunder_file(self):
"""
If the __path__ attr contains duplicate paths and there is no
__file__, they duplicates should be deduplicated (#25246).
"""
ac = AppConfig("label", Stub(__path__=["a", "a"]))
self.assertEqual(ac.path, "a")
def test_repr(self):
ac = AppConfig("label", Stub(__path__=["a"]))
self.assertEqual(repr(ac), "<AppConfig: label>")
def test_invalid_label(self):
class MyAppConfig(AppConfig):
label = "invalid.label"
msg = "The app label 'invalid.label' is not a valid Python identifier."
with self.assertRaisesMessage(ImproperlyConfigured, msg):
MyAppConfig("test_app", Stub())
@override_settings(
INSTALLED_APPS=["apps.apps.ModelPKAppsConfig"],
DEFAULT_AUTO_FIELD="django.db.models.SmallAutoField",
)
def test_app_default_auto_field(self):
apps_config = apps.get_app_config("apps")
self.assertEqual(
apps_config.default_auto_field,
"django.db.models.BigAutoField",
)
self.assertIs(apps_config._is_default_auto_field_overridden, True)
@override_settings(
INSTALLED_APPS=["apps.apps.PlainAppsConfig"],
DEFAULT_AUTO_FIELD="django.db.models.SmallAutoField",
)
def test_default_auto_field_setting(self):
apps_config = apps.get_app_config("apps")
self.assertEqual(
apps_config.default_auto_field,
"django.db.models.SmallAutoField",
)
self.assertIs(apps_config._is_default_auto_field_overridden, False)
class NamespacePackageAppTests(SimpleTestCase):
# We need nsapp to be top-level so our multiple-paths tests can add another
# location for it (if its inside a normal package with an __init__.py that
# isn't possible). In order to avoid cluttering the already-full tests/ dir
# (which is on sys.path), we add these new entries to sys.path temporarily.
base_location = os.path.join(HERE, "namespace_package_base")
other_location = os.path.join(HERE, "namespace_package_other_base")
app_path = os.path.join(base_location, "nsapp")
def test_single_path(self):
"""
A Py3.3+ namespace package can be an app if it has only one path.
"""
with extend_sys_path(self.base_location):
with self.settings(INSTALLED_APPS=["nsapp"]):
app_config = apps.get_app_config("nsapp")
self.assertEqual(app_config.path, self.app_path)
def test_multiple_paths(self):
"""
A Py3.3+ namespace package with multiple locations cannot be an app.
(Because then we wouldn't know where to load its templates, static
assets, etc. from.)
"""
# Temporarily add two directories to sys.path that both contain
# components of the "nsapp" package.
with extend_sys_path(self.base_location, self.other_location):
with self.assertRaises(ImproperlyConfigured):
with self.settings(INSTALLED_APPS=["nsapp"]):
pass
def test_multiple_paths_explicit_path(self):
"""
Multiple locations are ok only if app-config has explicit path.
"""
# Temporarily add two directories to sys.path that both contain
# components of the "nsapp" package.
with extend_sys_path(self.base_location, self.other_location):
with self.settings(INSTALLED_APPS=["nsapp.apps.NSAppConfig"]):
app_config = apps.get_app_config("nsapp")
self.assertEqual(app_config.path, self.app_path)
class QueryPerformingAppTests(TransactionTestCase):
available_apps = ["apps"]
databases = {"default", "other"}
expected_msg = (
"Accessing the database during app initialization is discouraged. To fix this "
"warning, avoid executing queries in AppConfig.ready() or when your app "
"modules are imported."
)
def test_query_default_database_using_model(self):
query_results = self.run_setup("QueryDefaultDatabaseModelAppConfig")
self.assertSequenceEqual(query_results, [("new name",)])
def test_query_other_database_using_model(self):
query_results = self.run_setup("QueryOtherDatabaseModelAppConfig")
self.assertSequenceEqual(query_results, [("new name",)])
def test_query_default_database_using_cursor(self):
query_results = self.run_setup("QueryDefaultDatabaseCursorAppConfig")
self.assertSequenceEqual(query_results, [(42,)])
def test_query_other_database_using_cursor(self):
query_results = self.run_setup("QueryOtherDatabaseCursorAppConfig")
self.assertSequenceEqual(query_results, [(42,)])
def test_query_many_default_database_using_cursor(self):
self.run_setup("QueryDefaultDatabaseCursorManyAppConfig")
def test_query_many_other_database_using_cursor(self):
self.run_setup("QueryOtherDatabaseCursorManyAppConfig")
@skipUnlessDBFeature("create_test_procedure_without_params_sql")
def test_query_default_database_using_stored_procedure(self):
connection = connections["default"]
with connection.cursor() as cursor:
cursor.execute(connection.features.create_test_procedure_without_params_sql)
try:
self.run_setup("QueryDefaultDatabaseStoredProcedureAppConfig")
finally:
with connection.schema_editor() as editor:
editor.remove_procedure("test_procedure")
@skipUnlessDBFeature("create_test_procedure_without_params_sql")
def test_query_other_database_using_stored_procedure(self):
connection = connections["other"]
with connection.cursor() as cursor:
cursor.execute(connection.features.create_test_procedure_without_params_sql)
try:
self.run_setup("QueryOtherDatabaseStoredProcedureAppConfig")
finally:
with connection.schema_editor() as editor:
editor.remove_procedure("test_procedure")
def run_setup(self, app_config_name):
custom_settings = override_settings(
INSTALLED_APPS=[f"apps.query_performing_app.apps.{app_config_name}"]
)
custom_settings.enable()
old_stored_app_configs = apps.stored_app_configs
apps.stored_app_configs = []
try:
with patch.multiple(apps, ready=False, loading=False, app_configs={}):
with self.assertWarnsMessage(RuntimeWarning, self.expected_msg):
django.setup()
app_config = apps.get_app_config("query_performing_app")
return app_config.query_results
finally:
setattr(apps, "stored_app_configs", old_stored_app_configs)
custom_settings.disable()
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/apps/apps.py | tests/apps/apps.py | from django.apps import AppConfig
class MyAdmin(AppConfig):
name = "django.contrib.admin"
verbose_name = "Admin sweet admin."
class MyAuth(AppConfig):
name = "django.contrib.auth"
label = "myauth"
verbose_name = "All your password are belong to us."
class BadConfig(AppConfig):
"""This class doesn't supply the mandatory 'name' attribute."""
class NotAConfig:
name = "apps"
class NoSuchApp(AppConfig):
name = "there is no such app"
class PlainAppsConfig(AppConfig):
name = "apps"
class RelabeledAppsConfig(AppConfig):
name = "apps"
label = "relabeled"
class ModelPKAppsConfig(AppConfig):
name = "apps"
default_auto_field = "django.db.models.BigAutoField"
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/apps/two_configs_one_default_app/__init__.py | tests/apps/two_configs_one_default_app/__init__.py | python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false | |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/apps/two_configs_one_default_app/apps.py | tests/apps/two_configs_one_default_app/apps.py | from django.apps import AppConfig
class TwoConfig(AppConfig):
default = True
name = "apps.two_configs_one_default_app"
class TwoConfigAlt(AppConfig):
name = "apps.two_configs_one_default_app"
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/apps/no_config_app/__init__.py | tests/apps/no_config_app/__init__.py | python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false | |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/apps/query_performing_app/__init__.py | tests/apps/query_performing_app/__init__.py | python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false | |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/apps/query_performing_app/apps.py | tests/apps/query_performing_app/apps.py | from django.apps import AppConfig
from django.db import connections
class BaseAppConfig(AppConfig):
name = "apps.query_performing_app"
database = "default"
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.query_results = []
def ready(self):
self.query_results = []
self._perform_query()
def _perform_query(self):
raise NotImplementedError
class ModelQueryAppConfig(BaseAppConfig):
def _perform_query(self):
from ..models import TotallyNormal
queryset = TotallyNormal.objects.using(self.database)
queryset.update_or_create(name="new name")
self.query_results = list(queryset.values_list("name"))
class QueryDefaultDatabaseModelAppConfig(ModelQueryAppConfig):
database = "default"
class QueryOtherDatabaseModelAppConfig(ModelQueryAppConfig):
database = "other"
class CursorQueryAppConfig(BaseAppConfig):
def _perform_query(self):
connection = connections[self.database]
with connection.cursor() as cursor:
cursor.execute("SELECT 42" + connection.features.bare_select_suffix)
self.query_results = cursor.fetchall()
class QueryDefaultDatabaseCursorAppConfig(CursorQueryAppConfig):
database = "default"
class QueryOtherDatabaseCursorAppConfig(CursorQueryAppConfig):
database = "other"
class CursorQueryManyAppConfig(BaseAppConfig):
def _perform_query(self):
from ..models import TotallyNormal
connection = connections[self.database]
table_meta = TotallyNormal._meta
with connection.cursor() as cursor:
cursor.executemany(
"INSERT INTO %s (%s) VALUES(%%s)"
% (
connection.introspection.identifier_converter(table_meta.db_table),
connection.ops.quote_name(table_meta.get_field("name").column),
),
[("test name 1",), ("test name 2",)],
)
self.query_results = []
class QueryDefaultDatabaseCursorManyAppConfig(CursorQueryManyAppConfig):
database = "default"
class QueryOtherDatabaseCursorManyAppConfig(CursorQueryManyAppConfig):
database = "other"
class StoredProcedureQueryAppConfig(BaseAppConfig):
def _perform_query(self):
with connections[self.database].cursor() as cursor:
cursor.callproc("test_procedure")
self.query_results = []
class QueryDefaultDatabaseStoredProcedureAppConfig(StoredProcedureQueryAppConfig):
database = "default"
class QueryOtherDatabaseStoredProcedureAppConfig(StoredProcedureQueryAppConfig):
database = "other"
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/apps/two_default_configs_app/__init__.py | tests/apps/two_default_configs_app/__init__.py | python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false | |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/apps/two_default_configs_app/apps.py | tests/apps/two_default_configs_app/apps.py | from django.apps import AppConfig
class TwoConfig(AppConfig):
default = True
name = "apps.two_default_configs_app"
class TwoConfigBis(AppConfig):
default = True
name = "apps.two_default_configs_app"
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/apps/namespace_package_base/nsapp/apps.py | tests/apps/namespace_package_base/nsapp/apps.py | import os
from django.apps import AppConfig
class NSAppConfig(AppConfig):
default = False
name = "nsapp"
path = os.path.dirname(__file__)
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/apps/one_config_app/__init__.py | tests/apps/one_config_app/__init__.py | python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false | |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/apps/one_config_app/apps.py | tests/apps/one_config_app/apps.py | from django.apps import AppConfig
class OneConfig(AppConfig):
name = "apps.one_config_app"
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/apps/two_configs_app/__init__.py | tests/apps/two_configs_app/__init__.py | python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false | |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/apps/two_configs_app/apps.py | tests/apps/two_configs_app/apps.py | from django.apps import AppConfig
class TwoConfig(AppConfig):
name = "apps.two_configs_app"
class TwoConfigBis(AppConfig):
name = "apps.two_configs_app"
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/postgres_tests/test_trigram.py | tests/postgres_tests/test_trigram.py | from django.db.models import F, Value
from django.db.models.functions import Concat
from . import PostgreSQLTestCase
from .models import CharFieldModel, TextFieldModel
try:
from django.contrib.postgres.search import (
TrigramDistance,
TrigramSimilarity,
TrigramStrictWordDistance,
TrigramStrictWordSimilarity,
TrigramWordDistance,
TrigramWordSimilarity,
)
except ImportError:
pass
class TrigramTest(PostgreSQLTestCase):
Model = CharFieldModel
@classmethod
def setUpTestData(cls):
cls.Model.objects.bulk_create(
[
cls.Model(field="Matthew"),
cls.Model(field="Cat sat on mat."),
cls.Model(field="Dog sat on rug."),
]
)
def test_trigram_search(self):
self.assertQuerySetEqual(
self.Model.objects.filter(field__trigram_similar="Mathew"),
["Matthew"],
transform=lambda instance: instance.field,
)
def test_trigram_word_search(self):
obj = self.Model.objects.create(
field="Gumby rides on the path of Middlesbrough",
)
self.assertSequenceEqual(
self.Model.objects.filter(field__trigram_word_similar="Middlesborough"),
[obj],
)
self.assertSequenceEqual(
self.Model.objects.filter(field__trigram_word_similar="Middle"),
[obj],
)
def test_trigram_strict_word_search_matched(self):
obj = self.Model.objects.create(
field="Gumby rides on the path of Middlesbrough",
)
self.assertSequenceEqual(
self.Model.objects.filter(
field__trigram_strict_word_similar="Middlesborough"
),
[obj],
)
self.assertSequenceEqual(
self.Model.objects.filter(field__trigram_strict_word_similar="Middle"),
[],
)
def test_trigram_similarity(self):
search = "Bat sat on cat."
# Round result of similarity because PostgreSQL uses greater precision.
self.assertQuerySetEqual(
self.Model.objects.filter(
field__trigram_similar=search,
)
.annotate(similarity=TrigramSimilarity("field", search))
.order_by("-similarity"),
[("Cat sat on mat.", 0.625), ("Dog sat on rug.", 0.333333)],
transform=lambda instance: (instance.field, round(instance.similarity, 6)),
ordered=True,
)
def test_trigram_word_similarity(self):
search = "mat"
self.assertSequenceEqual(
self.Model.objects.filter(
field__trigram_word_similar=search,
)
.annotate(
word_similarity=TrigramWordSimilarity(search, "field"),
)
.values("field", "word_similarity")
.order_by("-word_similarity"),
[
{"field": "Cat sat on mat.", "word_similarity": 1.0},
{"field": "Matthew", "word_similarity": 0.75},
],
)
def test_trigram_strict_word_similarity(self):
search = "matt"
self.assertSequenceEqual(
self.Model.objects.filter(field__trigram_word_similar=search)
.annotate(word_similarity=TrigramStrictWordSimilarity(search, "field"))
.values("field", "word_similarity")
.order_by("-word_similarity"),
[
{"field": "Cat sat on mat.", "word_similarity": 0.5},
{"field": "Matthew", "word_similarity": 0.44444445},
],
)
def test_trigram_similarity_alternate(self):
# Round result of distance because PostgreSQL uses greater precision.
self.assertQuerySetEqual(
self.Model.objects.annotate(
distance=TrigramDistance("field", "Bat sat on cat."),
)
.filter(distance__lte=0.7)
.order_by("distance"),
[("Cat sat on mat.", 0.375), ("Dog sat on rug.", 0.666667)],
transform=lambda instance: (instance.field, round(instance.distance, 6)),
ordered=True,
)
def test_trigram_word_similarity_alternate(self):
self.assertSequenceEqual(
self.Model.objects.annotate(
word_distance=TrigramWordDistance("mat", "field"),
)
.filter(
word_distance__lte=0.7,
)
.values("field", "word_distance")
.order_by("word_distance"),
[
{"field": "Cat sat on mat.", "word_distance": 0},
{"field": "Matthew", "word_distance": 0.25},
],
)
def test_trigram_strict_word_distance(self):
self.assertSequenceEqual(
self.Model.objects.annotate(
word_distance=TrigramStrictWordDistance("matt", "field"),
)
.filter(word_distance__lte=0.7)
.values("field", "word_distance")
.order_by("word_distance"),
[
{"field": "Cat sat on mat.", "word_distance": 0.5},
{"field": "Matthew", "word_distance": 0.5555556},
],
)
def test_trigram_concat_precedence(self):
search_term = "im matthew"
self.assertSequenceEqual(
self.Model.objects.annotate(
concat_result=Concat(
Value("I'm "),
F("field"),
output_field=self.Model._meta.get_field("field"),
),
)
.filter(concat_result__trigram_similar=search_term)
.values("field"),
[{"field": "Matthew"}],
)
class TrigramTextFieldTest(TrigramTest):
"""
TextField has the same behavior as CharField regarding trigram lookups.
"""
Model = TextFieldModel
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/postgres_tests/test_app_installed_check.py | tests/postgres_tests/test_app_installed_check.py | from django.core import checks
from django.db import models
from django.test import modify_settings
from django.test.utils import isolate_apps
from . import PostgreSQLTestCase
from .fields import (
BigIntegerRangeField,
DateRangeField,
DateTimeRangeField,
DecimalRangeField,
HStoreField,
IntegerRangeField,
SearchVectorField,
)
from .models import IntegerArrayModel, NestedIntegerArrayModel, PostgreSQLModel
try:
from django.contrib.postgres.constraints import ExclusionConstraint
from django.contrib.postgres.fields.ranges import RangeOperators
from django.contrib.postgres.indexes import GinIndex, PostgresIndex
from django.contrib.postgres.search import SearchQueryField
except ImportError:
pass
@isolate_apps("postgres_tests")
class TestPostgresAppInstalledCheck(PostgreSQLTestCase):
def _make_error(self, obj, klass_name):
"""Helper to create postgres.E005 error for specific objects."""
return checks.Error(
"'django.contrib.postgres' must be in INSTALLED_APPS in order to "
f"use {klass_name}.",
obj=obj,
id="postgres.E005",
)
def assert_model_check_errors(self, model_class, expected_errors):
errors = model_class.check(databases=self.databases)
self.assertEqual(errors, [])
with modify_settings(INSTALLED_APPS={"remove": "django.contrib.postgres"}):
errors = model_class.check(databases=self.databases)
self.assertEqual(errors, expected_errors)
def test_indexes(self):
class IndexModel(PostgreSQLModel):
field = models.IntegerField()
class Meta:
indexes = [
PostgresIndex(fields=["id"], name="postgres_index_test"),
GinIndex(fields=["field"], name="gin_index_test"),
]
self.assert_model_check_errors(
IndexModel,
[
self._make_error(IndexModel, "PostgresIndex"),
self._make_error(IndexModel, "GinIndex"),
],
)
def test_exclusion_constraint(self):
class ExclusionModel(PostgreSQLModel):
value = models.IntegerField()
class Meta:
constraints = [
ExclusionConstraint(
name="exclude_equal",
expressions=[("value", RangeOperators.EQUAL)],
)
]
self.assert_model_check_errors(
ExclusionModel, [self._make_error(ExclusionModel, "ExclusionConstraint")]
)
def test_array_field(self):
field = IntegerArrayModel._meta.get_field("field")
self.assert_model_check_errors(
IntegerArrayModel,
[self._make_error(field, "ArrayField")],
)
def test_nested_array_field(self):
"""Inner ArrayField does not cause a postgres.E001 error."""
field = NestedIntegerArrayModel._meta.get_field("field")
self.assert_model_check_errors(
NestedIntegerArrayModel,
[
self._make_error(field, "ArrayField"),
],
)
def test_hstore_field(self):
class HStoreFieldModel(PostgreSQLModel):
field = HStoreField()
field = HStoreFieldModel._meta.get_field("field")
self.assert_model_check_errors(
HStoreFieldModel,
[
self._make_error(field, "HStoreField"),
],
)
def test_range_fields(self):
class RangeFieldsModel(PostgreSQLModel):
int_range = IntegerRangeField()
bigint_range = BigIntegerRangeField()
decimal_range = DecimalRangeField()
datetime_range = DateTimeRangeField()
date_range = DateRangeField()
expected_errors = [
self._make_error(field, field.__class__.__name__)
for field in [
RangeFieldsModel._meta.get_field("int_range"),
RangeFieldsModel._meta.get_field("bigint_range"),
RangeFieldsModel._meta.get_field("decimal_range"),
RangeFieldsModel._meta.get_field("datetime_range"),
RangeFieldsModel._meta.get_field("date_range"),
]
]
self.assert_model_check_errors(RangeFieldsModel, expected_errors)
def test_search_vector_field(self):
class SearchModel(PostgreSQLModel):
search_vector = SearchVectorField()
search_query = SearchQueryField()
vector_field = SearchModel._meta.get_field("search_vector")
query_field = SearchModel._meta.get_field("search_query")
self.assert_model_check_errors(
SearchModel,
[
self._make_error(vector_field, "SearchVectorField"),
self._make_error(query_field, "SearchQueryField"),
],
)
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/postgres_tests/test_operations.py | tests/postgres_tests/test_operations.py | import unittest
from migrations.test_base import OperationTestBase, OptimizerTestBase
from django.db import IntegrityError, NotSupportedError, connection, transaction
from django.db.migrations.operations import RemoveIndex, RenameIndex
from django.db.migrations.state import ProjectState
from django.db.migrations.writer import OperationWriter
from django.db.models import CheckConstraint, Index, Q, UniqueConstraint
from django.db.utils import ProgrammingError
from django.test import modify_settings, override_settings
from django.test.utils import CaptureQueriesContext
from . import PostgreSQLTestCase
try:
from django.contrib.postgres.indexes import BrinIndex, BTreeIndex
from django.contrib.postgres.operations import (
AddConstraintNotValid,
AddIndexConcurrently,
BloomExtension,
CreateCollation,
CreateExtension,
RemoveCollation,
RemoveIndexConcurrently,
ValidateConstraint,
)
except ImportError:
pass
@unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific tests.")
@modify_settings(INSTALLED_APPS={"append": "migrations"})
class AddIndexConcurrentlyTests(OptimizerTestBase, OperationTestBase):
app_label = "test_add_concurrently"
def test_requires_atomic_false(self):
project_state = self.set_up_test_model(self.app_label)
new_state = project_state.clone()
operation = AddIndexConcurrently(
"Pony",
Index(fields=["pink"], name="pony_pink_idx"),
)
msg = (
"The AddIndexConcurrently operation cannot be executed inside "
"a transaction (set atomic = False on the migration)."
)
with self.assertRaisesMessage(NotSupportedError, msg):
with connection.schema_editor(atomic=True) as editor:
operation.database_forwards(
self.app_label, editor, project_state, new_state
)
def test_add(self):
project_state = self.set_up_test_model(self.app_label, index=False)
table_name = "%s_pony" % self.app_label
index = Index(fields=["pink"], name="pony_pink_idx")
new_state = project_state.clone()
operation = AddIndexConcurrently("Pony", index)
self.assertEqual(
operation.describe(),
"Concurrently create index pony_pink_idx on field(s) pink of model Pony",
)
self.assertEqual(
operation.formatted_description(),
"+ Concurrently create index pony_pink_idx on field(s) pink of model Pony",
)
operation.state_forwards(self.app_label, new_state)
self.assertEqual(
len(new_state.models[self.app_label, "pony"].options["indexes"]), 1
)
self.assertIndexNotExists(table_name, ["pink"])
# Add index.
with connection.schema_editor(atomic=False) as editor:
operation.database_forwards(
self.app_label, editor, project_state, new_state
)
self.assertIndexExists(table_name, ["pink"])
# Reversal.
with connection.schema_editor(atomic=False) as editor:
operation.database_backwards(
self.app_label, editor, new_state, project_state
)
self.assertIndexNotExists(table_name, ["pink"])
# Deconstruction.
name, args, kwargs = operation.deconstruct()
self.assertEqual(name, "AddIndexConcurrently")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"model_name": "Pony", "index": index})
def test_add_other_index_type(self):
project_state = self.set_up_test_model(self.app_label, index=False)
table_name = "%s_pony" % self.app_label
new_state = project_state.clone()
operation = AddIndexConcurrently(
"Pony",
BrinIndex(fields=["pink"], name="pony_pink_brin_idx"),
)
self.assertIndexNotExists(table_name, ["pink"])
# Add index.
with connection.schema_editor(atomic=False) as editor:
operation.database_forwards(
self.app_label, editor, project_state, new_state
)
self.assertIndexExists(table_name, ["pink"], index_type="brin")
# Reversal.
with connection.schema_editor(atomic=False) as editor:
operation.database_backwards(
self.app_label, editor, new_state, project_state
)
self.assertIndexNotExists(table_name, ["pink"])
def test_add_with_options(self):
project_state = self.set_up_test_model(self.app_label, index=False)
table_name = "%s_pony" % self.app_label
new_state = project_state.clone()
index = BTreeIndex(fields=["pink"], name="pony_pink_btree_idx", fillfactor=70)
operation = AddIndexConcurrently("Pony", index)
self.assertIndexNotExists(table_name, ["pink"])
# Add index.
with connection.schema_editor(atomic=False) as editor:
operation.database_forwards(
self.app_label, editor, project_state, new_state
)
self.assertIndexExists(table_name, ["pink"], index_type="btree")
# Reversal.
with connection.schema_editor(atomic=False) as editor:
operation.database_backwards(
self.app_label, editor, new_state, project_state
)
self.assertIndexNotExists(table_name, ["pink"])
def test_reduce_add_remove_concurrently(self):
self.assertOptimizesTo(
[
AddIndexConcurrently(
"Pony",
Index(fields=["pink"], name="pony_pink_idx"),
),
RemoveIndex("Pony", "pony_pink_idx"),
],
[],
)
def test_reduce_add_remove(self):
self.assertOptimizesTo(
[
AddIndexConcurrently(
"Pony",
Index(fields=["pink"], name="pony_pink_idx"),
),
RemoveIndexConcurrently("Pony", "pony_pink_idx"),
],
[],
)
def test_reduce_add_rename(self):
self.assertOptimizesTo(
[
AddIndexConcurrently(
"Pony",
Index(fields=["pink"], name="pony_pink_idx"),
),
RenameIndex(
"Pony",
old_name="pony_pink_idx",
new_name="pony_pink_index",
),
],
[
AddIndexConcurrently(
"Pony",
Index(fields=["pink"], name="pony_pink_index"),
),
],
)
@unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific tests.")
@modify_settings(INSTALLED_APPS={"append": "migrations"})
class RemoveIndexConcurrentlyTests(OperationTestBase):
app_label = "test_rm_concurrently"
def test_requires_atomic_false(self):
project_state = self.set_up_test_model(self.app_label, index=True)
new_state = project_state.clone()
operation = RemoveIndexConcurrently("Pony", "pony_pink_idx")
msg = (
"The RemoveIndexConcurrently operation cannot be executed inside "
"a transaction (set atomic = False on the migration)."
)
with self.assertRaisesMessage(NotSupportedError, msg):
with connection.schema_editor(atomic=True) as editor:
operation.database_forwards(
self.app_label, editor, project_state, new_state
)
def test_remove(self):
project_state = self.set_up_test_model(self.app_label, index=True)
table_name = "%s_pony" % self.app_label
self.assertTableExists(table_name)
new_state = project_state.clone()
operation = RemoveIndexConcurrently("Pony", "pony_pink_idx")
self.assertEqual(
operation.describe(),
"Concurrently remove index pony_pink_idx from Pony",
)
self.assertEqual(
operation.formatted_description(),
"- Concurrently remove index pony_pink_idx from Pony",
)
operation.state_forwards(self.app_label, new_state)
self.assertEqual(
len(new_state.models[self.app_label, "pony"].options["indexes"]), 0
)
self.assertIndexExists(table_name, ["pink"])
# Remove index.
with connection.schema_editor(atomic=False) as editor:
operation.database_forwards(
self.app_label, editor, project_state, new_state
)
self.assertIndexNotExists(table_name, ["pink"])
# Reversal.
with connection.schema_editor(atomic=False) as editor:
operation.database_backwards(
self.app_label, editor, new_state, project_state
)
self.assertIndexExists(table_name, ["pink"])
# Deconstruction.
name, args, kwargs = operation.deconstruct()
self.assertEqual(name, "RemoveIndexConcurrently")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"model_name": "Pony", "name": "pony_pink_idx"})
class NoMigrationRouter:
def allow_migrate(self, db, app_label, **hints):
return False
class MigrateWhenHinted:
def allow_migrate(self, db, app_label, **hints):
return hints.get("a_hint", False)
@unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific tests.")
class CreateExtensionTests(PostgreSQLTestCase):
app_label = "test_allow_create_extention"
@override_settings(DATABASE_ROUTERS=[NoMigrationRouter()])
def test_no_allow_migrate(self):
operation = CreateExtension("tablefunc")
self.assertEqual(
operation.formatted_description(), "+ Creates extension tablefunc"
)
project_state = ProjectState()
new_state = project_state.clone()
# Don't create an extension.
with CaptureQueriesContext(connection) as captured_queries:
with connection.schema_editor(atomic=False) as editor:
operation.database_forwards(
self.app_label, editor, project_state, new_state
)
self.assertEqual(len(captured_queries), 0)
# Reversal.
with CaptureQueriesContext(connection) as captured_queries:
with connection.schema_editor(atomic=False) as editor:
operation.database_backwards(
self.app_label, editor, new_state, project_state
)
self.assertEqual(len(captured_queries), 0)
def test_allow_migrate(self):
operation = CreateExtension("tablefunc")
self.assertEqual(
operation.migration_name_fragment, "create_extension_tablefunc"
)
project_state = ProjectState()
new_state = project_state.clone()
# Create an extension.
with CaptureQueriesContext(connection) as captured_queries:
with connection.schema_editor(atomic=False) as editor:
operation.database_forwards(
self.app_label, editor, project_state, new_state
)
self.assertEqual(len(captured_queries), 4)
self.assertIn("CREATE EXTENSION IF NOT EXISTS", captured_queries[1]["sql"])
# Reversal.
with CaptureQueriesContext(connection) as captured_queries:
with connection.schema_editor(atomic=False) as editor:
operation.database_backwards(
self.app_label, editor, new_state, project_state
)
self.assertEqual(len(captured_queries), 2)
self.assertIn("DROP EXTENSION IF EXISTS", captured_queries[1]["sql"])
@override_settings(DATABASE_ROUTERS=[MigrateWhenHinted()])
def test_allow_migrate_based_on_hints(self):
operation_no_hints = CreateExtension("tablefunc")
self.assertEqual(operation_no_hints.hints, {})
operation_hints = CreateExtension("tablefunc", hints={"a_hint": True})
self.assertEqual(operation_hints.hints, {"a_hint": True})
project_state = ProjectState()
new_state = project_state.clone()
with (
CaptureQueriesContext(connection) as captured_queries,
connection.schema_editor(atomic=False) as editor,
):
operation_no_hints.database_forwards(
self.app_label, editor, project_state, new_state
)
self.assertEqual(len(captured_queries), 0)
with (
CaptureQueriesContext(connection) as captured_queries,
connection.schema_editor(atomic=False) as editor,
):
operation_no_hints.database_backwards(
self.app_label, editor, project_state, new_state
)
self.assertEqual(len(captured_queries), 0)
with (
CaptureQueriesContext(connection) as captured_queries,
connection.schema_editor(atomic=False) as editor,
):
operation_hints.database_forwards(
self.app_label, editor, project_state, new_state
)
self.assertEqual(len(captured_queries), 4)
self.assertIn("CREATE EXTENSION IF NOT EXISTS", captured_queries[1]["sql"])
with (
CaptureQueriesContext(connection) as captured_queries,
connection.schema_editor(atomic=False) as editor,
):
operation_hints.database_backwards(
self.app_label, editor, project_state, new_state
)
self.assertEqual(len(captured_queries), 2)
self.assertIn("DROP EXTENSION IF EXISTS", captured_queries[1]["sql"])
def test_create_existing_extension(self):
operation = BloomExtension()
self.assertEqual(operation.migration_name_fragment, "create_extension_bloom")
project_state = ProjectState()
new_state = project_state.clone()
# Don't create an existing extension.
with CaptureQueriesContext(connection) as captured_queries:
with connection.schema_editor(atomic=False) as editor:
operation.database_forwards(
self.app_label, editor, project_state, new_state
)
self.assertEqual(len(captured_queries), 3)
self.assertIn("SELECT", captured_queries[0]["sql"])
def test_drop_nonexistent_extension(self):
operation = CreateExtension("tablefunc")
project_state = ProjectState()
new_state = project_state.clone()
# Don't drop a nonexistent extension.
with CaptureQueriesContext(connection) as captured_queries:
with connection.schema_editor(atomic=False) as editor:
operation.database_backwards(
self.app_label, editor, project_state, new_state
)
self.assertEqual(len(captured_queries), 1)
self.assertIn("SELECT", captured_queries[0]["sql"])
@unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific tests.")
class CreateCollationTests(OptimizerTestBase, PostgreSQLTestCase):
app_label = "test_allow_create_collation"
@override_settings(DATABASE_ROUTERS=[NoMigrationRouter()])
def test_no_allow_migrate(self):
operation = CreateCollation("C_test", locale="C")
project_state = ProjectState()
new_state = project_state.clone()
# Don't create a collation.
with CaptureQueriesContext(connection) as captured_queries:
with connection.schema_editor(atomic=False) as editor:
operation.database_forwards(
self.app_label, editor, project_state, new_state
)
self.assertEqual(len(captured_queries), 0)
# Reversal.
with CaptureQueriesContext(connection) as captured_queries:
with connection.schema_editor(atomic=False) as editor:
operation.database_backwards(
self.app_label, editor, new_state, project_state
)
self.assertEqual(len(captured_queries), 0)
def test_create(self):
operation = CreateCollation("C_test", locale="C")
self.assertEqual(operation.migration_name_fragment, "create_collation_c_test")
self.assertEqual(operation.describe(), "Create collation C_test")
self.assertEqual(operation.formatted_description(), "+ Create collation C_test")
project_state = ProjectState()
new_state = project_state.clone()
# Create a collation.
with CaptureQueriesContext(connection) as captured_queries:
with connection.schema_editor(atomic=False) as editor:
operation.database_forwards(
self.app_label, editor, project_state, new_state
)
self.assertEqual(len(captured_queries), 1)
self.assertIn("CREATE COLLATION", captured_queries[0]["sql"])
# Creating the same collation raises an exception.
with self.assertRaisesMessage(ProgrammingError, "already exists"):
with connection.schema_editor(atomic=True) as editor:
operation.database_forwards(
self.app_label, editor, project_state, new_state
)
# Reversal.
with CaptureQueriesContext(connection) as captured_queries:
with connection.schema_editor(atomic=False) as editor:
operation.database_backwards(
self.app_label, editor, new_state, project_state
)
self.assertEqual(len(captured_queries), 1)
self.assertIn("DROP COLLATION", captured_queries[0]["sql"])
# Deconstruction.
name, args, kwargs = operation.deconstruct()
self.assertEqual(name, "CreateCollation")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"name": "C_test", "locale": "C"})
def test_create_non_deterministic_collation(self):
operation = CreateCollation(
"case_insensitive_test",
"und-u-ks-level2",
provider="icu",
deterministic=False,
)
project_state = ProjectState()
new_state = project_state.clone()
# Create a collation.
with CaptureQueriesContext(connection) as captured_queries:
with connection.schema_editor(atomic=False) as editor:
operation.database_forwards(
self.app_label, editor, project_state, new_state
)
self.assertEqual(len(captured_queries), 1)
self.assertIn("CREATE COLLATION", captured_queries[0]["sql"])
# Reversal.
with CaptureQueriesContext(connection) as captured_queries:
with connection.schema_editor(atomic=False) as editor:
operation.database_backwards(
self.app_label, editor, new_state, project_state
)
self.assertEqual(len(captured_queries), 1)
self.assertIn("DROP COLLATION", captured_queries[0]["sql"])
# Deconstruction.
name, args, kwargs = operation.deconstruct()
self.assertEqual(name, "CreateCollation")
self.assertEqual(args, [])
self.assertEqual(
kwargs,
{
"name": "case_insensitive_test",
"locale": "und-u-ks-level2",
"provider": "icu",
"deterministic": False,
},
)
def test_create_collation_alternate_provider(self):
operation = CreateCollation(
"german_phonebook_test",
provider="icu",
locale="de-u-co-phonebk",
)
project_state = ProjectState()
new_state = project_state.clone()
# Create an collation.
with CaptureQueriesContext(connection) as captured_queries:
with connection.schema_editor(atomic=False) as editor:
operation.database_forwards(
self.app_label, editor, project_state, new_state
)
self.assertEqual(len(captured_queries), 1)
self.assertIn("CREATE COLLATION", captured_queries[0]["sql"])
# Reversal.
with CaptureQueriesContext(connection) as captured_queries:
with connection.schema_editor(atomic=False) as editor:
operation.database_backwards(
self.app_label, editor, new_state, project_state
)
self.assertEqual(len(captured_queries), 1)
self.assertIn("DROP COLLATION", captured_queries[0]["sql"])
def test_writer(self):
operation = CreateCollation(
"sample_collation",
"und-u-ks-level2",
provider="icu",
deterministic=False,
)
buff, imports = OperationWriter(operation, indentation=0).serialize()
self.assertEqual(imports, {"import django.contrib.postgres.operations"})
self.assertEqual(
buff,
"django.contrib.postgres.operations.CreateCollation(\n"
" name='sample_collation',\n"
" locale='und-u-ks-level2',\n"
" provider='icu',\n"
" deterministic=False,\n"
"),",
)
def test_reduce_create_remove(self):
self.assertOptimizesTo(
[
CreateCollation(
"sample_collation",
"und-u-ks-level2",
provider="icu",
deterministic=False,
),
RemoveCollation(
"sample_collation",
# Different locale
"de-u-ks-level1",
),
],
[],
)
@unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific tests.")
class RemoveCollationTests(PostgreSQLTestCase):
app_label = "test_allow_remove_collation"
@override_settings(DATABASE_ROUTERS=[NoMigrationRouter()])
def test_no_allow_migrate(self):
operation = RemoveCollation("C_test", locale="C")
project_state = ProjectState()
new_state = project_state.clone()
# Don't create a collation.
with CaptureQueriesContext(connection) as captured_queries:
with connection.schema_editor(atomic=False) as editor:
operation.database_forwards(
self.app_label, editor, project_state, new_state
)
self.assertEqual(len(captured_queries), 0)
# Reversal.
with CaptureQueriesContext(connection) as captured_queries:
with connection.schema_editor(atomic=False) as editor:
operation.database_backwards(
self.app_label, editor, new_state, project_state
)
self.assertEqual(len(captured_queries), 0)
def test_remove(self):
operation = CreateCollation("C_test", locale="C")
project_state = ProjectState()
new_state = project_state.clone()
with connection.schema_editor(atomic=False) as editor:
operation.database_forwards(
self.app_label, editor, project_state, new_state
)
operation = RemoveCollation("C_test", locale="C")
self.assertEqual(operation.migration_name_fragment, "remove_collation_c_test")
self.assertEqual(operation.describe(), "Remove collation C_test")
self.assertEqual(operation.formatted_description(), "- Remove collation C_test")
project_state = ProjectState()
new_state = project_state.clone()
# Remove a collation.
with CaptureQueriesContext(connection) as captured_queries:
with connection.schema_editor(atomic=False) as editor:
operation.database_forwards(
self.app_label, editor, project_state, new_state
)
self.assertEqual(len(captured_queries), 1)
self.assertIn("DROP COLLATION", captured_queries[0]["sql"])
# Removing a nonexistent collation raises an exception.
with self.assertRaisesMessage(ProgrammingError, "does not exist"):
with connection.schema_editor(atomic=True) as editor:
operation.database_forwards(
self.app_label, editor, project_state, new_state
)
# Reversal.
with CaptureQueriesContext(connection) as captured_queries:
with connection.schema_editor(atomic=False) as editor:
operation.database_backwards(
self.app_label, editor, new_state, project_state
)
self.assertEqual(len(captured_queries), 1)
self.assertIn("CREATE COLLATION", captured_queries[0]["sql"])
# Deconstruction.
name, args, kwargs = operation.deconstruct()
self.assertEqual(name, "RemoveCollation")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"name": "C_test", "locale": "C"})
@unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific tests.")
@modify_settings(INSTALLED_APPS={"append": "migrations"})
class AddConstraintNotValidTests(OperationTestBase):
app_label = "test_add_constraint_not_valid"
def test_non_check_constraint_not_supported(self):
constraint = UniqueConstraint(fields=["pink"], name="pony_pink_uniq")
msg = "AddConstraintNotValid.constraint must be a check constraint."
with self.assertRaisesMessage(TypeError, msg):
AddConstraintNotValid(model_name="pony", constraint=constraint)
def test_add(self):
table_name = f"{self.app_label}_pony"
constraint_name = "pony_pink_gte_check"
constraint = CheckConstraint(condition=Q(pink__gte=4), name=constraint_name)
operation = AddConstraintNotValid("Pony", constraint=constraint)
project_state, new_state = self.make_test_state(self.app_label, operation)
self.assertEqual(
operation.describe(),
f"Create not valid constraint {constraint_name} on model Pony",
)
self.assertEqual(
operation.formatted_description(),
f"+ Create not valid constraint {constraint_name} on model Pony",
)
self.assertEqual(
operation.migration_name_fragment,
f"pony_{constraint_name}_not_valid",
)
self.assertEqual(
len(new_state.models[self.app_label, "pony"].options["constraints"]),
1,
)
self.assertConstraintNotExists(table_name, constraint_name)
Pony = new_state.apps.get_model(self.app_label, "Pony")
self.assertEqual(len(Pony._meta.constraints), 1)
Pony.objects.create(pink=2, weight=1.0)
# Add constraint.
with connection.schema_editor(atomic=True) as editor:
operation.database_forwards(
self.app_label, editor, project_state, new_state
)
msg = f'check constraint "{constraint_name}"'
with self.assertRaisesMessage(IntegrityError, msg), transaction.atomic():
Pony.objects.create(pink=3, weight=1.0)
self.assertConstraintExists(table_name, constraint_name)
# Reversal.
with connection.schema_editor(atomic=True) as editor:
operation.database_backwards(
self.app_label, editor, project_state, new_state
)
self.assertConstraintNotExists(table_name, constraint_name)
Pony.objects.create(pink=3, weight=1.0)
# Deconstruction.
name, args, kwargs = operation.deconstruct()
self.assertEqual(name, "AddConstraintNotValid")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"model_name": "Pony", "constraint": constraint})
@unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific tests.")
@modify_settings(INSTALLED_APPS={"append": "migrations"})
class ValidateConstraintTests(OperationTestBase):
app_label = "test_validate_constraint"
def test_validate(self):
constraint_name = "pony_pink_gte_check"
constraint = CheckConstraint(condition=Q(pink__gte=4), name=constraint_name)
operation = AddConstraintNotValid("Pony", constraint=constraint)
project_state, new_state = self.make_test_state(self.app_label, operation)
Pony = new_state.apps.get_model(self.app_label, "Pony")
obj = Pony.objects.create(pink=2, weight=1.0)
# Add constraint.
with connection.schema_editor(atomic=True) as editor:
operation.database_forwards(
self.app_label, editor, project_state, new_state
)
project_state = new_state
new_state = new_state.clone()
operation = ValidateConstraint("Pony", name=constraint_name)
operation.state_forwards(self.app_label, new_state)
self.assertEqual(
operation.describe(),
f"Validate constraint {constraint_name} on model Pony",
)
self.assertEqual(
operation.formatted_description(),
f"~ Validate constraint {constraint_name} on model Pony",
)
self.assertEqual(
operation.migration_name_fragment,
f"pony_validate_{constraint_name}",
)
# Validate constraint.
with connection.schema_editor(atomic=True) as editor:
msg = f'check constraint "{constraint_name}"'
with self.assertRaisesMessage(IntegrityError, msg):
operation.database_forwards(
self.app_label, editor, project_state, new_state
)
obj.pink = 5
obj.save()
with connection.schema_editor(atomic=True) as editor:
operation.database_forwards(
self.app_label, editor, project_state, new_state
)
# Reversal is a noop.
with connection.schema_editor() as editor:
with self.assertNumQueries(0):
operation.database_backwards(
self.app_label, editor, new_state, project_state
)
# Deconstruction.
name, args, kwargs = operation.deconstruct()
self.assertEqual(name, "ValidateConstraint")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"model_name": "Pony", "name": constraint_name})
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/postgres_tests/test_aggregates.py | tests/postgres_tests/test_aggregates.py | from django.db import transaction
from django.db.models import (
CharField,
F,
Func,
IntegerField,
JSONField,
OuterRef,
Q,
Subquery,
Value,
Window,
)
from django.db.models.fields.json import KeyTransform
from django.db.models.functions import Cast, Concat, LPad, Substr
from django.test.utils import Approximate
from django.utils import timezone
from django.utils.deprecation import RemovedInDjango70Warning
from . import PostgreSQLTestCase
from .models import AggregateTestModel, HotelReservation, Room, StatTestModel
try:
from django.contrib.postgres.aggregates import (
StringAgg, # RemovedInDjango70Warning.
)
from django.contrib.postgres.aggregates import (
ArrayAgg,
BitAnd,
BitOr,
BitXor,
BoolAnd,
BoolOr,
Corr,
CovarPop,
JSONBAgg,
RegrAvgX,
RegrAvgY,
RegrCount,
RegrIntercept,
RegrR2,
RegrSlope,
RegrSXX,
RegrSXY,
RegrSYY,
StatAggregate,
)
from django.contrib.postgres.fields import ArrayField
except ImportError:
pass # psycopg2 is not installed
class TestGeneralAggregate(PostgreSQLTestCase):
@classmethod
def setUpTestData(cls):
cls.aggs = AggregateTestModel.objects.bulk_create(
[
AggregateTestModel(
boolean_field=True,
char_field="Foo1",
text_field="Text1",
integer_field=0,
),
AggregateTestModel(
boolean_field=False,
char_field="Foo2",
text_field="Text2",
integer_field=1,
json_field={"lang": "pl"},
),
AggregateTestModel(
boolean_field=False,
char_field="Foo4",
text_field="Text4",
integer_field=2,
json_field={"lang": "en"},
),
AggregateTestModel(
boolean_field=True,
char_field="Foo3",
text_field="Text3",
integer_field=0,
json_field={"breed": "collie"},
),
]
)
def test_empty_result_set(self):
AggregateTestModel.objects.all().delete()
tests = [
ArrayAgg("char_field"),
ArrayAgg("integer_field"),
ArrayAgg("boolean_field"),
BitAnd("integer_field"),
BitOr("integer_field"),
BoolAnd("boolean_field"),
BoolOr("boolean_field"),
JSONBAgg("integer_field"),
BitXor("integer_field"),
]
for aggregation in tests:
with self.subTest(aggregation=aggregation):
# Empty result with non-execution optimization.
with self.assertNumQueries(0):
values = AggregateTestModel.objects.none().aggregate(
aggregation=aggregation,
)
self.assertEqual(values, {"aggregation": None})
# Empty result when query must be executed.
with self.assertNumQueries(1):
values = AggregateTestModel.objects.aggregate(
aggregation=aggregation,
)
self.assertEqual(values, {"aggregation": None})
def test_default_argument(self):
AggregateTestModel.objects.all().delete()
tests = [
(ArrayAgg("char_field", default=["<empty>"]), ["<empty>"]),
(ArrayAgg("integer_field", default=[0]), [0]),
(ArrayAgg("boolean_field", default=[False]), [False]),
(BitAnd("integer_field", default=0), 0),
(BitOr("integer_field", default=0), 0),
(BoolAnd("boolean_field", default=False), False),
(BoolOr("boolean_field", default=False), False),
(JSONBAgg("integer_field", default=["<empty>"]), ["<empty>"]),
(
JSONBAgg("integer_field", default=Value(["<empty>"], JSONField())),
["<empty>"],
),
(BitXor("integer_field", default=0), 0),
]
for aggregation, expected_result in tests:
with self.subTest(aggregation=aggregation):
# Empty result with non-execution optimization.
with self.assertNumQueries(0):
values = AggregateTestModel.objects.none().aggregate(
aggregation=aggregation,
)
self.assertEqual(values, {"aggregation": expected_result})
# Empty result when query must be executed.
with transaction.atomic(), self.assertNumQueries(1):
values = AggregateTestModel.objects.aggregate(
aggregation=aggregation,
)
self.assertEqual(values, {"aggregation": expected_result})
def test_array_agg_charfield(self):
values = AggregateTestModel.objects.aggregate(arrayagg=ArrayAgg("char_field"))
self.assertEqual(values, {"arrayagg": ["Foo1", "Foo2", "Foo4", "Foo3"]})
def test_array_agg_charfield_order_by(self):
order_by_test_cases = (
(F("char_field").desc(), ["Foo4", "Foo3", "Foo2", "Foo1"]),
(F("char_field").asc(), ["Foo1", "Foo2", "Foo3", "Foo4"]),
(F("char_field"), ["Foo1", "Foo2", "Foo3", "Foo4"]),
(
[F("boolean_field"), F("char_field").desc()],
["Foo4", "Foo2", "Foo3", "Foo1"],
),
(
(F("boolean_field"), F("char_field").desc()),
["Foo4", "Foo2", "Foo3", "Foo1"],
),
("char_field", ["Foo1", "Foo2", "Foo3", "Foo4"]),
("-char_field", ["Foo4", "Foo3", "Foo2", "Foo1"]),
(Concat("char_field", Value("@")), ["Foo1", "Foo2", "Foo3", "Foo4"]),
(Concat("char_field", Value("@")).desc(), ["Foo4", "Foo3", "Foo2", "Foo1"]),
(
(
Substr("char_field", 1, 1),
F("integer_field"),
Substr("char_field", 4, 1).desc(),
),
["Foo3", "Foo1", "Foo2", "Foo4"],
),
)
for order_by, expected_output in order_by_test_cases:
with self.subTest(order_by=order_by, expected_output=expected_output):
values = AggregateTestModel.objects.aggregate(
arrayagg=ArrayAgg("char_field", order_by=order_by)
)
self.assertEqual(values, {"arrayagg": expected_output})
def test_array_agg_integerfield(self):
values = AggregateTestModel.objects.aggregate(
arrayagg=ArrayAgg("integer_field")
)
self.assertEqual(values, {"arrayagg": [0, 1, 2, 0]})
def test_array_agg_integerfield_order_by(self):
values = AggregateTestModel.objects.aggregate(
arrayagg=ArrayAgg("integer_field", order_by=F("integer_field").desc())
)
self.assertEqual(values, {"arrayagg": [2, 1, 0, 0]})
def test_array_agg_booleanfield(self):
values = AggregateTestModel.objects.aggregate(
arrayagg=ArrayAgg("boolean_field")
)
self.assertEqual(values, {"arrayagg": [True, False, False, True]})
def test_array_agg_booleanfield_order_by(self):
order_by_test_cases = (
(F("boolean_field").asc(), [False, False, True, True]),
(F("boolean_field").desc(), [True, True, False, False]),
(F("boolean_field"), [False, False, True, True]),
)
for order_by, expected_output in order_by_test_cases:
with self.subTest(order_by=order_by, expected_output=expected_output):
values = AggregateTestModel.objects.aggregate(
arrayagg=ArrayAgg("boolean_field", order_by=order_by)
)
self.assertEqual(values, {"arrayagg": expected_output})
def test_array_agg_jsonfield(self):
values = AggregateTestModel.objects.aggregate(
arrayagg=ArrayAgg(
KeyTransform("lang", "json_field"),
filter=Q(json_field__lang__isnull=False),
),
)
self.assertEqual(values, {"arrayagg": ["pl", "en"]})
def test_array_agg_jsonfield_order_by(self):
values = AggregateTestModel.objects.aggregate(
arrayagg=ArrayAgg(
KeyTransform("lang", "json_field"),
filter=Q(json_field__lang__isnull=False),
order_by=KeyTransform("lang", "json_field"),
),
)
self.assertEqual(values, {"arrayagg": ["en", "pl"]})
def test_array_agg_filter_and_order_by_params(self):
values = AggregateTestModel.objects.aggregate(
arrayagg=ArrayAgg(
"char_field",
filter=Q(json_field__has_key="lang"),
order_by=LPad(Cast("integer_field", CharField()), 2, Value("0")),
)
)
self.assertEqual(values, {"arrayagg": ["Foo2", "Foo4"]})
def test_array_agg_filter(self):
values = AggregateTestModel.objects.aggregate(
arrayagg=ArrayAgg("integer_field", filter=Q(integer_field__gt=0)),
)
self.assertEqual(values, {"arrayagg": [1, 2]})
def test_array_agg_lookups(self):
aggr1 = AggregateTestModel.objects.create()
aggr2 = AggregateTestModel.objects.create()
StatTestModel.objects.create(related_field=aggr1, int1=1, int2=0)
StatTestModel.objects.create(related_field=aggr1, int1=2, int2=0)
StatTestModel.objects.create(related_field=aggr2, int1=3, int2=0)
StatTestModel.objects.create(related_field=aggr2, int1=4, int2=0)
qs = (
StatTestModel.objects.values("related_field")
.annotate(array=ArrayAgg("int1"))
.filter(array__overlap=[2])
.values_list("array", flat=True)
)
self.assertCountEqual(qs.get(), [1, 2])
def test_array_agg_filter_index(self):
aggr1 = AggregateTestModel.objects.create(integer_field=1)
aggr2 = AggregateTestModel.objects.create(integer_field=2)
StatTestModel.objects.bulk_create(
[
StatTestModel(related_field=aggr1, int1=1, int2=0),
StatTestModel(related_field=aggr1, int1=2, int2=1),
StatTestModel(related_field=aggr2, int1=3, int2=0),
StatTestModel(related_field=aggr2, int1=4, int2=1),
]
)
qs = (
AggregateTestModel.objects.filter(pk__in=[aggr1.pk, aggr2.pk])
.annotate(
array=ArrayAgg("stattestmodel__int1", filter=Q(stattestmodel__int2=0))
)
.annotate(array_value=F("array__0"))
.values_list("array_value", flat=True)
)
self.assertCountEqual(qs, [1, 3])
def test_array_agg_filter_slice(self):
aggr1 = AggregateTestModel.objects.create(integer_field=1)
aggr2 = AggregateTestModel.objects.create(integer_field=2)
StatTestModel.objects.bulk_create(
[
StatTestModel(related_field=aggr1, int1=1, int2=0),
StatTestModel(related_field=aggr1, int1=2, int2=1),
StatTestModel(related_field=aggr2, int1=3, int2=0),
StatTestModel(related_field=aggr2, int1=4, int2=1),
StatTestModel(related_field=aggr2, int1=5, int2=0),
]
)
qs = (
AggregateTestModel.objects.filter(pk__in=[aggr1.pk, aggr2.pk])
.annotate(
array=ArrayAgg("stattestmodel__int1", filter=Q(stattestmodel__int2=0))
)
.annotate(array_value=F("array__1_2"))
.values_list("array_value", flat=True)
)
self.assertCountEqual(qs, [[], [5]])
def test_array_agg_with_empty_filter_and_default_values(self):
for filter_value in ([-1], []):
for default_value in ([], Value([])):
with self.subTest(filter=filter_value, default=default_value):
queryset = AggregateTestModel.objects.annotate(
test_array_agg=ArrayAgg(
"stattestmodel__int1",
filter=Q(pk__in=filter_value),
default=default_value,
)
)
self.assertSequenceEqual(
queryset.values_list("test_array_agg", flat=True),
[[], [], [], []],
)
def test_array_agg_with_order_by_outer_ref(self):
StatTestModel.objects.annotate(
atm_ids=Subquery(
AggregateTestModel.objects.annotate(
ids=ArrayAgg(
"id",
order_by=[OuterRef("int1")],
)
).values("ids")[:1]
)
)
def test_bit_and_general(self):
values = AggregateTestModel.objects.filter(integer_field__in=[0, 1]).aggregate(
bitand=BitAnd("integer_field")
)
self.assertEqual(values, {"bitand": 0})
def test_bit_and_on_only_true_values(self):
values = AggregateTestModel.objects.filter(integer_field=1).aggregate(
bitand=BitAnd("integer_field")
)
self.assertEqual(values, {"bitand": 1})
def test_bit_and_on_only_false_values(self):
values = AggregateTestModel.objects.filter(integer_field=0).aggregate(
bitand=BitAnd("integer_field")
)
self.assertEqual(values, {"bitand": 0})
def test_bit_or_general(self):
values = AggregateTestModel.objects.filter(integer_field__in=[0, 1]).aggregate(
bitor=BitOr("integer_field")
)
self.assertEqual(values, {"bitor": 1})
def test_bit_or_on_only_true_values(self):
values = AggregateTestModel.objects.filter(integer_field=1).aggregate(
bitor=BitOr("integer_field")
)
self.assertEqual(values, {"bitor": 1})
def test_bit_or_on_only_false_values(self):
values = AggregateTestModel.objects.filter(integer_field=0).aggregate(
bitor=BitOr("integer_field")
)
self.assertEqual(values, {"bitor": 0})
def test_bit_xor_general(self):
AggregateTestModel.objects.create(integer_field=3)
values = AggregateTestModel.objects.filter(
integer_field__in=[1, 3],
).aggregate(bitxor=BitXor("integer_field"))
self.assertEqual(values, {"bitxor": 2})
def test_bit_xor_on_only_true_values(self):
values = AggregateTestModel.objects.filter(
integer_field=1,
).aggregate(bitxor=BitXor("integer_field"))
self.assertEqual(values, {"bitxor": 1})
def test_bit_xor_on_only_false_values(self):
values = AggregateTestModel.objects.filter(
integer_field=0,
).aggregate(bitxor=BitXor("integer_field"))
self.assertEqual(values, {"bitxor": 0})
def test_bool_and_general(self):
values = AggregateTestModel.objects.aggregate(booland=BoolAnd("boolean_field"))
self.assertEqual(values, {"booland": False})
def test_bool_and_q_object(self):
values = AggregateTestModel.objects.aggregate(
booland=BoolAnd(Q(integer_field__gt=2)),
)
self.assertEqual(values, {"booland": False})
def test_bool_or_general(self):
values = AggregateTestModel.objects.aggregate(boolor=BoolOr("boolean_field"))
self.assertEqual(values, {"boolor": True})
def test_bool_or_q_object(self):
values = AggregateTestModel.objects.aggregate(
boolor=BoolOr(Q(integer_field__gt=2)),
)
self.assertEqual(values, {"boolor": False})
def test_orderable_agg_alternative_fields(self):
values = AggregateTestModel.objects.aggregate(
arrayagg=ArrayAgg("integer_field", order_by=F("char_field").asc())
)
self.assertEqual(values, {"arrayagg": [0, 1, 0, 2]})
def test_jsonb_agg(self):
values = AggregateTestModel.objects.aggregate(jsonbagg=JSONBAgg("char_field"))
self.assertEqual(values, {"jsonbagg": ["Foo1", "Foo2", "Foo4", "Foo3"]})
def test_jsonb_agg_charfield_order_by(self):
order_by_test_cases = (
(F("char_field").desc(), ["Foo4", "Foo3", "Foo2", "Foo1"]),
(F("char_field").asc(), ["Foo1", "Foo2", "Foo3", "Foo4"]),
(F("char_field"), ["Foo1", "Foo2", "Foo3", "Foo4"]),
("char_field", ["Foo1", "Foo2", "Foo3", "Foo4"]),
("-char_field", ["Foo4", "Foo3", "Foo2", "Foo1"]),
(Concat("char_field", Value("@")), ["Foo1", "Foo2", "Foo3", "Foo4"]),
(Concat("char_field", Value("@")).desc(), ["Foo4", "Foo3", "Foo2", "Foo1"]),
)
for order_by, expected_output in order_by_test_cases:
with self.subTest(order_by=order_by, expected_output=expected_output):
values = AggregateTestModel.objects.aggregate(
jsonbagg=JSONBAgg("char_field", order_by=order_by),
)
self.assertEqual(values, {"jsonbagg": expected_output})
def test_jsonb_agg_integerfield_order_by(self):
values = AggregateTestModel.objects.aggregate(
jsonbagg=JSONBAgg("integer_field", order_by=F("integer_field").desc()),
)
self.assertEqual(values, {"jsonbagg": [2, 1, 0, 0]})
def test_jsonb_agg_booleanfield_order_by(self):
order_by_test_cases = (
(F("boolean_field").asc(), [False, False, True, True]),
(F("boolean_field").desc(), [True, True, False, False]),
(F("boolean_field"), [False, False, True, True]),
)
for order_by, expected_output in order_by_test_cases:
with self.subTest(order_by=order_by, expected_output=expected_output):
values = AggregateTestModel.objects.aggregate(
jsonbagg=JSONBAgg("boolean_field", order_by=order_by),
)
self.assertEqual(values, {"jsonbagg": expected_output})
def test_jsonb_agg_jsonfield_order_by(self):
values = AggregateTestModel.objects.aggregate(
jsonbagg=JSONBAgg(
KeyTransform("lang", "json_field"),
filter=Q(json_field__lang__isnull=False),
order_by=KeyTransform("lang", "json_field"),
),
)
self.assertEqual(values, {"jsonbagg": ["en", "pl"]})
def test_jsonb_agg_key_index_transforms(self):
room101 = Room.objects.create(number=101)
room102 = Room.objects.create(number=102)
datetimes = [
timezone.datetime(2018, 6, 20),
timezone.datetime(2018, 6, 24),
timezone.datetime(2018, 6, 28),
]
HotelReservation.objects.create(
datespan=(datetimes[0].date(), datetimes[1].date()),
start=datetimes[0],
end=datetimes[1],
room=room102,
requirements={"double_bed": True, "parking": True},
)
HotelReservation.objects.create(
datespan=(datetimes[1].date(), datetimes[2].date()),
start=datetimes[1],
end=datetimes[2],
room=room102,
requirements={"double_bed": False, "sea_view": True, "parking": False},
)
HotelReservation.objects.create(
datespan=(datetimes[0].date(), datetimes[2].date()),
start=datetimes[0],
end=datetimes[2],
room=room101,
requirements={"sea_view": False},
)
values = (
Room.objects.annotate(
requirements=JSONBAgg(
"hotelreservation__requirements",
order_by="-hotelreservation__start",
)
)
.filter(requirements__0__sea_view=True)
.values("number", "requirements")
)
self.assertSequenceEqual(
values,
[
{
"number": 102,
"requirements": [
{"double_bed": False, "sea_view": True, "parking": False},
{"double_bed": True, "parking": True},
],
},
],
)
def test_array_agg_order_by_in_subquery(self):
stats = []
for i, agg in enumerate(AggregateTestModel.objects.order_by("char_field")):
stats.append(StatTestModel(related_field=agg, int1=i, int2=i + 1))
stats.append(StatTestModel(related_field=agg, int1=i + 1, int2=i))
StatTestModel.objects.bulk_create(stats)
aggregate = ArrayAgg("stattestmodel__int1", order_by="-stattestmodel__int2")
expected_result = [
("Foo1", [0, 1]),
("Foo2", [1, 2]),
("Foo3", [2, 3]),
("Foo4", [3, 4]),
]
subquery = (
AggregateTestModel.objects.filter(
pk=OuterRef("pk"),
)
.annotate(agg=aggregate)
.values("agg")
)
values = (
AggregateTestModel.objects.annotate(
agg=Subquery(subquery),
)
.order_by("char_field")
.values_list("char_field", "agg")
)
self.assertEqual(list(values), expected_result)
def test_string_agg_array_agg_filter_in_subquery(self):
StatTestModel.objects.bulk_create(
[
StatTestModel(related_field=self.aggs[0], int1=0, int2=5),
StatTestModel(related_field=self.aggs[0], int1=1, int2=4),
StatTestModel(related_field=self.aggs[0], int1=2, int2=3),
]
)
aggregate = ArrayAgg(
"stattestmodel__int1",
filter=Q(stattestmodel__int2__gt=3),
)
expected_result = [("Foo1", [0, 1]), ("Foo2", None)]
subquery = (
AggregateTestModel.objects.filter(
pk=OuterRef("pk"),
)
.annotate(agg=aggregate)
.values("agg")
)
values = (
AggregateTestModel.objects.annotate(
agg=Subquery(subquery),
)
.filter(
char_field__in=["Foo1", "Foo2"],
)
.order_by("char_field")
.values_list("char_field", "agg")
)
self.assertEqual(list(values), expected_result)
def test_ordering_isnt_cleared_for_array_subquery(self):
inner_qs = AggregateTestModel.objects.order_by("-integer_field")
qs = AggregateTestModel.objects.annotate(
integers=Func(
Subquery(inner_qs.values("integer_field")),
function="ARRAY",
output_field=ArrayField(base_field=IntegerField()),
),
)
self.assertSequenceEqual(
qs.first().integers,
inner_qs.values_list("integer_field", flat=True),
)
def test_window(self):
self.assertCountEqual(
AggregateTestModel.objects.annotate(
integers=Window(
expression=ArrayAgg("char_field"),
partition_by=F("integer_field"),
)
).values("integers", "char_field"),
[
{"integers": ["Foo1", "Foo3"], "char_field": "Foo1"},
{"integers": ["Foo1", "Foo3"], "char_field": "Foo3"},
{"integers": ["Foo2"], "char_field": "Foo2"},
{"integers": ["Foo4"], "char_field": "Foo4"},
],
)
def test_values_list(self):
tests = [ArrayAgg("integer_field"), JSONBAgg("integer_field")]
for aggregation in tests:
with self.subTest(aggregation=aggregation):
results = AggregateTestModel.objects.annotate(
agg=aggregation
).values_list("agg")
self.assertCountEqual(
results,
[([0],), ([1],), ([2],), ([0],)],
)
def test_string_agg_delimiter_deprecation(self):
msg = (
"delimiter: str will be resolved as a field reference instead "
'of a string literal on Django 7.0. Pass `delimiter=Value("\'")` to '
"preserve the previous behavior."
)
with self.assertWarnsMessage(RemovedInDjango70Warning, msg) as ctx:
values = AggregateTestModel.objects.aggregate(
stringagg=StringAgg("char_field", delimiter="'")
)
self.assertEqual(values, {"stringagg": "Foo1'Foo2'Foo4'Foo3"})
self.assertEqual(ctx.filename, __file__)
def test_string_agg_deprecation(self):
msg = (
"The PostgreSQL specific StringAgg function is deprecated. Use "
"django.db.models.aggregates.StringAgg instead."
)
with self.assertWarnsMessage(RemovedInDjango70Warning, msg) as ctx:
values = AggregateTestModel.objects.aggregate(
stringagg=StringAgg("char_field", delimiter=Value("'"))
)
self.assertEqual(values, {"stringagg": "Foo1'Foo2'Foo4'Foo3"})
self.assertEqual(ctx.filename, __file__)
class TestAggregateDistinct(PostgreSQLTestCase):
@classmethod
def setUpTestData(cls):
AggregateTestModel.objects.create(char_field="Foo")
AggregateTestModel.objects.create(char_field="Foo")
AggregateTestModel.objects.create(char_field="Bar")
def test_array_agg_distinct_false(self):
values = AggregateTestModel.objects.aggregate(
arrayagg=ArrayAgg("char_field", distinct=False)
)
self.assertEqual(sorted(values["arrayagg"]), ["Bar", "Foo", "Foo"])
def test_array_agg_distinct_true(self):
values = AggregateTestModel.objects.aggregate(
arrayagg=ArrayAgg("char_field", distinct=True)
)
self.assertEqual(sorted(values["arrayagg"]), ["Bar", "Foo"])
def test_jsonb_agg_distinct_false(self):
values = AggregateTestModel.objects.aggregate(
jsonbagg=JSONBAgg("char_field", distinct=False),
)
self.assertEqual(sorted(values["jsonbagg"]), ["Bar", "Foo", "Foo"])
def test_jsonb_agg_distinct_true(self):
values = AggregateTestModel.objects.aggregate(
jsonbagg=JSONBAgg("char_field", distinct=True),
)
self.assertEqual(sorted(values["jsonbagg"]), ["Bar", "Foo"])
class TestStatisticsAggregate(PostgreSQLTestCase):
@classmethod
def setUpTestData(cls):
StatTestModel.objects.create(
int1=1,
int2=3,
related_field=AggregateTestModel.objects.create(integer_field=0),
)
StatTestModel.objects.create(
int1=2,
int2=2,
related_field=AggregateTestModel.objects.create(integer_field=1),
)
StatTestModel.objects.create(
int1=3,
int2=1,
related_field=AggregateTestModel.objects.create(integer_field=2),
)
# Tests for base class (StatAggregate)
def test_missing_arguments_raises_exception(self):
with self.assertRaisesMessage(ValueError, "Both y and x must be provided."):
StatAggregate(x=None, y=None)
def test_correct_source_expressions(self):
func = StatAggregate(x="test", y=13)
self.assertIsInstance(func.source_expressions[0], Value)
self.assertIsInstance(func.source_expressions[1], F)
def test_alias_is_required(self):
class SomeFunc(StatAggregate):
function = "TEST"
with self.assertRaisesMessage(TypeError, "Complex aggregates require an alias"):
StatTestModel.objects.aggregate(SomeFunc(y="int2", x="int1"))
# Test aggregates
def test_empty_result_set(self):
StatTestModel.objects.all().delete()
tests = [
(Corr(y="int2", x="int1"), None),
(CovarPop(y="int2", x="int1"), None),
(CovarPop(y="int2", x="int1", sample=True), None),
(RegrAvgX(y="int2", x="int1"), None),
(RegrAvgY(y="int2", x="int1"), None),
(RegrCount(y="int2", x="int1"), 0),
(RegrIntercept(y="int2", x="int1"), None),
(RegrR2(y="int2", x="int1"), None),
(RegrSlope(y="int2", x="int1"), None),
(RegrSXX(y="int2", x="int1"), None),
(RegrSXY(y="int2", x="int1"), None),
(RegrSYY(y="int2", x="int1"), None),
]
for aggregation, expected_result in tests:
with self.subTest(aggregation=aggregation):
# Empty result with non-execution optimization.
with self.assertNumQueries(0):
values = StatTestModel.objects.none().aggregate(
aggregation=aggregation,
)
self.assertEqual(values, {"aggregation": expected_result})
# Empty result when query must be executed.
with self.assertNumQueries(1):
values = StatTestModel.objects.aggregate(
aggregation=aggregation,
)
self.assertEqual(values, {"aggregation": expected_result})
def test_default_argument(self):
StatTestModel.objects.all().delete()
tests = [
(Corr(y="int2", x="int1", default=0), 0),
(CovarPop(y="int2", x="int1", default=0), 0),
(CovarPop(y="int2", x="int1", sample=True, default=0), 0),
(RegrAvgX(y="int2", x="int1", default=0), 0),
(RegrAvgY(y="int2", x="int1", default=0), 0),
# RegrCount() doesn't support the default argument.
(RegrIntercept(y="int2", x="int1", default=0), 0),
(RegrR2(y="int2", x="int1", default=0), 0),
(RegrSlope(y="int2", x="int1", default=0), 0),
(RegrSXX(y="int2", x="int1", default=0), 0),
(RegrSXY(y="int2", x="int1", default=0), 0),
(RegrSYY(y="int2", x="int1", default=0), 0),
]
for aggregation, expected_result in tests:
with self.subTest(aggregation=aggregation):
# Empty result with non-execution optimization.
with self.assertNumQueries(0):
values = StatTestModel.objects.none().aggregate(
aggregation=aggregation,
)
self.assertEqual(values, {"aggregation": expected_result})
# Empty result when query must be executed.
with self.assertNumQueries(1):
values = StatTestModel.objects.aggregate(
aggregation=aggregation,
)
self.assertEqual(values, {"aggregation": expected_result})
def test_corr_general(self):
values = StatTestModel.objects.aggregate(corr=Corr(y="int2", x="int1"))
self.assertEqual(values, {"corr": -1.0})
def test_covar_pop_general(self):
values = StatTestModel.objects.aggregate(covarpop=CovarPop(y="int2", x="int1"))
self.assertEqual(values, {"covarpop": Approximate(-0.66, places=1)})
def test_covar_pop_sample(self):
values = StatTestModel.objects.aggregate(
covarpop=CovarPop(y="int2", x="int1", sample=True)
)
self.assertEqual(values, {"covarpop": -1.0})
def test_regr_avgx_general(self):
values = StatTestModel.objects.aggregate(regravgx=RegrAvgX(y="int2", x="int1"))
self.assertEqual(values, {"regravgx": 2.0})
def test_regr_avgy_general(self):
values = StatTestModel.objects.aggregate(regravgy=RegrAvgY(y="int2", x="int1"))
self.assertEqual(values, {"regravgy": 2.0})
def test_regr_count_general(self):
values = StatTestModel.objects.aggregate(
regrcount=RegrCount(y="int2", x="int1")
)
self.assertEqual(values, {"regrcount": 3})
def test_regr_count_default(self):
msg = "RegrCount does not allow default."
with self.assertRaisesMessage(TypeError, msg):
RegrCount(y="int2", x="int1", default=0)
def test_regr_intercept_general(self):
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | true |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/postgres_tests/integration_settings.py | tests/postgres_tests/integration_settings.py | SECRET_KEY = "abcdefg"
INSTALLED_APPS = [
"django.contrib.postgres",
]
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/postgres_tests/test_functions.py | tests/postgres_tests/test_functions.py | import uuid
from datetime import datetime
from time import sleep
from django.contrib.postgres.functions import RandomUUID, TransactionNow
from . import PostgreSQLTestCase
from .models import NowTestModel, UUIDTestModel
class TestTransactionNow(PostgreSQLTestCase):
def test_transaction_now(self):
"""
The test case puts everything under a transaction, so two models
updated with a short gap should have the same time.
"""
m1 = NowTestModel.objects.create()
m2 = NowTestModel.objects.create()
NowTestModel.objects.filter(id=m1.id).update(when=TransactionNow())
sleep(0.1)
NowTestModel.objects.filter(id=m2.id).update(when=TransactionNow())
m1.refresh_from_db()
m2.refresh_from_db()
self.assertIsInstance(m1.when, datetime)
self.assertEqual(m1.when, m2.when)
class TestRandomUUID(PostgreSQLTestCase):
def test_random_uuid(self):
m1 = UUIDTestModel.objects.create()
m2 = UUIDTestModel.objects.create()
UUIDTestModel.objects.update(uuid=RandomUUID())
m1.refresh_from_db()
m2.refresh_from_db()
self.assertIsInstance(m1.uuid, uuid.UUID)
self.assertNotEqual(m1.uuid, m2.uuid)
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/postgres_tests/test_array.py | tests/postgres_tests/test_array.py | import decimal
import enum
import json
import unittest
import uuid
from django import forms
from django.contrib.admin.utils import display_for_field
from django.core import checks, exceptions, serializers, validators
from django.core.exceptions import FieldError
from django.core.management import call_command
from django.db import IntegrityError, connection, models
from django.db.models import JSONNull
from django.db.models.expressions import Exists, F, OuterRef, RawSQL, Value
from django.db.models.functions import Cast, JSONObject, Upper
from django.test import TransactionTestCase, override_settings, skipUnlessDBFeature
from django.test.utils import isolate_apps
from django.utils import timezone
from django.utils.deprecation import RemovedInDjango70Warning
from . import PostgreSQLSimpleTestCase, PostgreSQLTestCase, PostgreSQLWidgetTestCase
from .models import (
ArrayEnumModel,
ArrayFieldSubclass,
CharArrayModel,
DateTimeArrayModel,
IntegerArrayModel,
NestedIntegerArrayModel,
NullableIntegerArrayModel,
OtherTypesArrayModel,
PostgreSQLModel,
Tag,
WithSizeArrayModel,
)
try:
from django.contrib.postgres.aggregates import ArrayAgg
from django.contrib.postgres.expressions import ArraySubquery
from django.contrib.postgres.fields import ArrayField
from django.contrib.postgres.fields.array import IndexTransform, SliceTransform
from django.contrib.postgres.forms import (
SimpleArrayField,
SplitArrayField,
SplitArrayWidget,
)
from django.db.backends.postgresql.psycopg_any import NumericRange
except ImportError:
pass
@isolate_apps("postgres_tests")
class BasicTests(PostgreSQLSimpleTestCase):
def test_get_field_display(self):
class MyModel(PostgreSQLModel):
field = ArrayField(
models.CharField(max_length=16),
choices=[
["Media", [(["vinyl", "cd"], "Audio")]],
(("mp3", "mp4"), "Digital"),
],
)
tests = (
(["vinyl", "cd"], "Audio"),
(("mp3", "mp4"), "Digital"),
(("a", "b"), "('a', 'b')"),
(["c", "d"], "['c', 'd']"),
)
for value, display in tests:
with self.subTest(value=value, display=display):
instance = MyModel(field=value)
self.assertEqual(instance.get_field_display(), display)
def test_get_field_display_nested_array(self):
class MyModel(PostgreSQLModel):
field = ArrayField(
ArrayField(models.CharField(max_length=16)),
choices=[
[
"Media",
[([["vinyl", "cd"], ("x",)], "Audio")],
],
((["mp3"], ("mp4",)), "Digital"),
],
)
tests = (
([["vinyl", "cd"], ("x",)], "Audio"),
((["mp3"], ("mp4",)), "Digital"),
((("a", "b"), ("c",)), "(('a', 'b'), ('c',))"),
([["a", "b"], ["c"]], "[['a', 'b'], ['c']]"),
)
for value, display in tests:
with self.subTest(value=value, display=display):
instance = MyModel(field=value)
self.assertEqual(instance.get_field_display(), display)
class TestSaveLoad(PostgreSQLTestCase):
def test_integer(self):
instance = IntegerArrayModel(field=[1, 2, 3])
instance.save()
loaded = IntegerArrayModel.objects.get()
self.assertEqual(instance.field, loaded.field)
def test_char(self):
instance = CharArrayModel(field=["hello", "goodbye"])
instance.save()
loaded = CharArrayModel.objects.get()
self.assertEqual(instance.field, loaded.field)
def test_dates(self):
instance = DateTimeArrayModel(
datetimes=[timezone.now()],
dates=[timezone.now().date()],
times=[timezone.now().time()],
)
instance.save()
loaded = DateTimeArrayModel.objects.get()
self.assertEqual(instance.datetimes, loaded.datetimes)
self.assertEqual(instance.dates, loaded.dates)
self.assertEqual(instance.times, loaded.times)
def test_tuples(self):
instance = IntegerArrayModel(field=(1,))
instance.save()
loaded = IntegerArrayModel.objects.get()
self.assertSequenceEqual(instance.field, loaded.field)
def test_integers_passed_as_strings(self):
# This checks that get_prep_value is deferred properly
instance = IntegerArrayModel(field=["1"])
instance.save()
loaded = IntegerArrayModel.objects.get()
self.assertEqual(loaded.field, [1])
def test_default_null(self):
instance = NullableIntegerArrayModel()
instance.save()
loaded = NullableIntegerArrayModel.objects.get(pk=instance.pk)
self.assertIsNone(loaded.field)
self.assertEqual(instance.field, loaded.field)
def test_null_handling(self):
instance = NullableIntegerArrayModel(field=None)
instance.save()
loaded = NullableIntegerArrayModel.objects.get()
self.assertEqual(instance.field, loaded.field)
instance = IntegerArrayModel(field=None)
with self.assertRaises(IntegrityError):
instance.save()
def test_nested(self):
instance = NestedIntegerArrayModel(field=[[1, 2], [3, 4]])
instance.save()
loaded = NestedIntegerArrayModel.objects.get()
self.assertEqual(instance.field, loaded.field)
def test_other_array_types(self):
instance = OtherTypesArrayModel(
ips=["192.168.0.1", "::1"],
uuids=[uuid.uuid4()],
decimals=[decimal.Decimal(1.25), 1.75],
tags=[Tag(1), Tag(2), Tag(3)],
json=[{"a": 1}, {"b": 2}],
int_ranges=[NumericRange(10, 20), NumericRange(30, 40)],
bigint_ranges=[
NumericRange(7000000000, 10000000000),
NumericRange(50000000000, 70000000000),
],
)
instance.save()
loaded = OtherTypesArrayModel.objects.get()
self.assertEqual(instance.ips, loaded.ips)
self.assertEqual(instance.uuids, loaded.uuids)
self.assertEqual(instance.decimals, loaded.decimals)
self.assertEqual(instance.tags, loaded.tags)
self.assertEqual(instance.json, loaded.json)
self.assertEqual(instance.int_ranges, loaded.int_ranges)
self.assertEqual(instance.bigint_ranges, loaded.bigint_ranges)
def test_null_from_db_value_handling(self):
instance = OtherTypesArrayModel.objects.create(
ips=["192.168.0.1", "::1"],
uuids=[uuid.uuid4()],
decimals=[decimal.Decimal(1.25), 1.75],
tags=None,
)
instance.refresh_from_db()
self.assertIsNone(instance.tags)
self.assertEqual(instance.json, [])
self.assertIsNone(instance.int_ranges)
self.assertIsNone(instance.bigint_ranges)
def test_model_set_on_base_field(self):
instance = IntegerArrayModel()
field = instance._meta.get_field("field")
self.assertEqual(field.model, IntegerArrayModel)
self.assertEqual(field.base_field.model, IntegerArrayModel)
def test_nested_nullable_base_field(self):
instance = NullableIntegerArrayModel.objects.create(
field_nested=[[None, None], [None, None]],
)
self.assertEqual(instance.field_nested, [[None, None], [None, None]])
class TestQuerying(PostgreSQLTestCase):
@classmethod
def setUpTestData(cls):
cls.objs = NullableIntegerArrayModel.objects.bulk_create(
[
NullableIntegerArrayModel(order=1, field=[1]),
NullableIntegerArrayModel(order=2, field=[2]),
NullableIntegerArrayModel(order=3, field=[2, 3]),
NullableIntegerArrayModel(order=4, field=[20, 30, 40]),
NullableIntegerArrayModel(order=5, field=None),
]
)
def test_bulk_create_with_sized_arrayfield(self):
objs = WithSizeArrayModel.objects.bulk_create(
[
WithSizeArrayModel(field=[1, 2]),
WithSizeArrayModel(field=[3, 4]),
]
)
self.assertEqual(objs[0].field, [1, 2])
self.assertEqual(objs[1].field, [3, 4])
def test_empty_list(self):
NullableIntegerArrayModel.objects.create(field=[])
obj = (
NullableIntegerArrayModel.objects.annotate(
empty_array=models.Value(
[], output_field=ArrayField(models.IntegerField())
),
)
.filter(field=models.F("empty_array"))
.get()
)
self.assertEqual(obj.field, [])
self.assertEqual(obj.empty_array, [])
def test_exact(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__exact=[1]), self.objs[:1]
)
def test_exact_null_only_array(self):
obj = NullableIntegerArrayModel.objects.create(
field=[None], field_nested=[None, None]
)
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__exact=[None]), [obj]
)
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field_nested__exact=[None, None]),
[obj],
)
def test_exact_null_only_nested_array(self):
obj1 = NullableIntegerArrayModel.objects.create(field_nested=[[None, None]])
obj2 = NullableIntegerArrayModel.objects.create(
field_nested=[[None, None], [None, None]],
)
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(
field_nested__exact=[[None, None]],
),
[obj1],
)
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(
field_nested__exact=[[None, None], [None, None]],
),
[obj2],
)
def test_exact_with_expression(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__exact=[Value(1)]),
self.objs[:1],
)
def test_exact_charfield(self):
instance = CharArrayModel.objects.create(field=["text"])
self.assertSequenceEqual(
CharArrayModel.objects.filter(field=["text"]), [instance]
)
def test_exact_nested(self):
instance = NestedIntegerArrayModel.objects.create(field=[[1, 2], [3, 4]])
self.assertSequenceEqual(
NestedIntegerArrayModel.objects.filter(field=[[1, 2], [3, 4]]), [instance]
)
def test_isnull(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__isnull=True), self.objs[-1:]
)
def test_gt(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__gt=[0]), self.objs[:4]
)
def test_lt(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__lt=[2]), self.objs[:1]
)
def test_in(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__in=[[1], [2]]),
self.objs[:2],
)
def test_in_subquery(self):
IntegerArrayModel.objects.create(field=[2, 3])
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(
field__in=IntegerArrayModel.objects.values_list("field", flat=True)
),
self.objs[2:3],
)
@unittest.expectedFailure
def test_in_including_F_object(self):
# This test asserts that Array objects passed to filters can be
# constructed to contain F objects. This currently doesn't work as the
# psycopg mogrify method that generates the ARRAY() syntax is
# expecting literals, not column references (#27095).
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__in=[[models.F("id")]]),
self.objs[:2],
)
def test_in_as_F_object(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__in=[models.F("field")]),
self.objs[:4],
)
def test_contained_by(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__contained_by=[1, 2]),
self.objs[:2],
)
def test_contained_by_including_F_object(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(
field__contained_by=[models.F("order"), 2]
),
self.objs[:3],
)
def test_contains(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__contains=[2]),
self.objs[1:3],
)
def test_contains_subquery(self):
IntegerArrayModel.objects.create(field=[2, 3])
inner_qs = IntegerArrayModel.objects.values_list("field", flat=True)
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__contains=inner_qs[:1]),
self.objs[2:3],
)
inner_qs = IntegerArrayModel.objects.filter(field__contains=OuterRef("field"))
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(Exists(inner_qs)),
self.objs[1:3],
)
def test_contains_including_expression(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(
field__contains=[2, Value(6) / Value(2)],
),
self.objs[2:3],
)
def test_icontains(self):
# Using the __icontains lookup with ArrayField is inefficient.
instance = CharArrayModel.objects.create(field=["FoO"])
self.assertSequenceEqual(
CharArrayModel.objects.filter(field__icontains="foo"), [instance]
)
def test_contains_charfield(self):
# Regression for #22907
self.assertSequenceEqual(
CharArrayModel.objects.filter(field__contains=["text"]), []
)
def test_contained_by_charfield(self):
self.assertSequenceEqual(
CharArrayModel.objects.filter(field__contained_by=["text"]), []
)
def test_overlap_charfield(self):
self.assertSequenceEqual(
CharArrayModel.objects.filter(field__overlap=["text"]), []
)
def test_overlap_charfield_including_expression(self):
obj_1 = CharArrayModel.objects.create(field=["TEXT", "lower text"])
obj_2 = CharArrayModel.objects.create(field=["lower text", "TEXT"])
CharArrayModel.objects.create(field=["lower text", "text"])
self.assertSequenceEqual(
CharArrayModel.objects.filter(
field__overlap=[
Upper(Value("text")),
"other",
]
),
[obj_1, obj_2],
)
def test_overlap_values(self):
qs = NullableIntegerArrayModel.objects.filter(order__lt=3)
self.assertCountEqual(
NullableIntegerArrayModel.objects.filter(
field__overlap=qs.values_list("field"),
),
self.objs[:3],
)
self.assertCountEqual(
NullableIntegerArrayModel.objects.filter(
field__overlap=qs.values("field"),
),
self.objs[:3],
)
def test_lookups_autofield_array(self):
qs = (
NullableIntegerArrayModel.objects.filter(
field__0__isnull=False,
)
.values("field__0")
.annotate(
arrayagg=ArrayAgg("id"),
)
.order_by("field__0")
)
tests = (
("contained_by", [self.objs[1].pk, self.objs[2].pk, 0], [2]),
("contains", [self.objs[2].pk], [2]),
("exact", [self.objs[3].pk], [20]),
("overlap", [self.objs[1].pk, self.objs[3].pk], [2, 20]),
)
for lookup, value, expected in tests:
with self.subTest(lookup=lookup):
self.assertSequenceEqual(
qs.filter(
**{"arrayagg__" + lookup: value},
).values_list("field__0", flat=True),
expected,
)
@skipUnlessDBFeature("allows_group_by_select_index")
def test_group_by_order_by_select_index(self):
with self.assertNumQueries(1) as ctx:
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(
field__0__isnull=False,
)
.values("field__0")
.annotate(arrayagg=ArrayAgg("id"))
.order_by("field__0"),
[
{"field__0": 1, "arrayagg": [self.objs[0].pk]},
{"field__0": 2, "arrayagg": [self.objs[1].pk, self.objs[2].pk]},
{"field__0": 20, "arrayagg": [self.objs[3].pk]},
],
)
sql = ctx[0]["sql"]
self.assertIn("GROUP BY 1", sql)
self.assertIn("ORDER BY 1", sql)
def test_order_by_arrayagg_index(self):
qs = (
NullableIntegerArrayModel.objects.values("order")
.annotate(ids=ArrayAgg("id"))
.order_by("-ids__0")
)
self.assertQuerySetEqual(
qs, [{"order": obj.order, "ids": [obj.id]} for obj in reversed(self.objs)]
)
def test_index(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__0=2), self.objs[1:3]
)
def test_index_chained(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__0__lt=3), self.objs[0:3]
)
def test_index_nested(self):
instance = NestedIntegerArrayModel.objects.create(field=[[1, 2], [3, 4]])
self.assertSequenceEqual(
NestedIntegerArrayModel.objects.filter(field__0__0=1), [instance]
)
@unittest.expectedFailure
def test_index_used_on_nested_data(self):
instance = NestedIntegerArrayModel.objects.create(field=[[1, 2], [3, 4]])
self.assertSequenceEqual(
NestedIntegerArrayModel.objects.filter(field__0=[1, 2]), [instance]
)
def test_index_transform_expression(self):
expr = RawSQL("string_to_array(%s, ';')", ["1;2"])
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(
field__0=Cast(
IndexTransform(1, models.IntegerField, expr),
output_field=models.IntegerField(),
),
),
self.objs[:1],
)
def test_index_annotation(self):
qs = NullableIntegerArrayModel.objects.annotate(second=models.F("field__1"))
self.assertCountEqual(
qs.values_list("second", flat=True),
[None, None, None, 3, 30],
)
def test_overlap(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__overlap=[1, 2]),
self.objs[0:3],
)
def test_len(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__len__lte=2), self.objs[0:3]
)
def test_len_empty_array(self):
obj = NullableIntegerArrayModel.objects.create(field=[])
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__len=0), [obj]
)
def test_slice(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__0_1=[2]), self.objs[1:3]
)
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__0_2=[2, 3]), self.objs[2:3]
)
def test_order_by_index(self):
more_objs = (
NullableIntegerArrayModel.objects.create(field=[1, 637]),
NullableIntegerArrayModel.objects.create(field=[2, 1]),
NullableIntegerArrayModel.objects.create(field=[3, -98123]),
NullableIntegerArrayModel.objects.create(field=[4, 2]),
)
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.order_by("field__1"),
[
more_objs[2],
more_objs[1],
more_objs[3],
self.objs[2],
self.objs[3],
more_objs[0],
self.objs[4],
self.objs[1],
self.objs[0],
],
)
@unittest.expectedFailure
def test_slice_nested(self):
instance = NestedIntegerArrayModel.objects.create(field=[[1, 2], [3, 4]])
self.assertSequenceEqual(
NestedIntegerArrayModel.objects.filter(field__0__0_1=[1]), [instance]
)
def test_slice_transform_expression(self):
expr = RawSQL("string_to_array(%s, ';')", ["9;2;3"])
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(
field__0_2=SliceTransform(2, 3, expr)
),
self.objs[2:3],
)
def test_slice_annotation(self):
qs = NullableIntegerArrayModel.objects.annotate(
first_two=models.F("field__0_2"),
)
self.assertCountEqual(
qs.values_list("first_two", flat=True),
[None, [1], [2], [2, 3], [20, 30]],
)
def test_slicing_of_f_expressions(self):
tests = [
(F("field")[:2], [1, 2]),
(F("field")[2:], [3, 4]),
(F("field")[1:3], [2, 3]),
(F("field")[3], [4]),
(F("field")[:3][1:], [2, 3]), # Nested slicing.
(F("field")[:3][1], [2]), # Slice then index.
]
for expression, expected in tests:
with self.subTest(expression=expression, expected=expected):
instance = IntegerArrayModel.objects.create(field=[1, 2, 3, 4])
instance.field = expression
instance.save()
instance.refresh_from_db()
self.assertEqual(instance.field, expected)
def test_slicing_of_f_expressions_with_annotate(self):
IntegerArrayModel.objects.create(field=[1, 2, 3])
annotated = IntegerArrayModel.objects.annotate(
first_two=F("field")[:2],
after_two=F("field")[2:],
random_two=F("field")[1:3],
).get()
self.assertEqual(annotated.first_two, [1, 2])
self.assertEqual(annotated.after_two, [3])
self.assertEqual(annotated.random_two, [2, 3])
def test_slicing_of_f_expressions_with_len(self):
queryset = NullableIntegerArrayModel.objects.annotate(
subarray=F("field")[:1]
).filter(field__len=F("subarray__len"))
self.assertSequenceEqual(queryset, self.objs[:2])
def test_usage_in_subquery(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(
id__in=NullableIntegerArrayModel.objects.filter(field__len=3)
),
[self.objs[3]],
)
def test_enum_lookup(self):
class TestEnum(enum.Enum):
VALUE_1 = "value_1"
instance = ArrayEnumModel.objects.create(array_of_enums=[TestEnum.VALUE_1])
self.assertSequenceEqual(
ArrayEnumModel.objects.filter(array_of_enums__contains=[TestEnum.VALUE_1]),
[instance],
)
def test_unsupported_lookup(self):
msg = (
"Unsupported lookup '0_bar' for ArrayField or join on the field not "
"permitted."
)
with self.assertRaisesMessage(FieldError, msg):
list(NullableIntegerArrayModel.objects.filter(field__0_bar=[2]))
msg = (
"Unsupported lookup '0bar' for ArrayField or join on the field not "
"permitted."
)
with self.assertRaisesMessage(FieldError, msg):
list(NullableIntegerArrayModel.objects.filter(field__0bar=[2]))
def test_grouping_by_annotations_with_array_field_param(self):
value = models.Value([1], output_field=ArrayField(models.IntegerField()))
self.assertEqual(
NullableIntegerArrayModel.objects.annotate(
array_length=models.Func(
value,
1,
function="ARRAY_LENGTH",
output_field=models.IntegerField(),
),
)
.values("array_length")
.annotate(
count=models.Count("pk"),
)
.get()["array_length"],
1,
)
def test_filter_by_array_subquery(self):
inner_qs = NullableIntegerArrayModel.objects.filter(
field__len=models.OuterRef("field__len"),
).values("field")
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.alias(
same_sized_fields=ArraySubquery(inner_qs),
).filter(same_sized_fields__len__gt=1),
self.objs[0:2],
)
def test_annotated_array_subquery(self):
inner_qs = NullableIntegerArrayModel.objects.exclude(
pk=models.OuterRef("pk")
).values("order")
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.annotate(
sibling_ids=ArraySubquery(inner_qs),
)
.get(order=1)
.sibling_ids,
[2, 3, 4, 5],
)
def test_group_by_with_annotated_array_subquery(self):
inner_qs = NullableIntegerArrayModel.objects.exclude(
pk=models.OuterRef("pk")
).values("order")
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.annotate(
sibling_ids=ArraySubquery(inner_qs),
sibling_count=models.Max("sibling_ids__len"),
).values_list("sibling_count", flat=True),
[len(self.objs) - 1] * len(self.objs),
)
def test_annotated_ordered_array_subquery(self):
inner_qs = NullableIntegerArrayModel.objects.order_by("-order").values("order")
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.annotate(
ids=ArraySubquery(inner_qs),
)
.first()
.ids,
[5, 4, 3, 2, 1],
)
def test_annotated_array_subquery_with_json_objects(self):
inner_qs = NullableIntegerArrayModel.objects.exclude(
pk=models.OuterRef("pk")
).values(json=JSONObject(order="order", field="field"))
siblings_json = (
NullableIntegerArrayModel.objects.annotate(
siblings_json=ArraySubquery(inner_qs),
)
.values_list("siblings_json", flat=True)
.get(order=1)
)
self.assertSequenceEqual(
siblings_json,
[
{"field": [2], "order": 2},
{"field": [2, 3], "order": 3},
{"field": [20, 30, 40], "order": 4},
{"field": None, "order": 5},
],
)
class TestDateTimeExactQuerying(PostgreSQLTestCase):
@classmethod
def setUpTestData(cls):
now = timezone.now()
cls.datetimes = [now]
cls.dates = [now.date()]
cls.times = [now.time()]
cls.objs = [
DateTimeArrayModel.objects.create(
datetimes=cls.datetimes, dates=cls.dates, times=cls.times
),
]
def test_exact_datetimes(self):
self.assertSequenceEqual(
DateTimeArrayModel.objects.filter(datetimes=self.datetimes), self.objs
)
def test_exact_dates(self):
self.assertSequenceEqual(
DateTimeArrayModel.objects.filter(dates=self.dates), self.objs
)
def test_exact_times(self):
self.assertSequenceEqual(
DateTimeArrayModel.objects.filter(times=self.times), self.objs
)
class TestOtherTypesExactQuerying(PostgreSQLTestCase):
@classmethod
def setUpTestData(cls):
cls.ips = ["192.168.0.1", "::1"]
cls.uuids = [uuid.uuid4()]
cls.decimals = [decimal.Decimal(1.25), 1.75]
cls.tags = [Tag(1), Tag(2), Tag(3)]
cls.objs = [
OtherTypesArrayModel.objects.create(
ips=cls.ips,
uuids=cls.uuids,
decimals=cls.decimals,
tags=cls.tags,
)
]
def test_exact_ip_addresses(self):
self.assertSequenceEqual(
OtherTypesArrayModel.objects.filter(ips=self.ips), self.objs
)
def test_exact_uuids(self):
self.assertSequenceEqual(
OtherTypesArrayModel.objects.filter(uuids=self.uuids), self.objs
)
def test_exact_decimals(self):
self.assertSequenceEqual(
OtherTypesArrayModel.objects.filter(decimals=self.decimals), self.objs
)
def test_exact_tags(self):
self.assertSequenceEqual(
OtherTypesArrayModel.objects.filter(tags=self.tags), self.objs
)
@isolate_apps("postgres_tests")
class TestChecks(PostgreSQLSimpleTestCase):
def test_field_checks(self):
class MyModel(PostgreSQLModel):
field = ArrayField(models.CharField(max_length=-1))
model = MyModel()
errors = model.check()
self.assertEqual(len(errors), 1)
# The inner CharField has a non-positive max_length.
self.assertEqual(errors[0].id, "postgres.E001")
self.assertIn("max_length", errors[0].msg)
def test_base_field_check_kwargs(self):
passed_kwargs = None
class MyField(models.Field):
def check(self, **kwargs):
nonlocal passed_kwargs
passed_kwargs = kwargs
return []
class MyModel(PostgreSQLModel):
field = ArrayField(MyField())
self.assertEqual(MyModel.check(databases=["default"]), [])
self.assertEqual(
passed_kwargs,
{"databases": ["default"]},
"ArrayField.check kwargs should be passed to its base_field.",
)
def test_invalid_base_fields(self):
class MyModel(PostgreSQLModel):
field = ArrayField(
models.ManyToManyField("postgres_tests.IntegerArrayModel")
)
model = MyModel()
errors = model.check()
self.assertEqual(len(errors), 1)
self.assertEqual(errors[0].id, "postgres.E002")
def test_invalid_default(self):
class MyModel(PostgreSQLModel):
field = ArrayField(models.IntegerField(), default=[])
model = MyModel()
self.assertEqual(
model.check(),
[
checks.Warning(
msg=(
"ArrayField default should be a callable instead of an "
"instance so that it's not shared between all field "
"instances."
),
hint="Use a callable instead, e.g., use `list` instead of `[]`.",
obj=MyModel._meta.get_field("field"),
id="fields.E010",
)
],
)
def test_valid_default(self):
class MyModel(PostgreSQLModel):
field = ArrayField(models.IntegerField(), default=list)
model = MyModel()
self.assertEqual(model.check(), [])
def test_valid_default_none(self):
class MyModel(PostgreSQLModel):
field = ArrayField(models.IntegerField(), default=None)
model = MyModel()
self.assertEqual(model.check(), [])
def test_nested_field_checks(self):
"""
Nested ArrayFields are permitted.
"""
class MyModel(PostgreSQLModel):
field = ArrayField(ArrayField(models.CharField(max_length=-1)))
model = MyModel()
errors = model.check()
self.assertEqual(len(errors), 1)
# The inner CharField has a non-positive max_length.
self.assertEqual(errors[0].id, "postgres.E001")
self.assertIn("max_length", errors[0].msg)
def test_choices_tuple_list(self):
class MyModel(PostgreSQLModel):
field = ArrayField(
models.CharField(max_length=16),
choices=[
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | true |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/postgres_tests/models.py | tests/postgres_tests/models.py | from django.db import models
from .fields import (
ArrayField,
BigIntegerRangeField,
DateRangeField,
DateTimeRangeField,
DecimalRangeField,
EnumField,
HStoreField,
IntegerRangeField,
OffByOneField,
SearchVectorField,
)
class Tag:
def __init__(self, tag_id):
self.tag_id = tag_id
def __eq__(self, other):
return isinstance(other, Tag) and self.tag_id == other.tag_id
class TagField(models.SmallIntegerField):
def from_db_value(self, value, expression, connection):
if value is None:
return value
return Tag(int(value))
def to_python(self, value):
if isinstance(value, Tag):
return value
if value is None:
return value
return Tag(int(value))
def get_prep_value(self, value):
return value.tag_id
class PostgreSQLModel(models.Model):
class Meta:
abstract = True
required_db_vendor = "postgresql"
class IntegerArrayModel(PostgreSQLModel):
field = ArrayField(models.BigIntegerField(), default=list, blank=True)
class NullableIntegerArrayModel(PostgreSQLModel):
field = ArrayField(models.BigIntegerField(), blank=True, null=True)
field_nested = ArrayField(ArrayField(models.BigIntegerField(null=True)), null=True)
order = models.IntegerField(null=True)
class CharArrayModel(PostgreSQLModel):
field = ArrayField(models.CharField(max_length=10))
class DateTimeArrayModel(PostgreSQLModel):
datetimes = ArrayField(models.DateTimeField())
dates = ArrayField(models.DateField())
times = ArrayField(models.TimeField())
class WithSizeArrayModel(PostgreSQLModel):
field = ArrayField(models.FloatField(), size=3)
class NestedIntegerArrayModel(PostgreSQLModel):
field = ArrayField(ArrayField(models.IntegerField()))
class OtherTypesArrayModel(PostgreSQLModel):
ips = ArrayField(models.GenericIPAddressField(), default=list)
uuids = ArrayField(models.UUIDField(), default=list)
decimals = ArrayField(
models.DecimalField(max_digits=5, decimal_places=2), default=list
)
tags = ArrayField(TagField(), blank=True, null=True)
json = ArrayField(models.JSONField(default=dict), default=list, null=True)
int_ranges = ArrayField(IntegerRangeField(), blank=True, null=True)
bigint_ranges = ArrayField(BigIntegerRangeField(), blank=True, null=True)
class HStoreModel(PostgreSQLModel):
field = HStoreField(blank=True, null=True)
array_field = ArrayField(HStoreField(), null=True)
class ArrayEnumModel(PostgreSQLModel):
array_of_enums = ArrayField(EnumField(max_length=20))
class CharFieldModel(models.Model):
field = models.CharField(max_length=64)
class TextFieldModel(models.Model):
field = models.TextField()
class SmallAutoFieldModel(models.Model):
id = models.SmallAutoField(primary_key=True)
class BigAutoFieldModel(models.Model):
id = models.BigAutoField(primary_key=True)
# Scene/Character/Line models are used to test full text search. They're
# populated with content from Monty Python and the Holy Grail.
class Scene(models.Model):
scene = models.TextField()
setting = models.CharField(max_length=255)
class Character(models.Model):
name = models.CharField(max_length=255)
class Line(PostgreSQLModel):
scene = models.ForeignKey("Scene", models.CASCADE)
character = models.ForeignKey("Character", models.CASCADE)
dialogue = models.TextField(blank=True, null=True)
dialogue_search_vector = SearchVectorField(blank=True, null=True)
dialogue_config = models.CharField(max_length=100, blank=True, null=True)
class LineSavedSearch(PostgreSQLModel):
line = models.ForeignKey("Line", models.CASCADE)
query = models.CharField(max_length=100)
class RangesModel(PostgreSQLModel):
ints = IntegerRangeField(blank=True, null=True, db_default=(5, 10))
bigints = BigIntegerRangeField(blank=True, null=True)
decimals = DecimalRangeField(blank=True, null=True)
timestamps = DateTimeRangeField(blank=True, null=True)
timestamps_inner = DateTimeRangeField(blank=True, null=True)
timestamps_closed_bounds = DateTimeRangeField(
blank=True,
null=True,
default_bounds="[]",
)
dates = DateRangeField(blank=True, null=True)
dates_inner = DateRangeField(blank=True, null=True)
class RangeLookupsModel(PostgreSQLModel):
parent = models.ForeignKey(RangesModel, models.SET_NULL, blank=True, null=True)
integer = models.IntegerField(blank=True, null=True)
big_integer = models.BigIntegerField(blank=True, null=True)
float = models.FloatField(blank=True, null=True)
timestamp = models.DateTimeField(blank=True, null=True)
date = models.DateField(blank=True, null=True)
small_integer = models.SmallIntegerField(blank=True, null=True)
decimal_field = models.DecimalField(
max_digits=5, decimal_places=2, blank=True, null=True
)
class ArrayFieldSubclass(ArrayField):
def __init__(self, *args, **kwargs):
super().__init__(models.IntegerField())
class AggregateTestModel(PostgreSQLModel):
"""
To test postgres-specific general aggregation functions
"""
char_field = models.CharField(max_length=30, blank=True)
text_field = models.TextField(blank=True)
integer_field = models.IntegerField(null=True)
boolean_field = models.BooleanField(null=True)
json_field = models.JSONField(null=True)
class StatTestModel(PostgreSQLModel):
"""
To test postgres-specific aggregation functions for statistics
"""
int1 = models.IntegerField()
int2 = models.IntegerField()
related_field = models.ForeignKey(AggregateTestModel, models.SET_NULL, null=True)
class NowTestModel(models.Model):
when = models.DateTimeField(null=True, default=None)
class UUIDTestModel(models.Model):
uuid = models.UUIDField(default=None, null=True)
class Room(models.Model):
number = models.IntegerField(unique=True)
class HotelReservation(PostgreSQLModel):
room = models.ForeignKey("Room", on_delete=models.CASCADE)
datespan = DateRangeField()
start = models.DateTimeField()
end = models.DateTimeField()
cancelled = models.BooleanField(default=False)
requirements = models.JSONField(blank=True, null=True)
class OffByOneModel(PostgreSQLModel):
one_off = OffByOneField()
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/postgres_tests/fields.py | tests/postgres_tests/fields.py | """
Indirection layer for PostgreSQL-specific fields, so the tests don't fail when
run with a backend other than PostgreSQL.
"""
import enum
from django.db import models
try:
from django.contrib.postgres.fields import (
ArrayField,
BigIntegerRangeField,
DateRangeField,
DateTimeRangeField,
DecimalRangeField,
HStoreField,
IntegerRangeField,
)
from django.contrib.postgres.search import SearchVector, SearchVectorField
except ImportError:
class DummyArrayField(models.Field):
def __init__(self, base_field, size=None, **kwargs):
super().__init__(**kwargs)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
kwargs.update(
{
"base_field": "",
"size": 1,
}
)
return name, path, args, kwargs
class DummyContinuousRangeField(models.Field):
def __init__(self, *args, default_bounds="[)", **kwargs):
super().__init__(**kwargs)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
kwargs["default_bounds"] = "[)"
return name, path, args, kwargs
ArrayField = DummyArrayField
BigIntegerRangeField = models.Field
DateRangeField = models.Field
DateTimeRangeField = DummyContinuousRangeField
DecimalRangeField = DummyContinuousRangeField
HStoreField = models.Field
IntegerRangeField = models.Field
SearchVector = models.Expression
SearchVectorField = models.Field
class EnumField(models.CharField):
def get_prep_value(self, value):
return value.value if isinstance(value, enum.Enum) else value
class OffByOneField(models.IntegerField):
def get_placeholder(self, value, compiler, connection):
return "(%s + 1)"
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/postgres_tests/__init__.py | tests/postgres_tests/__init__.py | import unittest
from forms_tests.widget_tests.base import WidgetTest
from django.db import connection
from django.test import SimpleTestCase, TestCase, modify_settings
from django.utils.functional import cached_property
@unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific tests")
# To register type handlers and locate the widget's template.
@modify_settings(INSTALLED_APPS={"append": "django.contrib.postgres"})
class PostgreSQLSimpleTestCase(SimpleTestCase):
pass
@unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific tests")
# To register type handlers and locate the widget's template.
@modify_settings(INSTALLED_APPS={"append": "django.contrib.postgres"})
class PostgreSQLTestCase(TestCase):
@cached_property
def default_text_search_config(self):
with connection.cursor() as cursor:
cursor.execute("SHOW default_text_search_config")
row = cursor.fetchone()
return row[0] if row else None
def check_default_text_search_config(self):
if self.default_text_search_config != "pg_catalog.english":
self.skipTest("The default text search config is not 'english'.")
@unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific tests")
# To locate the widget's template.
@modify_settings(INSTALLED_APPS={"append": "django.contrib.postgres"})
class PostgreSQLWidgetTestCase(WidgetTest, PostgreSQLSimpleTestCase):
pass
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/postgres_tests/test_indexes.py | tests/postgres_tests/test_indexes.py | from django.contrib.postgres.indexes import (
BloomIndex,
BrinIndex,
BTreeIndex,
GinIndex,
GistIndex,
HashIndex,
OpClass,
PostgresIndex,
SpGistIndex,
)
from django.db import connection
from django.db.models import CharField, F, Index, Q
from django.db.models.functions import Cast, Collate, Length, Lower
from django.test.utils import register_lookup
from . import PostgreSQLSimpleTestCase, PostgreSQLTestCase
from .fields import SearchVector, SearchVectorField
from .models import CharFieldModel, IntegerArrayModel, Scene, TextFieldModel
class IndexTestMixin:
def test_name_auto_generation(self):
index = self.index_class(fields=["field"])
index.set_name_with_model(CharFieldModel)
self.assertRegex(
index.name, r"postgres_te_field_[0-9a-f]{6}_%s" % self.index_class.suffix
)
def test_deconstruction_no_customization(self):
index = self.index_class(
fields=["title"], name="test_title_%s" % self.index_class.suffix
)
path, args, kwargs = index.deconstruct()
self.assertEqual(
path, "django.contrib.postgres.indexes.%s" % self.index_class.__name__
)
self.assertEqual(args, ())
self.assertEqual(
kwargs,
{"fields": ["title"], "name": "test_title_%s" % self.index_class.suffix},
)
def test_deconstruction_with_expressions_no_customization(self):
name = f"test_title_{self.index_class.suffix}"
index = self.index_class(Lower("title"), name=name)
path, args, kwargs = index.deconstruct()
self.assertEqual(
path,
f"django.contrib.postgres.indexes.{self.index_class.__name__}",
)
self.assertEqual(args, (Lower("title"),))
self.assertEqual(kwargs, {"name": name})
class BloomIndexTests(IndexTestMixin, PostgreSQLSimpleTestCase):
index_class = BloomIndex
def test_suffix(self):
self.assertEqual(BloomIndex.suffix, "bloom")
def test_deconstruction(self):
index = BloomIndex(fields=["title"], name="test_bloom", length=80, columns=[4])
path, args, kwargs = index.deconstruct()
self.assertEqual(path, "django.contrib.postgres.indexes.BloomIndex")
self.assertEqual(args, ())
self.assertEqual(
kwargs,
{
"fields": ["title"],
"name": "test_bloom",
"length": 80,
"columns": [4],
},
)
def test_invalid_fields(self):
msg = "Bloom indexes support a maximum of 32 fields."
with self.assertRaisesMessage(ValueError, msg):
BloomIndex(fields=["title"] * 33, name="test_bloom")
def test_invalid_columns(self):
msg = "BloomIndex.columns must be a list or tuple."
with self.assertRaisesMessage(ValueError, msg):
BloomIndex(fields=["title"], name="test_bloom", columns="x")
msg = "BloomIndex.columns cannot have more values than fields."
with self.assertRaisesMessage(ValueError, msg):
BloomIndex(fields=["title"], name="test_bloom", columns=[4, 3])
def test_invalid_columns_value(self):
msg = "BloomIndex.columns must contain integers from 1 to 4095."
for length in (0, 4096):
with self.subTest(length), self.assertRaisesMessage(ValueError, msg):
BloomIndex(fields=["title"], name="test_bloom", columns=[length])
def test_invalid_length(self):
msg = "BloomIndex.length must be None or an integer from 1 to 4096."
for length in (0, 4097):
with self.subTest(length), self.assertRaisesMessage(ValueError, msg):
BloomIndex(fields=["title"], name="test_bloom", length=length)
class BrinIndexTests(IndexTestMixin, PostgreSQLSimpleTestCase):
index_class = BrinIndex
def test_suffix(self):
self.assertEqual(BrinIndex.suffix, "brin")
def test_deconstruction(self):
index = BrinIndex(
fields=["title"],
name="test_title_brin",
autosummarize=True,
pages_per_range=16,
)
path, args, kwargs = index.deconstruct()
self.assertEqual(path, "django.contrib.postgres.indexes.BrinIndex")
self.assertEqual(args, ())
self.assertEqual(
kwargs,
{
"fields": ["title"],
"name": "test_title_brin",
"autosummarize": True,
"pages_per_range": 16,
},
)
def test_invalid_pages_per_range(self):
with self.assertRaisesMessage(
ValueError, "pages_per_range must be None or a positive integer"
):
BrinIndex(fields=["title"], name="test_title_brin", pages_per_range=0)
class BTreeIndexTests(IndexTestMixin, PostgreSQLSimpleTestCase):
index_class = BTreeIndex
def test_suffix(self):
self.assertEqual(BTreeIndex.suffix, "btree")
def test_deconstruction(self):
index = BTreeIndex(fields=["title"], name="test_title_btree")
path, args, kwargs = index.deconstruct()
self.assertEqual(path, "django.contrib.postgres.indexes.BTreeIndex")
self.assertEqual(args, ())
self.assertEqual(kwargs, {"fields": ["title"], "name": "test_title_btree"})
index = BTreeIndex(
fields=["title"],
name="test_title_btree",
fillfactor=80,
deduplicate_items=False,
)
path, args, kwargs = index.deconstruct()
self.assertEqual(path, "django.contrib.postgres.indexes.BTreeIndex")
self.assertEqual(args, ())
self.assertEqual(
kwargs,
{
"fields": ["title"],
"name": "test_title_btree",
"fillfactor": 80,
"deduplicate_items": False,
},
)
class GinIndexTests(IndexTestMixin, PostgreSQLSimpleTestCase):
index_class = GinIndex
def test_suffix(self):
self.assertEqual(GinIndex.suffix, "gin")
def test_deconstruction(self):
index = GinIndex(
fields=["title"],
name="test_title_gin",
fastupdate=True,
gin_pending_list_limit=128,
)
path, args, kwargs = index.deconstruct()
self.assertEqual(path, "django.contrib.postgres.indexes.GinIndex")
self.assertEqual(args, ())
self.assertEqual(
kwargs,
{
"fields": ["title"],
"name": "test_title_gin",
"fastupdate": True,
"gin_pending_list_limit": 128,
},
)
class GistIndexTests(IndexTestMixin, PostgreSQLSimpleTestCase):
index_class = GistIndex
def test_suffix(self):
self.assertEqual(GistIndex.suffix, "gist")
def test_deconstruction(self):
index = GistIndex(
fields=["title"], name="test_title_gist", buffering=False, fillfactor=80
)
path, args, kwargs = index.deconstruct()
self.assertEqual(path, "django.contrib.postgres.indexes.GistIndex")
self.assertEqual(args, ())
self.assertEqual(
kwargs,
{
"fields": ["title"],
"name": "test_title_gist",
"buffering": False,
"fillfactor": 80,
},
)
class HashIndexTests(IndexTestMixin, PostgreSQLSimpleTestCase):
index_class = HashIndex
def test_suffix(self):
self.assertEqual(HashIndex.suffix, "hash")
def test_deconstruction(self):
index = HashIndex(fields=["title"], name="test_title_hash", fillfactor=80)
path, args, kwargs = index.deconstruct()
self.assertEqual(path, "django.contrib.postgres.indexes.HashIndex")
self.assertEqual(args, ())
self.assertEqual(
kwargs, {"fields": ["title"], "name": "test_title_hash", "fillfactor": 80}
)
class SpGistIndexTests(IndexTestMixin, PostgreSQLSimpleTestCase):
index_class = SpGistIndex
def test_suffix(self):
self.assertEqual(SpGistIndex.suffix, "spgist")
def test_deconstruction(self):
index = SpGistIndex(fields=["title"], name="test_title_spgist", fillfactor=80)
path, args, kwargs = index.deconstruct()
self.assertEqual(path, "django.contrib.postgres.indexes.SpGistIndex")
self.assertEqual(args, ())
self.assertEqual(
kwargs, {"fields": ["title"], "name": "test_title_spgist", "fillfactor": 80}
)
class SchemaTests(PostgreSQLTestCase):
get_opclass_query = """
SELECT opcname, c.relname FROM pg_opclass AS oc
JOIN pg_index as i on oc.oid = ANY(i.indclass)
JOIN pg_class as c on c.oid = i.indexrelid
WHERE c.relname = %s
"""
def get_constraints(self, table):
"""
Get the indexes on the table using a new cursor.
"""
with connection.cursor() as cursor:
return connection.introspection.get_constraints(cursor, table)
def test_gin_index(self):
# Ensure the table is there and doesn't have an index.
self.assertNotIn(
"field", self.get_constraints(IntegerArrayModel._meta.db_table)
)
# Add the index
index_name = "integer_array_model_field_gin"
index = GinIndex(fields=["field"], name=index_name)
with connection.schema_editor() as editor:
editor.add_index(IntegerArrayModel, index)
constraints = self.get_constraints(IntegerArrayModel._meta.db_table)
# Check gin index was added
self.assertEqual(constraints[index_name]["type"], GinIndex.suffix)
# Drop the index
with connection.schema_editor() as editor:
editor.remove_index(IntegerArrayModel, index)
self.assertNotIn(
index_name, self.get_constraints(IntegerArrayModel._meta.db_table)
)
def test_gin_fastupdate(self):
index_name = "integer_array_gin_fastupdate"
index = GinIndex(fields=["field"], name=index_name, fastupdate=False)
with connection.schema_editor() as editor:
editor.add_index(IntegerArrayModel, index)
constraints = self.get_constraints(IntegerArrayModel._meta.db_table)
self.assertEqual(constraints[index_name]["type"], "gin")
self.assertEqual(constraints[index_name]["options"], ["fastupdate=off"])
with connection.schema_editor() as editor:
editor.remove_index(IntegerArrayModel, index)
self.assertNotIn(
index_name, self.get_constraints(IntegerArrayModel._meta.db_table)
)
def test_partial_gin_index(self):
with register_lookup(CharField, Length):
index_name = "char_field_gin_partial_idx"
index = GinIndex(
fields=["field"], name=index_name, condition=Q(field__length=40)
)
with connection.schema_editor() as editor:
editor.add_index(CharFieldModel, index)
constraints = self.get_constraints(CharFieldModel._meta.db_table)
self.assertEqual(constraints[index_name]["type"], "gin")
with connection.schema_editor() as editor:
editor.remove_index(CharFieldModel, index)
self.assertNotIn(
index_name, self.get_constraints(CharFieldModel._meta.db_table)
)
def test_partial_gin_index_with_tablespace(self):
with register_lookup(CharField, Length):
index_name = "char_field_gin_partial_idx"
index = GinIndex(
fields=["field"],
name=index_name,
condition=Q(field__length=40),
db_tablespace="pg_default",
)
with connection.schema_editor() as editor:
editor.add_index(CharFieldModel, index)
self.assertIn(
'TABLESPACE "pg_default" ',
str(index.create_sql(CharFieldModel, editor)),
)
constraints = self.get_constraints(CharFieldModel._meta.db_table)
self.assertEqual(constraints[index_name]["type"], "gin")
with connection.schema_editor() as editor:
editor.remove_index(CharFieldModel, index)
self.assertNotIn(
index_name, self.get_constraints(CharFieldModel._meta.db_table)
)
def test_gin_parameters(self):
index_name = "integer_array_gin_params"
index = GinIndex(
fields=["field"],
name=index_name,
fastupdate=True,
gin_pending_list_limit=64,
db_tablespace="pg_default",
)
with connection.schema_editor() as editor:
editor.add_index(IntegerArrayModel, index)
self.assertIn(
") WITH (gin_pending_list_limit = 64, fastupdate = on) TABLESPACE",
str(index.create_sql(IntegerArrayModel, editor)),
)
constraints = self.get_constraints(IntegerArrayModel._meta.db_table)
self.assertEqual(constraints[index_name]["type"], "gin")
self.assertEqual(
constraints[index_name]["options"],
["gin_pending_list_limit=64", "fastupdate=on"],
)
with connection.schema_editor() as editor:
editor.remove_index(IntegerArrayModel, index)
self.assertNotIn(
index_name, self.get_constraints(IntegerArrayModel._meta.db_table)
)
def test_trigram_op_class_gin_index(self):
index_name = "trigram_op_class_gin"
index = GinIndex(OpClass(F("scene"), name="gin_trgm_ops"), name=index_name)
with connection.schema_editor() as editor:
editor.add_index(Scene, index)
with editor.connection.cursor() as cursor:
cursor.execute(self.get_opclass_query, [index_name])
self.assertCountEqual(cursor.fetchall(), [("gin_trgm_ops", index_name)])
constraints = self.get_constraints(Scene._meta.db_table)
self.assertIn(index_name, constraints)
self.assertIn(constraints[index_name]["type"], GinIndex.suffix)
with connection.schema_editor() as editor:
editor.remove_index(Scene, index)
self.assertNotIn(index_name, self.get_constraints(Scene._meta.db_table))
def test_cast_search_vector_gin_index(self):
index_name = "cast_search_vector_gin"
index = GinIndex(Cast("field", SearchVectorField()), name=index_name)
with connection.schema_editor() as editor:
editor.add_index(TextFieldModel, index)
sql = index.create_sql(TextFieldModel, editor)
table = TextFieldModel._meta.db_table
constraints = self.get_constraints(table)
self.assertIn(index_name, constraints)
self.assertIn(constraints[index_name]["type"], GinIndex.suffix)
self.assertIs(sql.references_column(table, "field"), True)
self.assertIn("::tsvector", str(sql))
with connection.schema_editor() as editor:
editor.remove_index(TextFieldModel, index)
self.assertNotIn(index_name, self.get_constraints(table))
def test_bloom_index(self):
index_name = "char_field_model_field_bloom"
index = BloomIndex(fields=["field"], name=index_name)
with connection.schema_editor() as editor:
editor.add_index(CharFieldModel, index)
constraints = self.get_constraints(CharFieldModel._meta.db_table)
self.assertEqual(constraints[index_name]["type"], BloomIndex.suffix)
with connection.schema_editor() as editor:
editor.remove_index(CharFieldModel, index)
self.assertNotIn(
index_name, self.get_constraints(CharFieldModel._meta.db_table)
)
def test_bloom_parameters(self):
index_name = "char_field_model_field_bloom_params"
index = BloomIndex(fields=["field"], name=index_name, length=512, columns=[3])
with connection.schema_editor() as editor:
editor.add_index(CharFieldModel, index)
constraints = self.get_constraints(CharFieldModel._meta.db_table)
self.assertEqual(constraints[index_name]["type"], BloomIndex.suffix)
self.assertEqual(constraints[index_name]["options"], ["length=512", "col1=3"])
with connection.schema_editor() as editor:
editor.remove_index(CharFieldModel, index)
self.assertNotIn(
index_name, self.get_constraints(CharFieldModel._meta.db_table)
)
def test_brin_index(self):
index_name = "char_field_model_field_brin"
index = BrinIndex(fields=["field"], name=index_name, pages_per_range=4)
with connection.schema_editor() as editor:
editor.add_index(CharFieldModel, index)
constraints = self.get_constraints(CharFieldModel._meta.db_table)
self.assertEqual(constraints[index_name]["type"], BrinIndex.suffix)
self.assertEqual(constraints[index_name]["options"], ["pages_per_range=4"])
with connection.schema_editor() as editor:
editor.remove_index(CharFieldModel, index)
self.assertNotIn(
index_name, self.get_constraints(CharFieldModel._meta.db_table)
)
def test_brin_parameters(self):
index_name = "char_field_brin_params"
index = BrinIndex(fields=["field"], name=index_name, autosummarize=True)
with connection.schema_editor() as editor:
editor.add_index(CharFieldModel, index)
constraints = self.get_constraints(CharFieldModel._meta.db_table)
self.assertEqual(constraints[index_name]["type"], BrinIndex.suffix)
self.assertEqual(constraints[index_name]["options"], ["autosummarize=on"])
with connection.schema_editor() as editor:
editor.remove_index(CharFieldModel, index)
self.assertNotIn(
index_name, self.get_constraints(CharFieldModel._meta.db_table)
)
def test_btree_index(self):
# Ensure the table is there and doesn't have an index.
self.assertNotIn("field", self.get_constraints(CharFieldModel._meta.db_table))
# Add the index.
index_name = "char_field_model_field_btree"
index = BTreeIndex(fields=["field"], name=index_name)
with connection.schema_editor() as editor:
editor.add_index(CharFieldModel, index)
constraints = self.get_constraints(CharFieldModel._meta.db_table)
# The index was added.
self.assertEqual(constraints[index_name]["type"], BTreeIndex.suffix)
# Drop the index.
with connection.schema_editor() as editor:
editor.remove_index(CharFieldModel, index)
self.assertNotIn(
index_name, self.get_constraints(CharFieldModel._meta.db_table)
)
def test_btree_parameters(self):
index_name = "integer_array_btree_parameters"
index = BTreeIndex(
fields=["field"], name=index_name, fillfactor=80, deduplicate_items=False
)
with connection.schema_editor() as editor:
editor.add_index(CharFieldModel, index)
constraints = self.get_constraints(CharFieldModel._meta.db_table)
self.assertEqual(constraints[index_name]["type"], BTreeIndex.suffix)
self.assertEqual(
constraints[index_name]["options"],
["fillfactor=80", "deduplicate_items=off"],
)
with connection.schema_editor() as editor:
editor.remove_index(CharFieldModel, index)
self.assertNotIn(
index_name, self.get_constraints(CharFieldModel._meta.db_table)
)
def test_gist_index(self):
# Ensure the table is there and doesn't have an index.
self.assertNotIn("field", self.get_constraints(CharFieldModel._meta.db_table))
# Add the index.
index_name = "char_field_model_field_gist"
index = GistIndex(fields=["field"], name=index_name)
with connection.schema_editor() as editor:
editor.add_index(CharFieldModel, index)
constraints = self.get_constraints(CharFieldModel._meta.db_table)
# The index was added.
self.assertEqual(constraints[index_name]["type"], GistIndex.suffix)
# Drop the index.
with connection.schema_editor() as editor:
editor.remove_index(CharFieldModel, index)
self.assertNotIn(
index_name, self.get_constraints(CharFieldModel._meta.db_table)
)
def test_gist_parameters(self):
index_name = "integer_array_gist_buffering"
index = GistIndex(
fields=["field"], name=index_name, buffering=True, fillfactor=80
)
with connection.schema_editor() as editor:
editor.add_index(CharFieldModel, index)
constraints = self.get_constraints(CharFieldModel._meta.db_table)
self.assertEqual(constraints[index_name]["type"], GistIndex.suffix)
self.assertEqual(
constraints[index_name]["options"], ["buffering=on", "fillfactor=80"]
)
with connection.schema_editor() as editor:
editor.remove_index(CharFieldModel, index)
self.assertNotIn(
index_name, self.get_constraints(CharFieldModel._meta.db_table)
)
def test_gist_include(self):
index_name = "scene_gist_include_setting"
index = GistIndex(name=index_name, fields=["scene"], include=["setting"])
with connection.schema_editor() as editor:
editor.add_index(Scene, index)
constraints = self.get_constraints(Scene._meta.db_table)
self.assertIn(index_name, constraints)
self.assertEqual(constraints[index_name]["type"], GistIndex.suffix)
self.assertEqual(constraints[index_name]["columns"], ["scene", "setting"])
with connection.schema_editor() as editor:
editor.remove_index(Scene, index)
self.assertNotIn(index_name, self.get_constraints(Scene._meta.db_table))
def test_tsvector_op_class_gist_index(self):
index_name = "tsvector_op_class_gist"
index = GistIndex(
OpClass(
SearchVector("scene", "setting", config="english"),
name="tsvector_ops",
),
name=index_name,
)
with connection.schema_editor() as editor:
editor.add_index(Scene, index)
sql = index.create_sql(Scene, editor)
table = Scene._meta.db_table
constraints = self.get_constraints(table)
self.assertIn(index_name, constraints)
self.assertIn(constraints[index_name]["type"], GistIndex.suffix)
self.assertIs(sql.references_column(table, "scene"), True)
self.assertIs(sql.references_column(table, "setting"), True)
with connection.schema_editor() as editor:
editor.remove_index(Scene, index)
self.assertNotIn(index_name, self.get_constraints(table))
def test_search_vector(self):
"""SearchVector generates IMMUTABLE SQL in order to be indexable."""
index_name = "test_search_vector"
index = Index(SearchVector("id", "scene", config="english"), name=index_name)
# Indexed function must be IMMUTABLE.
with connection.schema_editor() as editor:
editor.add_index(Scene, index)
constraints = self.get_constraints(Scene._meta.db_table)
self.assertIn(index_name, constraints)
self.assertIs(constraints[index_name]["index"], True)
with connection.schema_editor() as editor:
editor.remove_index(Scene, index)
self.assertNotIn(index_name, self.get_constraints(Scene._meta.db_table))
def test_hash_index(self):
# Ensure the table is there and doesn't have an index.
self.assertNotIn("field", self.get_constraints(CharFieldModel._meta.db_table))
# Add the index.
index_name = "char_field_model_field_hash"
index = HashIndex(fields=["field"], name=index_name)
with connection.schema_editor() as editor:
editor.add_index(CharFieldModel, index)
constraints = self.get_constraints(CharFieldModel._meta.db_table)
# The index was added.
self.assertEqual(constraints[index_name]["type"], HashIndex.suffix)
# Drop the index.
with connection.schema_editor() as editor:
editor.remove_index(CharFieldModel, index)
self.assertNotIn(
index_name, self.get_constraints(CharFieldModel._meta.db_table)
)
def test_hash_parameters(self):
index_name = "integer_array_hash_fillfactor"
index = HashIndex(fields=["field"], name=index_name, fillfactor=80)
with connection.schema_editor() as editor:
editor.add_index(CharFieldModel, index)
constraints = self.get_constraints(CharFieldModel._meta.db_table)
self.assertEqual(constraints[index_name]["type"], HashIndex.suffix)
self.assertEqual(constraints[index_name]["options"], ["fillfactor=80"])
with connection.schema_editor() as editor:
editor.remove_index(CharFieldModel, index)
self.assertNotIn(
index_name, self.get_constraints(CharFieldModel._meta.db_table)
)
def test_spgist_index(self):
# Ensure the table is there and doesn't have an index.
self.assertNotIn("field", self.get_constraints(TextFieldModel._meta.db_table))
# Add the index.
index_name = "text_field_model_field_spgist"
index = SpGistIndex(fields=["field"], name=index_name)
with connection.schema_editor() as editor:
editor.add_index(TextFieldModel, index)
constraints = self.get_constraints(TextFieldModel._meta.db_table)
# The index was added.
self.assertEqual(constraints[index_name]["type"], SpGistIndex.suffix)
# Drop the index.
with connection.schema_editor() as editor:
editor.remove_index(TextFieldModel, index)
self.assertNotIn(
index_name, self.get_constraints(TextFieldModel._meta.db_table)
)
def test_spgist_parameters(self):
index_name = "text_field_model_spgist_fillfactor"
index = SpGistIndex(fields=["field"], name=index_name, fillfactor=80)
with connection.schema_editor() as editor:
editor.add_index(TextFieldModel, index)
constraints = self.get_constraints(TextFieldModel._meta.db_table)
self.assertEqual(constraints[index_name]["type"], SpGistIndex.suffix)
self.assertEqual(constraints[index_name]["options"], ["fillfactor=80"])
with connection.schema_editor() as editor:
editor.remove_index(TextFieldModel, index)
self.assertNotIn(
index_name, self.get_constraints(TextFieldModel._meta.db_table)
)
def test_spgist_include(self):
index_name = "scene_spgist_include_setting"
index = SpGistIndex(name=index_name, fields=["scene"], include=["setting"])
with connection.schema_editor() as editor:
editor.add_index(Scene, index)
constraints = self.get_constraints(Scene._meta.db_table)
self.assertIn(index_name, constraints)
self.assertEqual(constraints[index_name]["type"], SpGistIndex.suffix)
self.assertEqual(constraints[index_name]["columns"], ["scene", "setting"])
with connection.schema_editor() as editor:
editor.remove_index(Scene, index)
self.assertNotIn(index_name, self.get_constraints(Scene._meta.db_table))
def test_custom_suffix(self):
class CustomSuffixIndex(PostgresIndex):
suffix = "sfx"
def create_sql(self, model, schema_editor, using="gin", **kwargs):
return super().create_sql(model, schema_editor, using=using, **kwargs)
index = CustomSuffixIndex(fields=["field"], name="custom_suffix_idx")
self.assertEqual(index.suffix, "sfx")
with connection.schema_editor() as editor:
self.assertIn(
" USING gin ",
str(index.create_sql(CharFieldModel, editor)),
)
def test_custom_sql(self):
class CustomSQLIndex(PostgresIndex):
sql_create_index = "SELECT 1"
sql_delete_index = "SELECT 2"
def create_sql(self, model, schema_editor, using="", **kwargs):
kwargs.setdefault("sql", self.sql_create_index)
return super().create_sql(model, schema_editor, using, **kwargs)
def remove_sql(self, model, schema_editor, **kwargs):
kwargs.setdefault("sql", self.sql_delete_index)
return super().remove_sql(model, schema_editor, **kwargs)
index = CustomSQLIndex(fields=["field"], name="custom_sql_idx")
operations = [
(index.create_sql, CustomSQLIndex.sql_create_index),
(index.remove_sql, CustomSQLIndex.sql_delete_index),
]
for operation, expected in operations:
with self.subTest(operation=operation.__name__):
with connection.schema_editor() as editor:
self.assertEqual(expected, str(operation(CharFieldModel, editor)))
def test_op_class(self):
index_name = "test_op_class"
index = Index(
OpClass(Lower("field"), name="text_pattern_ops"),
name=index_name,
)
with connection.schema_editor() as editor:
editor.add_index(TextFieldModel, index)
with editor.connection.cursor() as cursor:
cursor.execute(self.get_opclass_query, [index_name])
self.assertCountEqual(cursor.fetchall(), [("text_pattern_ops", index_name)])
def test_op_class_descending_collation(self):
collation = connection.features.test_collations.get("non_default")
if not collation:
self.skipTest("This backend does not support case-insensitive collations.")
index_name = "test_op_class_descending_collation"
index = Index(
Collate(
OpClass(Lower("field"), name="text_pattern_ops").desc(nulls_last=True),
collation=collation,
),
name=index_name,
)
with connection.schema_editor() as editor:
editor.add_index(TextFieldModel, index)
self.assertIn(
"COLLATE %s" % editor.quote_name(collation),
str(index.create_sql(TextFieldModel, editor)),
)
with editor.connection.cursor() as cursor:
cursor.execute(self.get_opclass_query, [index_name])
self.assertCountEqual(cursor.fetchall(), [("text_pattern_ops", index_name)])
table = TextFieldModel._meta.db_table
constraints = self.get_constraints(table)
self.assertIn(index_name, constraints)
self.assertEqual(constraints[index_name]["orders"], ["DESC"])
with connection.schema_editor() as editor:
editor.remove_index(TextFieldModel, index)
self.assertNotIn(index_name, self.get_constraints(table))
def test_op_class_descending_partial(self):
index_name = "test_op_class_descending_partial"
index = Index(
OpClass(Lower("field"), name="text_pattern_ops").desc(),
name=index_name,
condition=Q(field__contains="China"),
)
with connection.schema_editor() as editor:
editor.add_index(TextFieldModel, index)
with editor.connection.cursor() as cursor:
cursor.execute(self.get_opclass_query, [index_name])
self.assertCountEqual(cursor.fetchall(), [("text_pattern_ops", index_name)])
constraints = self.get_constraints(TextFieldModel._meta.db_table)
self.assertIn(index_name, constraints)
self.assertEqual(constraints[index_name]["orders"], ["DESC"])
def test_op_class_descending_partial_tablespace(self):
index_name = "test_op_class_descending_partial_tablespace"
index = Index(
OpClass(Lower("field").desc(), name="text_pattern_ops"),
name=index_name,
condition=Q(field__contains="China"),
db_tablespace="pg_default",
)
with connection.schema_editor() as editor:
editor.add_index(TextFieldModel, index)
self.assertIn(
'TABLESPACE "pg_default" ',
str(index.create_sql(TextFieldModel, editor)),
)
with editor.connection.cursor() as cursor:
cursor.execute(self.get_opclass_query, [index_name])
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | true |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/postgres_tests/test_apps.py | tests/postgres_tests/test_apps.py | import unittest
from decimal import Decimal
from django.db import connection
from django.db.backends.signals import connection_created
from django.db.migrations.writer import MigrationWriter
from django.test import TestCase
from django.test.utils import CaptureQueriesContext, modify_settings, override_settings
try:
from django.contrib.postgres.fields import (
DateRangeField,
DateTimeRangeField,
DecimalRangeField,
IntegerRangeField,
)
from django.contrib.postgres.signals import get_hstore_oids
from django.db.backends.postgresql.psycopg_any import (
DateRange,
DateTimeRange,
DateTimeTZRange,
NumericRange,
is_psycopg3,
)
except ImportError:
pass
@unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific tests")
class PostgresConfigTests(TestCase):
def test_install_app_no_warning(self):
# Clear cache to force queries when (re)initializing the
# "django.contrib.postgres" app.
get_hstore_oids.cache_clear()
with CaptureQueriesContext(connection) as captured_queries:
with override_settings(INSTALLED_APPS=["django.contrib.postgres"]):
pass
self.assertGreaterEqual(len(captured_queries), 1)
def test_register_type_handlers_connection(self):
from django.contrib.postgres.signals import register_type_handlers
self.assertNotIn(
register_type_handlers, connection_created._live_receivers(None)[0]
)
with modify_settings(INSTALLED_APPS={"append": "django.contrib.postgres"}):
self.assertIn(
register_type_handlers, connection_created._live_receivers(None)[0]
)
self.assertNotIn(
register_type_handlers, connection_created._live_receivers(None)[0]
)
def test_register_serializer_for_migrations(self):
tests = (
(DateRange(empty=True), DateRangeField),
(DateTimeRange(empty=True), DateRangeField),
(DateTimeTZRange(None, None, "[]"), DateTimeRangeField),
(NumericRange(Decimal("1.0"), Decimal("5.0"), "()"), DecimalRangeField),
(NumericRange(1, 10), IntegerRangeField),
)
def assertNotSerializable():
for default, test_field in tests:
with self.subTest(default=default):
field = test_field(default=default)
with self.assertRaisesMessage(
ValueError, "Cannot serialize: %s" % default.__class__.__name__
):
MigrationWriter.serialize(field)
assertNotSerializable()
import_name = "psycopg.types.range" if is_psycopg3 else "psycopg2.extras"
with self.modify_settings(INSTALLED_APPS={"append": "django.contrib.postgres"}):
for default, test_field in tests:
with self.subTest(default=default):
field = test_field(default=default)
serialized_field, imports = MigrationWriter.serialize(field)
self.assertEqual(
imports,
{
"import django.contrib.postgres.fields.ranges",
f"import {import_name}",
},
)
self.assertIn(
f"{field.__module__}.{field.__class__.__name__}"
f"(default={import_name}.{default!r})",
serialized_field,
)
assertNotSerializable()
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/postgres_tests/test_search.py | tests/postgres_tests/test_search.py | """
Test PostgreSQL full text search.
These tests use dialogue from the 1975 film Monty Python and the Holy Grail.
All text copyright Python (Monty) Pictures. Thanks to sacred-texts.com for the
transcript.
"""
from django.db import connection
from django.db.models import F, Value
from . import PostgreSQLSimpleTestCase, PostgreSQLTestCase
from .models import Character, Line, LineSavedSearch, Scene
try:
from django.contrib.postgres.search import (
Lexeme,
SearchConfig,
SearchHeadline,
SearchQuery,
SearchRank,
SearchVector,
quote_lexeme,
)
except ImportError:
pass
class GrailTestData:
@classmethod
def setUpTestData(cls):
cls.robin = Scene.objects.create(
scene="Scene 10", setting="The dark forest of Ewing"
)
cls.minstrel = Character.objects.create(name="Minstrel")
verses = [
(
"Bravely bold Sir Robin, rode forth from Camelot. "
"He was not afraid to die, o Brave Sir Robin. "
"He was not at all afraid to be killed in nasty ways. "
"Brave, brave, brave, brave Sir Robin"
),
(
"He was not in the least bit scared to be mashed into a pulp, "
"Or to have his eyes gouged out, and his elbows broken. "
"To have his kneecaps split, and his body burned away, "
"And his limbs all hacked and mangled, brave Sir Robin!"
),
(
"His head smashed in and his heart cut out, "
"And his liver removed and his bowels unplugged, "
"And his nostrils ripped and his bottom burned off,"
"And his --"
),
]
cls.verses = [
Line.objects.create(
scene=cls.robin,
character=cls.minstrel,
dialogue=verse,
)
for verse in verses
]
cls.verse0, cls.verse1, cls.verse2 = cls.verses
cls.witch_scene = Scene.objects.create(
scene="Scene 5", setting="Sir Bedemir's Castle"
)
bedemir = Character.objects.create(name="Bedemir")
crowd = Character.objects.create(name="Crowd")
witch = Character.objects.create(name="Witch")
duck = Character.objects.create(name="Duck")
cls.bedemir0 = Line.objects.create(
scene=cls.witch_scene,
character=bedemir,
dialogue="We shall use my larger scales!",
dialogue_config="english",
)
cls.bedemir1 = Line.objects.create(
scene=cls.witch_scene,
character=bedemir,
dialogue="Right, remove the supports!",
dialogue_config="english",
)
cls.duck = Line.objects.create(
scene=cls.witch_scene, character=duck, dialogue=None
)
cls.crowd = Line.objects.create(
scene=cls.witch_scene, character=crowd, dialogue="A witch! A witch!"
)
cls.witch = Line.objects.create(
scene=cls.witch_scene, character=witch, dialogue="It's a fair cop."
)
trojan_rabbit = Scene.objects.create(
scene="Scene 8", setting="The castle of Our Master Ruiz' de lu la Ramper"
)
guards = Character.objects.create(name="French Guards")
cls.french = Line.objects.create(
scene=trojan_rabbit,
character=guards,
dialogue="Oh. Un beau cadeau. Oui oui.",
dialogue_config="french",
)
class SimpleSearchTest(GrailTestData, PostgreSQLTestCase):
def test_simple(self):
searched = Line.objects.filter(dialogue__search="elbows")
self.assertSequenceEqual(searched, [self.verse1])
def test_non_exact_match(self):
self.check_default_text_search_config()
searched = Line.objects.filter(dialogue__search="hearts")
self.assertSequenceEqual(searched, [self.verse2])
def test_search_two_terms(self):
self.check_default_text_search_config()
searched = Line.objects.filter(dialogue__search="heart bowel")
self.assertSequenceEqual(searched, [self.verse2])
def test_search_two_terms_with_partial_match(self):
searched = Line.objects.filter(dialogue__search="Robin killed")
self.assertSequenceEqual(searched, [self.verse0])
def test_search_query_config(self):
searched = Line.objects.filter(
dialogue__search=SearchQuery("nostrils", config="simple"),
)
self.assertSequenceEqual(searched, [self.verse2])
def test_search_with_F_expression(self):
# Non-matching query.
LineSavedSearch.objects.create(line=self.verse1, query="hearts")
# Matching query.
match = LineSavedSearch.objects.create(line=self.verse1, query="elbows")
for query_expression in [F("query"), SearchQuery(F("query"))]:
with self.subTest(query_expression):
searched = LineSavedSearch.objects.filter(
line__dialogue__search=query_expression,
)
self.assertSequenceEqual(searched, [match])
class SearchVectorFieldTest(GrailTestData, PostgreSQLTestCase):
def test_existing_vector(self):
Line.objects.update(dialogue_search_vector=SearchVector("dialogue"))
searched = Line.objects.filter(
dialogue_search_vector=SearchQuery("Robin killed")
)
self.assertSequenceEqual(searched, [self.verse0])
def test_existing_vector_config_explicit(self):
Line.objects.update(dialogue_search_vector=SearchVector("dialogue"))
searched = Line.objects.filter(
dialogue_search_vector=SearchQuery("cadeaux", config="french")
)
self.assertSequenceEqual(searched, [self.french])
def test_single_coalesce_expression(self):
searched = Line.objects.annotate(search=SearchVector("dialogue")).filter(
search="cadeaux"
)
self.assertNotIn("COALESCE(COALESCE", str(searched.query))
def test_values_with_percent(self):
searched = Line.objects.annotate(
search=SearchVector(Value("This week everything is 10% off"))
).filter(search="10 % off")
self.assertEqual(len(searched), 9)
class SearchConfigTests(PostgreSQLSimpleTestCase):
def test_from_parameter(self):
self.assertIsNone(SearchConfig.from_parameter(None))
self.assertEqual(SearchConfig.from_parameter("foo"), SearchConfig("foo"))
self.assertEqual(
SearchConfig.from_parameter(SearchConfig("bar")), SearchConfig("bar")
)
class MultipleFieldsTest(GrailTestData, PostgreSQLTestCase):
def test_simple_on_dialogue(self):
searched = Line.objects.annotate(
search=SearchVector("scene__setting", "dialogue"),
).filter(search="elbows")
self.assertSequenceEqual(searched, [self.verse1])
def test_simple_on_scene(self):
searched = Line.objects.annotate(
search=SearchVector("scene__setting", "dialogue"),
).filter(search="Forest")
self.assertCountEqual(searched, self.verses)
def test_non_exact_match(self):
searched = Line.objects.annotate(
search=SearchVector("scene__setting", "dialogue"),
).filter(search="heart")
self.assertSequenceEqual(searched, [self.verse2])
def test_search_two_terms(self):
searched = Line.objects.annotate(
search=SearchVector("scene__setting", "dialogue"),
).filter(search="heart forest")
self.assertSequenceEqual(searched, [self.verse2])
def test_terms_adjacent(self):
searched = Line.objects.annotate(
search=SearchVector("character__name", "dialogue"),
).filter(search="minstrel")
self.assertCountEqual(searched, self.verses)
searched = Line.objects.annotate(
search=SearchVector("scene__setting", "dialogue"),
).filter(search="minstrelbravely")
self.assertSequenceEqual(searched, [])
def test_search_with_null(self):
searched = Line.objects.annotate(
search=SearchVector("scene__setting", "dialogue"),
).filter(search="bedemir")
self.assertCountEqual(
searched, [self.bedemir0, self.bedemir1, self.crowd, self.witch, self.duck]
)
def test_search_with_non_text(self):
searched = Line.objects.annotate(
search=SearchVector("id"),
).filter(search=str(self.crowd.id))
self.assertSequenceEqual(searched, [self.crowd])
def test_phrase_search(self):
line_qs = Line.objects.annotate(search=SearchVector("dialogue"))
searched = line_qs.filter(
search=SearchQuery("burned body his away", search_type="phrase")
)
self.assertSequenceEqual(searched, [])
searched = line_qs.filter(
search=SearchQuery("his body burned away", search_type="phrase")
)
self.assertSequenceEqual(searched, [self.verse1])
def test_phrase_search_with_config(self):
line_qs = Line.objects.annotate(
search=SearchVector("scene__setting", "dialogue", config="french"),
)
searched = line_qs.filter(
search=SearchQuery("cadeau beau un", search_type="phrase", config="french"),
)
self.assertSequenceEqual(searched, [])
searched = line_qs.filter(
search=SearchQuery("un beau cadeau", search_type="phrase", config="french"),
)
self.assertSequenceEqual(searched, [self.french])
def test_raw_search(self):
line_qs = Line.objects.annotate(search=SearchVector("dialogue"))
searched = line_qs.filter(search=SearchQuery("Robin", search_type="raw"))
self.assertCountEqual(searched, [self.verse0, self.verse1])
searched = line_qs.filter(
search=SearchQuery("Robin & !'Camelot'", search_type="raw")
)
self.assertSequenceEqual(searched, [self.verse1])
def test_raw_search_with_config(self):
line_qs = Line.objects.annotate(
search=SearchVector("dialogue", config="french")
)
searched = line_qs.filter(
search=SearchQuery(
"'cadeaux' & 'beaux'", search_type="raw", config="french"
),
)
self.assertSequenceEqual(searched, [self.french])
def test_web_search(self):
line_qs = Line.objects.annotate(search=SearchVector("dialogue"))
searched = line_qs.filter(
search=SearchQuery(
'"burned body" "split kneecaps"',
search_type="websearch",
),
)
self.assertSequenceEqual(searched, [])
searched = line_qs.filter(
search=SearchQuery(
'"body burned" "kneecaps split" -"nostrils"',
search_type="websearch",
),
)
self.assertSequenceEqual(searched, [self.verse1])
searched = line_qs.filter(
search=SearchQuery(
'"Sir Robin" ("kneecaps" OR "Camelot")',
search_type="websearch",
),
)
self.assertSequenceEqual(searched, [self.verse0, self.verse1])
def test_web_search_with_config(self):
line_qs = Line.objects.annotate(
search=SearchVector("scene__setting", "dialogue", config="french"),
)
searched = line_qs.filter(
search=SearchQuery(
"cadeau -beau", search_type="websearch", config="french"
),
)
self.assertSequenceEqual(searched, [])
searched = line_qs.filter(
search=SearchQuery("beau cadeau", search_type="websearch", config="french"),
)
self.assertSequenceEqual(searched, [self.french])
def test_bad_search_type(self):
with self.assertRaisesMessage(
ValueError, "Unknown search_type argument 'foo'."
):
SearchQuery("kneecaps", search_type="foo")
def test_config_query_explicit(self):
searched = Line.objects.annotate(
search=SearchVector("scene__setting", "dialogue", config="french"),
).filter(search=SearchQuery("cadeaux", config="french"))
self.assertSequenceEqual(searched, [self.french])
def test_config_query_implicit(self):
searched = Line.objects.annotate(
search=SearchVector("scene__setting", "dialogue", config="french"),
).filter(search="cadeaux")
self.assertSequenceEqual(searched, [self.french])
def test_config_from_field_explicit(self):
searched = Line.objects.annotate(
search=SearchVector(
"scene__setting", "dialogue", config=F("dialogue_config")
),
).filter(search=SearchQuery("cadeaux", config=F("dialogue_config")))
self.assertSequenceEqual(searched, [self.french])
def test_config_from_field_implicit(self):
searched = Line.objects.annotate(
search=SearchVector(
"scene__setting", "dialogue", config=F("dialogue_config")
),
).filter(search="cadeaux")
self.assertSequenceEqual(searched, [self.french])
class TestCombinations(GrailTestData, PostgreSQLTestCase):
def test_vector_add(self):
searched = Line.objects.annotate(
search=SearchVector("scene__setting") + SearchVector("character__name"),
).filter(search="bedemir")
self.assertCountEqual(
searched, [self.bedemir0, self.bedemir1, self.crowd, self.witch, self.duck]
)
def test_vector_add_multi(self):
searched = Line.objects.annotate(
search=(
SearchVector("scene__setting")
+ SearchVector("character__name")
+ SearchVector("dialogue")
),
).filter(search="bedemir")
self.assertCountEqual(
searched, [self.bedemir0, self.bedemir1, self.crowd, self.witch, self.duck]
)
def test_vector_combined_mismatch(self):
msg = (
"SearchVector can only be combined with other SearchVector "
"instances, got NoneType."
)
with self.assertRaisesMessage(TypeError, msg):
Line.objects.filter(dialogue__search=None + SearchVector("character__name"))
def test_combine_different_vector_configs(self):
self.check_default_text_search_config()
searched = Line.objects.annotate(
search=(
SearchVector("dialogue", config="english")
+ SearchVector("dialogue", config="french")
),
).filter(
search=SearchQuery("cadeaux", config="french") | SearchQuery("nostrils")
)
self.assertCountEqual(searched, [self.french, self.verse2])
def test_query_and(self):
searched = Line.objects.annotate(
search=SearchVector("scene__setting", "dialogue"),
).filter(search=SearchQuery("bedemir") & SearchQuery("scales"))
self.assertSequenceEqual(searched, [self.bedemir0])
def test_query_multiple_and(self):
searched = Line.objects.annotate(
search=SearchVector("scene__setting", "dialogue"),
).filter(
search=SearchQuery("bedemir")
& SearchQuery("scales")
& SearchQuery("nostrils")
)
self.assertSequenceEqual(searched, [])
searched = Line.objects.annotate(
search=SearchVector("scene__setting", "dialogue"),
).filter(
search=SearchQuery("shall") & SearchQuery("use") & SearchQuery("larger")
)
self.assertSequenceEqual(searched, [self.bedemir0])
def test_query_or(self):
searched = Line.objects.filter(
dialogue__search=SearchQuery("kneecaps") | SearchQuery("nostrils")
)
self.assertCountEqual(searched, [self.verse1, self.verse2])
def test_query_multiple_or(self):
searched = Line.objects.filter(
dialogue__search=SearchQuery("kneecaps")
| SearchQuery("nostrils")
| SearchQuery("Sir Robin")
)
self.assertCountEqual(searched, [self.verse1, self.verse2, self.verse0])
def test_query_invert(self):
searched = Line.objects.filter(
character=self.minstrel, dialogue__search=~SearchQuery("kneecaps")
)
self.assertCountEqual(searched, [self.verse0, self.verse2])
def test_combine_different_configs(self):
searched = Line.objects.filter(
dialogue__search=(
SearchQuery("cadeau", config="french")
| SearchQuery("nostrils", config="english")
)
)
self.assertCountEqual(searched, [self.french, self.verse2])
def test_combined_configs(self):
searched = Line.objects.filter(
dialogue__search=(
SearchQuery("nostrils", config="simple")
& SearchQuery("bowels", config="simple")
),
)
self.assertSequenceEqual(searched, [self.verse2])
def test_combine_raw_phrase(self):
self.check_default_text_search_config()
searched = Line.objects.filter(
dialogue__search=(
SearchQuery("burn:*", search_type="raw", config="simple")
| SearchQuery("rode forth from Camelot", search_type="phrase")
)
)
self.assertCountEqual(searched, [self.verse0, self.verse1, self.verse2])
def test_query_combined_mismatch(self):
msg = (
"SearchQuery can only be combined with other SearchQuery "
"instances, got NoneType."
)
with self.assertRaisesMessage(TypeError, msg):
Line.objects.filter(dialogue__search=None | SearchQuery("kneecaps"))
with self.assertRaisesMessage(TypeError, msg):
Line.objects.filter(dialogue__search=None & SearchQuery("kneecaps"))
class TestRankingAndWeights(GrailTestData, PostgreSQLTestCase):
def test_ranking(self):
searched = (
Line.objects.filter(character=self.minstrel)
.annotate(
rank=SearchRank(
SearchVector("dialogue"), SearchQuery("brave sir robin")
),
)
.order_by("rank")
)
self.assertSequenceEqual(searched, [self.verse2, self.verse1, self.verse0])
def test_rank_passing_untyped_args(self):
searched = (
Line.objects.filter(character=self.minstrel)
.annotate(
rank=SearchRank("dialogue", "brave sir robin"),
)
.order_by("rank")
)
self.assertSequenceEqual(searched, [self.verse2, self.verse1, self.verse0])
def test_weights_in_vector(self):
vector = SearchVector("dialogue", weight="A") + SearchVector(
"character__name", weight="D"
)
searched = (
Line.objects.filter(scene=self.witch_scene)
.annotate(
rank=SearchRank(vector, SearchQuery("witch")),
)
.order_by("-rank")[:2]
)
self.assertSequenceEqual(searched, [self.crowd, self.witch])
vector = SearchVector("dialogue", weight="D") + SearchVector(
"character__name", weight="A"
)
searched = (
Line.objects.filter(scene=self.witch_scene)
.annotate(
rank=SearchRank(vector, SearchQuery("witch")),
)
.order_by("-rank")[:2]
)
self.assertSequenceEqual(searched, [self.witch, self.crowd])
def test_ranked_custom_weights(self):
vector = SearchVector("dialogue", weight="D") + SearchVector(
"character__name", weight="A"
)
weights = [1.0, 0.0, 0.0, 0.5]
searched = (
Line.objects.filter(scene=self.witch_scene)
.annotate(
rank=SearchRank(vector, SearchQuery("witch"), weights=weights),
)
.order_by("-rank")[:2]
)
self.assertSequenceEqual(searched, [self.crowd, self.witch])
def test_ranking_chaining(self):
searched = (
Line.objects.filter(character=self.minstrel)
.annotate(
rank=SearchRank(
SearchVector("dialogue"), SearchQuery("brave sir robin")
),
)
.filter(rank__gt=0.3)
)
self.assertSequenceEqual(searched, [self.verse0])
def test_cover_density_ranking(self):
not_dense_verse = Line.objects.create(
scene=self.robin,
character=self.minstrel,
dialogue=(
"Bravely taking to his feet, he beat a very brave retreat. "
"A brave retreat brave Sir Robin."
),
)
searched = (
Line.objects.filter(character=self.minstrel)
.annotate(
rank=SearchRank(
SearchVector("dialogue"),
SearchQuery("brave robin"),
cover_density=True,
),
)
.order_by("rank", "-pk")
)
self.assertSequenceEqual(
searched,
[self.verse2, not_dense_verse, self.verse1, self.verse0],
)
def test_ranking_with_normalization(self):
short_verse = Line.objects.create(
scene=self.robin,
character=self.minstrel,
dialogue="A brave retreat brave Sir Robin.",
)
searched = (
Line.objects.filter(character=self.minstrel)
.annotate(
rank=SearchRank(
SearchVector("dialogue"),
SearchQuery("brave sir robin"),
# Divide the rank by the document length.
normalization=2,
),
)
.order_by("rank")
)
self.assertSequenceEqual(
searched,
[self.verse2, self.verse1, self.verse0, short_verse],
)
def test_ranking_with_masked_normalization(self):
short_verse = Line.objects.create(
scene=self.robin,
character=self.minstrel,
dialogue="A brave retreat brave Sir Robin.",
)
searched = (
Line.objects.filter(character=self.minstrel)
.annotate(
rank=SearchRank(
SearchVector("dialogue"),
SearchQuery("brave sir robin"),
# Divide the rank by the document length and by the number
# of unique words in document.
normalization=Value(2).bitor(Value(8)),
),
)
.order_by("rank")
)
self.assertSequenceEqual(
searched,
[self.verse2, self.verse1, self.verse0, short_verse],
)
class SearchQueryTests(PostgreSQLSimpleTestCase):
def test_str(self):
tests = (
(~SearchQuery("a"), "~SearchQuery(Value('a'))"),
(
(SearchQuery("a") | SearchQuery("b"))
& (SearchQuery("c") | SearchQuery("d")),
"((SearchQuery(Value('a')) || SearchQuery(Value('b'))) && "
"(SearchQuery(Value('c')) || SearchQuery(Value('d'))))",
),
(
SearchQuery("a") & (SearchQuery("b") | SearchQuery("c")),
"(SearchQuery(Value('a')) && (SearchQuery(Value('b')) || "
"SearchQuery(Value('c'))))",
),
(
(SearchQuery("a") | SearchQuery("b")) & SearchQuery("c"),
"((SearchQuery(Value('a')) || SearchQuery(Value('b'))) && "
"SearchQuery(Value('c')))",
),
(
SearchQuery("a")
& (SearchQuery("b") & (SearchQuery("c") | SearchQuery("d"))),
"(SearchQuery(Value('a')) && (SearchQuery(Value('b')) && "
"(SearchQuery(Value('c')) || SearchQuery(Value('d')))))",
),
)
for query, expected_str in tests:
with self.subTest(query=query):
self.assertEqual(str(query), expected_str)
class SearchHeadlineTests(GrailTestData, PostgreSQLTestCase):
def test_headline(self):
self.check_default_text_search_config()
searched = Line.objects.annotate(
headline=SearchHeadline(
F("dialogue"),
SearchQuery("brave sir robin"),
config=SearchConfig("english"),
),
).get(pk=self.verse0.pk)
self.assertEqual(
searched.headline,
"<b>Robin</b>. He was not at all afraid to be killed in nasty "
"ways. <b>Brave</b>, <b>brave</b>, <b>brave</b>, <b>brave</b> "
"<b>Sir</b> <b>Robin</b>",
)
def test_headline_untyped_args(self):
self.check_default_text_search_config()
searched = Line.objects.annotate(
headline=SearchHeadline("dialogue", "killed", config="english"),
).get(pk=self.verse0.pk)
self.assertEqual(
searched.headline,
"Robin. He was not at all afraid to be <b>killed</b> in nasty "
"ways. Brave, brave, brave, brave Sir Robin",
)
def test_headline_with_config(self):
searched = Line.objects.annotate(
headline=SearchHeadline(
"dialogue",
SearchQuery("cadeaux", config="french"),
config="french",
),
).get(pk=self.french.pk)
self.assertEqual(
searched.headline,
"Oh. Un beau <b>cadeau</b>. Oui oui.",
)
def test_headline_with_config_from_field(self):
searched = Line.objects.annotate(
headline=SearchHeadline(
"dialogue",
SearchQuery("cadeaux", config=F("dialogue_config")),
config=F("dialogue_config"),
),
).get(pk=self.french.pk)
self.assertEqual(
searched.headline,
"Oh. Un beau <b>cadeau</b>. Oui oui.",
)
def test_headline_separator_options(self):
searched = Line.objects.annotate(
headline=SearchHeadline(
"dialogue",
"brave sir robin",
start_sel="<span>",
stop_sel="</span>",
),
).get(pk=self.verse0.pk)
self.assertEqual(
searched.headline,
"<span>Robin</span>. He was not at all afraid to be killed in "
"nasty ways. <span>Brave</span>, <span>brave</span>, <span>brave"
"</span>, <span>brave</span> <span>Sir</span> <span>Robin</span>",
)
def test_headline_highlight_all_option(self):
self.check_default_text_search_config()
searched = Line.objects.annotate(
headline=SearchHeadline(
"dialogue",
SearchQuery("brave sir robin", config="english"),
highlight_all=True,
),
).get(pk=self.verse0.pk)
self.assertIn(
"<b>Bravely</b> bold <b>Sir</b> <b>Robin</b>, rode forth from "
"Camelot. He was not afraid to die, o ",
searched.headline,
)
def test_headline_short_word_option(self):
self.check_default_text_search_config()
searched = Line.objects.annotate(
headline=SearchHeadline(
"dialogue",
SearchQuery("Camelot", config="english"),
short_word=5,
min_words=8,
),
).get(pk=self.verse0.pk)
self.assertEqual(
searched.headline,
(
"<b>Camelot</b>. He was not afraid to die, o Brave Sir Robin. He "
"was not at all afraid"
),
)
def test_headline_fragments_words_options(self):
self.check_default_text_search_config()
searched = Line.objects.annotate(
headline=SearchHeadline(
"dialogue",
SearchQuery("brave sir robin", config="english"),
fragment_delimiter="...<br>",
max_fragments=4,
max_words=3,
min_words=1,
),
).get(pk=self.verse0.pk)
self.assertEqual(
searched.headline,
"<b>Sir</b> <b>Robin</b>, rode...<br>"
"<b>Brave</b> <b>Sir</b> <b>Robin</b>...<br>"
"<b>Brave</b>, <b>brave</b>, <b>brave</b>...<br>"
"<b>brave</b> <b>Sir</b> <b>Robin</b>",
)
class TestLexemes(GrailTestData, PostgreSQLTestCase):
def test_and(self):
searched = Line.objects.annotate(
search=SearchVector("scene__setting", "dialogue"),
).filter(search=SearchQuery(Lexeme("bedemir") & Lexeme("scales")))
self.assertSequenceEqual(searched, [self.bedemir0])
def test_multiple_and(self):
searched = Line.objects.annotate(
search=SearchVector("scene__setting", "dialogue"),
).filter(
search=SearchQuery(
Lexeme("bedemir") & Lexeme("scales") & Lexeme("nostrils")
)
)
self.assertSequenceEqual(searched, [])
searched = Line.objects.annotate(
search=SearchVector("scene__setting", "dialogue"),
).filter(search=SearchQuery(Lexeme("shall") & Lexeme("use") & Lexeme("larger")))
self.assertSequenceEqual(searched, [self.bedemir0])
def test_or(self):
searched = Line.objects.annotate(search=SearchVector("dialogue")).filter(
search=SearchQuery(Lexeme("kneecaps") | Lexeme("nostrils"))
)
self.assertCountEqual(searched, [self.verse1, self.verse2])
def test_multiple_or(self):
searched = Line.objects.annotate(search=SearchVector("dialogue")).filter(
search=SearchQuery(
Lexeme("kneecaps") | Lexeme("nostrils") | Lexeme("Sir Robin")
)
)
self.assertCountEqual(searched, [self.verse1, self.verse2, self.verse0])
def test_advanced(self):
"""
Combination of & and |
This is mainly helpful for checking the test_advanced_invert below
"""
searched = Line.objects.annotate(search=SearchVector("dialogue")).filter(
search=SearchQuery(
Lexeme("shall") & Lexeme("use") & Lexeme("larger") | Lexeme("nostrils")
)
)
self.assertCountEqual(searched, [self.bedemir0, self.verse2])
def test_invert(self):
searched = Line.objects.annotate(search=SearchVector("dialogue")).filter(
character=self.minstrel, search=SearchQuery(~Lexeme("kneecaps"))
)
self.assertCountEqual(searched, [self.verse0, self.verse2])
def test_advanced_invert(self):
"""
Inverting a query that uses a combination of & and |
should return the opposite of test_advanced.
"""
searched = Line.objects.annotate(search=SearchVector("dialogue")).filter(
search=SearchQuery(
~(
Lexeme("shall") & Lexeme("use") & Lexeme("larger")
| Lexeme("nostrils")
)
)
)
expected_result = Line.objects.exclude(
id__in=[self.bedemir0.id, self.verse2.id]
)
self.assertCountEqual(searched, expected_result)
def test_as_sql(self):
query = Line.objects.all().query
compiler = query.get_compiler(connection.alias)
tests = (
(Lexeme("a"), ("'a'",)),
(Lexeme("a", invert=True), ("!'a'",)),
(~Lexeme("a"), ("!'a'",)),
(Lexeme("a", prefix=True), ("'a':*",)),
(Lexeme("a", weight="D"), ("'a':D",)),
(Lexeme("a", invert=True, prefix=True, weight="D"), ("!'a':*D",)),
(Lexeme("a") | Lexeme("b") & ~Lexeme("c"), ("('a' | ('b' & !'c'))",)),
(
~(Lexeme("a") | Lexeme("b") & ~Lexeme("c")),
("(!'a' & (!'b' | 'c'))",),
),
)
for expression, expected_params in tests:
with self.subTest(expression=expression, expected_params=expected_params):
_, params = expression.as_sql(compiler, connection)
self.assertEqual(params, expected_params)
def test_quote_lexeme(self):
tests = (
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | true |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/postgres_tests/test_signals.py | tests/postgres_tests/test_signals.py | from django.db import connection
from . import PostgreSQLTestCase
try:
from django.contrib.postgres.signals import (
get_citext_oids,
get_hstore_oids,
register_type_handlers,
)
except ImportError:
pass # psycopg isn't installed.
class OIDTests(PostgreSQLTestCase):
def assertOIDs(self, oids):
self.assertIsInstance(oids, tuple)
self.assertGreater(len(oids), 0)
self.assertTrue(all(isinstance(oid, int) for oid in oids))
def test_hstore_cache(self):
get_hstore_oids(connection.alias)
with self.assertNumQueries(0):
get_hstore_oids(connection.alias)
def test_citext_cache(self):
get_citext_oids(connection.alias)
with self.assertNumQueries(0):
get_citext_oids(connection.alias)
def test_hstore_values(self):
oids, array_oids = get_hstore_oids(connection.alias)
self.assertOIDs(oids)
self.assertOIDs(array_oids)
def test_citext_values(self):
oids, citext_oids = get_citext_oids(connection.alias)
self.assertOIDs(oids)
self.assertOIDs(citext_oids)
def test_register_type_handlers_no_db(self):
"""Registering type handlers for the nodb connection does nothing."""
with connection._nodb_cursor() as cursor:
register_type_handlers(cursor.db)
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/postgres_tests/test_ranges.py | tests/postgres_tests/test_ranges.py | import datetime
import json
from decimal import Decimal
from django import forms
from django.core import exceptions, serializers
from django.db.models import DateField, DateTimeField, F, Func, Value
from django.http import QueryDict
from django.test import override_settings
from django.test.utils import isolate_apps
from django.utils import timezone
from . import PostgreSQLSimpleTestCase, PostgreSQLTestCase
from .models import (
BigAutoFieldModel,
PostgreSQLModel,
RangeLookupsModel,
RangesModel,
SmallAutoFieldModel,
)
try:
from django.contrib.postgres import fields as pg_fields
from django.contrib.postgres import forms as pg_forms
from django.contrib.postgres.validators import (
RangeMaxValueValidator,
RangeMinValueValidator,
)
from django.db.backends.postgresql.psycopg_any import (
DateRange,
DateTimeTZRange,
NumericRange,
)
except ImportError:
pass
@isolate_apps("postgres_tests")
class BasicTests(PostgreSQLSimpleTestCase):
def test_get_field_display(self):
class Model(PostgreSQLModel):
field = pg_fields.IntegerRangeField(
choices=[
["1-50", [((1, 25), "1-25"), ([26, 50], "26-50")]],
((51, 100), "51-100"),
],
)
tests = (
((1, 25), "1-25"),
([26, 50], "26-50"),
((51, 100), "51-100"),
((1, 2), "(1, 2)"),
([1, 2], "[1, 2]"),
)
for value, display in tests:
with self.subTest(value=value, display=display):
instance = Model(field=value)
self.assertEqual(instance.get_field_display(), display)
def test_discrete_range_fields_unsupported_default_bounds(self):
discrete_range_types = [
pg_fields.BigIntegerRangeField,
pg_fields.IntegerRangeField,
pg_fields.DateRangeField,
]
for field_type in discrete_range_types:
msg = f"Cannot use 'default_bounds' with {field_type.__name__}."
with self.assertRaisesMessage(TypeError, msg):
field_type(choices=[((51, 100), "51-100")], default_bounds="[]")
def test_continuous_range_fields_default_bounds(self):
continuous_range_types = [
pg_fields.DecimalRangeField,
pg_fields.DateTimeRangeField,
]
for field_type in continuous_range_types:
field = field_type(choices=[((51, 100), "51-100")], default_bounds="[]")
self.assertEqual(field.default_bounds, "[]")
def test_invalid_default_bounds(self):
tests = [")]", ")[", "](", "])", "([", "[(", "x", "", None]
msg = "default_bounds must be one of '[)', '(]', '()', or '[]'."
for invalid_bounds in tests:
with self.assertRaisesMessage(ValueError, msg):
pg_fields.DecimalRangeField(default_bounds=invalid_bounds)
def test_deconstruct(self):
field = pg_fields.DecimalRangeField()
*_, kwargs = field.deconstruct()
self.assertEqual(kwargs, {})
field = pg_fields.DecimalRangeField(default_bounds="[]")
*_, kwargs = field.deconstruct()
self.assertEqual(kwargs, {"default_bounds": "[]"})
class TestSaveLoad(PostgreSQLTestCase):
def test_all_fields(self):
now = timezone.now()
instance = RangesModel(
ints=NumericRange(0, 10),
bigints=NumericRange(10, 20),
decimals=NumericRange(20, 30),
timestamps=DateTimeTZRange(now - datetime.timedelta(hours=1), now),
dates=DateRange(now.date() - datetime.timedelta(days=1), now.date()),
)
instance.save()
loaded = RangesModel.objects.get()
self.assertEqual(instance.ints, loaded.ints)
self.assertEqual(instance.bigints, loaded.bigints)
self.assertEqual(instance.decimals, loaded.decimals)
self.assertEqual(instance.timestamps, loaded.timestamps)
self.assertEqual(instance.dates, loaded.dates)
def test_range_object(self):
r = NumericRange(0, 10)
instance = RangesModel(ints=r)
instance.save()
loaded = RangesModel.objects.get()
self.assertEqual(r, loaded.ints)
def test_tuple(self):
instance = RangesModel(ints=(0, 10))
instance.save()
loaded = RangesModel.objects.get()
self.assertEqual(NumericRange(0, 10), loaded.ints)
def test_tuple_range_with_default_bounds(self):
range_ = (timezone.now(), timezone.now() + datetime.timedelta(hours=1))
RangesModel.objects.create(timestamps_closed_bounds=range_, timestamps=range_)
loaded = RangesModel.objects.get()
self.assertEqual(
loaded.timestamps_closed_bounds,
DateTimeTZRange(range_[0], range_[1], "[]"),
)
self.assertEqual(
loaded.timestamps,
DateTimeTZRange(range_[0], range_[1], "[)"),
)
def test_range_object_boundaries(self):
r = NumericRange(0, 10, "[]")
instance = RangesModel(decimals=r)
instance.save()
loaded = RangesModel.objects.get()
self.assertEqual(r, loaded.decimals)
self.assertIn(10, loaded.decimals)
def test_range_object_boundaries_range_with_default_bounds(self):
range_ = DateTimeTZRange(
timezone.now(),
timezone.now() + datetime.timedelta(hours=1),
bounds="()",
)
RangesModel.objects.create(timestamps_closed_bounds=range_)
loaded = RangesModel.objects.get()
self.assertEqual(loaded.timestamps_closed_bounds, range_)
def test_unbounded(self):
r = NumericRange(None, None, "()")
instance = RangesModel(decimals=r)
instance.save()
loaded = RangesModel.objects.get()
self.assertEqual(r, loaded.decimals)
def test_empty(self):
r = NumericRange(empty=True)
instance = RangesModel(ints=r)
instance.save()
loaded = RangesModel.objects.get()
self.assertEqual(r, loaded.ints)
def test_null(self):
instance = RangesModel(ints=None)
instance.save()
loaded = RangesModel.objects.get()
self.assertIsNone(loaded.ints)
def test_model_set_on_base_field(self):
instance = RangesModel()
field = instance._meta.get_field("ints")
self.assertEqual(field.model, RangesModel)
self.assertEqual(field.base_field.model, RangesModel)
class TestRangeContainsLookup(PostgreSQLTestCase):
@classmethod
def setUpTestData(cls):
cls.timestamps = [
datetime.datetime(year=2016, month=1, day=1),
datetime.datetime(year=2016, month=1, day=2, hour=1),
datetime.datetime(year=2016, month=1, day=2, hour=12),
datetime.datetime(year=2016, month=1, day=3),
datetime.datetime(year=2016, month=1, day=3, hour=1),
datetime.datetime(year=2016, month=2, day=2),
]
cls.aware_timestamps = [
timezone.make_aware(timestamp) for timestamp in cls.timestamps
]
cls.dates = [
datetime.date(year=2016, month=1, day=1),
datetime.date(year=2016, month=1, day=2),
datetime.date(year=2016, month=1, day=3),
datetime.date(year=2016, month=1, day=4),
datetime.date(year=2016, month=2, day=2),
datetime.date(year=2016, month=2, day=3),
]
cls.obj = RangesModel.objects.create(
dates=(cls.dates[0], cls.dates[3]),
dates_inner=(cls.dates[1], cls.dates[2]),
timestamps=(cls.timestamps[0], cls.timestamps[3]),
timestamps_inner=(cls.timestamps[1], cls.timestamps[2]),
)
cls.aware_obj = RangesModel.objects.create(
dates=(cls.dates[0], cls.dates[3]),
dates_inner=(cls.dates[1], cls.dates[2]),
timestamps=(cls.aware_timestamps[0], cls.aware_timestamps[3]),
timestamps_inner=(cls.timestamps[1], cls.timestamps[2]),
)
# Objects that don't match any queries.
for i in range(3, 4):
RangesModel.objects.create(
dates=(cls.dates[i], cls.dates[i + 1]),
timestamps=(cls.timestamps[i], cls.timestamps[i + 1]),
)
RangesModel.objects.create(
dates=(cls.dates[i], cls.dates[i + 1]),
timestamps=(cls.aware_timestamps[i], cls.aware_timestamps[i + 1]),
)
def test_datetime_range_contains(self):
filter_args = (
self.timestamps[1],
self.aware_timestamps[1],
(self.timestamps[1], self.timestamps[2]),
(self.aware_timestamps[1], self.aware_timestamps[2]),
Value(self.dates[0]),
Func(F("dates"), function="lower", output_field=DateTimeField()),
F("timestamps_inner"),
)
for filter_arg in filter_args:
with self.subTest(filter_arg=filter_arg):
self.assertCountEqual(
RangesModel.objects.filter(**{"timestamps__contains": filter_arg}),
[self.obj, self.aware_obj],
)
def test_date_range_contains(self):
filter_args = (
self.timestamps[1],
(self.dates[1], self.dates[2]),
Value(self.dates[0], output_field=DateField()),
Func(F("timestamps"), function="lower", output_field=DateField()),
F("dates_inner"),
)
for filter_arg in filter_args:
with self.subTest(filter_arg=filter_arg):
self.assertCountEqual(
RangesModel.objects.filter(**{"dates__contains": filter_arg}),
[self.obj, self.aware_obj],
)
class TestQuerying(PostgreSQLTestCase):
@classmethod
def setUpTestData(cls):
cls.objs = RangesModel.objects.bulk_create(
[
RangesModel(ints=NumericRange(0, 10)),
RangesModel(ints=NumericRange(5, 15)),
RangesModel(ints=NumericRange(None, 0)),
RangesModel(ints=NumericRange(empty=True)),
RangesModel(ints=None),
]
)
def test_exact(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__exact=NumericRange(0, 10)),
[self.objs[0]],
)
def test_isnull(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__isnull=True),
[self.objs[4]],
)
def test_isempty(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__isempty=True),
[self.objs[3]],
)
def test_contains(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__contains=8),
[self.objs[0], self.objs[1]],
)
def test_contains_range(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__contains=NumericRange(3, 8)),
[self.objs[0]],
)
def test_decimal_contains_range(self):
decimals = RangesModel.objects.bulk_create(
[
RangesModel(decimals=NumericRange(None, 10)),
RangesModel(decimals=NumericRange(10, None)),
RangesModel(decimals=NumericRange(5, 15)),
RangesModel(decimals=NumericRange(5, 15, "(]")),
]
)
for contains, objs in [
(199, [decimals[1]]),
(1, [decimals[0]]),
(15, [decimals[1], decimals[3]]),
]:
with self.subTest(decimal_contains=contains):
self.assertSequenceEqual(
RangesModel.objects.filter(decimals__contains=contains), objs
)
def test_contained_by(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__contained_by=NumericRange(0, 20)),
[self.objs[0], self.objs[1], self.objs[3]],
)
def test_overlap(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__overlap=NumericRange(3, 8)),
[self.objs[0], self.objs[1]],
)
def test_fully_lt(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__fully_lt=NumericRange(5, 10)),
[self.objs[2]],
)
def test_fully_gt(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__fully_gt=NumericRange(5, 10)),
[],
)
def test_not_lt(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__not_lt=NumericRange(5, 10)),
[self.objs[1]],
)
def test_not_gt(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__not_gt=NumericRange(5, 10)),
[self.objs[0], self.objs[2]],
)
def test_adjacent_to(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__adjacent_to=NumericRange(0, 5)),
[self.objs[1], self.objs[2]],
)
def test_startswith(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__startswith=0),
[self.objs[0]],
)
def test_endswith(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__endswith=0),
[self.objs[2]],
)
def test_startswith_chaining(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__startswith__gte=0),
[self.objs[0], self.objs[1]],
)
def test_bound_type(self):
decimals = RangesModel.objects.bulk_create(
[
RangesModel(decimals=NumericRange(None, 10)),
RangesModel(decimals=NumericRange(10, None)),
RangesModel(decimals=NumericRange(5, 15)),
RangesModel(decimals=NumericRange(5, 15, "(]")),
]
)
tests = [
("lower_inc", True, [decimals[1], decimals[2]]),
("lower_inc", False, [decimals[0], decimals[3]]),
("lower_inf", True, [decimals[0]]),
("lower_inf", False, [decimals[1], decimals[2], decimals[3]]),
("upper_inc", True, [decimals[3]]),
("upper_inc", False, [decimals[0], decimals[1], decimals[2]]),
("upper_inf", True, [decimals[1]]),
("upper_inf", False, [decimals[0], decimals[2], decimals[3]]),
]
for lookup, filter_arg, excepted_result in tests:
with self.subTest(lookup=lookup, filter_arg=filter_arg):
self.assertSequenceEqual(
RangesModel.objects.filter(**{"decimals__%s" % lookup: filter_arg}),
excepted_result,
)
class TestQueryingWithRanges(PostgreSQLTestCase):
def test_date_range(self):
objs = [
RangeLookupsModel.objects.create(date="2015-01-01"),
RangeLookupsModel.objects.create(date="2015-05-05"),
]
self.assertSequenceEqual(
RangeLookupsModel.objects.filter(
date__contained_by=DateRange("2015-01-01", "2015-05-04")
),
[objs[0]],
)
def test_date_range_datetime_field(self):
objs = [
RangeLookupsModel.objects.create(timestamp="2015-01-01"),
RangeLookupsModel.objects.create(timestamp="2015-05-05"),
]
self.assertSequenceEqual(
RangeLookupsModel.objects.filter(
timestamp__date__contained_by=DateRange("2015-01-01", "2015-05-04")
),
[objs[0]],
)
def test_datetime_range(self):
objs = [
RangeLookupsModel.objects.create(timestamp="2015-01-01T09:00:00"),
RangeLookupsModel.objects.create(timestamp="2015-05-05T17:00:00"),
]
self.assertSequenceEqual(
RangeLookupsModel.objects.filter(
timestamp__contained_by=DateTimeTZRange(
"2015-01-01T09:00", "2015-05-04T23:55"
)
),
[objs[0]],
)
def test_small_integer_field_contained_by(self):
objs = [
RangeLookupsModel.objects.create(small_integer=8),
RangeLookupsModel.objects.create(small_integer=4),
RangeLookupsModel.objects.create(small_integer=-1),
]
self.assertSequenceEqual(
RangeLookupsModel.objects.filter(
small_integer__contained_by=NumericRange(4, 6)
),
[objs[1]],
)
def test_integer_range(self):
objs = [
RangeLookupsModel.objects.create(integer=5),
RangeLookupsModel.objects.create(integer=99),
RangeLookupsModel.objects.create(integer=-1),
]
self.assertSequenceEqual(
RangeLookupsModel.objects.filter(integer__contained_by=NumericRange(1, 98)),
[objs[0]],
)
def test_biginteger_range(self):
objs = [
RangeLookupsModel.objects.create(big_integer=5),
RangeLookupsModel.objects.create(big_integer=99),
RangeLookupsModel.objects.create(big_integer=-1),
]
self.assertSequenceEqual(
RangeLookupsModel.objects.filter(
big_integer__contained_by=NumericRange(1, 98)
),
[objs[0]],
)
def test_decimal_field_contained_by(self):
objs = [
RangeLookupsModel.objects.create(decimal_field=Decimal("1.33")),
RangeLookupsModel.objects.create(decimal_field=Decimal("2.88")),
RangeLookupsModel.objects.create(decimal_field=Decimal("99.17")),
]
self.assertSequenceEqual(
RangeLookupsModel.objects.filter(
decimal_field__contained_by=NumericRange(
Decimal("1.89"), Decimal("7.91")
),
),
[objs[1]],
)
def test_float_range(self):
objs = [
RangeLookupsModel.objects.create(float=5),
RangeLookupsModel.objects.create(float=99),
RangeLookupsModel.objects.create(float=-1),
]
self.assertSequenceEqual(
RangeLookupsModel.objects.filter(float__contained_by=NumericRange(1, 98)),
[objs[0]],
)
def test_small_auto_field_contained_by(self):
objs = SmallAutoFieldModel.objects.bulk_create(
[SmallAutoFieldModel() for i in range(1, 5)]
)
self.assertSequenceEqual(
SmallAutoFieldModel.objects.filter(
id__contained_by=NumericRange(objs[1].pk, objs[3].pk),
),
objs[1:3],
)
def test_auto_field_contained_by(self):
objs = RangeLookupsModel.objects.bulk_create(
[RangeLookupsModel() for i in range(1, 5)]
)
self.assertSequenceEqual(
RangeLookupsModel.objects.filter(
id__contained_by=NumericRange(objs[1].pk, objs[3].pk),
),
objs[1:3],
)
def test_big_auto_field_contained_by(self):
objs = BigAutoFieldModel.objects.bulk_create(
[BigAutoFieldModel() for i in range(1, 5)]
)
self.assertSequenceEqual(
BigAutoFieldModel.objects.filter(
id__contained_by=NumericRange(objs[1].pk, objs[3].pk),
),
objs[1:3],
)
def test_f_ranges(self):
parent = RangesModel.objects.create(decimals=NumericRange(0, 10))
objs = [
RangeLookupsModel.objects.create(float=5, parent=parent),
RangeLookupsModel.objects.create(float=99, parent=parent),
]
self.assertSequenceEqual(
RangeLookupsModel.objects.filter(float__contained_by=F("parent__decimals")),
[objs[0]],
)
def test_exclude(self):
objs = [
RangeLookupsModel.objects.create(float=5),
RangeLookupsModel.objects.create(float=99),
RangeLookupsModel.objects.create(float=-1),
]
self.assertSequenceEqual(
RangeLookupsModel.objects.exclude(float__contained_by=NumericRange(0, 100)),
[objs[2]],
)
class TestSerialization(PostgreSQLSimpleTestCase):
test_data = (
'[{"fields": {"ints": "{\\"upper\\": \\"10\\", \\"lower\\": \\"0\\", '
'\\"bounds\\": \\"[)\\"}", "decimals": "{\\"empty\\": true}", '
'"bigints": null, "timestamps": '
'"{\\"upper\\": \\"2014-02-02T12:12:12+00:00\\", '
'\\"lower\\": \\"2014-01-01T00:00:00+00:00\\", \\"bounds\\": \\"[)\\"}", '
'"timestamps_inner": null, '
'"timestamps_closed_bounds": "{\\"upper\\": \\"2014-02-02T12:12:12+00:00\\", '
'\\"lower\\": \\"2014-01-01T00:00:00+00:00\\", \\"bounds\\": \\"()\\"}", '
'"dates": "{\\"upper\\": \\"2014-02-02\\", \\"lower\\": \\"2014-01-01\\", '
'\\"bounds\\": \\"[)\\"}", "dates_inner": null }, '
'"model": "postgres_tests.rangesmodel", "pk": null}]'
)
lower_date = datetime.date(2014, 1, 1)
upper_date = datetime.date(2014, 2, 2)
lower_dt = datetime.datetime(2014, 1, 1, 0, 0, 0, tzinfo=datetime.UTC)
upper_dt = datetime.datetime(2014, 2, 2, 12, 12, 12, tzinfo=datetime.UTC)
def test_dumping(self):
instance = RangesModel(
ints=NumericRange(0, 10),
decimals=NumericRange(empty=True),
timestamps=DateTimeTZRange(self.lower_dt, self.upper_dt),
timestamps_closed_bounds=DateTimeTZRange(
self.lower_dt,
self.upper_dt,
bounds="()",
),
dates=DateRange(self.lower_date, self.upper_date),
)
data = serializers.serialize("json", [instance])
dumped = json.loads(data)
for field in ("ints", "dates", "timestamps", "timestamps_closed_bounds"):
dumped[0]["fields"][field] = json.loads(dumped[0]["fields"][field])
check = json.loads(self.test_data)
for field in ("ints", "dates", "timestamps", "timestamps_closed_bounds"):
check[0]["fields"][field] = json.loads(check[0]["fields"][field])
self.assertEqual(dumped, check)
def test_loading(self):
instance = list(serializers.deserialize("json", self.test_data))[0].object
self.assertEqual(instance.ints, NumericRange(0, 10))
self.assertEqual(instance.decimals, NumericRange(empty=True))
self.assertIsNone(instance.bigints)
self.assertEqual(instance.dates, DateRange(self.lower_date, self.upper_date))
self.assertEqual(
instance.timestamps, DateTimeTZRange(self.lower_dt, self.upper_dt)
)
self.assertEqual(
instance.timestamps_closed_bounds,
DateTimeTZRange(self.lower_dt, self.upper_dt, bounds="()"),
)
def test_serialize_range_with_null(self):
instance = RangesModel(ints=NumericRange(None, 10))
data = serializers.serialize("json", [instance])
new_instance = list(serializers.deserialize("json", data))[0].object
self.assertEqual(new_instance.ints, NumericRange(None, 10))
instance = RangesModel(ints=NumericRange(10, None))
data = serializers.serialize("json", [instance])
new_instance = list(serializers.deserialize("json", data))[0].object
self.assertEqual(new_instance.ints, NumericRange(10, None))
class TestChecks(PostgreSQLSimpleTestCase):
def test_choices_tuple_list(self):
class Model(PostgreSQLModel):
field = pg_fields.IntegerRangeField(
choices=[
["1-50", [((1, 25), "1-25"), ([26, 50], "26-50")]],
((51, 100), "51-100"),
],
)
self.assertEqual(Model._meta.get_field("field").check(), [])
class TestValidators(PostgreSQLSimpleTestCase):
def test_max(self):
validator = RangeMaxValueValidator(5)
validator(NumericRange(0, 5))
msg = "Ensure that the upper bound of the range is not greater than 5."
with self.assertRaises(exceptions.ValidationError) as cm:
validator(NumericRange(0, 10))
self.assertEqual(cm.exception.messages[0], msg)
self.assertEqual(cm.exception.code, "max_value")
with self.assertRaisesMessage(exceptions.ValidationError, msg):
validator(NumericRange(0, None)) # an unbound range
def test_min(self):
validator = RangeMinValueValidator(5)
validator(NumericRange(10, 15))
msg = "Ensure that the lower bound of the range is not less than 5."
with self.assertRaises(exceptions.ValidationError) as cm:
validator(NumericRange(0, 10))
self.assertEqual(cm.exception.messages[0], msg)
self.assertEqual(cm.exception.code, "min_value")
with self.assertRaisesMessage(exceptions.ValidationError, msg):
validator(NumericRange(None, 10)) # an unbound range
class TestFormField(PostgreSQLSimpleTestCase):
def test_valid_integer(self):
field = pg_forms.IntegerRangeField()
value = field.clean(["1", "2"])
self.assertEqual(value, NumericRange(1, 2))
def test_valid_decimal(self):
field = pg_forms.DecimalRangeField()
value = field.clean(["1.12345", "2.001"])
self.assertEqual(value, NumericRange(Decimal("1.12345"), Decimal("2.001")))
def test_valid_timestamps(self):
field = pg_forms.DateTimeRangeField()
value = field.clean(["01/01/2014 00:00:00", "02/02/2014 12:12:12"])
lower = datetime.datetime(2014, 1, 1, 0, 0, 0)
upper = datetime.datetime(2014, 2, 2, 12, 12, 12)
self.assertEqual(value, DateTimeTZRange(lower, upper))
def test_valid_dates(self):
field = pg_forms.DateRangeField()
value = field.clean(["01/01/2014", "02/02/2014"])
lower = datetime.date(2014, 1, 1)
upper = datetime.date(2014, 2, 2)
self.assertEqual(value, DateRange(lower, upper))
def test_using_split_datetime_widget(self):
class SplitDateTimeRangeField(pg_forms.DateTimeRangeField):
base_field = forms.SplitDateTimeField
class SplitForm(forms.Form):
field = SplitDateTimeRangeField()
form = SplitForm()
self.assertHTMLEqual(
str(form),
"""
<div>
<fieldset>
<legend>Field:</legend>
<input id="id_field_0_0" name="field_0_0" type="text">
<input id="id_field_0_1" name="field_0_1" type="text">
<input id="id_field_1_0" name="field_1_0" type="text">
<input id="id_field_1_1" name="field_1_1" type="text">
</fieldset>
</div>
""",
)
form = SplitForm(
{
"field_0_0": "01/01/2014",
"field_0_1": "00:00:00",
"field_1_0": "02/02/2014",
"field_1_1": "12:12:12",
}
)
self.assertTrue(form.is_valid())
lower = datetime.datetime(2014, 1, 1, 0, 0, 0)
upper = datetime.datetime(2014, 2, 2, 12, 12, 12)
self.assertEqual(form.cleaned_data["field"], DateTimeTZRange(lower, upper))
def test_none(self):
field = pg_forms.IntegerRangeField(required=False)
value = field.clean(["", ""])
self.assertIsNone(value)
def test_datetime_form_as_table(self):
class DateTimeRangeForm(forms.Form):
datetime_field = pg_forms.DateTimeRangeField(show_hidden_initial=True)
form = DateTimeRangeForm()
self.assertHTMLEqual(
form.as_table(),
"""
<tr><th>
<label>Datetime field:</label>
</th><td>
<input type="text" name="datetime_field_0" id="id_datetime_field_0">
<input type="text" name="datetime_field_1" id="id_datetime_field_1">
<input type="hidden" name="initial-datetime_field_0"
id="initial-id_datetime_field_0">
<input type="hidden" name="initial-datetime_field_1"
id="initial-id_datetime_field_1">
</td></tr>
""",
)
form = DateTimeRangeForm(
{
"datetime_field_0": "2010-01-01 11:13:00",
"datetime_field_1": "2020-12-12 16:59:00",
}
)
self.assertHTMLEqual(
form.as_table(),
"""
<tr><th>
<label>Datetime field:</label>
</th><td>
<input type="text" name="datetime_field_0"
value="2010-01-01 11:13:00" id="id_datetime_field_0">
<input type="text" name="datetime_field_1"
value="2020-12-12 16:59:00" id="id_datetime_field_1">
<input type="hidden" name="initial-datetime_field_0"
value="2010-01-01 11:13:00" id="initial-id_datetime_field_0">
<input type="hidden" name="initial-datetime_field_1"
value="2020-12-12 16:59:00" id="initial-id_datetime_field_1"></td></tr>
""",
)
def test_datetime_form_initial_data(self):
class DateTimeRangeForm(forms.Form):
datetime_field = pg_forms.DateTimeRangeField(show_hidden_initial=True)
data = QueryDict(mutable=True)
data.update(
{
"datetime_field_0": "2010-01-01 11:13:00",
"datetime_field_1": "",
"initial-datetime_field_0": "2010-01-01 10:12:00",
"initial-datetime_field_1": "",
}
)
form = DateTimeRangeForm(data=data)
self.assertTrue(form.has_changed())
data["initial-datetime_field_0"] = "2010-01-01 11:13:00"
form = DateTimeRangeForm(data=data)
self.assertFalse(form.has_changed())
def test_rendering(self):
class RangeForm(forms.Form):
ints = pg_forms.IntegerRangeField()
self.assertHTMLEqual(
str(RangeForm()),
"""
<div>
<fieldset>
<legend>Ints:</legend>
<input id="id_ints_0" name="ints_0" type="number">
<input id="id_ints_1" name="ints_1" type="number">
</fieldset>
</div>
""",
)
def test_integer_lower_bound_higher(self):
field = pg_forms.IntegerRangeField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(["10", "2"])
self.assertEqual(
cm.exception.messages[0],
"The start of the range must not exceed the end of the range.",
)
self.assertEqual(cm.exception.code, "bound_ordering")
def test_integer_open(self):
field = pg_forms.IntegerRangeField()
value = field.clean(["", "0"])
self.assertEqual(value, NumericRange(None, 0))
def test_integer_incorrect_data_type(self):
field = pg_forms.IntegerRangeField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean("1")
self.assertEqual(cm.exception.messages[0], "Enter two whole numbers.")
self.assertEqual(cm.exception.code, "invalid")
def test_integer_invalid_lower(self):
field = pg_forms.IntegerRangeField()
msg = "Enter a whole number."
with self.assertRaisesMessage(exceptions.ValidationError, msg):
field.clean(["a", "2"])
def test_integer_invalid_upper(self):
field = pg_forms.IntegerRangeField()
msg = "Enter a whole number."
with self.assertRaisesMessage(exceptions.ValidationError, msg):
field.clean(["1", "b"])
def test_integer_required(self):
field = pg_forms.IntegerRangeField(required=True)
msg = "This field is required."
with self.assertRaisesMessage(exceptions.ValidationError, msg):
field.clean(["", ""])
value = field.clean([1, ""])
self.assertEqual(value, NumericRange(1, None))
def test_decimal_lower_bound_higher(self):
field = pg_forms.DecimalRangeField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(["1.8", "1.6"])
self.assertEqual(
cm.exception.messages[0],
"The start of the range must not exceed the end of the range.",
)
self.assertEqual(cm.exception.code, "bound_ordering")
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | true |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/postgres_tests/test_unaccent.py | tests/postgres_tests/test_unaccent.py | from django.db import connection
from . import PostgreSQLTestCase
from .models import CharFieldModel, TextFieldModel
class UnaccentTest(PostgreSQLTestCase):
Model = CharFieldModel
@classmethod
def setUpTestData(cls):
cls.Model.objects.bulk_create(
[
cls.Model(field="àéÖ"),
cls.Model(field="aeO"),
cls.Model(field="aeo"),
]
)
def test_unaccent(self):
self.assertQuerySetEqual(
self.Model.objects.filter(field__unaccent="aeO"),
["àéÖ", "aeO"],
transform=lambda instance: instance.field,
ordered=False,
)
def test_unaccent_chained(self):
"""
Unaccent can be used chained with a lookup (which should be the case
since unaccent implements the Transform API)
"""
self.assertQuerySetEqual(
self.Model.objects.filter(field__unaccent__iexact="aeO"),
["àéÖ", "aeO", "aeo"],
transform=lambda instance: instance.field,
ordered=False,
)
self.assertQuerySetEqual(
self.Model.objects.filter(field__unaccent__endswith="éÖ"),
["àéÖ", "aeO"],
transform=lambda instance: instance.field,
ordered=False,
)
def test_unaccent_with_conforming_strings_off(self):
"""SQL is valid when standard_conforming_strings is off."""
with connection.cursor() as cursor:
cursor.execute("SHOW standard_conforming_strings")
disable_conforming_strings = cursor.fetchall()[0][0] == "on"
if disable_conforming_strings:
cursor.execute("SET standard_conforming_strings TO off")
try:
self.assertQuerySetEqual(
self.Model.objects.filter(field__unaccent__endswith="éÖ"),
["àéÖ", "aeO"],
transform=lambda instance: instance.field,
ordered=False,
)
finally:
if disable_conforming_strings:
cursor.execute("SET standard_conforming_strings TO on")
def test_unaccent_accentuated_needle(self):
self.assertQuerySetEqual(
self.Model.objects.filter(field__unaccent="aéÖ"),
["àéÖ", "aeO"],
transform=lambda instance: instance.field,
ordered=False,
)
class UnaccentTextFieldTest(UnaccentTest):
"""
TextField should have the exact same behavior as CharField
regarding unaccent lookups.
"""
Model = TextFieldModel
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/postgres_tests/test_hstore.py | tests/postgres_tests/test_hstore.py | import json
from django.core import checks, exceptions, serializers
from django.db import connection
from django.db.models import F, OuterRef, Subquery
from django.db.models.expressions import RawSQL
from django.forms import Form
from django.test.utils import CaptureQueriesContext, isolate_apps
from . import PostgreSQLSimpleTestCase, PostgreSQLTestCase
from .models import HStoreModel, PostgreSQLModel
try:
from django.contrib.postgres import forms
from django.contrib.postgres.fields import HStoreField
from django.contrib.postgres.fields.hstore import KeyTransform
from django.contrib.postgres.validators import KeysValidator
except ImportError:
pass
class SimpleTests(PostgreSQLTestCase):
def test_save_load_success(self):
value = {"a": "b"}
instance = HStoreModel(field=value)
instance.save()
reloaded = HStoreModel.objects.get()
self.assertEqual(reloaded.field, value)
def test_null(self):
instance = HStoreModel(field=None)
instance.save()
reloaded = HStoreModel.objects.get()
self.assertIsNone(reloaded.field)
def test_value_null(self):
value = {"a": None}
instance = HStoreModel(field=value)
instance.save()
reloaded = HStoreModel.objects.get()
self.assertEqual(reloaded.field, value)
def test_key_val_cast_to_string(self):
value = {"a": 1, "b": "B", 2: "c", "ï": "ê"}
expected_value = {"a": "1", "b": "B", "2": "c", "ï": "ê"}
instance = HStoreModel.objects.create(field=value)
instance = HStoreModel.objects.get()
self.assertEqual(instance.field, expected_value)
instance = HStoreModel.objects.get(field__a=1)
self.assertEqual(instance.field, expected_value)
instance = HStoreModel.objects.get(field__has_keys=[2, "a", "ï"])
self.assertEqual(instance.field, expected_value)
def test_array_field(self):
value = [
{"a": 1, "b": "B", 2: "c", "ï": "ê"},
{"a": 1, "b": "B", 2: "c", "ï": "ê"},
]
expected_value = [
{"a": "1", "b": "B", "2": "c", "ï": "ê"},
{"a": "1", "b": "B", "2": "c", "ï": "ê"},
]
instance = HStoreModel.objects.create(array_field=value)
instance.refresh_from_db()
self.assertEqual(instance.array_field, expected_value)
class TestQuerying(PostgreSQLTestCase):
@classmethod
def setUpTestData(cls):
cls.objs = HStoreModel.objects.bulk_create(
[
HStoreModel(field={"a": "b"}),
HStoreModel(field={"a": "b", "c": "d"}),
HStoreModel(field={"c": "d"}),
HStoreModel(field={}),
HStoreModel(field=None),
HStoreModel(field={"cat": "TigrOu", "breed": "birman"}),
HStoreModel(field={"cat": "minou", "breed": "ragdoll"}),
HStoreModel(field={"cat": "kitty", "breed": "Persian"}),
HStoreModel(field={"cat": "Kit Kat", "breed": "persian"}),
]
)
def test_exact(self):
self.assertSequenceEqual(
HStoreModel.objects.filter(field__exact={"a": "b"}), self.objs[:1]
)
def test_contained_by(self):
self.assertSequenceEqual(
HStoreModel.objects.filter(field__contained_by={"a": "b", "c": "d"}),
self.objs[:4],
)
def test_contains(self):
self.assertSequenceEqual(
HStoreModel.objects.filter(field__contains={"a": "b"}), self.objs[:2]
)
def test_in_generator(self):
def search():
yield {"a": "b"}
self.assertSequenceEqual(
HStoreModel.objects.filter(field__in=search()), self.objs[:1]
)
def test_has_key(self):
self.assertSequenceEqual(
HStoreModel.objects.filter(field__has_key="c"), self.objs[1:3]
)
def test_has_keys(self):
self.assertSequenceEqual(
HStoreModel.objects.filter(field__has_keys=["a", "c"]), self.objs[1:2]
)
def test_has_any_keys(self):
self.assertSequenceEqual(
HStoreModel.objects.filter(field__has_any_keys=["a", "c"]), self.objs[:3]
)
def test_key_transform(self):
self.assertSequenceEqual(
HStoreModel.objects.filter(field__a="b"), self.objs[:2]
)
def test_key_transform_raw_expression(self):
expr = RawSQL("%s::hstore", ["x => b, y => c"])
self.assertSequenceEqual(
HStoreModel.objects.filter(field__a=KeyTransform("x", expr)), self.objs[:2]
)
def test_key_transform_annotation(self):
qs = HStoreModel.objects.annotate(a=F("field__a"))
self.assertCountEqual(
qs.values_list("a", flat=True),
["b", "b", None, None, None, None, None, None, None],
)
def test_keys(self):
self.assertSequenceEqual(
HStoreModel.objects.filter(field__keys=["a"]), self.objs[:1]
)
def test_values(self):
self.assertSequenceEqual(
HStoreModel.objects.filter(field__values=["b"]), self.objs[:1]
)
def test_field_chaining_contains(self):
self.assertSequenceEqual(
HStoreModel.objects.filter(field__a__contains="b"), self.objs[:2]
)
def test_field_chaining_icontains(self):
self.assertSequenceEqual(
HStoreModel.objects.filter(field__cat__icontains="INo"),
[self.objs[6]],
)
def test_field_chaining_startswith(self):
self.assertSequenceEqual(
HStoreModel.objects.filter(field__cat__startswith="kit"),
[self.objs[7]],
)
def test_field_chaining_istartswith(self):
self.assertSequenceEqual(
HStoreModel.objects.filter(field__cat__istartswith="kit"),
self.objs[7:],
)
def test_field_chaining_endswith(self):
self.assertSequenceEqual(
HStoreModel.objects.filter(field__cat__endswith="ou"),
[self.objs[6]],
)
def test_field_chaining_iendswith(self):
self.assertSequenceEqual(
HStoreModel.objects.filter(field__cat__iendswith="ou"),
self.objs[5:7],
)
def test_field_chaining_iexact(self):
self.assertSequenceEqual(
HStoreModel.objects.filter(field__breed__iexact="persian"),
self.objs[7:],
)
def test_field_chaining_regex(self):
self.assertSequenceEqual(
HStoreModel.objects.filter(field__cat__regex=r"ou$"),
[self.objs[6]],
)
def test_field_chaining_iregex(self):
self.assertSequenceEqual(
HStoreModel.objects.filter(field__cat__iregex=r"oU$"),
self.objs[5:7],
)
def test_order_by_field(self):
more_objs = (
HStoreModel.objects.create(field={"g": "637"}),
HStoreModel.objects.create(field={"g": "002"}),
HStoreModel.objects.create(field={"g": "042"}),
HStoreModel.objects.create(field={"g": "981"}),
)
self.assertSequenceEqual(
HStoreModel.objects.filter(field__has_key="g").order_by("field__g"),
[more_objs[1], more_objs[2], more_objs[0], more_objs[3]],
)
def test_keys_contains(self):
self.assertSequenceEqual(
HStoreModel.objects.filter(field__keys__contains=["a"]), self.objs[:2]
)
def test_values_overlap(self):
self.assertSequenceEqual(
HStoreModel.objects.filter(field__values__overlap=["b", "d"]), self.objs[:3]
)
def test_key_isnull(self):
obj = HStoreModel.objects.create(field={"a": None})
self.assertSequenceEqual(
HStoreModel.objects.filter(field__a__isnull=True),
self.objs[2:9] + [obj],
)
self.assertSequenceEqual(
HStoreModel.objects.filter(field__a__isnull=False), self.objs[:2]
)
def test_usage_in_subquery(self):
self.assertSequenceEqual(
HStoreModel.objects.filter(id__in=HStoreModel.objects.filter(field__a="b")),
self.objs[:2],
)
def test_key_sql_injection(self):
with CaptureQueriesContext(connection) as queries:
self.assertFalse(
HStoreModel.objects.filter(
**{
"field__test' = 'a') OR 1 = 1 OR ('d": "x",
}
).exists()
)
self.assertIn(
"""."field" -> 'test'' = ''a'') OR 1 = 1 OR (''d') = 'x' """,
queries[0]["sql"],
)
def test_obj_subquery_lookup(self):
qs = HStoreModel.objects.annotate(
value=Subquery(
HStoreModel.objects.filter(pk=OuterRef("pk")).values("field")
),
).filter(value__a="b")
self.assertSequenceEqual(qs, self.objs[:2])
@isolate_apps("postgres_tests")
class TestChecks(PostgreSQLSimpleTestCase):
def test_invalid_default(self):
class MyModel(PostgreSQLModel):
field = HStoreField(default={})
model = MyModel()
self.assertEqual(
model.check(),
[
checks.Warning(
msg=(
"HStoreField default should be a callable instead of an "
"instance so that it's not shared between all field "
"instances."
),
hint="Use a callable instead, e.g., use `dict` instead of `{}`.",
obj=MyModel._meta.get_field("field"),
id="fields.E010",
)
],
)
def test_valid_default(self):
class MyModel(PostgreSQLModel):
field = HStoreField(default=dict)
self.assertEqual(MyModel().check(), [])
class TestSerialization(PostgreSQLSimpleTestCase):
field_values = [
({"a": "b"}, [{"a": "b"}, {"b": "a"}]),
(
{"все": "Трурль и Клапауций"},
[{"Трурль": "Клапауций"}, {"Клапауций": "Трурль"}],
),
]
@staticmethod
def create_json_data(field_value, array_field_value):
fields = {
"field": json.dumps(field_value, ensure_ascii=False),
"array_field": json.dumps(
[json.dumps(item, ensure_ascii=False) for item in array_field_value],
ensure_ascii=False,
),
}
return json.dumps(
[{"model": "postgres_tests.hstoremodel", "pk": None, "fields": fields}]
)
def test_dumping(self):
for field_value, array_field_value in self.field_values:
with self.subTest(field_value=field_value, array_value=array_field_value):
instance = HStoreModel(field=field_value, array_field=array_field_value)
data = serializers.serialize("json", [instance])
json_data = self.create_json_data(field_value, array_field_value)
self.assertEqual(json.loads(data), json.loads(json_data))
def test_loading(self):
for field_value, array_field_value in self.field_values:
with self.subTest(field_value=field_value, array_value=array_field_value):
json_data = self.create_json_data(field_value, array_field_value)
instance = list(serializers.deserialize("json", json_data))[0].object
self.assertEqual(instance.field, field_value)
self.assertEqual(instance.array_field, array_field_value)
def test_roundtrip_with_null(self):
for field_value in [
{"a": "b", "c": None},
{"Енеїда": "Ти знаєш, він який суціга", "Зефір": None},
]:
with self.subTest(field_value=field_value):
instance = HStoreModel(field=field_value)
data = serializers.serialize("json", [instance])
new_instance = list(serializers.deserialize("json", data))[0].object
self.assertEqual(instance.field, new_instance.field)
class TestValidation(PostgreSQLSimpleTestCase):
def test_not_a_string(self):
field = HStoreField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean({"a": 1}, None)
self.assertEqual(cm.exception.code, "not_a_string")
self.assertEqual(
cm.exception.message % cm.exception.params,
"The value of “a” is not a string or null.",
)
def test_none_allowed_as_value(self):
field = HStoreField()
self.assertEqual(field.clean({"a": None}, None), {"a": None})
class TestFormField(PostgreSQLSimpleTestCase):
def test_valid(self):
field = forms.HStoreField()
value = field.clean('{"a": "b"}')
self.assertEqual(value, {"a": "b"})
def test_invalid_json(self):
field = forms.HStoreField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean('{"a": "b"')
self.assertEqual(cm.exception.messages[0], "Could not load JSON data.")
self.assertEqual(cm.exception.code, "invalid_json")
def test_non_dict_json(self):
field = forms.HStoreField()
msg = "Input must be a JSON dictionary."
with self.assertRaisesMessage(exceptions.ValidationError, msg) as cm:
field.clean('["a", "b", 1]')
self.assertEqual(cm.exception.code, "invalid_format")
def test_not_string_values(self):
field = forms.HStoreField()
value = field.clean('{"a": 1}')
self.assertEqual(value, {"a": "1"})
def test_none_value(self):
field = forms.HStoreField()
value = field.clean('{"a": null}')
self.assertEqual(value, {"a": None})
def test_empty(self):
field = forms.HStoreField(required=False)
value = field.clean("")
self.assertEqual(value, {})
def test_model_field_formfield(self):
model_field = HStoreField()
form_field = model_field.formfield()
self.assertIsInstance(form_field, forms.HStoreField)
def test_field_has_changed(self):
class HStoreFormTest(Form):
f1 = forms.HStoreField()
form_w_hstore = HStoreFormTest()
self.assertFalse(form_w_hstore.has_changed())
form_w_hstore = HStoreFormTest({"f1": '{"a": 1}'})
self.assertTrue(form_w_hstore.has_changed())
form_w_hstore = HStoreFormTest({"f1": '{"a": 1}'}, initial={"f1": '{"a": 1}'})
self.assertFalse(form_w_hstore.has_changed())
form_w_hstore = HStoreFormTest({"f1": '{"a": 2}'}, initial={"f1": '{"a": 1}'})
self.assertTrue(form_w_hstore.has_changed())
form_w_hstore = HStoreFormTest({"f1": '{"a": 1}'}, initial={"f1": {"a": 1}})
self.assertFalse(form_w_hstore.has_changed())
form_w_hstore = HStoreFormTest({"f1": '{"a": 2}'}, initial={"f1": {"a": 1}})
self.assertTrue(form_w_hstore.has_changed())
def test_prepare_value(self):
field = forms.HStoreField()
self.assertEqual(
field.prepare_value({"aira_maplayer": "Αρδευτικό δίκτυο"}),
'{"aira_maplayer": "Αρδευτικό δίκτυο"}',
)
class TestValidator(PostgreSQLSimpleTestCase):
def test_simple_valid(self):
validator = KeysValidator(keys=["a", "b"])
validator({"a": "foo", "b": "bar", "c": "baz"})
def test_missing_keys(self):
validator = KeysValidator(keys=["a", "b"])
with self.assertRaises(exceptions.ValidationError) as cm:
validator({"a": "foo", "c": "baz"})
self.assertEqual(cm.exception.messages[0], "Some keys were missing: b")
self.assertEqual(cm.exception.code, "missing_keys")
def test_strict_valid(self):
validator = KeysValidator(keys=["a", "b"], strict=True)
validator({"a": "foo", "b": "bar"})
def test_extra_keys(self):
validator = KeysValidator(keys=["a", "b"], strict=True)
with self.assertRaises(exceptions.ValidationError) as cm:
validator({"a": "foo", "b": "bar", "c": "baz"})
self.assertEqual(cm.exception.messages[0], "Some unknown keys were provided: c")
self.assertEqual(cm.exception.code, "extra_keys")
def test_custom_messages(self):
messages = {
"missing_keys": "Foobar",
}
validator = KeysValidator(keys=["a", "b"], strict=True, messages=messages)
with self.assertRaises(exceptions.ValidationError) as cm:
validator({"a": "foo", "c": "baz"})
self.assertEqual(cm.exception.messages[0], "Foobar")
self.assertEqual(cm.exception.code, "missing_keys")
with self.assertRaises(exceptions.ValidationError) as cm:
validator({"a": "foo", "b": "bar", "c": "baz"})
self.assertEqual(cm.exception.messages[0], "Some unknown keys were provided: c")
self.assertEqual(cm.exception.code, "extra_keys")
def test_deconstruct(self):
messages = {
"missing_keys": "Foobar",
}
validator = KeysValidator(keys=["a", "b"], strict=True, messages=messages)
path, args, kwargs = validator.deconstruct()
self.assertEqual(path, "django.contrib.postgres.validators.KeysValidator")
self.assertEqual(args, ())
self.assertEqual(
kwargs, {"keys": ["a", "b"], "strict": True, "messages": messages}
)
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/postgres_tests/test_introspection.py | tests/postgres_tests/test_introspection.py | from io import StringIO
from django.core.management import call_command
from . import PostgreSQLTestCase
class InspectDBTests(PostgreSQLTestCase):
def assertFieldsInModel(self, model, field_outputs):
out = StringIO()
call_command(
"inspectdb",
table_name_filter=lambda tn: tn.startswith(model),
stdout=out,
)
output = out.getvalue()
for field_output in field_outputs:
self.assertIn(field_output, output)
def test_range_fields(self):
self.assertFieldsInModel(
"postgres_tests_rangesmodel",
[
"ints = django.contrib.postgres.fields.IntegerRangeField(blank=True, "
"null=True)",
"bigints = django.contrib.postgres.fields.BigIntegerRangeField("
"blank=True, null=True)",
"decimals = django.contrib.postgres.fields.DecimalRangeField("
"blank=True, null=True)",
"timestamps = django.contrib.postgres.fields.DateTimeRangeField("
"blank=True, null=True)",
"dates = django.contrib.postgres.fields.DateRangeField(blank=True, "
"null=True)",
],
)
def test_hstore_field(self):
from django.db.backends.postgresql.base import psycopg_version
if psycopg_version() < (3, 2):
self.skipTest("psycopg 3.2+ is required.")
self.assertFieldsInModel(
"postgres_tests_hstoremodel",
[
"field = django.contrib.postgres.fields.HStoreField(blank=True, "
"null=True)",
],
)
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/postgres_tests/test_integration.py | tests/postgres_tests/test_integration.py | import os
import subprocess
import sys
from . import PostgreSQLSimpleTestCase
class PostgresIntegrationTests(PostgreSQLSimpleTestCase):
def test_check(self):
test_environ = os.environ.copy()
if "DJANGO_SETTINGS_MODULE" in test_environ:
del test_environ["DJANGO_SETTINGS_MODULE"]
test_environ["PYTHONPATH"] = os.path.join(os.path.dirname(__file__), "../../")
result = subprocess.run(
[
sys.executable,
"-m",
"django",
"check",
"--settings",
"integration_settings",
],
stdout=subprocess.DEVNULL,
stderr=subprocess.PIPE,
cwd=os.path.dirname(__file__),
env=test_environ,
encoding="utf-8",
)
self.assertEqual(result.returncode, 0, msg=result.stderr)
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/postgres_tests/test_constraints.py | tests/postgres_tests/test_constraints.py | import datetime
from unittest import mock
from django.contrib.postgres.indexes import OpClass
from django.core.checks import Error
from django.core.exceptions import ValidationError
from django.db import IntegrityError, connection, transaction
from django.db.models import (
CASCADE,
CharField,
CheckConstraint,
DateField,
Deferrable,
F,
ForeignKey,
Func,
GeneratedField,
IntegerField,
Model,
Q,
UniqueConstraint,
)
from django.db.models.fields.json import KeyTextTransform
from django.db.models.functions import Cast, Left, Lower
from django.test import skipUnlessDBFeature
from django.test.utils import isolate_apps
from django.utils import timezone
from . import PostgreSQLTestCase
from .models import HotelReservation, IntegerArrayModel, RangesModel, Room, Scene
try:
from django.contrib.postgres.constraints import ExclusionConstraint
from django.contrib.postgres.fields import (
DateTimeRangeField,
IntegerRangeField,
RangeBoundary,
RangeOperators,
)
from django.db.backends.postgresql.psycopg_any import DateRange, NumericRange
except ImportError:
pass
class SchemaTests(PostgreSQLTestCase):
get_opclass_query = """
SELECT opcname, c.relname FROM pg_opclass AS oc
JOIN pg_index as i on oc.oid = ANY(i.indclass)
JOIN pg_class as c on c.oid = i.indexrelid
WHERE c.relname = %s
"""
def get_constraints(self, table):
"""Get the constraints on the table using a new cursor."""
with connection.cursor() as cursor:
return connection.introspection.get_constraints(cursor, table)
def test_check_constraint_range_value(self):
constraint_name = "ints_between"
self.assertNotIn(
constraint_name, self.get_constraints(RangesModel._meta.db_table)
)
constraint = CheckConstraint(
condition=Q(ints__contained_by=NumericRange(10, 30)),
name=constraint_name,
)
with connection.schema_editor() as editor:
editor.add_constraint(RangesModel, constraint)
self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
with self.assertRaises(IntegrityError), transaction.atomic():
RangesModel.objects.create(ints=(20, 50))
RangesModel.objects.create(ints=(10, 30))
def test_check_constraint_array_contains(self):
constraint = CheckConstraint(
condition=Q(field__contains=[1]),
name="array_contains",
)
msg = f"Constraint “{constraint.name}” is violated."
with self.assertRaisesMessage(ValidationError, msg):
constraint.validate(IntegerArrayModel, IntegerArrayModel())
constraint.validate(IntegerArrayModel, IntegerArrayModel(field=[1]))
def test_check_constraint_array_length(self):
constraint = CheckConstraint(
condition=Q(field__len=1),
name="array_length",
)
msg = f"Constraint “{constraint.name}” is violated."
with self.assertRaisesMessage(ValidationError, msg):
constraint.validate(IntegerArrayModel, IntegerArrayModel())
constraint.validate(IntegerArrayModel, IntegerArrayModel(field=[1]))
def test_check_constraint_daterange_contains(self):
constraint_name = "dates_contains"
self.assertNotIn(
constraint_name, self.get_constraints(RangesModel._meta.db_table)
)
constraint = CheckConstraint(
condition=Q(dates__contains=F("dates_inner")),
name=constraint_name,
)
with connection.schema_editor() as editor:
editor.add_constraint(RangesModel, constraint)
self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
date_1 = datetime.date(2016, 1, 1)
date_2 = datetime.date(2016, 1, 4)
with self.assertRaises(IntegrityError), transaction.atomic():
RangesModel.objects.create(
dates=(date_1, date_2),
dates_inner=(date_1, date_2.replace(day=5)),
)
RangesModel.objects.create(
dates=(date_1, date_2),
dates_inner=(date_1, date_2),
)
def test_check_constraint_datetimerange_contains(self):
constraint_name = "timestamps_contains"
self.assertNotIn(
constraint_name, self.get_constraints(RangesModel._meta.db_table)
)
constraint = CheckConstraint(
condition=Q(timestamps__contains=F("timestamps_inner")),
name=constraint_name,
)
with connection.schema_editor() as editor:
editor.add_constraint(RangesModel, constraint)
self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
datetime_1 = datetime.datetime(2016, 1, 1)
datetime_2 = datetime.datetime(2016, 1, 2, 12)
with self.assertRaises(IntegrityError), transaction.atomic():
RangesModel.objects.create(
timestamps=(datetime_1, datetime_2),
timestamps_inner=(datetime_1, datetime_2.replace(hour=13)),
)
RangesModel.objects.create(
timestamps=(datetime_1, datetime_2),
timestamps_inner=(datetime_1, datetime_2),
)
def test_check_constraint_range_contains(self):
constraint = CheckConstraint(
condition=Q(ints__contains=(1, 5)),
name="ints_contains",
)
msg = f"Constraint “{constraint.name}” is violated."
with self.assertRaisesMessage(ValidationError, msg):
constraint.validate(RangesModel, RangesModel(ints=(6, 10)))
def test_check_constraint_range_lower_upper(self):
constraint = CheckConstraint(
condition=Q(ints__startswith__gte=0) & Q(ints__endswith__lte=99),
name="ints_range_lower_upper",
)
msg = f"Constraint “{constraint.name}” is violated."
with self.assertRaisesMessage(ValidationError, msg):
constraint.validate(RangesModel, RangesModel(ints=(-1, 20)))
with self.assertRaisesMessage(ValidationError, msg):
constraint.validate(RangesModel, RangesModel(ints=(0, 100)))
constraint.validate(RangesModel, RangesModel(ints=(0, 99)))
def test_check_constraint_range_lower_with_nulls(self):
constraint = CheckConstraint(
condition=Q(ints__isnull=True) | Q(ints__startswith__gte=0),
name="ints_optional_positive_range",
)
constraint.validate(RangesModel, RangesModel())
constraint = CheckConstraint(
condition=Q(ints__startswith__gte=0),
name="ints_positive_range",
)
constraint.validate(RangesModel, RangesModel())
def test_opclass(self):
constraint = UniqueConstraint(
name="test_opclass",
fields=["scene"],
opclasses=["varchar_pattern_ops"],
)
with connection.schema_editor() as editor:
editor.add_constraint(Scene, constraint)
self.assertIn(constraint.name, self.get_constraints(Scene._meta.db_table))
with editor.connection.cursor() as cursor:
cursor.execute(self.get_opclass_query, [constraint.name])
self.assertEqual(
cursor.fetchall(),
[("varchar_pattern_ops", constraint.name)],
)
# Drop the constraint.
with connection.schema_editor() as editor:
editor.remove_constraint(Scene, constraint)
self.assertNotIn(constraint.name, self.get_constraints(Scene._meta.db_table))
def test_opclass_multiple_columns(self):
constraint = UniqueConstraint(
name="test_opclass_multiple",
fields=["scene", "setting"],
opclasses=["varchar_pattern_ops", "text_pattern_ops"],
)
with connection.schema_editor() as editor:
editor.add_constraint(Scene, constraint)
with editor.connection.cursor() as cursor:
cursor.execute(self.get_opclass_query, [constraint.name])
expected_opclasses = (
("varchar_pattern_ops", constraint.name),
("text_pattern_ops", constraint.name),
)
self.assertCountEqual(cursor.fetchall(), expected_opclasses)
def test_opclass_partial(self):
constraint = UniqueConstraint(
name="test_opclass_partial",
fields=["scene"],
opclasses=["varchar_pattern_ops"],
condition=Q(setting__contains="Sir Bedemir's Castle"),
)
with connection.schema_editor() as editor:
editor.add_constraint(Scene, constraint)
with editor.connection.cursor() as cursor:
cursor.execute(self.get_opclass_query, [constraint.name])
self.assertCountEqual(
cursor.fetchall(),
[("varchar_pattern_ops", constraint.name)],
)
@skipUnlessDBFeature("supports_covering_indexes")
def test_opclass_include(self):
constraint = UniqueConstraint(
name="test_opclass_include",
fields=["scene"],
opclasses=["varchar_pattern_ops"],
include=["setting"],
)
with connection.schema_editor() as editor:
editor.add_constraint(Scene, constraint)
with editor.connection.cursor() as cursor:
cursor.execute(self.get_opclass_query, [constraint.name])
self.assertCountEqual(
cursor.fetchall(),
[("varchar_pattern_ops", constraint.name)],
)
@skipUnlessDBFeature("supports_expression_indexes")
def test_opclass_func(self):
constraint = UniqueConstraint(
OpClass(Lower("scene"), name="text_pattern_ops"),
name="test_opclass_func",
)
with connection.schema_editor() as editor:
editor.add_constraint(Scene, constraint)
constraints = self.get_constraints(Scene._meta.db_table)
self.assertIs(constraints[constraint.name]["unique"], True)
self.assertIn(constraint.name, constraints)
with editor.connection.cursor() as cursor:
cursor.execute(self.get_opclass_query, [constraint.name])
self.assertEqual(
cursor.fetchall(),
[("text_pattern_ops", constraint.name)],
)
Scene.objects.create(scene="Scene 10", setting="The dark forest of Ewing")
with self.assertRaises(IntegrityError), transaction.atomic():
Scene.objects.create(scene="ScEnE 10", setting="Sir Bedemir's Castle")
Scene.objects.create(scene="Scene 5", setting="Sir Bedemir's Castle")
# Drop the constraint.
with connection.schema_editor() as editor:
editor.remove_constraint(Scene, constraint)
self.assertNotIn(constraint.name, self.get_constraints(Scene._meta.db_table))
Scene.objects.create(scene="ScEnE 10", setting="Sir Bedemir's Castle")
def test_opclass_func_validate_constraints(self):
constraint_name = "test_opclass_func_validate_constraints"
constraint = UniqueConstraint(
OpClass(Lower("scene"), name="text_pattern_ops"),
name="test_opclass_func_validate_constraints",
)
Scene.objects.create(scene="First scene")
# Non-unique scene.
msg = f"Constraint “{constraint_name}” is violated."
with self.assertRaisesMessage(ValidationError, msg):
constraint.validate(Scene, Scene(scene="first Scene"))
constraint.validate(Scene, Scene(scene="second Scene"))
class ExclusionConstraintTests(PostgreSQLTestCase):
def get_constraints(self, table):
"""Get the constraints on the table using a new cursor."""
with connection.cursor() as cursor:
return connection.introspection.get_constraints(cursor, table)
def test_invalid_condition(self):
msg = "ExclusionConstraint.condition must be a Q instance."
with self.assertRaisesMessage(ValueError, msg):
ExclusionConstraint(
index_type="GIST",
name="exclude_invalid_condition",
expressions=[(F("datespan"), RangeOperators.OVERLAPS)],
condition=F("invalid"),
)
def test_invalid_index_type(self):
msg = "Exclusion constraints only support GiST or SP-GiST indexes."
with self.assertRaisesMessage(ValueError, msg):
ExclusionConstraint(
index_type="gin",
name="exclude_invalid_index_type",
expressions=[(F("datespan"), RangeOperators.OVERLAPS)],
)
def test_invalid_expressions(self):
msg = "The expressions must be a list of 2-tuples."
for expressions in (["foo"], [("foo",)], [("foo_1", "foo_2", "foo_3")]):
with self.subTest(expressions), self.assertRaisesMessage(ValueError, msg):
ExclusionConstraint(
index_type="GIST",
name="exclude_invalid_expressions",
expressions=expressions,
)
def test_empty_expressions(self):
msg = "At least one expression is required to define an exclusion constraint."
for empty_expressions in (None, []):
with (
self.subTest(empty_expressions),
self.assertRaisesMessage(ValueError, msg),
):
ExclusionConstraint(
index_type="GIST",
name="exclude_empty_expressions",
expressions=empty_expressions,
)
def test_invalid_deferrable(self):
msg = "ExclusionConstraint.deferrable must be a Deferrable instance."
with self.assertRaisesMessage(ValueError, msg):
ExclusionConstraint(
name="exclude_invalid_deferrable",
expressions=[(F("datespan"), RangeOperators.OVERLAPS)],
deferrable="invalid",
)
def test_invalid_include_type(self):
msg = "ExclusionConstraint.include must be a list or tuple."
with self.assertRaisesMessage(ValueError, msg):
ExclusionConstraint(
name="exclude_invalid_include",
expressions=[(F("datespan"), RangeOperators.OVERLAPS)],
include="invalid",
)
@isolate_apps("postgres_tests")
def test_check(self):
class Author(Model):
name = CharField(max_length=255)
alias = CharField(max_length=255)
class Meta:
app_label = "postgres_tests"
class Book(Model):
title = CharField(max_length=255)
published_date = DateField()
author = ForeignKey(Author, CASCADE)
class Meta:
app_label = "postgres_tests"
constraints = [
ExclusionConstraint(
name="exclude_check",
expressions=[
(F("title"), RangeOperators.EQUAL),
(F("published_date__year"), RangeOperators.EQUAL),
("published_date__month", RangeOperators.EQUAL),
(F("author__name"), RangeOperators.EQUAL),
("author__alias", RangeOperators.EQUAL),
("nonexistent", RangeOperators.EQUAL),
],
)
]
self.assertCountEqual(
Book.check(databases=self.databases),
[
Error(
"'constraints' refers to the nonexistent field 'nonexistent'.",
obj=Book,
id="models.E012",
),
Error(
"'constraints' refers to the joined field 'author__alias'.",
obj=Book,
id="models.E041",
),
Error(
"'constraints' refers to the joined field 'author__name'.",
obj=Book,
id="models.E041",
),
],
)
def test_repr(self):
constraint = ExclusionConstraint(
name="exclude_overlapping",
expressions=[
(F("datespan"), RangeOperators.OVERLAPS),
(F("room"), RangeOperators.EQUAL),
],
)
self.assertEqual(
repr(constraint),
"<ExclusionConstraint: index_type='GIST' expressions=["
"(F(datespan), '&&'), (F(room), '=')] name='exclude_overlapping'>",
)
constraint = ExclusionConstraint(
name="exclude_overlapping",
expressions=[(F("datespan"), RangeOperators.ADJACENT_TO)],
condition=Q(cancelled=False),
index_type="SPGiST",
)
self.assertEqual(
repr(constraint),
"<ExclusionConstraint: index_type='SPGiST' expressions=["
"(F(datespan), '-|-')] name='exclude_overlapping' "
"condition=(AND: ('cancelled', False))>",
)
constraint = ExclusionConstraint(
name="exclude_overlapping",
expressions=[(F("datespan"), RangeOperators.ADJACENT_TO)],
deferrable=Deferrable.IMMEDIATE,
)
self.assertEqual(
repr(constraint),
"<ExclusionConstraint: index_type='GIST' expressions=["
"(F(datespan), '-|-')] name='exclude_overlapping' "
"deferrable=Deferrable.IMMEDIATE>",
)
constraint = ExclusionConstraint(
name="exclude_overlapping",
expressions=[(F("datespan"), RangeOperators.ADJACENT_TO)],
include=["cancelled", "room"],
)
self.assertEqual(
repr(constraint),
"<ExclusionConstraint: index_type='GIST' expressions=["
"(F(datespan), '-|-')] name='exclude_overlapping' "
"include=('cancelled', 'room')>",
)
constraint = ExclusionConstraint(
name="exclude_overlapping",
expressions=[
(OpClass("datespan", name="range_ops"), RangeOperators.ADJACENT_TO),
],
)
self.assertEqual(
repr(constraint),
"<ExclusionConstraint: index_type='GIST' expressions=["
"(OpClass(F(datespan), name=range_ops), '-|-')] "
"name='exclude_overlapping'>",
)
constraint = ExclusionConstraint(
name="exclude_overlapping",
expressions=[(F("datespan"), RangeOperators.ADJACENT_TO)],
violation_error_message="Overlapping must be excluded",
)
self.assertEqual(
repr(constraint),
"<ExclusionConstraint: index_type='GIST' expressions=["
"(F(datespan), '-|-')] name='exclude_overlapping' "
"violation_error_message='Overlapping must be excluded'>",
)
constraint = ExclusionConstraint(
name="exclude_overlapping",
expressions=[(F("datespan"), RangeOperators.ADJACENT_TO)],
violation_error_code="overlapping_must_be_excluded",
)
self.assertEqual(
repr(constraint),
"<ExclusionConstraint: index_type='GIST' expressions=["
"(F(datespan), '-|-')] name='exclude_overlapping' "
"violation_error_code='overlapping_must_be_excluded'>",
)
def test_eq(self):
constraint_1 = ExclusionConstraint(
name="exclude_overlapping",
expressions=[
(F("datespan"), RangeOperators.OVERLAPS),
(F("room"), RangeOperators.EQUAL),
],
condition=Q(cancelled=False),
)
constraint_2 = ExclusionConstraint(
name="exclude_overlapping",
expressions=[
("datespan", RangeOperators.OVERLAPS),
("room", RangeOperators.EQUAL),
],
)
constraint_3 = ExclusionConstraint(
name="exclude_overlapping",
expressions=[("datespan", RangeOperators.OVERLAPS)],
condition=Q(cancelled=False),
)
constraint_4 = ExclusionConstraint(
name="exclude_overlapping",
expressions=[
("datespan", RangeOperators.OVERLAPS),
("room", RangeOperators.EQUAL),
],
deferrable=Deferrable.DEFERRED,
)
constraint_5 = ExclusionConstraint(
name="exclude_overlapping",
expressions=[
("datespan", RangeOperators.OVERLAPS),
("room", RangeOperators.EQUAL),
],
deferrable=Deferrable.IMMEDIATE,
)
constraint_6 = ExclusionConstraint(
name="exclude_overlapping",
expressions=[
("datespan", RangeOperators.OVERLAPS),
("room", RangeOperators.EQUAL),
],
deferrable=Deferrable.IMMEDIATE,
include=["cancelled"],
)
constraint_7 = ExclusionConstraint(
name="exclude_overlapping",
expressions=[
("datespan", RangeOperators.OVERLAPS),
("room", RangeOperators.EQUAL),
],
include=["cancelled"],
)
constraint_10 = ExclusionConstraint(
name="exclude_overlapping",
expressions=[
(F("datespan"), RangeOperators.OVERLAPS),
(F("room"), RangeOperators.EQUAL),
],
condition=Q(cancelled=False),
violation_error_message="custom error",
)
constraint_11 = ExclusionConstraint(
name="exclude_overlapping",
expressions=[
(F("datespan"), RangeOperators.OVERLAPS),
(F("room"), RangeOperators.EQUAL),
],
condition=Q(cancelled=False),
violation_error_message="other custom error",
)
constraint_12 = ExclusionConstraint(
name="exclude_overlapping",
expressions=[
(F("datespan"), RangeOperators.OVERLAPS),
(F("room"), RangeOperators.EQUAL),
],
condition=Q(cancelled=False),
violation_error_code="custom_code",
violation_error_message="other custom error",
)
self.assertEqual(constraint_1, constraint_1)
self.assertEqual(constraint_1, mock.ANY)
self.assertNotEqual(constraint_1, constraint_2)
self.assertNotEqual(constraint_1, constraint_3)
self.assertNotEqual(constraint_1, constraint_4)
self.assertNotEqual(constraint_1, constraint_10)
self.assertNotEqual(constraint_2, constraint_3)
self.assertNotEqual(constraint_2, constraint_4)
self.assertNotEqual(constraint_2, constraint_7)
self.assertNotEqual(constraint_4, constraint_5)
self.assertNotEqual(constraint_5, constraint_6)
self.assertNotEqual(constraint_1, object())
self.assertNotEqual(constraint_10, constraint_11)
self.assertNotEqual(constraint_11, constraint_12)
self.assertEqual(constraint_10, constraint_10)
self.assertEqual(constraint_12, constraint_12)
def test_deconstruct(self):
constraint = ExclusionConstraint(
name="exclude_overlapping",
expressions=[
("datespan", RangeOperators.OVERLAPS),
("room", RangeOperators.EQUAL),
],
)
path, args, kwargs = constraint.deconstruct()
self.assertEqual(
path, "django.contrib.postgres.constraints.ExclusionConstraint"
)
self.assertEqual(args, ())
self.assertEqual(
kwargs,
{
"name": "exclude_overlapping",
"expressions": [
("datespan", RangeOperators.OVERLAPS),
("room", RangeOperators.EQUAL),
],
},
)
def test_deconstruct_index_type(self):
constraint = ExclusionConstraint(
name="exclude_overlapping",
index_type="SPGIST",
expressions=[
("datespan", RangeOperators.OVERLAPS),
("room", RangeOperators.EQUAL),
],
)
path, args, kwargs = constraint.deconstruct()
self.assertEqual(
path, "django.contrib.postgres.constraints.ExclusionConstraint"
)
self.assertEqual(args, ())
self.assertEqual(
kwargs,
{
"name": "exclude_overlapping",
"index_type": "SPGIST",
"expressions": [
("datespan", RangeOperators.OVERLAPS),
("room", RangeOperators.EQUAL),
],
},
)
def test_deconstruct_condition(self):
constraint = ExclusionConstraint(
name="exclude_overlapping",
expressions=[
("datespan", RangeOperators.OVERLAPS),
("room", RangeOperators.EQUAL),
],
condition=Q(cancelled=False),
)
path, args, kwargs = constraint.deconstruct()
self.assertEqual(
path, "django.contrib.postgres.constraints.ExclusionConstraint"
)
self.assertEqual(args, ())
self.assertEqual(
kwargs,
{
"name": "exclude_overlapping",
"expressions": [
("datespan", RangeOperators.OVERLAPS),
("room", RangeOperators.EQUAL),
],
"condition": Q(cancelled=False),
},
)
def test_deconstruct_deferrable(self):
constraint = ExclusionConstraint(
name="exclude_overlapping",
expressions=[("datespan", RangeOperators.OVERLAPS)],
deferrable=Deferrable.DEFERRED,
)
path, args, kwargs = constraint.deconstruct()
self.assertEqual(
path, "django.contrib.postgres.constraints.ExclusionConstraint"
)
self.assertEqual(args, ())
self.assertEqual(
kwargs,
{
"name": "exclude_overlapping",
"expressions": [("datespan", RangeOperators.OVERLAPS)],
"deferrable": Deferrable.DEFERRED,
},
)
def test_deconstruct_include(self):
constraint = ExclusionConstraint(
name="exclude_overlapping",
expressions=[("datespan", RangeOperators.OVERLAPS)],
include=["cancelled", "room"],
)
path, args, kwargs = constraint.deconstruct()
self.assertEqual(
path, "django.contrib.postgres.constraints.ExclusionConstraint"
)
self.assertEqual(args, ())
self.assertEqual(
kwargs,
{
"name": "exclude_overlapping",
"expressions": [("datespan", RangeOperators.OVERLAPS)],
"include": ("cancelled", "room"),
},
)
def _test_range_overlaps(self, constraint):
# Create exclusion constraint.
self.assertNotIn(
constraint.name, self.get_constraints(HotelReservation._meta.db_table)
)
with connection.schema_editor() as editor:
editor.add_constraint(HotelReservation, constraint)
self.assertIn(
constraint.name, self.get_constraints(HotelReservation._meta.db_table)
)
# Add initial reservations.
room101 = Room.objects.create(number=101)
room102 = Room.objects.create(number=102)
datetimes = [
timezone.datetime(2018, 6, 20),
timezone.datetime(2018, 6, 24),
timezone.datetime(2018, 6, 26),
timezone.datetime(2018, 6, 28),
timezone.datetime(2018, 6, 29),
]
reservation = HotelReservation.objects.create(
datespan=DateRange(datetimes[0].date(), datetimes[1].date()),
start=datetimes[0],
end=datetimes[1],
room=room102,
)
constraint.validate(HotelReservation, reservation)
HotelReservation.objects.create(
datespan=DateRange(datetimes[1].date(), datetimes[3].date()),
start=datetimes[1],
end=datetimes[3],
room=room102,
)
HotelReservation.objects.create(
datespan=DateRange(datetimes[3].date(), datetimes[4].date()),
start=datetimes[3],
end=datetimes[4],
room=room102,
cancelled=True,
)
# Overlap dates.
with self.assertRaises(IntegrityError), transaction.atomic():
reservation = HotelReservation(
datespan=(datetimes[1].date(), datetimes[2].date()),
start=datetimes[1],
end=datetimes[2],
room=room102,
)
msg = f"Constraint “{constraint.name}” is violated."
with self.assertRaisesMessage(ValidationError, msg):
constraint.validate(HotelReservation, reservation)
reservation.save()
# Valid range.
other_valid_reservations = [
# Other room.
HotelReservation(
datespan=(datetimes[1].date(), datetimes[2].date()),
start=datetimes[1],
end=datetimes[2],
room=room101,
),
# Cancelled reservation.
HotelReservation(
datespan=(datetimes[1].date(), datetimes[1].date()),
start=datetimes[1],
end=datetimes[2],
room=room102,
cancelled=True,
),
# Other adjacent dates.
HotelReservation(
datespan=(datetimes[3].date(), datetimes[4].date()),
start=datetimes[3],
end=datetimes[4],
room=room102,
),
]
for reservation in other_valid_reservations:
constraint.validate(HotelReservation, reservation)
HotelReservation.objects.bulk_create(other_valid_reservations)
# Excluded fields.
constraint.validate(
HotelReservation,
HotelReservation(
datespan=(datetimes[1].date(), datetimes[2].date()),
start=datetimes[1],
end=datetimes[2],
room=room102,
),
exclude={"room"},
)
constraint.validate(
HotelReservation,
HotelReservation(
datespan=(datetimes[1].date(), datetimes[2].date()),
start=datetimes[1],
end=datetimes[2],
room=room102,
),
exclude={"datespan", "start", "end", "room"},
)
# Constraints with excluded fields in condition are ignored.
constraint.validate(
HotelReservation,
HotelReservation(
datespan=(datetimes[1].date(), datetimes[2].date()),
start=datetimes[1],
end=datetimes[2],
room=room102,
),
exclude={"cancelled"},
)
def test_range_overlaps_custom(self):
class TsTzRange(Func):
function = "TSTZRANGE"
output_field = DateTimeRangeField()
constraint = ExclusionConstraint(
name="exclude_overlapping_reservations_custom_opclass",
expressions=[
(
OpClass(TsTzRange("start", "end", RangeBoundary()), "range_ops"),
RangeOperators.OVERLAPS,
),
(OpClass("room", "gist_int8_ops"), RangeOperators.EQUAL),
],
condition=Q(cancelled=False),
)
self._test_range_overlaps(constraint)
def test_range_overlaps(self):
constraint = ExclusionConstraint(
name="exclude_overlapping_reservations",
expressions=[
(F("datespan"), RangeOperators.OVERLAPS),
("room", RangeOperators.EQUAL),
],
condition=Q(cancelled=False),
)
self._test_range_overlaps(constraint)
def test_range_adjacent(self):
constraint_name = "ints_adjacent"
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | true |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/postgres_tests/test_bulk_update.py | tests/postgres_tests/test_bulk_update.py | from datetime import date
from . import PostgreSQLTestCase
from .models import (
HStoreModel,
IntegerArrayModel,
NestedIntegerArrayModel,
NullableIntegerArrayModel,
OffByOneModel,
OtherTypesArrayModel,
RangesModel,
)
try:
from django.db.backends.postgresql.psycopg_any import DateRange, NumericRange
except ImportError:
pass # psycopg isn't installed.
class BulkSaveTests(PostgreSQLTestCase):
def test_bulk_update(self):
test_data = [
(IntegerArrayModel, "field", [], [1, 2, 3]),
(NullableIntegerArrayModel, "field", [1, 2, 3], None),
(NestedIntegerArrayModel, "field", [], [[1, 2, 3]]),
(HStoreModel, "field", {}, {1: 2}),
(RangesModel, "ints", None, NumericRange(lower=1, upper=10)),
(
RangesModel,
"dates",
None,
DateRange(lower=date.today(), upper=date.today()),
),
(OtherTypesArrayModel, "ips", [], ["1.2.3.4"]),
(OtherTypesArrayModel, "json", [], [{"a": "b"}]),
]
for Model, field, initial, new in test_data:
with self.subTest(model=Model, field=field):
instances = Model.objects.bulk_create(
Model(**{field: initial}) for _ in range(20)
)
for instance in instances:
setattr(instance, field, new)
Model.objects.bulk_update(instances, [field])
self.assertSequenceEqual(
Model.objects.filter(**{field: new}), instances
)
def test_bulk_create(self):
OffByOneModel.objects.bulk_create(OffByOneModel(one_off=0) for _ in range(20))
self.assertSequenceEqual(
[m.one_off for m in OffByOneModel.objects.all()], 20 * [1]
)
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/postgres_tests/array_index_migrations/0001_initial.py | tests/postgres_tests/array_index_migrations/0001_initial.py | import django.contrib.postgres.fields
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = []
operations = [
migrations.CreateModel(
name="CharTextArrayIndexModel",
fields=[
(
"id",
models.BigAutoField(
verbose_name="ID",
serialize=False,
auto_created=True,
primary_key=True,
),
),
(
"char",
django.contrib.postgres.fields.ArrayField(
models.CharField(max_length=10), db_index=True, size=100
),
),
("char2", models.CharField(max_length=11, db_index=True)),
(
"text",
django.contrib.postgres.fields.ArrayField(
models.TextField(), db_index=True
),
),
],
options={},
bases=(models.Model,),
),
]
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/postgres_tests/array_index_migrations/__init__.py | tests/postgres_tests/array_index_migrations/__init__.py | python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false | |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/postgres_tests/array_default_migrations/0001_initial.py | tests/postgres_tests/array_default_migrations/0001_initial.py | import django.contrib.postgres.fields
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = []
operations = [
migrations.CreateModel(
name="IntegerArrayDefaultModel",
fields=[
(
"id",
models.BigAutoField(
verbose_name="ID",
serialize=False,
auto_created=True,
primary_key=True,
),
),
(
"field",
django.contrib.postgres.fields.ArrayField(models.IntegerField()),
),
],
options={},
bases=(models.Model,),
),
]
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/postgres_tests/array_default_migrations/__init__.py | tests/postgres_tests/array_default_migrations/__init__.py | python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false | |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/postgres_tests/array_default_migrations/0002_integerarraymodel_field_2.py | tests/postgres_tests/array_default_migrations/0002_integerarraymodel_field_2.py | import django.contrib.postgres.fields
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("postgres_tests", "0001_initial"),
]
operations = [
migrations.AddField(
model_name="integerarraydefaultmodel",
name="field_2",
field=django.contrib.postgres.fields.ArrayField(
models.IntegerField(), default=[]
),
preserve_default=False,
),
]
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/postgres_tests/migrations/0001_setup_extensions.py | tests/postgres_tests/migrations/0001_setup_extensions.py | from unittest import mock
from django.db import migrations
try:
from django.contrib.postgres.operations import (
BloomExtension,
BtreeGinExtension,
BtreeGistExtension,
CITextExtension,
CreateExtension,
HStoreExtension,
TrigramExtension,
UnaccentExtension,
)
except ImportError:
BloomExtension = mock.Mock()
BtreeGinExtension = mock.Mock()
BtreeGistExtension = mock.Mock()
CITextExtension = mock.Mock()
CreateExtension = mock.Mock()
HStoreExtension = mock.Mock()
TrigramExtension = mock.Mock()
UnaccentExtension = mock.Mock()
class Migration(migrations.Migration):
operations = [
BloomExtension(),
BtreeGinExtension(),
BtreeGistExtension(),
CITextExtension(),
# Ensure CreateExtension quotes extension names by creating one with a
# dash in its name.
CreateExtension("uuid-ossp"),
HStoreExtension(),
TrigramExtension(),
UnaccentExtension(),
]
| python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
django/django | https://github.com/django/django/blob/3201a895cba335000827b28768a7b7105c81b415/tests/postgres_tests/migrations/__init__.py | tests/postgres_tests/migrations/__init__.py | python | BSD-3-Clause | 3201a895cba335000827b28768a7b7105c81b415 | 2026-01-04T14:38:15.489092Z | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.